xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-dce.c (revision a24efa7dea9f1f56c3bdb15a927d3516792ace1c)
1 /* Dead code elimination pass for the GNU compiler.
2    Copyright (C) 2002-2013 Free Software Foundation, Inc.
3    Contributed by Ben Elliston <bje@redhat.com>
4    and Andrew MacLeod <amacleod@redhat.com>
5    Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 /* Dead code elimination.
24 
25    References:
26 
27      Building an Optimizing Compiler,
28      Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29 
30      Advanced Compiler Design and Implementation,
31      Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32 
33    Dead-code elimination is the removal of statements which have no
34    impact on the program's output.  "Dead statements" have no impact
35    on the program's output, while "necessary statements" may have
36    impact on the output.
37 
38    The algorithm consists of three phases:
39    1. Marking as necessary all statements known to be necessary,
40       e.g. most function calls, writing a value to memory, etc;
41    2. Propagating necessary statements, e.g., the statements
42       giving values to operands in necessary statements; and
43    3. Removing dead statements.  */
44 
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 
50 #include "tree.h"
51 #include "gimple-pretty-print.h"
52 #include "basic-block.h"
53 #include "tree-flow.h"
54 #include "gimple.h"
55 #include "tree-pass.h"
56 #include "flags.h"
57 #include "cfgloop.h"
58 #include "tree-scalar-evolution.h"
59 
60 static struct stmt_stats
61 {
62   int total;
63   int total_phis;
64   int removed;
65   int removed_phis;
66 } stats;
67 
68 #define STMT_NECESSARY GF_PLF_1
69 
70 static vec<gimple> worklist;
71 
72 /* Vector indicating an SSA name has already been processed and marked
73    as necessary.  */
74 static sbitmap processed;
75 
76 /* Vector indicating that the last statement of a basic block has already
77    been marked as necessary.  */
78 static sbitmap last_stmt_necessary;
79 
80 /* Vector indicating that BB contains statements that are live.  */
81 static sbitmap bb_contains_live_stmts;
82 
83 /* Before we can determine whether a control branch is dead, we need to
84    compute which blocks are control dependent on which edges.
85 
86    We expect each block to be control dependent on very few edges so we
87    use a bitmap for each block recording its edges.  An array holds the
88    bitmap.  The Ith bit in the bitmap is set if that block is dependent
89    on the Ith edge.  */
90 static bitmap *control_dependence_map;
91 
92 /* Vector indicating that a basic block has already had all the edges
93    processed that it is control dependent on.  */
94 static sbitmap visited_control_parents;
95 
96 /* TRUE if this pass alters the CFG (by removing control statements).
97    FALSE otherwise.
98 
99    If this pass alters the CFG, then it will arrange for the dominators
100    to be recomputed.  */
101 static bool cfg_altered;
102 
103 /* Execute code that follows the macro for each edge (given number
104    EDGE_NUMBER within the CODE) for which the block with index N is
105    control dependent.  */
106 #define EXECUTE_IF_CONTROL_DEPENDENT(BI, N, EDGE_NUMBER)	\
107   EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[(N)], 0,	\
108 			    (EDGE_NUMBER), (BI))
109 
110 
111 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX.  */
112 static inline void
113 set_control_dependence_map_bit (basic_block bb, int edge_index)
114 {
115   if (bb == ENTRY_BLOCK_PTR)
116     return;
117   gcc_assert (bb != EXIT_BLOCK_PTR);
118   bitmap_set_bit (control_dependence_map[bb->index], edge_index);
119 }
120 
121 /* Clear all control dependences for block BB.  */
122 static inline void
123 clear_control_dependence_bitmap (basic_block bb)
124 {
125   bitmap_clear (control_dependence_map[bb->index]);
126 }
127 
128 
129 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
130    This function is necessary because some blocks have negative numbers.  */
131 
132 static inline basic_block
133 find_pdom (basic_block block)
134 {
135   gcc_assert (block != ENTRY_BLOCK_PTR);
136 
137   if (block == EXIT_BLOCK_PTR)
138     return EXIT_BLOCK_PTR;
139   else
140     {
141       basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
142       if (! bb)
143 	return EXIT_BLOCK_PTR;
144       return bb;
145     }
146 }
147 
148 
149 /* Determine all blocks' control dependences on the given edge with edge_list
150    EL index EDGE_INDEX, ala Morgan, Section 3.6.  */
151 
152 static void
153 find_control_dependence (struct edge_list *el, int edge_index)
154 {
155   basic_block current_block;
156   basic_block ending_block;
157 
158   gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
159 
160   if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
161     ending_block = single_succ (ENTRY_BLOCK_PTR);
162   else
163     ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index));
164 
165   for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
166        current_block != ending_block && current_block != EXIT_BLOCK_PTR;
167        current_block = find_pdom (current_block))
168     {
169       edge e = INDEX_EDGE (el, edge_index);
170 
171       /* For abnormal edges, we don't make current_block control
172 	 dependent because instructions that throw are always necessary
173 	 anyway.  */
174       if (e->flags & EDGE_ABNORMAL)
175 	continue;
176 
177       set_control_dependence_map_bit (current_block, edge_index);
178     }
179 }
180 
181 
182 /* Record all blocks' control dependences on all edges in the edge
183    list EL, ala Morgan, Section 3.6.  */
184 
185 static void
186 find_all_control_dependences (struct edge_list *el)
187 {
188   int i;
189 
190   for (i = 0; i < NUM_EDGES (el); ++i)
191     find_control_dependence (el, i);
192 }
193 
194 /* If STMT is not already marked necessary, mark it, and add it to the
195    worklist if ADD_TO_WORKLIST is true.  */
196 
197 static inline void
198 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
199 {
200   gcc_assert (stmt);
201 
202   if (gimple_plf (stmt, STMT_NECESSARY))
203     return;
204 
205   if (dump_file && (dump_flags & TDF_DETAILS))
206     {
207       fprintf (dump_file, "Marking useful stmt: ");
208       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
209       fprintf (dump_file, "\n");
210     }
211 
212   gimple_set_plf (stmt, STMT_NECESSARY, true);
213   if (add_to_worklist)
214     worklist.safe_push (stmt);
215   if (bb_contains_live_stmts && !is_gimple_debug (stmt))
216     bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
217 }
218 
219 
220 /* Mark the statement defining operand OP as necessary.  */
221 
222 static inline void
223 mark_operand_necessary (tree op)
224 {
225   gimple stmt;
226   int ver;
227 
228   gcc_assert (op);
229 
230   ver = SSA_NAME_VERSION (op);
231   if (bitmap_bit_p (processed, ver))
232     {
233       stmt = SSA_NAME_DEF_STMT (op);
234       gcc_assert (gimple_nop_p (stmt)
235 		  || gimple_plf (stmt, STMT_NECESSARY));
236       return;
237     }
238   bitmap_set_bit (processed, ver);
239 
240   stmt = SSA_NAME_DEF_STMT (op);
241   gcc_assert (stmt);
242 
243   if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
244     return;
245 
246   if (dump_file && (dump_flags & TDF_DETAILS))
247     {
248       fprintf (dump_file, "marking necessary through ");
249       print_generic_expr (dump_file, op, 0);
250       fprintf (dump_file, " stmt ");
251       print_gimple_stmt (dump_file, stmt, 0, 0);
252     }
253 
254   gimple_set_plf (stmt, STMT_NECESSARY, true);
255   if (bb_contains_live_stmts)
256     bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
257   worklist.safe_push (stmt);
258 }
259 
260 
261 /* Mark STMT as necessary if it obviously is.  Add it to the worklist if
262    it can make other statements necessary.
263 
264    If AGGRESSIVE is false, control statements are conservatively marked as
265    necessary.  */
266 
267 static void
268 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
269 {
270   /* With non-call exceptions, we have to assume that all statements could
271      throw.  If a statement could throw, it can be deemed necessary.  */
272   if (cfun->can_throw_non_call_exceptions
273       && !cfun->can_delete_dead_exceptions
274       && stmt_could_throw_p (stmt))
275     {
276       mark_stmt_necessary (stmt, true);
277       return;
278     }
279 
280   /* Statements that are implicitly live.  Most function calls, asm
281      and return statements are required.  Labels and GIMPLE_BIND nodes
282      are kept because they are control flow, and we have no way of
283      knowing whether they can be removed.  DCE can eliminate all the
284      other statements in a block, and CFG can then remove the block
285      and labels.  */
286   switch (gimple_code (stmt))
287     {
288     case GIMPLE_PREDICT:
289     case GIMPLE_LABEL:
290       mark_stmt_necessary (stmt, false);
291       return;
292 
293     case GIMPLE_ASM:
294     case GIMPLE_RESX:
295     case GIMPLE_RETURN:
296       mark_stmt_necessary (stmt, true);
297       return;
298 
299     case GIMPLE_CALL:
300       {
301 	tree callee = gimple_call_fndecl (stmt);
302 	if (callee != NULL_TREE
303 	    && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
304 	  switch (DECL_FUNCTION_CODE (callee))
305 	    {
306 	    case BUILT_IN_MALLOC:
307 	    case BUILT_IN_CALLOC:
308 	    case BUILT_IN_ALLOCA:
309 	    case BUILT_IN_ALLOCA_WITH_ALIGN:
310 	      return;
311 
312 	    default:;
313 	    }
314 	/* Most, but not all function calls are required.  Function calls that
315 	   produce no result and have no side effects (i.e. const pure
316 	   functions) are unnecessary.  */
317 	if (gimple_has_side_effects (stmt))
318 	  {
319 	    mark_stmt_necessary (stmt, true);
320 	    return;
321 	  }
322 	if (!gimple_call_lhs (stmt))
323 	  return;
324 	break;
325       }
326 
327     case GIMPLE_DEBUG:
328       /* Debug temps without a value are not useful.  ??? If we could
329 	 easily locate the debug temp bind stmt for a use thereof,
330 	 would could refrain from marking all debug temps here, and
331 	 mark them only if they're used.  */
332       if (!gimple_debug_bind_p (stmt)
333 	  || gimple_debug_bind_has_value_p (stmt)
334 	  || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
335 	mark_stmt_necessary (stmt, false);
336       return;
337 
338     case GIMPLE_GOTO:
339       gcc_assert (!simple_goto_p (stmt));
340       mark_stmt_necessary (stmt, true);
341       return;
342 
343     case GIMPLE_COND:
344       gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
345       /* Fall through.  */
346 
347     case GIMPLE_SWITCH:
348       if (! aggressive)
349 	mark_stmt_necessary (stmt, true);
350       break;
351 
352     case GIMPLE_ASSIGN:
353       if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
354 	  && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt)))
355 	return;
356       break;
357 
358     default:
359       break;
360     }
361 
362   /* If the statement has volatile operands, it needs to be preserved.
363      Same for statements that can alter control flow in unpredictable
364      ways.  */
365   if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
366     {
367       mark_stmt_necessary (stmt, true);
368       return;
369     }
370 
371   if (stmt_may_clobber_global_p (stmt))
372     {
373       mark_stmt_necessary (stmt, true);
374       return;
375     }
376 
377   return;
378 }
379 
380 
381 /* Mark the last statement of BB as necessary.  */
382 
383 static void
384 mark_last_stmt_necessary (basic_block bb)
385 {
386   gimple stmt = last_stmt (bb);
387 
388   bitmap_set_bit (last_stmt_necessary, bb->index);
389   bitmap_set_bit (bb_contains_live_stmts, bb->index);
390 
391   /* We actually mark the statement only if it is a control statement.  */
392   if (stmt && is_ctrl_stmt (stmt))
393     mark_stmt_necessary (stmt, true);
394 }
395 
396 
397 /* Mark control dependent edges of BB as necessary.  We have to do this only
398    once for each basic block so we set the appropriate bit after we're done.
399 
400    When IGNORE_SELF is true, ignore BB in the list of control dependences.  */
401 
402 static void
403 mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el,
404 					bool ignore_self)
405 {
406   bitmap_iterator bi;
407   unsigned edge_number;
408   bool skipped = false;
409 
410   gcc_assert (bb != EXIT_BLOCK_PTR);
411 
412   if (bb == ENTRY_BLOCK_PTR)
413     return;
414 
415   EXECUTE_IF_CONTROL_DEPENDENT (bi, bb->index, edge_number)
416     {
417       basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number);
418 
419       if (ignore_self && cd_bb == bb)
420 	{
421 	  skipped = true;
422 	  continue;
423 	}
424 
425       if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
426 	mark_last_stmt_necessary (cd_bb);
427     }
428 
429   if (!skipped)
430     bitmap_set_bit (visited_control_parents, bb->index);
431 }
432 
433 
434 /* Find obviously necessary statements.  These are things like most function
435    calls, and stores to file level variables.
436 
437    If EL is NULL, control statements are conservatively marked as
438    necessary.  Otherwise it contains the list of edges used by control
439    dependence analysis.  */
440 
441 static void
442 find_obviously_necessary_stmts (struct edge_list *el)
443 {
444   basic_block bb;
445   gimple_stmt_iterator gsi;
446   edge e;
447   gimple phi, stmt;
448   int flags;
449 
450   FOR_EACH_BB (bb)
451     {
452       /* PHI nodes are never inherently necessary.  */
453       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
454 	{
455 	  phi = gsi_stmt (gsi);
456 	  gimple_set_plf (phi, STMT_NECESSARY, false);
457 	}
458 
459       /* Check all statements in the block.  */
460       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
461 	{
462 	  stmt = gsi_stmt (gsi);
463 	  gimple_set_plf (stmt, STMT_NECESSARY, false);
464 	  mark_stmt_if_obviously_necessary (stmt, el != NULL);
465 	}
466     }
467 
468   /* Pure and const functions are finite and thus have no infinite loops in
469      them.  */
470   flags = flags_from_decl_or_type (current_function_decl);
471   if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
472     return;
473 
474   /* Prevent the empty possibly infinite loops from being removed.  */
475   if (el)
476     {
477       loop_iterator li;
478       struct loop *loop;
479       scev_initialize ();
480       if (mark_irreducible_loops ())
481 	FOR_EACH_BB (bb)
482 	  {
483 	    edge_iterator ei;
484 	    FOR_EACH_EDGE (e, ei, bb->succs)
485 	      if ((e->flags & EDGE_DFS_BACK)
486 		  && (e->flags & EDGE_IRREDUCIBLE_LOOP))
487 		{
488 	          if (dump_file)
489 	            fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
490 		    	     e->src->index, e->dest->index);
491 		  mark_control_dependent_edges_necessary (e->dest, el, false);
492 		}
493 	  }
494 
495       FOR_EACH_LOOP (li, loop, 0)
496 	if (!finite_loop_p (loop))
497 	  {
498 	    if (dump_file)
499 	      fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
500 	    mark_control_dependent_edges_necessary (loop->latch, el, false);
501 	  }
502       scev_finalize ();
503     }
504 }
505 
506 
507 /* Return true if REF is based on an aliased base, otherwise false.  */
508 
509 static bool
510 ref_may_be_aliased (tree ref)
511 {
512   gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
513   while (handled_component_p (ref))
514     ref = TREE_OPERAND (ref, 0);
515   if (TREE_CODE (ref) == MEM_REF
516       && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
517     ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
518   return !(DECL_P (ref)
519 	   && !may_be_aliased (ref));
520 }
521 
522 static bitmap visited = NULL;
523 static unsigned int longest_chain = 0;
524 static unsigned int total_chain = 0;
525 static unsigned int nr_walks = 0;
526 static bool chain_ovfl = false;
527 
528 /* Worker for the walker that marks reaching definitions of REF,
529    which is based on a non-aliased decl, necessary.  It returns
530    true whenever the defining statement of the current VDEF is
531    a kill for REF, as no dominating may-defs are necessary for REF
532    anymore.  DATA points to the basic-block that contains the
533    stmt that refers to REF.  */
534 
535 static bool
536 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
537 {
538   gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
539 
540   /* All stmts we visit are necessary.  */
541   mark_operand_necessary (vdef);
542 
543   /* If the stmt lhs kills ref, then we can stop walking.  */
544   if (gimple_has_lhs (def_stmt)
545       && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
546       /* The assignment is not necessarily carried out if it can throw
547          and we can catch it in the current function where we could inspect
548 	 the previous value.
549          ???  We only need to care about the RHS throwing.  For aggregate
550 	 assignments or similar calls and non-call exceptions the LHS
551 	 might throw as well.  */
552       && !stmt_can_throw_internal (def_stmt))
553     {
554       tree base, lhs = gimple_get_lhs (def_stmt);
555       HOST_WIDE_INT size, offset, max_size;
556       ao_ref_base (ref);
557       base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
558       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
559 	 so base == refd->base does not always hold.  */
560       if (base == ref->base)
561 	{
562 	  /* For a must-alias check we need to be able to constrain
563 	     the accesses properly.  */
564 	  if (size != -1 && size == max_size
565 	      && ref->max_size != -1)
566 	    {
567 	      if (offset <= ref->offset
568 		  && offset + size >= ref->offset + ref->max_size)
569 		return true;
570 	    }
571 	  /* Or they need to be exactly the same.  */
572 	  else if (ref->ref
573 		   /* Make sure there is no induction variable involved
574 		      in the references (gcc.c-torture/execute/pr42142.c).
575 		      The simplest way is to check if the kill dominates
576 		      the use.  */
577 		   /* But when both are in the same block we cannot
578 		      easily tell whether we came from a backedge
579 		      unless we decide to compute stmt UIDs
580 		      (see PR58246).  */
581 		   && (basic_block) data != gimple_bb (def_stmt)
582 		   && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
583 				      gimple_bb (def_stmt))
584 		   && operand_equal_p (ref->ref, lhs, 0))
585 	    return true;
586 	}
587     }
588 
589   /* Otherwise keep walking.  */
590   return false;
591 }
592 
593 static void
594 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
595 {
596   unsigned int chain;
597   ao_ref refd;
598   gcc_assert (!chain_ovfl);
599   ao_ref_init (&refd, ref);
600   chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
601 			      mark_aliased_reaching_defs_necessary_1,
602 			      gimple_bb (stmt), NULL);
603   if (chain > longest_chain)
604     longest_chain = chain;
605   total_chain += chain;
606   nr_walks++;
607 }
608 
609 /* Worker for the walker that marks reaching definitions of REF, which
610    is not based on a non-aliased decl.  For simplicity we need to end
611    up marking all may-defs necessary that are not based on a non-aliased
612    decl.  The only job of this walker is to skip may-defs based on
613    a non-aliased decl.  */
614 
615 static bool
616 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
617 				    tree vdef, void *data ATTRIBUTE_UNUSED)
618 {
619   gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
620 
621   /* We have to skip already visited (and thus necessary) statements
622      to make the chaining work after we dropped back to simple mode.  */
623   if (chain_ovfl
624       && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
625     {
626       gcc_assert (gimple_nop_p (def_stmt)
627 		  || gimple_plf (def_stmt, STMT_NECESSARY));
628       return false;
629     }
630 
631   /* We want to skip stores to non-aliased variables.  */
632   if (!chain_ovfl
633       && gimple_assign_single_p (def_stmt))
634     {
635       tree lhs = gimple_assign_lhs (def_stmt);
636       if (!ref_may_be_aliased (lhs))
637 	return false;
638     }
639 
640   /* We want to skip statments that do not constitute stores but have
641      a virtual definition.  */
642   if (is_gimple_call (def_stmt))
643     {
644       tree callee = gimple_call_fndecl (def_stmt);
645       if (callee != NULL_TREE
646 	  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
647 	switch (DECL_FUNCTION_CODE (callee))
648 	  {
649 	  case BUILT_IN_MALLOC:
650 	  case BUILT_IN_CALLOC:
651 	  case BUILT_IN_ALLOCA:
652 	  case BUILT_IN_ALLOCA_WITH_ALIGN:
653 	  case BUILT_IN_FREE:
654 	    return false;
655 
656 	  default:;
657 	  }
658     }
659 
660   mark_operand_necessary (vdef);
661 
662   return false;
663 }
664 
665 static void
666 mark_all_reaching_defs_necessary (gimple stmt)
667 {
668   walk_aliased_vdefs (NULL, gimple_vuse (stmt),
669 		      mark_all_reaching_defs_necessary_1, NULL, &visited);
670 }
671 
672 /* Return true for PHI nodes with one or identical arguments
673    can be removed.  */
674 static bool
675 degenerate_phi_p (gimple phi)
676 {
677   unsigned int i;
678   tree op = gimple_phi_arg_def (phi, 0);
679   for (i = 1; i < gimple_phi_num_args (phi); i++)
680     if (gimple_phi_arg_def (phi, i) != op)
681       return false;
682   return true;
683 }
684 
685 /* Propagate necessity using the operands of necessary statements.
686    Process the uses on each statement in the worklist, and add all
687    feeding statements which contribute to the calculation of this
688    value to the worklist.
689 
690    In conservative mode, EL is NULL.  */
691 
692 static void
693 propagate_necessity (struct edge_list *el)
694 {
695   gimple stmt;
696   bool aggressive = (el ? true : false);
697 
698   if (dump_file && (dump_flags & TDF_DETAILS))
699     fprintf (dump_file, "\nProcessing worklist:\n");
700 
701   while (worklist.length () > 0)
702     {
703       /* Take STMT from worklist.  */
704       stmt = worklist.pop ();
705 
706       if (dump_file && (dump_flags & TDF_DETAILS))
707 	{
708 	  fprintf (dump_file, "processing: ");
709 	  print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
710 	  fprintf (dump_file, "\n");
711 	}
712 
713       if (aggressive)
714 	{
715 	  /* Mark the last statement of the basic blocks on which the block
716 	     containing STMT is control dependent, but only if we haven't
717 	     already done so.  */
718 	  basic_block bb = gimple_bb (stmt);
719 	  if (bb != ENTRY_BLOCK_PTR
720 	      && !bitmap_bit_p (visited_control_parents, bb->index))
721 	    mark_control_dependent_edges_necessary (bb, el, false);
722 	}
723 
724       if (gimple_code (stmt) == GIMPLE_PHI
725 	  /* We do not process virtual PHI nodes nor do we track their
726 	     necessity.  */
727 	  && !virtual_operand_p (gimple_phi_result (stmt)))
728 	{
729 	  /* PHI nodes are somewhat special in that each PHI alternative has
730 	     data and control dependencies.  All the statements feeding the
731 	     PHI node's arguments are always necessary.  In aggressive mode,
732 	     we also consider the control dependent edges leading to the
733 	     predecessor block associated with each PHI alternative as
734 	     necessary.  */
735 	  size_t k;
736 
737 	  for (k = 0; k < gimple_phi_num_args (stmt); k++)
738             {
739 	      tree arg = PHI_ARG_DEF (stmt, k);
740 	      if (TREE_CODE (arg) == SSA_NAME)
741 		mark_operand_necessary (arg);
742 	    }
743 
744 	  /* For PHI operands it matters from where the control flow arrives
745 	     to the BB.  Consider the following example:
746 
747 	     a=exp1;
748 	     b=exp2;
749 	     if (test)
750 		;
751 	     else
752 		;
753 	     c=PHI(a,b)
754 
755 	     We need to mark control dependence of the empty basic blocks, since they
756 	     contains computation of PHI operands.
757 
758 	     Doing so is too restrictive in the case the predecestor block is in
759 	     the loop. Consider:
760 
761 	      if (b)
762 		{
763 		  int i;
764 		  for (i = 0; i<1000; ++i)
765 		    ;
766 		  j = 0;
767 		}
768 	      return j;
769 
770 	     There is PHI for J in the BB containing return statement.
771 	     In this case the control dependence of predecestor block (that is
772 	     within the empty loop) also contains the block determining number
773 	     of iterations of the block that would prevent removing of empty
774 	     loop in this case.
775 
776 	     This scenario can be avoided by splitting critical edges.
777 	     To save the critical edge splitting pass we identify how the control
778 	     dependence would look like if the edge was split.
779 
780 	     Consider the modified CFG created from current CFG by splitting
781 	     edge B->C.  In the postdominance tree of modified CFG, C' is
782 	     always child of C.  There are two cases how chlids of C' can look
783 	     like:
784 
785 		1) C' is leaf
786 
787 		   In this case the only basic block C' is control dependent on is B.
788 
789 		2) C' has single child that is B
790 
791 		   In this case control dependence of C' is same as control
792 		   dependence of B in original CFG except for block B itself.
793 		   (since C' postdominate B in modified CFG)
794 
795 	     Now how to decide what case happens?  There are two basic options:
796 
797 		a) C postdominate B.  Then C immediately postdominate B and
798 		   case 2 happens iff there is no other way from B to C except
799 		   the edge B->C.
800 
801 		   There is other way from B to C iff there is succesor of B that
802 		   is not postdominated by B.  Testing this condition is somewhat
803 		   expensive, because we need to iterate all succesors of B.
804 		   We are safe to assume that this does not happen: we will mark B
805 		   as needed when processing the other path from B to C that is
806 		   conrol dependent on B and marking control dependencies of B
807 		   itself is harmless because they will be processed anyway after
808 		   processing control statement in B.
809 
810 		b) C does not postdominate B.  Always case 1 happens since there is
811 		   path from C to exit that does not go through B and thus also C'.  */
812 
813 	  if (aggressive && !degenerate_phi_p (stmt))
814 	    {
815 	      for (k = 0; k < gimple_phi_num_args (stmt); k++)
816 		{
817 		  basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
818 
819 		  if (gimple_bb (stmt)
820 		      != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
821 		    {
822 		      if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
823 			mark_last_stmt_necessary (arg_bb);
824 		    }
825 		  else if (arg_bb != ENTRY_BLOCK_PTR
826 		           && !bitmap_bit_p (visited_control_parents,
827 					 arg_bb->index))
828 		    mark_control_dependent_edges_necessary (arg_bb, el, true);
829 		}
830 	    }
831 	}
832       else
833 	{
834 	  /* Propagate through the operands.  Examine all the USE, VUSE and
835 	     VDEF operands in this statement.  Mark all the statements
836 	     which feed this statement's uses as necessary.  */
837 	  ssa_op_iter iter;
838 	  tree use;
839 
840 	  /* If this is a call to free which is directly fed by an
841 	     allocation function do not mark that necessary through
842 	     processing the argument.  */
843 	  if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
844 	    {
845 	      tree ptr = gimple_call_arg (stmt, 0);
846 	      gimple def_stmt;
847 	      tree def_callee;
848 	      /* If the pointer we free is defined by an allocation
849 		 function do not add the call to the worklist.  */
850 	      if (TREE_CODE (ptr) == SSA_NAME
851 		  && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
852 		  && (def_callee = gimple_call_fndecl (def_stmt))
853 		  && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
854 		  && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
855 		      || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
856 		continue;
857 	    }
858 
859 	  FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
860 	    mark_operand_necessary (use);
861 
862 	  use = gimple_vuse (stmt);
863 	  if (!use)
864 	    continue;
865 
866 	  /* If we dropped to simple mode make all immediately
867 	     reachable definitions necessary.  */
868 	  if (chain_ovfl)
869 	    {
870 	      mark_all_reaching_defs_necessary (stmt);
871 	      continue;
872 	    }
873 
874 	  /* For statements that may load from memory (have a VUSE) we
875 	     have to mark all reaching (may-)definitions as necessary.
876 	     We partition this task into two cases:
877 	      1) explicit loads based on decls that are not aliased
878 	      2) implicit loads (like calls) and explicit loads not
879 	         based on decls that are not aliased (like indirect
880 		 references or loads from globals)
881 	     For 1) we mark all reaching may-defs as necessary, stopping
882 	     at dominating kills.  For 2) we want to mark all dominating
883 	     references necessary, but non-aliased ones which we handle
884 	     in 1).  By keeping a global visited bitmap for references
885 	     we walk for 2) we avoid quadratic behavior for those.  */
886 
887 	  if (is_gimple_call (stmt))
888 	    {
889 	      tree callee = gimple_call_fndecl (stmt);
890 	      unsigned i;
891 
892 	      /* Calls to functions that are merely acting as barriers
893 		 or that only store to memory do not make any previous
894 		 stores necessary.  */
895 	      if (callee != NULL_TREE
896 		  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
897 		  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
898 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
899 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
900 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
901 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
902 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
903 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
904 		      || (DECL_FUNCTION_CODE (callee)
905 			  == BUILT_IN_ALLOCA_WITH_ALIGN)
906 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
907 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
908 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
909 		continue;
910 
911 	      /* Calls implicitly load from memory, their arguments
912 	         in addition may explicitly perform memory loads.  */
913 	      mark_all_reaching_defs_necessary (stmt);
914 	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
915 		{
916 		  tree arg = gimple_call_arg (stmt, i);
917 		  if (TREE_CODE (arg) == SSA_NAME
918 		      || is_gimple_min_invariant (arg))
919 		    continue;
920 		  if (TREE_CODE (arg) == WITH_SIZE_EXPR)
921 		    arg = TREE_OPERAND (arg, 0);
922 		  if (!ref_may_be_aliased (arg))
923 		    mark_aliased_reaching_defs_necessary (stmt, arg);
924 		}
925 	    }
926 	  else if (gimple_assign_single_p (stmt))
927 	    {
928 	      tree rhs;
929 	      /* If this is a load mark things necessary.  */
930 	      rhs = gimple_assign_rhs1 (stmt);
931 	      if (TREE_CODE (rhs) != SSA_NAME
932 		  && !is_gimple_min_invariant (rhs)
933 		  && TREE_CODE (rhs) != CONSTRUCTOR)
934 		{
935 		  if (!ref_may_be_aliased (rhs))
936 		    mark_aliased_reaching_defs_necessary (stmt, rhs);
937 		  else
938 		    mark_all_reaching_defs_necessary (stmt);
939 		}
940 	    }
941 	  else if (gimple_code (stmt) == GIMPLE_RETURN)
942 	    {
943 	      tree rhs = gimple_return_retval (stmt);
944 	      /* A return statement may perform a load.  */
945 	      if (rhs
946 		  && TREE_CODE (rhs) != SSA_NAME
947 		  && !is_gimple_min_invariant (rhs)
948 		  && TREE_CODE (rhs) != CONSTRUCTOR)
949 		{
950 		  if (!ref_may_be_aliased (rhs))
951 		    mark_aliased_reaching_defs_necessary (stmt, rhs);
952 		  else
953 		    mark_all_reaching_defs_necessary (stmt);
954 		}
955 	    }
956 	  else if (gimple_code (stmt) == GIMPLE_ASM)
957 	    {
958 	      unsigned i;
959 	      mark_all_reaching_defs_necessary (stmt);
960 	      /* Inputs may perform loads.  */
961 	      for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
962 		{
963 		  tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
964 		  if (TREE_CODE (op) != SSA_NAME
965 		      && !is_gimple_min_invariant (op)
966 		      && TREE_CODE (op) != CONSTRUCTOR
967 		      && !ref_may_be_aliased (op))
968 		    mark_aliased_reaching_defs_necessary (stmt, op);
969 		}
970 	    }
971 	  else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
972 	    {
973 	      /* The beginning of a transaction is a memory barrier.  */
974 	      /* ??? If we were really cool, we'd only be a barrier
975 		 for the memories touched within the transaction.  */
976 	      mark_all_reaching_defs_necessary (stmt);
977 	    }
978 	  else
979 	    gcc_unreachable ();
980 
981 	  /* If we over-used our alias oracle budget drop to simple
982 	     mode.  The cost metric allows quadratic behavior
983 	     (number of uses times number of may-defs queries) up to
984 	     a constant maximal number of queries and after that falls back to
985 	     super-linear complexity.  */
986 	  if (/* Constant but quadratic for small functions.  */
987 	      total_chain > 128 * 128
988 	      /* Linear in the number of may-defs.  */
989 	      && total_chain > 32 * longest_chain
990 	      /* Linear in the number of uses.  */
991 	      && total_chain > nr_walks * 32)
992 	    {
993 	      chain_ovfl = true;
994 	      if (visited)
995 		bitmap_clear (visited);
996 	    }
997 	}
998     }
999 }
1000 
1001 /* Replace all uses of NAME by underlying variable and mark it
1002    for renaming.  This assumes the defining statement of NAME is
1003    going to be removed.  */
1004 
1005 void
1006 mark_virtual_operand_for_renaming (tree name)
1007 {
1008   tree name_var = SSA_NAME_VAR (name);
1009   bool used = false;
1010   imm_use_iterator iter;
1011   use_operand_p use_p;
1012   gimple stmt;
1013 
1014   gcc_assert (VAR_DECL_IS_VIRTUAL_OPERAND (name_var));
1015   FOR_EACH_IMM_USE_STMT (stmt, iter, name)
1016     {
1017       FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1018         SET_USE (use_p, name_var);
1019       used = true;
1020     }
1021   if (used)
1022     mark_virtual_operands_for_renaming (cfun);
1023 }
1024 
1025 /* Replace all uses of the virtual PHI result by its underlying variable
1026    and mark it for renaming.  This assumes the PHI node is going to be
1027    removed.  */
1028 
1029 void
1030 mark_virtual_phi_result_for_renaming (gimple phi)
1031 {
1032   if (dump_file && (dump_flags & TDF_DETAILS))
1033     {
1034       fprintf (dump_file, "Marking result for renaming : ");
1035       print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1036       fprintf (dump_file, "\n");
1037     }
1038 
1039   mark_virtual_operand_for_renaming (gimple_phi_result (phi));
1040 }
1041 
1042 
1043 /* Remove dead PHI nodes from block BB.  */
1044 
1045 static bool
1046 remove_dead_phis (basic_block bb)
1047 {
1048   bool something_changed = false;
1049   gimple phi;
1050   gimple_stmt_iterator gsi;
1051 
1052   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1053     {
1054       stats.total_phis++;
1055       phi = gsi_stmt (gsi);
1056 
1057       /* We do not track necessity of virtual PHI nodes.  Instead do
1058          very simple dead PHI removal here.  */
1059       if (virtual_operand_p (gimple_phi_result (phi)))
1060 	{
1061 	  /* Virtual PHI nodes with one or identical arguments
1062 	     can be removed.  */
1063 	  if (degenerate_phi_p (phi))
1064 	    {
1065 	      tree vdef = gimple_phi_result (phi);
1066 	      tree vuse = gimple_phi_arg_def (phi, 0);
1067 
1068 	      use_operand_p use_p;
1069 	      imm_use_iterator iter;
1070 	      gimple use_stmt;
1071 	      FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1072 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1073 		  SET_USE (use_p, vuse);
1074 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1075 	          && TREE_CODE (vuse) == SSA_NAME)
1076 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1077 	    }
1078 	  else
1079 	    gimple_set_plf (phi, STMT_NECESSARY, true);
1080 	}
1081 
1082       if (!gimple_plf (phi, STMT_NECESSARY))
1083 	{
1084 	  something_changed = true;
1085 	  if (dump_file && (dump_flags & TDF_DETAILS))
1086 	    {
1087 	      fprintf (dump_file, "Deleting : ");
1088 	      print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1089 	      fprintf (dump_file, "\n");
1090 	    }
1091 
1092 	  remove_phi_node (&gsi, true);
1093 	  stats.removed_phis++;
1094 	  continue;
1095 	}
1096 
1097       gsi_next (&gsi);
1098     }
1099   return something_changed;
1100 }
1101 
1102 /* Forward edge E to respective POST_DOM_BB and update PHIs.  */
1103 
1104 static edge
1105 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
1106 {
1107   gimple_stmt_iterator gsi;
1108   edge e2 = NULL;
1109   edge_iterator ei;
1110 
1111   if (dump_file && (dump_flags & TDF_DETAILS))
1112     fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
1113 	     e->dest->index, post_dom_bb->index);
1114 
1115   e2 = redirect_edge_and_branch (e, post_dom_bb);
1116   cfg_altered = true;
1117 
1118   /* If edge was already around, no updating is neccesary.  */
1119   if (e2 != e)
1120     return e2;
1121 
1122   if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1123     {
1124       /* We are sure that for every live PHI we are seeing control dependent BB.
1125          This means that we can pick any edge to duplicate PHI args from.  */
1126       FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1127 	if (e2 != e)
1128 	  break;
1129       for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1130 	{
1131 	  gimple phi = gsi_stmt (gsi);
1132 	  tree op;
1133 	  source_location locus;
1134 
1135 	  /* PHIs for virtuals have no control dependency relation on them.
1136 	     We are lost here and must force renaming of the symbol.  */
1137 	  if (virtual_operand_p (gimple_phi_result (phi)))
1138 	    {
1139 	      mark_virtual_phi_result_for_renaming (phi);
1140 	      remove_phi_node (&gsi, true);
1141 	      continue;
1142 	    }
1143 
1144 	  /* Dead PHI do not imply control dependency.  */
1145           if (!gimple_plf (phi, STMT_NECESSARY))
1146 	    {
1147 	      gsi_next (&gsi);
1148 	      continue;
1149 	    }
1150 
1151 	  op = gimple_phi_arg_def (phi, e2->dest_idx);
1152 	  locus = gimple_phi_arg_location (phi, e2->dest_idx);
1153 	  add_phi_arg (phi, op, e, locus);
1154 	  /* The resulting PHI if not dead can only be degenerate.  */
1155 	  gcc_assert (degenerate_phi_p (phi));
1156 	  gsi_next (&gsi);
1157 	}
1158     }
1159   return e;
1160 }
1161 
1162 /* Remove dead statement pointed to by iterator I.  Receives the basic block BB
1163    containing I so that we don't have to look it up.  */
1164 
1165 static void
1166 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1167 {
1168   gimple stmt = gsi_stmt (*i);
1169 
1170   if (dump_file && (dump_flags & TDF_DETAILS))
1171     {
1172       fprintf (dump_file, "Deleting : ");
1173       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1174       fprintf (dump_file, "\n");
1175     }
1176 
1177   stats.removed++;
1178 
1179   /* If we have determined that a conditional branch statement contributes
1180      nothing to the program, then we not only remove it, but we also change
1181      the flow graph so that the current block will simply fall-thru to its
1182      immediate post-dominator.  The blocks we are circumventing will be
1183      removed by cleanup_tree_cfg if this change in the flow graph makes them
1184      unreachable.  */
1185   if (is_ctrl_stmt (stmt))
1186     {
1187       basic_block post_dom_bb;
1188       edge e, e2;
1189       edge_iterator ei;
1190 
1191       post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1192 
1193       e = find_edge (bb, post_dom_bb);
1194 
1195       /* If edge is already there, try to use it.  This avoids need to update
1196          PHI nodes.  Also watch for cases where post dominator does not exists
1197 	 or is exit block.  These can happen for infinite loops as we create
1198 	 fake edges in the dominator tree.  */
1199       if (e)
1200         ;
1201       else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR)
1202 	e = EDGE_SUCC (bb, 0);
1203       else
1204         e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1205       gcc_assert (e);
1206       e->probability = REG_BR_PROB_BASE;
1207       e->count = bb->count;
1208 
1209       /* The edge is no longer associated with a conditional, so it does
1210 	 not have TRUE/FALSE flags.  */
1211       e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1212 
1213       /* The lone outgoing edge from BB will be a fallthru edge.  */
1214       e->flags |= EDGE_FALLTHRU;
1215 
1216       /* Remove the remaining outgoing edges.  */
1217       for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1218 	if (e != e2)
1219 	  {
1220 	    cfg_altered = true;
1221             remove_edge (e2);
1222 	  }
1223 	else
1224 	  ei_next (&ei);
1225     }
1226 
1227   /* If this is a store into a variable that is being optimized away,
1228      add a debug bind stmt if possible.  */
1229   if (MAY_HAVE_DEBUG_STMTS
1230       && gimple_assign_single_p (stmt)
1231       && is_gimple_val (gimple_assign_rhs1 (stmt)))
1232     {
1233       tree lhs = gimple_assign_lhs (stmt);
1234       if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1235 	  && !DECL_IGNORED_P (lhs)
1236 	  && is_gimple_reg_type (TREE_TYPE (lhs))
1237 	  && !is_global_var (lhs)
1238 	  && !DECL_HAS_VALUE_EXPR_P (lhs))
1239 	{
1240 	  tree rhs = gimple_assign_rhs1 (stmt);
1241 	  gimple note
1242 	    = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1243 	  gsi_insert_after (i, note, GSI_SAME_STMT);
1244 	}
1245     }
1246 
1247   unlink_stmt_vdef (stmt);
1248   gsi_remove (i, true);
1249   release_defs (stmt);
1250 }
1251 
1252 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1253    contributes nothing to the program, and can be deleted.  */
1254 
1255 static bool
1256 eliminate_unnecessary_stmts (void)
1257 {
1258   bool something_changed = false;
1259   basic_block bb;
1260   gimple_stmt_iterator gsi, psi;
1261   gimple stmt;
1262   tree call;
1263   vec<basic_block> h;
1264 
1265   if (dump_file && (dump_flags & TDF_DETAILS))
1266     fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1267 
1268   clear_special_calls ();
1269 
1270   /* Walking basic blocks and statements in reverse order avoids
1271      releasing SSA names before any other DEFs that refer to them are
1272      released.  This helps avoid loss of debug information, as we get
1273      a chance to propagate all RHSs of removed SSAs into debug uses,
1274      rather than only the latest ones.  E.g., consider:
1275 
1276      x_3 = y_1 + z_2;
1277      a_5 = x_3 - b_4;
1278      # DEBUG a => a_5
1279 
1280      If we were to release x_3 before a_5, when we reached a_5 and
1281      tried to substitute it into the debug stmt, we'd see x_3 there,
1282      but x_3's DEF, type, etc would have already been disconnected.
1283      By going backwards, the debug stmt first changes to:
1284 
1285      # DEBUG a => x_3 - b_4
1286 
1287      and then to:
1288 
1289      # DEBUG a => y_1 + z_2 - b_4
1290 
1291      as desired.  */
1292   gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1293   h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR));
1294 
1295   while (h.length ())
1296     {
1297       bb = h.pop ();
1298 
1299       /* Remove dead statements.  */
1300       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1301 	{
1302 	  stmt = gsi_stmt (gsi);
1303 
1304 	  psi = gsi;
1305 	  gsi_prev (&psi);
1306 
1307 	  stats.total++;
1308 
1309 	  /* We can mark a call to free as not necessary if the
1310 	     defining statement of its argument is not necessary
1311 	     (and thus is getting removed).  */
1312 	  if (gimple_plf (stmt, STMT_NECESSARY)
1313 	      && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1314 	    {
1315 	      tree ptr = gimple_call_arg (stmt, 0);
1316 	      if (TREE_CODE (ptr) == SSA_NAME)
1317 		{
1318 		  gimple def_stmt = SSA_NAME_DEF_STMT (ptr);
1319 		  if (!gimple_nop_p (def_stmt)
1320 		      && !gimple_plf (def_stmt, STMT_NECESSARY))
1321 		    gimple_set_plf (stmt, STMT_NECESSARY, false);
1322 		}
1323 	    }
1324 
1325 	  /* If GSI is not necessary then remove it.  */
1326 	  if (!gimple_plf (stmt, STMT_NECESSARY))
1327 	    {
1328 	      if (!is_gimple_debug (stmt))
1329 		something_changed = true;
1330 	      remove_dead_stmt (&gsi, bb);
1331 	    }
1332 	  else if (is_gimple_call (stmt))
1333 	    {
1334 	      tree name = gimple_call_lhs (stmt);
1335 
1336 	      notice_special_calls (stmt);
1337 
1338 	      /* When LHS of var = call (); is dead, simplify it into
1339 		 call (); saving one operand.  */
1340 	      if (name
1341 		  && TREE_CODE (name) == SSA_NAME
1342 		  && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1343 		  /* Avoid doing so for allocation calls which we
1344 		     did not mark as necessary, it will confuse the
1345 		     special logic we apply to malloc/free pair removal.  */
1346 		  && (!(call = gimple_call_fndecl (stmt))
1347 		      || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1348 		      || (DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1349 			  && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1350 			  && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1351 			  && (DECL_FUNCTION_CODE (call)
1352 			      != BUILT_IN_ALLOCA_WITH_ALIGN))))
1353 		{
1354 		  something_changed = true;
1355 		  if (dump_file && (dump_flags & TDF_DETAILS))
1356 		    {
1357 		      fprintf (dump_file, "Deleting LHS of call: ");
1358 		      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1359 		      fprintf (dump_file, "\n");
1360 		    }
1361 
1362 		  gimple_call_set_lhs (stmt, NULL_TREE);
1363 		  maybe_clean_or_replace_eh_stmt (stmt, stmt);
1364 		  update_stmt (stmt);
1365 		  release_ssa_name (name);
1366 		}
1367 	    }
1368 	}
1369     }
1370 
1371   h.release ();
1372 
1373   /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1374      rendered some PHI nodes unreachable while they are still in use.
1375      Mark them for renaming.  */
1376   if (cfg_altered)
1377     {
1378       basic_block prev_bb;
1379 
1380       find_unreachable_blocks ();
1381 
1382       /* Delete all unreachable basic blocks in reverse dominator order.  */
1383       for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb)
1384 	{
1385 	  prev_bb = bb->prev_bb;
1386 
1387 	  if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1388 	      || !(bb->flags & BB_REACHABLE))
1389 	    {
1390 	      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1391 		if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi))))
1392 		  {
1393 		    bool found = false;
1394 		    imm_use_iterator iter;
1395 
1396 		    FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1397 		      {
1398 			if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1399 			  continue;
1400 			if (gimple_code (stmt) == GIMPLE_PHI
1401 			    || gimple_plf (stmt, STMT_NECESSARY))
1402 			  {
1403 			    found = true;
1404 			    BREAK_FROM_IMM_USE_STMT (iter);
1405 			  }
1406 		      }
1407 		    if (found)
1408 		      mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1409 		  }
1410 
1411 	      if (!(bb->flags & BB_REACHABLE))
1412 		{
1413 		  /* Speed up the removal of blocks that don't
1414 		     dominate others.  Walking backwards, this should
1415 		     be the common case.  ??? Do we need to recompute
1416 		     dominators because of cfg_altered?  */
1417 		  if (!MAY_HAVE_DEBUG_STMTS
1418 		      || !first_dom_son (CDI_DOMINATORS, bb))
1419 		    delete_basic_block (bb);
1420 		  else
1421 		    {
1422 		      h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1423 
1424 		      while (h.length ())
1425 			{
1426 			  bb = h.pop ();
1427 			  prev_bb = bb->prev_bb;
1428 			  /* Rearrangements to the CFG may have failed
1429 			     to update the dominators tree, so that
1430 			     formerly-dominated blocks are now
1431 			     otherwise reachable.  */
1432 			  if (!!(bb->flags & BB_REACHABLE))
1433 			    continue;
1434 			  delete_basic_block (bb);
1435 			}
1436 
1437 		      h.release ();
1438 		    }
1439 		}
1440 	    }
1441 	}
1442     }
1443   FOR_EACH_BB (bb)
1444     {
1445       /* Remove dead PHI nodes.  */
1446       something_changed |= remove_dead_phis (bb);
1447     }
1448 
1449   return something_changed;
1450 }
1451 
1452 
1453 /* Print out removed statement statistics.  */
1454 
1455 static void
1456 print_stats (void)
1457 {
1458   float percg;
1459 
1460   percg = ((float) stats.removed / (float) stats.total) * 100;
1461   fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1462 	   stats.removed, stats.total, (int) percg);
1463 
1464   if (stats.total_phis == 0)
1465     percg = 0;
1466   else
1467     percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1468 
1469   fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1470 	   stats.removed_phis, stats.total_phis, (int) percg);
1471 }
1472 
1473 /* Initialization for this pass.  Set up the used data structures.  */
1474 
1475 static void
1476 tree_dce_init (bool aggressive)
1477 {
1478   memset ((void *) &stats, 0, sizeof (stats));
1479 
1480   if (aggressive)
1481     {
1482       int i;
1483 
1484       control_dependence_map = XNEWVEC (bitmap, last_basic_block);
1485       for (i = 0; i < last_basic_block; ++i)
1486 	control_dependence_map[i] = BITMAP_ALLOC (NULL);
1487 
1488       last_stmt_necessary = sbitmap_alloc (last_basic_block);
1489       bitmap_clear (last_stmt_necessary);
1490       bb_contains_live_stmts = sbitmap_alloc (last_basic_block);
1491       bitmap_clear (bb_contains_live_stmts);
1492     }
1493 
1494   processed = sbitmap_alloc (num_ssa_names + 1);
1495   bitmap_clear (processed);
1496 
1497   worklist.create (64);
1498   cfg_altered = false;
1499 }
1500 
1501 /* Cleanup after this pass.  */
1502 
1503 static void
1504 tree_dce_done (bool aggressive)
1505 {
1506   if (aggressive)
1507     {
1508       int i;
1509 
1510       for (i = 0; i < last_basic_block; ++i)
1511 	BITMAP_FREE (control_dependence_map[i]);
1512       free (control_dependence_map);
1513 
1514       sbitmap_free (visited_control_parents);
1515       sbitmap_free (last_stmt_necessary);
1516       sbitmap_free (bb_contains_live_stmts);
1517       bb_contains_live_stmts = NULL;
1518     }
1519 
1520   sbitmap_free (processed);
1521 
1522   worklist.release ();
1523 }
1524 
1525 /* Main routine to eliminate dead code.
1526 
1527    AGGRESSIVE controls the aggressiveness of the algorithm.
1528    In conservative mode, we ignore control dependence and simply declare
1529    all but the most trivially dead branches necessary.  This mode is fast.
1530    In aggressive mode, control dependences are taken into account, which
1531    results in more dead code elimination, but at the cost of some time.
1532 
1533    FIXME: Aggressive mode before PRE doesn't work currently because
1534 	  the dominance info is not invalidated after DCE1.  This is
1535 	  not an issue right now because we only run aggressive DCE
1536 	  as the last tree SSA pass, but keep this in mind when you
1537 	  start experimenting with pass ordering.  */
1538 
1539 static unsigned int
1540 perform_tree_ssa_dce (bool aggressive)
1541 {
1542   struct edge_list *el = NULL;
1543   bool something_changed = 0;
1544 
1545   calculate_dominance_info (CDI_DOMINATORS);
1546 
1547   /* Preheaders are needed for SCEV to work.
1548      Simple lateches and recorded exits improve chances that loop will
1549      proved to be finite in testcases such as in loop-15.c and loop-24.c  */
1550   if (aggressive)
1551     loop_optimizer_init (LOOPS_NORMAL
1552 			 | LOOPS_HAVE_RECORDED_EXITS);
1553 
1554   tree_dce_init (aggressive);
1555 
1556   if (aggressive)
1557     {
1558       /* Compute control dependence.  */
1559       timevar_push (TV_CONTROL_DEPENDENCES);
1560       calculate_dominance_info (CDI_POST_DOMINATORS);
1561       el = create_edge_list ();
1562       find_all_control_dependences (el);
1563       timevar_pop (TV_CONTROL_DEPENDENCES);
1564 
1565       visited_control_parents = sbitmap_alloc (last_basic_block);
1566       bitmap_clear (visited_control_parents);
1567 
1568       mark_dfs_back_edges ();
1569     }
1570 
1571   find_obviously_necessary_stmts (el);
1572 
1573   if (aggressive)
1574     loop_optimizer_finalize ();
1575 
1576   longest_chain = 0;
1577   total_chain = 0;
1578   nr_walks = 0;
1579   chain_ovfl = false;
1580   visited = BITMAP_ALLOC (NULL);
1581   propagate_necessity (el);
1582   BITMAP_FREE (visited);
1583 
1584   something_changed |= eliminate_unnecessary_stmts ();
1585   something_changed |= cfg_altered;
1586 
1587   /* We do not update postdominators, so free them unconditionally.  */
1588   free_dominance_info (CDI_POST_DOMINATORS);
1589 
1590   /* If we removed paths in the CFG, then we need to update
1591      dominators as well.  I haven't investigated the possibility
1592      of incrementally updating dominators.  */
1593   if (cfg_altered)
1594     free_dominance_info (CDI_DOMINATORS);
1595 
1596   statistics_counter_event (cfun, "Statements deleted", stats.removed);
1597   statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1598 
1599   /* Debugging dumps.  */
1600   if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1601     print_stats ();
1602 
1603   tree_dce_done (aggressive);
1604 
1605   free_edge_list (el);
1606 
1607   if (something_changed)
1608     return TODO_update_ssa | TODO_cleanup_cfg;
1609   return 0;
1610 }
1611 
1612 /* Pass entry points.  */
1613 static unsigned int
1614 tree_ssa_dce (void)
1615 {
1616   return perform_tree_ssa_dce (/*aggressive=*/false);
1617 }
1618 
1619 static unsigned int
1620 tree_ssa_dce_loop (void)
1621 {
1622   unsigned int todo;
1623   todo = perform_tree_ssa_dce (/*aggressive=*/false);
1624   if (todo)
1625     {
1626       free_numbers_of_iterations_estimates ();
1627       scev_reset ();
1628     }
1629   return todo;
1630 }
1631 
1632 static unsigned int
1633 tree_ssa_cd_dce (void)
1634 {
1635   return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1636 }
1637 
1638 static bool
1639 gate_dce (void)
1640 {
1641   return flag_tree_dce != 0;
1642 }
1643 
1644 struct gimple_opt_pass pass_dce =
1645 {
1646  {
1647   GIMPLE_PASS,
1648   "dce",				/* name */
1649   OPTGROUP_NONE,                        /* optinfo_flags */
1650   gate_dce,				/* gate */
1651   tree_ssa_dce,				/* execute */
1652   NULL,					/* sub */
1653   NULL,					/* next */
1654   0,					/* static_pass_number */
1655   TV_TREE_DCE,				/* tv_id */
1656   PROP_cfg | PROP_ssa,			/* properties_required */
1657   0,					/* properties_provided */
1658   0,					/* properties_destroyed */
1659   0,					/* todo_flags_start */
1660   TODO_verify_ssa	                /* todo_flags_finish */
1661  }
1662 };
1663 
1664 struct gimple_opt_pass pass_dce_loop =
1665 {
1666  {
1667   GIMPLE_PASS,
1668   "dceloop",				/* name */
1669   OPTGROUP_NONE,                        /* optinfo_flags */
1670   gate_dce,				/* gate */
1671   tree_ssa_dce_loop,			/* execute */
1672   NULL,					/* sub */
1673   NULL,					/* next */
1674   0,					/* static_pass_number */
1675   TV_TREE_DCE,				/* tv_id */
1676   PROP_cfg | PROP_ssa,			/* properties_required */
1677   0,					/* properties_provided */
1678   0,					/* properties_destroyed */
1679   0,					/* todo_flags_start */
1680   TODO_verify_ssa	                /* todo_flags_finish */
1681  }
1682 };
1683 
1684 struct gimple_opt_pass pass_cd_dce =
1685 {
1686  {
1687   GIMPLE_PASS,
1688   "cddce",				/* name */
1689   OPTGROUP_NONE,                        /* optinfo_flags */
1690   gate_dce,				/* gate */
1691   tree_ssa_cd_dce,			/* execute */
1692   NULL,					/* sub */
1693   NULL,					/* next */
1694   0,					/* static_pass_number */
1695   TV_TREE_CD_DCE,			/* tv_id */
1696   PROP_cfg | PROP_ssa,			/* properties_required */
1697   0,					/* properties_provided */
1698   0,					/* properties_destroyed */
1699   0,					/* todo_flags_start */
1700   TODO_verify_ssa
1701   | TODO_verify_flow			/* todo_flags_finish */
1702  }
1703 };
1704