xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-dce.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Dead code elimination pass for the GNU compiler.
2    Copyright (C) 2002-2015 Free Software Foundation, Inc.
3    Contributed by Ben Elliston <bje@redhat.com>
4    and Andrew MacLeod <amacleod@redhat.com>
5    Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 /* Dead code elimination.
24 
25    References:
26 
27      Building an Optimizing Compiler,
28      Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29 
30      Advanced Compiler Design and Implementation,
31      Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32 
33    Dead-code elimination is the removal of statements which have no
34    impact on the program's output.  "Dead statements" have no impact
35    on the program's output, while "necessary statements" may have
36    impact on the output.
37 
38    The algorithm consists of three phases:
39    1. Marking as necessary all statements known to be necessary,
40       e.g. most function calls, writing a value to memory, etc;
41    2. Propagating necessary statements, e.g., the statements
42       giving values to operands in necessary statements; and
43    3. Removing dead statements.  */
44 
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "hash-set.h"
50 #include "machmode.h"
51 #include "vec.h"
52 #include "double-int.h"
53 #include "input.h"
54 #include "alias.h"
55 #include "symtab.h"
56 #include "wide-int.h"
57 #include "inchash.h"
58 #include "tree.h"
59 #include "fold-const.h"
60 #include "calls.h"
61 #include "gimple-pretty-print.h"
62 #include "predict.h"
63 #include "hard-reg-set.h"
64 #include "function.h"
65 #include "dominance.h"
66 #include "cfg.h"
67 #include "cfganal.h"
68 #include "basic-block.h"
69 #include "tree-ssa-alias.h"
70 #include "internal-fn.h"
71 #include "tree-eh.h"
72 #include "gimple-expr.h"
73 #include "is-a.h"
74 #include "gimple.h"
75 #include "gimplify.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-cfg.h"
79 #include "tree-phinodes.h"
80 #include "ssa-iterators.h"
81 #include "stringpool.h"
82 #include "tree-ssanames.h"
83 #include "tree-ssa-loop-niter.h"
84 #include "tree-into-ssa.h"
85 #include "hashtab.h"
86 #include "rtl.h"
87 #include "flags.h"
88 #include "statistics.h"
89 #include "real.h"
90 #include "fixed-value.h"
91 #include "insn-config.h"
92 #include "expmed.h"
93 #include "dojump.h"
94 #include "explow.h"
95 #include "emit-rtl.h"
96 #include "varasm.h"
97 #include "stmt.h"
98 #include "expr.h"
99 #include "tree-dfa.h"
100 #include "tree-pass.h"
101 #include "cfgloop.h"
102 #include "tree-scalar-evolution.h"
103 #include "tree-chkp.h"
104 #include "tree-ssa-propagate.h"
105 #include "gimple-fold.h"
106 
107 static struct stmt_stats
108 {
109   int total;
110   int total_phis;
111   int removed;
112   int removed_phis;
113 } stats;
114 
115 #define STMT_NECESSARY GF_PLF_1
116 
117 static vec<gimple> worklist;
118 
119 /* Vector indicating an SSA name has already been processed and marked
120    as necessary.  */
121 static sbitmap processed;
122 
123 /* Vector indicating that the last statement of a basic block has already
124    been marked as necessary.  */
125 static sbitmap last_stmt_necessary;
126 
127 /* Vector indicating that BB contains statements that are live.  */
128 static sbitmap bb_contains_live_stmts;
129 
130 /* Before we can determine whether a control branch is dead, we need to
131    compute which blocks are control dependent on which edges.
132 
133    We expect each block to be control dependent on very few edges so we
134    use a bitmap for each block recording its edges.  An array holds the
135    bitmap.  The Ith bit in the bitmap is set if that block is dependent
136    on the Ith edge.  */
137 static control_dependences *cd;
138 
139 /* Vector indicating that a basic block has already had all the edges
140    processed that it is control dependent on.  */
141 static sbitmap visited_control_parents;
142 
143 /* TRUE if this pass alters the CFG (by removing control statements).
144    FALSE otherwise.
145 
146    If this pass alters the CFG, then it will arrange for the dominators
147    to be recomputed.  */
148 static bool cfg_altered;
149 
150 
151 /* If STMT is not already marked necessary, mark it, and add it to the
152    worklist if ADD_TO_WORKLIST is true.  */
153 
154 static inline void
155 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
156 {
157   gcc_assert (stmt);
158 
159   if (gimple_plf (stmt, STMT_NECESSARY))
160     return;
161 
162   if (dump_file && (dump_flags & TDF_DETAILS))
163     {
164       fprintf (dump_file, "Marking useful stmt: ");
165       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
166       fprintf (dump_file, "\n");
167     }
168 
169   gimple_set_plf (stmt, STMT_NECESSARY, true);
170   if (add_to_worklist)
171     worklist.safe_push (stmt);
172   if (bb_contains_live_stmts && !is_gimple_debug (stmt))
173     bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
174 }
175 
176 
177 /* Mark the statement defining operand OP as necessary.  */
178 
179 static inline void
180 mark_operand_necessary (tree op)
181 {
182   gimple stmt;
183   int ver;
184 
185   gcc_assert (op);
186 
187   ver = SSA_NAME_VERSION (op);
188   if (bitmap_bit_p (processed, ver))
189     {
190       stmt = SSA_NAME_DEF_STMT (op);
191       gcc_assert (gimple_nop_p (stmt)
192 		  || gimple_plf (stmt, STMT_NECESSARY));
193       return;
194     }
195   bitmap_set_bit (processed, ver);
196 
197   stmt = SSA_NAME_DEF_STMT (op);
198   gcc_assert (stmt);
199 
200   if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
201     return;
202 
203   if (dump_file && (dump_flags & TDF_DETAILS))
204     {
205       fprintf (dump_file, "marking necessary through ");
206       print_generic_expr (dump_file, op, 0);
207       fprintf (dump_file, " stmt ");
208       print_gimple_stmt (dump_file, stmt, 0, 0);
209     }
210 
211   gimple_set_plf (stmt, STMT_NECESSARY, true);
212   if (bb_contains_live_stmts)
213     bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
214   worklist.safe_push (stmt);
215 }
216 
217 
218 /* Mark STMT as necessary if it obviously is.  Add it to the worklist if
219    it can make other statements necessary.
220 
221    If AGGRESSIVE is false, control statements are conservatively marked as
222    necessary.  */
223 
224 static void
225 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
226 {
227   /* With non-call exceptions, we have to assume that all statements could
228      throw.  If a statement could throw, it can be deemed necessary.  */
229   if (cfun->can_throw_non_call_exceptions
230       && !cfun->can_delete_dead_exceptions
231       && stmt_could_throw_p (stmt))
232     {
233       mark_stmt_necessary (stmt, true);
234       return;
235     }
236 
237   /* Statements that are implicitly live.  Most function calls, asm
238      and return statements are required.  Labels and GIMPLE_BIND nodes
239      are kept because they are control flow, and we have no way of
240      knowing whether they can be removed.  DCE can eliminate all the
241      other statements in a block, and CFG can then remove the block
242      and labels.  */
243   switch (gimple_code (stmt))
244     {
245     case GIMPLE_PREDICT:
246     case GIMPLE_LABEL:
247       mark_stmt_necessary (stmt, false);
248       return;
249 
250     case GIMPLE_ASM:
251     case GIMPLE_RESX:
252     case GIMPLE_RETURN:
253       mark_stmt_necessary (stmt, true);
254       return;
255 
256     case GIMPLE_CALL:
257       {
258 	tree callee = gimple_call_fndecl (stmt);
259 	if (callee != NULL_TREE
260 	    && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
261 	  switch (DECL_FUNCTION_CODE (callee))
262 	    {
263 	    case BUILT_IN_MALLOC:
264 	    case BUILT_IN_ALIGNED_ALLOC:
265 	    case BUILT_IN_CALLOC:
266 	    case BUILT_IN_ALLOCA:
267 	    case BUILT_IN_ALLOCA_WITH_ALIGN:
268 	      return;
269 
270 	    default:;
271 	    }
272 	/* Most, but not all function calls are required.  Function calls that
273 	   produce no result and have no side effects (i.e. const pure
274 	   functions) are unnecessary.  */
275 	if (gimple_has_side_effects (stmt))
276 	  {
277 	    mark_stmt_necessary (stmt, true);
278 	    return;
279 	  }
280 	if (!gimple_call_lhs (stmt))
281 	  return;
282 	break;
283       }
284 
285     case GIMPLE_DEBUG:
286       /* Debug temps without a value are not useful.  ??? If we could
287 	 easily locate the debug temp bind stmt for a use thereof,
288 	 would could refrain from marking all debug temps here, and
289 	 mark them only if they're used.  */
290       if (!gimple_debug_bind_p (stmt)
291 	  || gimple_debug_bind_has_value_p (stmt)
292 	  || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
293 	mark_stmt_necessary (stmt, false);
294       return;
295 
296     case GIMPLE_GOTO:
297       gcc_assert (!simple_goto_p (stmt));
298       mark_stmt_necessary (stmt, true);
299       return;
300 
301     case GIMPLE_COND:
302       gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
303       /* Fall through.  */
304 
305     case GIMPLE_SWITCH:
306       if (! aggressive)
307 	mark_stmt_necessary (stmt, true);
308       break;
309 
310     case GIMPLE_ASSIGN:
311       if (gimple_clobber_p (stmt))
312 	return;
313       break;
314 
315     default:
316       break;
317     }
318 
319   /* If the statement has volatile operands, it needs to be preserved.
320      Same for statements that can alter control flow in unpredictable
321      ways.  */
322   if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
323     {
324       mark_stmt_necessary (stmt, true);
325       return;
326     }
327 
328   if (stmt_may_clobber_global_p (stmt))
329     {
330       mark_stmt_necessary (stmt, true);
331       return;
332     }
333 
334   return;
335 }
336 
337 
338 /* Mark the last statement of BB as necessary.  */
339 
340 static void
341 mark_last_stmt_necessary (basic_block bb)
342 {
343   gimple stmt = last_stmt (bb);
344 
345   bitmap_set_bit (last_stmt_necessary, bb->index);
346   bitmap_set_bit (bb_contains_live_stmts, bb->index);
347 
348   /* We actually mark the statement only if it is a control statement.  */
349   if (stmt && is_ctrl_stmt (stmt))
350     mark_stmt_necessary (stmt, true);
351 }
352 
353 
354 /* Mark control dependent edges of BB as necessary.  We have to do this only
355    once for each basic block so we set the appropriate bit after we're done.
356 
357    When IGNORE_SELF is true, ignore BB in the list of control dependences.  */
358 
359 static void
360 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
361 {
362   bitmap_iterator bi;
363   unsigned edge_number;
364   bool skipped = false;
365 
366   gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
367 
368   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
369     return;
370 
371   EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
372 			    0, edge_number, bi)
373     {
374       basic_block cd_bb = cd->get_edge (edge_number)->src;
375 
376       if (ignore_self && cd_bb == bb)
377 	{
378 	  skipped = true;
379 	  continue;
380 	}
381 
382       if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
383 	mark_last_stmt_necessary (cd_bb);
384     }
385 
386   if (!skipped)
387     bitmap_set_bit (visited_control_parents, bb->index);
388 }
389 
390 
391 /* Find obviously necessary statements.  These are things like most function
392    calls, and stores to file level variables.
393 
394    If EL is NULL, control statements are conservatively marked as
395    necessary.  Otherwise it contains the list of edges used by control
396    dependence analysis.  */
397 
398 static void
399 find_obviously_necessary_stmts (bool aggressive)
400 {
401   basic_block bb;
402   gimple_stmt_iterator gsi;
403   edge e;
404   gimple phi, stmt;
405   int flags;
406 
407   FOR_EACH_BB_FN (bb, cfun)
408     {
409       /* PHI nodes are never inherently necessary.  */
410       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
411 	{
412 	  phi = gsi_stmt (gsi);
413 	  gimple_set_plf (phi, STMT_NECESSARY, false);
414 	}
415 
416       /* Check all statements in the block.  */
417       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
418 	{
419 	  stmt = gsi_stmt (gsi);
420 	  gimple_set_plf (stmt, STMT_NECESSARY, false);
421 	  mark_stmt_if_obviously_necessary (stmt, aggressive);
422 	}
423     }
424 
425   /* Pure and const functions are finite and thus have no infinite loops in
426      them.  */
427   flags = flags_from_decl_or_type (current_function_decl);
428   if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
429     return;
430 
431   /* Prevent the empty possibly infinite loops from being removed.  */
432   if (aggressive)
433     {
434       struct loop *loop;
435       scev_initialize ();
436       if (mark_irreducible_loops ())
437 	FOR_EACH_BB_FN (bb, cfun)
438 	  {
439 	    edge_iterator ei;
440 	    FOR_EACH_EDGE (e, ei, bb->succs)
441 	      if ((e->flags & EDGE_DFS_BACK)
442 		  && (e->flags & EDGE_IRREDUCIBLE_LOOP))
443 		{
444 	          if (dump_file)
445 	            fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
446 		    	     e->src->index, e->dest->index);
447 		  mark_control_dependent_edges_necessary (e->dest, false);
448 		}
449 	  }
450 
451       FOR_EACH_LOOP (loop, 0)
452 	if (!finite_loop_p (loop))
453 	  {
454 	    if (dump_file)
455 	      fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
456 	    mark_control_dependent_edges_necessary (loop->latch, false);
457 	  }
458       scev_finalize ();
459     }
460 }
461 
462 
463 /* Return true if REF is based on an aliased base, otherwise false.  */
464 
465 static bool
466 ref_may_be_aliased (tree ref)
467 {
468   gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
469   while (handled_component_p (ref))
470     ref = TREE_OPERAND (ref, 0);
471   if (TREE_CODE (ref) == MEM_REF
472       && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
473     ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
474   return !(DECL_P (ref)
475 	   && !may_be_aliased (ref));
476 }
477 
478 static bitmap visited = NULL;
479 static unsigned int longest_chain = 0;
480 static unsigned int total_chain = 0;
481 static unsigned int nr_walks = 0;
482 static bool chain_ovfl = false;
483 
484 /* Worker for the walker that marks reaching definitions of REF,
485    which is based on a non-aliased decl, necessary.  It returns
486    true whenever the defining statement of the current VDEF is
487    a kill for REF, as no dominating may-defs are necessary for REF
488    anymore.  DATA points to the basic-block that contains the
489    stmt that refers to REF.  */
490 
491 static bool
492 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
493 {
494   gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
495 
496   /* All stmts we visit are necessary.  */
497   mark_operand_necessary (vdef);
498 
499   /* If the stmt lhs kills ref, then we can stop walking.  */
500   if (gimple_has_lhs (def_stmt)
501       && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
502       /* The assignment is not necessarily carried out if it can throw
503          and we can catch it in the current function where we could inspect
504 	 the previous value.
505          ???  We only need to care about the RHS throwing.  For aggregate
506 	 assignments or similar calls and non-call exceptions the LHS
507 	 might throw as well.  */
508       && !stmt_can_throw_internal (def_stmt))
509     {
510       tree base, lhs = gimple_get_lhs (def_stmt);
511       HOST_WIDE_INT size, offset, max_size;
512       ao_ref_base (ref);
513       base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
514       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
515 	 so base == refd->base does not always hold.  */
516       if (base == ref->base)
517 	{
518 	  /* For a must-alias check we need to be able to constrain
519 	     the accesses properly.  */
520 	  if (size != -1 && size == max_size
521 	      && ref->max_size != -1)
522 	    {
523 	      if (offset <= ref->offset
524 		  && offset + size >= ref->offset + ref->max_size)
525 		return true;
526 	    }
527 	  /* Or they need to be exactly the same.  */
528 	  else if (ref->ref
529 		   /* Make sure there is no induction variable involved
530 		      in the references (gcc.c-torture/execute/pr42142.c).
531 		      The simplest way is to check if the kill dominates
532 		      the use.  */
533 		   /* But when both are in the same block we cannot
534 		      easily tell whether we came from a backedge
535 		      unless we decide to compute stmt UIDs
536 		      (see PR58246).  */
537 		   && (basic_block) data != gimple_bb (def_stmt)
538 		   && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
539 				      gimple_bb (def_stmt))
540 		   && operand_equal_p (ref->ref, lhs, 0))
541 	    return true;
542 	}
543     }
544 
545   /* Otherwise keep walking.  */
546   return false;
547 }
548 
549 static void
550 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
551 {
552   unsigned int chain;
553   ao_ref refd;
554   gcc_assert (!chain_ovfl);
555   ao_ref_init (&refd, ref);
556   chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
557 			      mark_aliased_reaching_defs_necessary_1,
558 			      gimple_bb (stmt), NULL);
559   if (chain > longest_chain)
560     longest_chain = chain;
561   total_chain += chain;
562   nr_walks++;
563 }
564 
565 /* Worker for the walker that marks reaching definitions of REF, which
566    is not based on a non-aliased decl.  For simplicity we need to end
567    up marking all may-defs necessary that are not based on a non-aliased
568    decl.  The only job of this walker is to skip may-defs based on
569    a non-aliased decl.  */
570 
571 static bool
572 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
573 				    tree vdef, void *data ATTRIBUTE_UNUSED)
574 {
575   gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
576 
577   /* We have to skip already visited (and thus necessary) statements
578      to make the chaining work after we dropped back to simple mode.  */
579   if (chain_ovfl
580       && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
581     {
582       gcc_assert (gimple_nop_p (def_stmt)
583 		  || gimple_plf (def_stmt, STMT_NECESSARY));
584       return false;
585     }
586 
587   /* We want to skip stores to non-aliased variables.  */
588   if (!chain_ovfl
589       && gimple_assign_single_p (def_stmt))
590     {
591       tree lhs = gimple_assign_lhs (def_stmt);
592       if (!ref_may_be_aliased (lhs))
593 	return false;
594     }
595 
596   /* We want to skip statments that do not constitute stores but have
597      a virtual definition.  */
598   if (is_gimple_call (def_stmt))
599     {
600       tree callee = gimple_call_fndecl (def_stmt);
601       if (callee != NULL_TREE
602 	  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
603 	switch (DECL_FUNCTION_CODE (callee))
604 	  {
605 	  case BUILT_IN_MALLOC:
606 	  case BUILT_IN_ALIGNED_ALLOC:
607 	  case BUILT_IN_CALLOC:
608 	  case BUILT_IN_ALLOCA:
609 	  case BUILT_IN_ALLOCA_WITH_ALIGN:
610 	  case BUILT_IN_FREE:
611 	    return false;
612 
613 	  default:;
614 	  }
615     }
616 
617   mark_operand_necessary (vdef);
618 
619   return false;
620 }
621 
622 static void
623 mark_all_reaching_defs_necessary (gimple stmt)
624 {
625   walk_aliased_vdefs (NULL, gimple_vuse (stmt),
626 		      mark_all_reaching_defs_necessary_1, NULL, &visited);
627 }
628 
629 /* Return true for PHI nodes with one or identical arguments
630    can be removed.  */
631 static bool
632 degenerate_phi_p (gimple phi)
633 {
634   unsigned int i;
635   tree op = gimple_phi_arg_def (phi, 0);
636   for (i = 1; i < gimple_phi_num_args (phi); i++)
637     if (gimple_phi_arg_def (phi, i) != op)
638       return false;
639   return true;
640 }
641 
642 /* Propagate necessity using the operands of necessary statements.
643    Process the uses on each statement in the worklist, and add all
644    feeding statements which contribute to the calculation of this
645    value to the worklist.
646 
647    In conservative mode, EL is NULL.  */
648 
649 static void
650 propagate_necessity (bool aggressive)
651 {
652   gimple stmt;
653 
654   if (dump_file && (dump_flags & TDF_DETAILS))
655     fprintf (dump_file, "\nProcessing worklist:\n");
656 
657   while (worklist.length () > 0)
658     {
659       /* Take STMT from worklist.  */
660       stmt = worklist.pop ();
661 
662       if (dump_file && (dump_flags & TDF_DETAILS))
663 	{
664 	  fprintf (dump_file, "processing: ");
665 	  print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
666 	  fprintf (dump_file, "\n");
667 	}
668 
669       if (aggressive)
670 	{
671 	  /* Mark the last statement of the basic blocks on which the block
672 	     containing STMT is control dependent, but only if we haven't
673 	     already done so.  */
674 	  basic_block bb = gimple_bb (stmt);
675 	  if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
676 	      && !bitmap_bit_p (visited_control_parents, bb->index))
677 	    mark_control_dependent_edges_necessary (bb, false);
678 	}
679 
680       if (gimple_code (stmt) == GIMPLE_PHI
681 	  /* We do not process virtual PHI nodes nor do we track their
682 	     necessity.  */
683 	  && !virtual_operand_p (gimple_phi_result (stmt)))
684 	{
685 	  /* PHI nodes are somewhat special in that each PHI alternative has
686 	     data and control dependencies.  All the statements feeding the
687 	     PHI node's arguments are always necessary.  In aggressive mode,
688 	     we also consider the control dependent edges leading to the
689 	     predecessor block associated with each PHI alternative as
690 	     necessary.  */
691 	  gphi *phi = as_a <gphi *> (stmt);
692 	  size_t k;
693 
694 	  for (k = 0; k < gimple_phi_num_args (stmt); k++)
695             {
696 	      tree arg = PHI_ARG_DEF (stmt, k);
697 	      if (TREE_CODE (arg) == SSA_NAME)
698 		mark_operand_necessary (arg);
699 	    }
700 
701 	  /* For PHI operands it matters from where the control flow arrives
702 	     to the BB.  Consider the following example:
703 
704 	     a=exp1;
705 	     b=exp2;
706 	     if (test)
707 		;
708 	     else
709 		;
710 	     c=PHI(a,b)
711 
712 	     We need to mark control dependence of the empty basic blocks, since they
713 	     contains computation of PHI operands.
714 
715 	     Doing so is too restrictive in the case the predecestor block is in
716 	     the loop. Consider:
717 
718 	      if (b)
719 		{
720 		  int i;
721 		  for (i = 0; i<1000; ++i)
722 		    ;
723 		  j = 0;
724 		}
725 	      return j;
726 
727 	     There is PHI for J in the BB containing return statement.
728 	     In this case the control dependence of predecestor block (that is
729 	     within the empty loop) also contains the block determining number
730 	     of iterations of the block that would prevent removing of empty
731 	     loop in this case.
732 
733 	     This scenario can be avoided by splitting critical edges.
734 	     To save the critical edge splitting pass we identify how the control
735 	     dependence would look like if the edge was split.
736 
737 	     Consider the modified CFG created from current CFG by splitting
738 	     edge B->C.  In the postdominance tree of modified CFG, C' is
739 	     always child of C.  There are two cases how chlids of C' can look
740 	     like:
741 
742 		1) C' is leaf
743 
744 		   In this case the only basic block C' is control dependent on is B.
745 
746 		2) C' has single child that is B
747 
748 		   In this case control dependence of C' is same as control
749 		   dependence of B in original CFG except for block B itself.
750 		   (since C' postdominate B in modified CFG)
751 
752 	     Now how to decide what case happens?  There are two basic options:
753 
754 		a) C postdominate B.  Then C immediately postdominate B and
755 		   case 2 happens iff there is no other way from B to C except
756 		   the edge B->C.
757 
758 		   There is other way from B to C iff there is succesor of B that
759 		   is not postdominated by B.  Testing this condition is somewhat
760 		   expensive, because we need to iterate all succesors of B.
761 		   We are safe to assume that this does not happen: we will mark B
762 		   as needed when processing the other path from B to C that is
763 		   conrol dependent on B and marking control dependencies of B
764 		   itself is harmless because they will be processed anyway after
765 		   processing control statement in B.
766 
767 		b) C does not postdominate B.  Always case 1 happens since there is
768 		   path from C to exit that does not go through B and thus also C'.  */
769 
770 	  if (aggressive && !degenerate_phi_p (stmt))
771 	    {
772 	      for (k = 0; k < gimple_phi_num_args (stmt); k++)
773 		{
774 		  basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
775 
776 		  if (gimple_bb (stmt)
777 		      != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
778 		    {
779 		      if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
780 			mark_last_stmt_necessary (arg_bb);
781 		    }
782 		  else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
783 		           && !bitmap_bit_p (visited_control_parents,
784 					 arg_bb->index))
785 		    mark_control_dependent_edges_necessary (arg_bb, true);
786 		}
787 	    }
788 	}
789       else
790 	{
791 	  /* Propagate through the operands.  Examine all the USE, VUSE and
792 	     VDEF operands in this statement.  Mark all the statements
793 	     which feed this statement's uses as necessary.  */
794 	  ssa_op_iter iter;
795 	  tree use;
796 
797 	  /* If this is a call to free which is directly fed by an
798 	     allocation function do not mark that necessary through
799 	     processing the argument.  */
800 	  if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
801 	    {
802 	      tree ptr = gimple_call_arg (stmt, 0);
803 	      gimple def_stmt;
804 	      tree def_callee;
805 	      /* If the pointer we free is defined by an allocation
806 		 function do not add the call to the worklist.  */
807 	      if (TREE_CODE (ptr) == SSA_NAME
808 		  && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
809 		  && (def_callee = gimple_call_fndecl (def_stmt))
810 		  && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
811 		  && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
812 		      || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
813 		      || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
814 		{
815 		  gimple bounds_def_stmt;
816 		  tree bounds;
817 
818 		  /* For instrumented calls we should also check used
819 		     bounds are returned by the same allocation call.  */
820 		  if (!gimple_call_with_bounds_p (stmt)
821 		      || ((bounds = gimple_call_arg (stmt, 1))
822 			  && TREE_CODE (bounds) == SSA_NAME
823 			  && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
824 			  && chkp_gimple_call_builtin_p (bounds_def_stmt,
825 							 BUILT_IN_CHKP_BNDRET)
826 			  && gimple_call_arg (bounds_def_stmt, 0) == ptr))
827 		    continue;
828 		}
829 	    }
830 
831 	  FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
832 	    mark_operand_necessary (use);
833 
834 	  use = gimple_vuse (stmt);
835 	  if (!use)
836 	    continue;
837 
838 	  /* If we dropped to simple mode make all immediately
839 	     reachable definitions necessary.  */
840 	  if (chain_ovfl)
841 	    {
842 	      mark_all_reaching_defs_necessary (stmt);
843 	      continue;
844 	    }
845 
846 	  /* For statements that may load from memory (have a VUSE) we
847 	     have to mark all reaching (may-)definitions as necessary.
848 	     We partition this task into two cases:
849 	      1) explicit loads based on decls that are not aliased
850 	      2) implicit loads (like calls) and explicit loads not
851 	         based on decls that are not aliased (like indirect
852 		 references or loads from globals)
853 	     For 1) we mark all reaching may-defs as necessary, stopping
854 	     at dominating kills.  For 2) we want to mark all dominating
855 	     references necessary, but non-aliased ones which we handle
856 	     in 1).  By keeping a global visited bitmap for references
857 	     we walk for 2) we avoid quadratic behavior for those.  */
858 
859 	  if (is_gimple_call (stmt))
860 	    {
861 	      tree callee = gimple_call_fndecl (stmt);
862 	      unsigned i;
863 
864 	      /* Calls to functions that are merely acting as barriers
865 		 or that only store to memory do not make any previous
866 		 stores necessary.  */
867 	      if (callee != NULL_TREE
868 		  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
869 		  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
870 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
871 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
872 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
873 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
874 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
875 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
876 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
877 		      || (DECL_FUNCTION_CODE (callee)
878 			  == BUILT_IN_ALLOCA_WITH_ALIGN)
879 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
880 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
881 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
882 		continue;
883 
884 	      /* Calls implicitly load from memory, their arguments
885 	         in addition may explicitly perform memory loads.  */
886 	      mark_all_reaching_defs_necessary (stmt);
887 	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
888 		{
889 		  tree arg = gimple_call_arg (stmt, i);
890 		  if (TREE_CODE (arg) == SSA_NAME
891 		      || is_gimple_min_invariant (arg))
892 		    continue;
893 		  if (TREE_CODE (arg) == WITH_SIZE_EXPR)
894 		    arg = TREE_OPERAND (arg, 0);
895 		  if (!ref_may_be_aliased (arg))
896 		    mark_aliased_reaching_defs_necessary (stmt, arg);
897 		}
898 	    }
899 	  else if (gimple_assign_single_p (stmt))
900 	    {
901 	      tree rhs;
902 	      /* If this is a load mark things necessary.  */
903 	      rhs = gimple_assign_rhs1 (stmt);
904 	      if (TREE_CODE (rhs) != SSA_NAME
905 		  && !is_gimple_min_invariant (rhs)
906 		  && TREE_CODE (rhs) != CONSTRUCTOR)
907 		{
908 		  if (!ref_may_be_aliased (rhs))
909 		    mark_aliased_reaching_defs_necessary (stmt, rhs);
910 		  else
911 		    mark_all_reaching_defs_necessary (stmt);
912 		}
913 	    }
914 	  else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
915 	    {
916 	      tree rhs = gimple_return_retval (return_stmt);
917 	      /* A return statement may perform a load.  */
918 	      if (rhs
919 		  && TREE_CODE (rhs) != SSA_NAME
920 		  && !is_gimple_min_invariant (rhs)
921 		  && TREE_CODE (rhs) != CONSTRUCTOR)
922 		{
923 		  if (!ref_may_be_aliased (rhs))
924 		    mark_aliased_reaching_defs_necessary (stmt, rhs);
925 		  else
926 		    mark_all_reaching_defs_necessary (stmt);
927 		}
928 	    }
929 	  else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
930 	    {
931 	      unsigned i;
932 	      mark_all_reaching_defs_necessary (stmt);
933 	      /* Inputs may perform loads.  */
934 	      for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
935 		{
936 		  tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
937 		  if (TREE_CODE (op) != SSA_NAME
938 		      && !is_gimple_min_invariant (op)
939 		      && TREE_CODE (op) != CONSTRUCTOR
940 		      && !ref_may_be_aliased (op))
941 		    mark_aliased_reaching_defs_necessary (stmt, op);
942 		}
943 	    }
944 	  else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
945 	    {
946 	      /* The beginning of a transaction is a memory barrier.  */
947 	      /* ??? If we were really cool, we'd only be a barrier
948 		 for the memories touched within the transaction.  */
949 	      mark_all_reaching_defs_necessary (stmt);
950 	    }
951 	  else
952 	    gcc_unreachable ();
953 
954 	  /* If we over-used our alias oracle budget drop to simple
955 	     mode.  The cost metric allows quadratic behavior
956 	     (number of uses times number of may-defs queries) up to
957 	     a constant maximal number of queries and after that falls back to
958 	     super-linear complexity.  */
959 	  if (/* Constant but quadratic for small functions.  */
960 	      total_chain > 128 * 128
961 	      /* Linear in the number of may-defs.  */
962 	      && total_chain > 32 * longest_chain
963 	      /* Linear in the number of uses.  */
964 	      && total_chain > nr_walks * 32)
965 	    {
966 	      chain_ovfl = true;
967 	      if (visited)
968 		bitmap_clear (visited);
969 	    }
970 	}
971     }
972 }
973 
974 /* Remove dead PHI nodes from block BB.  */
975 
976 static bool
977 remove_dead_phis (basic_block bb)
978 {
979   bool something_changed = false;
980   gphi *phi;
981   gphi_iterator gsi;
982 
983   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
984     {
985       stats.total_phis++;
986       phi = gsi.phi ();
987 
988       /* We do not track necessity of virtual PHI nodes.  Instead do
989          very simple dead PHI removal here.  */
990       if (virtual_operand_p (gimple_phi_result (phi)))
991 	{
992 	  /* Virtual PHI nodes with one or identical arguments
993 	     can be removed.  */
994 	  if (degenerate_phi_p (phi))
995 	    {
996 	      tree vdef = gimple_phi_result (phi);
997 	      tree vuse = gimple_phi_arg_def (phi, 0);
998 
999 	      use_operand_p use_p;
1000 	      imm_use_iterator iter;
1001 	      gimple use_stmt;
1002 	      FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1003 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1004 		  SET_USE (use_p, vuse);
1005 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1006 	          && TREE_CODE (vuse) == SSA_NAME)
1007 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1008 	    }
1009 	  else
1010 	    gimple_set_plf (phi, STMT_NECESSARY, true);
1011 	}
1012 
1013       if (!gimple_plf (phi, STMT_NECESSARY))
1014 	{
1015 	  something_changed = true;
1016 	  if (dump_file && (dump_flags & TDF_DETAILS))
1017 	    {
1018 	      fprintf (dump_file, "Deleting : ");
1019 	      print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1020 	      fprintf (dump_file, "\n");
1021 	    }
1022 
1023 	  remove_phi_node (&gsi, true);
1024 	  stats.removed_phis++;
1025 	  continue;
1026 	}
1027 
1028       gsi_next (&gsi);
1029     }
1030   return something_changed;
1031 }
1032 
1033 /* Forward edge E to respective POST_DOM_BB and update PHIs.  */
1034 
1035 static edge
1036 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
1037 {
1038   gphi_iterator gsi;
1039   edge e2 = NULL;
1040   edge_iterator ei;
1041 
1042   if (dump_file && (dump_flags & TDF_DETAILS))
1043     fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
1044 	     e->dest->index, post_dom_bb->index);
1045 
1046   e2 = redirect_edge_and_branch (e, post_dom_bb);
1047   cfg_altered = true;
1048 
1049   /* If edge was already around, no updating is necessary.  */
1050   if (e2 != e)
1051     return e2;
1052 
1053   if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1054     {
1055       /* We are sure that for every live PHI we are seeing control dependent BB.
1056          This means that we can pick any edge to duplicate PHI args from.  */
1057       FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1058 	if (e2 != e)
1059 	  break;
1060       for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1061 	{
1062 	  gphi *phi = gsi.phi ();
1063 	  tree op;
1064 	  source_location locus;
1065 
1066 	  /* PHIs for virtuals have no control dependency relation on them.
1067 	     We are lost here and must force renaming of the symbol.  */
1068 	  if (virtual_operand_p (gimple_phi_result (phi)))
1069 	    {
1070 	      mark_virtual_phi_result_for_renaming (phi);
1071 	      remove_phi_node (&gsi, true);
1072 	      continue;
1073 	    }
1074 
1075 	  /* Dead PHI do not imply control dependency.  */
1076           if (!gimple_plf (phi, STMT_NECESSARY))
1077 	    {
1078 	      gsi_next (&gsi);
1079 	      continue;
1080 	    }
1081 
1082 	  op = gimple_phi_arg_def (phi, e2->dest_idx);
1083 	  locus = gimple_phi_arg_location (phi, e2->dest_idx);
1084 	  add_phi_arg (phi, op, e, locus);
1085 	  /* The resulting PHI if not dead can only be degenerate.  */
1086 	  gcc_assert (degenerate_phi_p (phi));
1087 	  gsi_next (&gsi);
1088 	}
1089     }
1090   return e;
1091 }
1092 
1093 /* Remove dead statement pointed to by iterator I.  Receives the basic block BB
1094    containing I so that we don't have to look it up.  */
1095 
1096 static void
1097 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1098 {
1099   gimple stmt = gsi_stmt (*i);
1100 
1101   if (dump_file && (dump_flags & TDF_DETAILS))
1102     {
1103       fprintf (dump_file, "Deleting : ");
1104       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1105       fprintf (dump_file, "\n");
1106     }
1107 
1108   stats.removed++;
1109 
1110   /* If we have determined that a conditional branch statement contributes
1111      nothing to the program, then we not only remove it, but we also change
1112      the flow graph so that the current block will simply fall-thru to its
1113      immediate post-dominator.  The blocks we are circumventing will be
1114      removed by cleanup_tree_cfg if this change in the flow graph makes them
1115      unreachable.  */
1116   if (is_ctrl_stmt (stmt))
1117     {
1118       basic_block post_dom_bb;
1119       edge e, e2;
1120       edge_iterator ei;
1121 
1122       post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1123 
1124       e = find_edge (bb, post_dom_bb);
1125 
1126       /* If edge is already there, try to use it.  This avoids need to update
1127          PHI nodes.  Also watch for cases where post dominator does not exists
1128 	 or is exit block.  These can happen for infinite loops as we create
1129 	 fake edges in the dominator tree.  */
1130       if (e)
1131         ;
1132       else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1133 	e = EDGE_SUCC (bb, 0);
1134       else
1135         e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1136       gcc_assert (e);
1137       e->probability = REG_BR_PROB_BASE;
1138       e->count = bb->count;
1139 
1140       /* The edge is no longer associated with a conditional, so it does
1141 	 not have TRUE/FALSE flags.  */
1142       e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1143 
1144       /* The lone outgoing edge from BB will be a fallthru edge.  */
1145       e->flags |= EDGE_FALLTHRU;
1146 
1147       /* Remove the remaining outgoing edges.  */
1148       for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1149 	if (e != e2)
1150 	  {
1151 	    cfg_altered = true;
1152 	    /* If we made a BB unconditionally exit a loop or removed
1153 	       an entry into an irreducible region, then this transform
1154 	       alters the set of BBs in the loop.  Schedule a fixup.  */
1155 	    if (loop_exit_edge_p (bb->loop_father, e)
1156 		|| (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1157 	      loops_state_set (LOOPS_NEED_FIXUP);
1158 	    remove_edge (e2);
1159 	  }
1160 	else
1161 	  ei_next (&ei);
1162     }
1163 
1164   /* If this is a store into a variable that is being optimized away,
1165      add a debug bind stmt if possible.  */
1166   if (MAY_HAVE_DEBUG_STMTS
1167       && gimple_assign_single_p (stmt)
1168       && is_gimple_val (gimple_assign_rhs1 (stmt)))
1169     {
1170       tree lhs = gimple_assign_lhs (stmt);
1171       if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1172 	  && !DECL_IGNORED_P (lhs)
1173 	  && is_gimple_reg_type (TREE_TYPE (lhs))
1174 	  && !is_global_var (lhs)
1175 	  && !DECL_HAS_VALUE_EXPR_P (lhs))
1176 	{
1177 	  tree rhs = gimple_assign_rhs1 (stmt);
1178 	  gdebug *note
1179 	    = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1180 	  gsi_insert_after (i, note, GSI_SAME_STMT);
1181 	}
1182     }
1183 
1184   unlink_stmt_vdef (stmt);
1185   gsi_remove (i, true);
1186   release_defs (stmt);
1187 }
1188 
1189 /* Helper for maybe_optimize_arith_overflow.  Find in *TP if there are any
1190    uses of data (SSA_NAME) other than REALPART_EXPR referencing it.  */
1191 
1192 static tree
1193 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1194 {
1195   if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1196     *walk_subtrees = 0;
1197   if (*tp == (tree) data)
1198     return *tp;
1199   return NULL_TREE;
1200 }
1201 
1202 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1203    but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1204    into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1205    uses.  */
1206 
1207 static void
1208 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1209 			       enum tree_code subcode)
1210 {
1211   gimple stmt = gsi_stmt (*gsi);
1212   tree lhs = gimple_call_lhs (stmt);
1213 
1214   if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1215     return;
1216 
1217   imm_use_iterator imm_iter;
1218   use_operand_p use_p;
1219   bool has_debug_uses = false;
1220   bool has_realpart_uses = false;
1221   bool has_other_uses = false;
1222   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1223     {
1224       gimple use_stmt = USE_STMT (use_p);
1225       if (is_gimple_debug (use_stmt))
1226 	has_debug_uses = true;
1227       else if (is_gimple_assign (use_stmt)
1228 	       && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1229 	       && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1230 	has_realpart_uses = true;
1231       else
1232 	{
1233 	  has_other_uses = true;
1234 	  break;
1235 	}
1236     }
1237 
1238   if (!has_realpart_uses || has_other_uses)
1239     return;
1240 
1241   tree arg0 = gimple_call_arg (stmt, 0);
1242   tree arg1 = gimple_call_arg (stmt, 1);
1243   location_t loc = gimple_location (stmt);
1244   tree type = TREE_TYPE (TREE_TYPE (lhs));
1245   tree utype = type;
1246   if (!TYPE_UNSIGNED (type))
1247     utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1248   tree result = fold_build2_loc (loc, subcode, utype,
1249 				 fold_convert_loc (loc, utype, arg0),
1250 				 fold_convert_loc (loc, utype, arg1));
1251   result = fold_convert_loc (loc, type, result);
1252 
1253   if (has_debug_uses)
1254     {
1255       gimple use_stmt;
1256       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1257 	{
1258 	  if (!gimple_debug_bind_p (use_stmt))
1259 	    continue;
1260 	  tree v = gimple_debug_bind_get_value (use_stmt);
1261 	  if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1262 	    {
1263 	      gimple_debug_bind_reset_value (use_stmt);
1264 	      update_stmt (use_stmt);
1265 	    }
1266 	}
1267     }
1268 
1269   if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1270     result = drop_tree_overflow (result);
1271   tree overflow = build_zero_cst (type);
1272   tree ctype = build_complex_type (type);
1273   if (TREE_CODE (result) == INTEGER_CST)
1274     result = build_complex (ctype, result, overflow);
1275   else
1276     result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1277 			 ctype, result, overflow);
1278 
1279   if (dump_file && (dump_flags & TDF_DETAILS))
1280     {
1281       fprintf (dump_file, "Transforming call: ");
1282       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1283       fprintf (dump_file, "because the overflow result is never used into: ");
1284       print_generic_stmt (dump_file, result, TDF_SLIM);
1285       fprintf (dump_file, "\n");
1286     }
1287 
1288   if (!update_call_from_tree (gsi, result))
1289     gimplify_and_update_call_from_tree (gsi, result);
1290 }
1291 
1292 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1293    contributes nothing to the program, and can be deleted.  */
1294 
1295 static bool
1296 eliminate_unnecessary_stmts (void)
1297 {
1298   bool something_changed = false;
1299   basic_block bb;
1300   gimple_stmt_iterator gsi, psi;
1301   gimple stmt;
1302   tree call;
1303   vec<basic_block> h;
1304 
1305   if (dump_file && (dump_flags & TDF_DETAILS))
1306     fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1307 
1308   clear_special_calls ();
1309 
1310   /* Walking basic blocks and statements in reverse order avoids
1311      releasing SSA names before any other DEFs that refer to them are
1312      released.  This helps avoid loss of debug information, as we get
1313      a chance to propagate all RHSs of removed SSAs into debug uses,
1314      rather than only the latest ones.  E.g., consider:
1315 
1316      x_3 = y_1 + z_2;
1317      a_5 = x_3 - b_4;
1318      # DEBUG a => a_5
1319 
1320      If we were to release x_3 before a_5, when we reached a_5 and
1321      tried to substitute it into the debug stmt, we'd see x_3 there,
1322      but x_3's DEF, type, etc would have already been disconnected.
1323      By going backwards, the debug stmt first changes to:
1324 
1325      # DEBUG a => x_3 - b_4
1326 
1327      and then to:
1328 
1329      # DEBUG a => y_1 + z_2 - b_4
1330 
1331      as desired.  */
1332   gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1333   h = get_all_dominated_blocks (CDI_DOMINATORS,
1334 				single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1335 
1336   while (h.length ())
1337     {
1338       bb = h.pop ();
1339 
1340       /* Remove dead statements.  */
1341       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1342 	{
1343 	  stmt = gsi_stmt (gsi);
1344 
1345 	  psi = gsi;
1346 	  gsi_prev (&psi);
1347 
1348 	  stats.total++;
1349 
1350 	  /* We can mark a call to free as not necessary if the
1351 	     defining statement of its argument is not necessary
1352 	     (and thus is getting removed).  */
1353 	  if (gimple_plf (stmt, STMT_NECESSARY)
1354 	      && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1355 	    {
1356 	      tree ptr = gimple_call_arg (stmt, 0);
1357 	      if (TREE_CODE (ptr) == SSA_NAME)
1358 		{
1359 		  gimple def_stmt = SSA_NAME_DEF_STMT (ptr);
1360 		  if (!gimple_nop_p (def_stmt)
1361 		      && !gimple_plf (def_stmt, STMT_NECESSARY))
1362 		    gimple_set_plf (stmt, STMT_NECESSARY, false);
1363 		}
1364 	      /* We did not propagate necessity for free calls fed
1365 		 by allocation function to allow unnecessary
1366 		 alloc-free sequence elimination.  For instrumented
1367 		 calls it also means we did not mark bounds producer
1368 		 as necessary and it is time to do it in case free
1369 		 call is not removed.  */
1370 	      if (gimple_call_with_bounds_p (stmt))
1371 		{
1372 		  gimple bounds_def_stmt;
1373 		  tree bounds = gimple_call_arg (stmt, 1);
1374 		  gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1375 		  bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1376 		  if (bounds_def_stmt
1377 		      && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1378 		    gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1379 				    gimple_plf (stmt, STMT_NECESSARY));
1380 		}
1381 	    }
1382 
1383 	  /* If GSI is not necessary then remove it.  */
1384 	  if (!gimple_plf (stmt, STMT_NECESSARY))
1385 	    {
1386 	      /* Keep clobbers that we can keep live live.  */
1387 	      if (gimple_clobber_p (stmt))
1388 		{
1389 		  ssa_op_iter iter;
1390 		  use_operand_p use_p;
1391 		  bool dead = false;
1392 		  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1393 		    {
1394 		      tree name = USE_FROM_PTR (use_p);
1395 		      if (!SSA_NAME_IS_DEFAULT_DEF (name)
1396 			  && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1397 			{
1398 			  dead = true;
1399 			  break;
1400 			}
1401 		    }
1402 		  if (!dead)
1403 		    continue;
1404 		}
1405 	      if (!is_gimple_debug (stmt))
1406 		something_changed = true;
1407 	      remove_dead_stmt (&gsi, bb);
1408 	    }
1409 	  else if (is_gimple_call (stmt))
1410 	    {
1411 	      tree name = gimple_call_lhs (stmt);
1412 
1413 	      notice_special_calls (as_a <gcall *> (stmt));
1414 
1415 	      /* When LHS of var = call (); is dead, simplify it into
1416 		 call (); saving one operand.  */
1417 	      if (name
1418 		  && TREE_CODE (name) == SSA_NAME
1419 		  && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1420 		  /* Avoid doing so for allocation calls which we
1421 		     did not mark as necessary, it will confuse the
1422 		     special logic we apply to malloc/free pair removal.  */
1423 		  && (!(call = gimple_call_fndecl (stmt))
1424 		      || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1425 		      || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1426 			  && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1427 			  && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1428 			  && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1429 			  && (DECL_FUNCTION_CODE (call)
1430 			      != BUILT_IN_ALLOCA_WITH_ALIGN)))
1431 		  /* Avoid doing so for bndret calls for the same reason.  */
1432 		  && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1433 		{
1434 		  something_changed = true;
1435 		  if (dump_file && (dump_flags & TDF_DETAILS))
1436 		    {
1437 		      fprintf (dump_file, "Deleting LHS of call: ");
1438 		      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1439 		      fprintf (dump_file, "\n");
1440 		    }
1441 
1442 		  gimple_call_set_lhs (stmt, NULL_TREE);
1443 		  maybe_clean_or_replace_eh_stmt (stmt, stmt);
1444 		  update_stmt (stmt);
1445 		  release_ssa_name (name);
1446 
1447 		  /* GOMP_SIMD_LANE without lhs is not needed.  */
1448 		  if (gimple_call_internal_p (stmt)
1449 		      && gimple_call_internal_fn (stmt) == IFN_GOMP_SIMD_LANE)
1450 		    remove_dead_stmt (&gsi, bb);
1451 		}
1452 	      else if (gimple_call_internal_p (stmt))
1453 		switch (gimple_call_internal_fn (stmt))
1454 		  {
1455 		  case IFN_ADD_OVERFLOW:
1456 		    maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1457 		    break;
1458 		  case IFN_SUB_OVERFLOW:
1459 		    maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1460 		    break;
1461 		  case IFN_MUL_OVERFLOW:
1462 		    maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1463 		    break;
1464 		  default:
1465 		    break;
1466 		  }
1467 	    }
1468 	}
1469     }
1470 
1471   h.release ();
1472 
1473   /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1474      rendered some PHI nodes unreachable while they are still in use.
1475      Mark them for renaming.  */
1476   if (cfg_altered)
1477     {
1478       basic_block prev_bb;
1479 
1480       find_unreachable_blocks ();
1481 
1482       /* Delete all unreachable basic blocks in reverse dominator order.  */
1483       for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1484 	   bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1485 	{
1486 	  prev_bb = bb->prev_bb;
1487 
1488 	  if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1489 	      || !(bb->flags & BB_REACHABLE))
1490 	    {
1491 	      for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1492 		   gsi_next (&gsi))
1493 		if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1494 		  {
1495 		    bool found = false;
1496 		    imm_use_iterator iter;
1497 
1498 		    FOR_EACH_IMM_USE_STMT (stmt, iter,
1499 					   gimple_phi_result (gsi.phi ()))
1500 		      {
1501 			if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1502 			  continue;
1503 			if (gimple_code (stmt) == GIMPLE_PHI
1504 			    || gimple_plf (stmt, STMT_NECESSARY))
1505 			  {
1506 			    found = true;
1507 			    BREAK_FROM_IMM_USE_STMT (iter);
1508 			  }
1509 		      }
1510 		    if (found)
1511 		      mark_virtual_phi_result_for_renaming (gsi.phi ());
1512 		  }
1513 
1514 	      if (!(bb->flags & BB_REACHABLE))
1515 		{
1516 		  /* Speed up the removal of blocks that don't
1517 		     dominate others.  Walking backwards, this should
1518 		     be the common case.  ??? Do we need to recompute
1519 		     dominators because of cfg_altered?  */
1520 		  if (!MAY_HAVE_DEBUG_STMTS
1521 		      || !first_dom_son (CDI_DOMINATORS, bb))
1522 		    delete_basic_block (bb);
1523 		  else
1524 		    {
1525 		      h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1526 
1527 		      while (h.length ())
1528 			{
1529 			  bb = h.pop ();
1530 			  prev_bb = bb->prev_bb;
1531 			  /* Rearrangements to the CFG may have failed
1532 			     to update the dominators tree, so that
1533 			     formerly-dominated blocks are now
1534 			     otherwise reachable.  */
1535 			  if (!!(bb->flags & BB_REACHABLE))
1536 			    continue;
1537 			  delete_basic_block (bb);
1538 			}
1539 
1540 		      h.release ();
1541 		    }
1542 		}
1543 	    }
1544 	}
1545     }
1546   FOR_EACH_BB_FN (bb, cfun)
1547     {
1548       /* Remove dead PHI nodes.  */
1549       something_changed |= remove_dead_phis (bb);
1550     }
1551 
1552   return something_changed;
1553 }
1554 
1555 
1556 /* Print out removed statement statistics.  */
1557 
1558 static void
1559 print_stats (void)
1560 {
1561   float percg;
1562 
1563   percg = ((float) stats.removed / (float) stats.total) * 100;
1564   fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1565 	   stats.removed, stats.total, (int) percg);
1566 
1567   if (stats.total_phis == 0)
1568     percg = 0;
1569   else
1570     percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1571 
1572   fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1573 	   stats.removed_phis, stats.total_phis, (int) percg);
1574 }
1575 
1576 /* Initialization for this pass.  Set up the used data structures.  */
1577 
1578 static void
1579 tree_dce_init (bool aggressive)
1580 {
1581   memset ((void *) &stats, 0, sizeof (stats));
1582 
1583   if (aggressive)
1584     {
1585       last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1586       bitmap_clear (last_stmt_necessary);
1587       bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1588       bitmap_clear (bb_contains_live_stmts);
1589     }
1590 
1591   processed = sbitmap_alloc (num_ssa_names + 1);
1592   bitmap_clear (processed);
1593 
1594   worklist.create (64);
1595   cfg_altered = false;
1596 }
1597 
1598 /* Cleanup after this pass.  */
1599 
1600 static void
1601 tree_dce_done (bool aggressive)
1602 {
1603   if (aggressive)
1604     {
1605       delete cd;
1606       sbitmap_free (visited_control_parents);
1607       sbitmap_free (last_stmt_necessary);
1608       sbitmap_free (bb_contains_live_stmts);
1609       bb_contains_live_stmts = NULL;
1610     }
1611 
1612   sbitmap_free (processed);
1613 
1614   worklist.release ();
1615 }
1616 
1617 /* Main routine to eliminate dead code.
1618 
1619    AGGRESSIVE controls the aggressiveness of the algorithm.
1620    In conservative mode, we ignore control dependence and simply declare
1621    all but the most trivially dead branches necessary.  This mode is fast.
1622    In aggressive mode, control dependences are taken into account, which
1623    results in more dead code elimination, but at the cost of some time.
1624 
1625    FIXME: Aggressive mode before PRE doesn't work currently because
1626 	  the dominance info is not invalidated after DCE1.  This is
1627 	  not an issue right now because we only run aggressive DCE
1628 	  as the last tree SSA pass, but keep this in mind when you
1629 	  start experimenting with pass ordering.  */
1630 
1631 static unsigned int
1632 perform_tree_ssa_dce (bool aggressive)
1633 {
1634   bool something_changed = 0;
1635 
1636   calculate_dominance_info (CDI_DOMINATORS);
1637 
1638   /* Preheaders are needed for SCEV to work.
1639      Simple lateches and recorded exits improve chances that loop will
1640      proved to be finite in testcases such as in loop-15.c and loop-24.c  */
1641   if (aggressive)
1642     loop_optimizer_init (LOOPS_NORMAL
1643 			 | LOOPS_HAVE_RECORDED_EXITS);
1644 
1645   tree_dce_init (aggressive);
1646 
1647   if (aggressive)
1648     {
1649       /* Compute control dependence.  */
1650       calculate_dominance_info (CDI_POST_DOMINATORS);
1651       cd = new control_dependences (create_edge_list ());
1652 
1653       visited_control_parents =
1654 	sbitmap_alloc (last_basic_block_for_fn (cfun));
1655       bitmap_clear (visited_control_parents);
1656 
1657       mark_dfs_back_edges ();
1658     }
1659 
1660   find_obviously_necessary_stmts (aggressive);
1661 
1662   if (aggressive)
1663     loop_optimizer_finalize ();
1664 
1665   longest_chain = 0;
1666   total_chain = 0;
1667   nr_walks = 0;
1668   chain_ovfl = false;
1669   visited = BITMAP_ALLOC (NULL);
1670   propagate_necessity (aggressive);
1671   BITMAP_FREE (visited);
1672 
1673   something_changed |= eliminate_unnecessary_stmts ();
1674   something_changed |= cfg_altered;
1675 
1676   /* We do not update postdominators, so free them unconditionally.  */
1677   free_dominance_info (CDI_POST_DOMINATORS);
1678 
1679   /* If we removed paths in the CFG, then we need to update
1680      dominators as well.  I haven't investigated the possibility
1681      of incrementally updating dominators.  */
1682   if (cfg_altered)
1683     free_dominance_info (CDI_DOMINATORS);
1684 
1685   statistics_counter_event (cfun, "Statements deleted", stats.removed);
1686   statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1687 
1688   /* Debugging dumps.  */
1689   if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1690     print_stats ();
1691 
1692   tree_dce_done (aggressive);
1693 
1694   if (something_changed)
1695     {
1696       free_numbers_of_iterations_estimates ();
1697       if (scev_initialized_p ())
1698 	scev_reset ();
1699       return TODO_update_ssa | TODO_cleanup_cfg;
1700     }
1701   return 0;
1702 }
1703 
1704 /* Pass entry points.  */
1705 static unsigned int
1706 tree_ssa_dce (void)
1707 {
1708   return perform_tree_ssa_dce (/*aggressive=*/false);
1709 }
1710 
1711 static unsigned int
1712 tree_ssa_cd_dce (void)
1713 {
1714   return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1715 }
1716 
1717 namespace {
1718 
1719 const pass_data pass_data_dce =
1720 {
1721   GIMPLE_PASS, /* type */
1722   "dce", /* name */
1723   OPTGROUP_NONE, /* optinfo_flags */
1724   TV_TREE_DCE, /* tv_id */
1725   ( PROP_cfg | PROP_ssa ), /* properties_required */
1726   0, /* properties_provided */
1727   0, /* properties_destroyed */
1728   0, /* todo_flags_start */
1729   0, /* todo_flags_finish */
1730 };
1731 
1732 class pass_dce : public gimple_opt_pass
1733 {
1734 public:
1735   pass_dce (gcc::context *ctxt)
1736     : gimple_opt_pass (pass_data_dce, ctxt)
1737   {}
1738 
1739   /* opt_pass methods: */
1740   opt_pass * clone () { return new pass_dce (m_ctxt); }
1741   virtual bool gate (function *) { return flag_tree_dce != 0; }
1742   virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1743 
1744 }; // class pass_dce
1745 
1746 } // anon namespace
1747 
1748 gimple_opt_pass *
1749 make_pass_dce (gcc::context *ctxt)
1750 {
1751   return new pass_dce (ctxt);
1752 }
1753 
1754 namespace {
1755 
1756 const pass_data pass_data_cd_dce =
1757 {
1758   GIMPLE_PASS, /* type */
1759   "cddce", /* name */
1760   OPTGROUP_NONE, /* optinfo_flags */
1761   TV_TREE_CD_DCE, /* tv_id */
1762   ( PROP_cfg | PROP_ssa ), /* properties_required */
1763   0, /* properties_provided */
1764   0, /* properties_destroyed */
1765   0, /* todo_flags_start */
1766   0, /* todo_flags_finish */
1767 };
1768 
1769 class pass_cd_dce : public gimple_opt_pass
1770 {
1771 public:
1772   pass_cd_dce (gcc::context *ctxt)
1773     : gimple_opt_pass (pass_data_cd_dce, ctxt)
1774   {}
1775 
1776   /* opt_pass methods: */
1777   opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1778   virtual bool gate (function *) { return flag_tree_dce != 0; }
1779   virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1780 
1781 }; // class pass_cd_dce
1782 
1783 } // anon namespace
1784 
1785 gimple_opt_pass *
1786 make_pass_cd_dce (gcc::context *ctxt)
1787 {
1788   return new pass_cd_dce (ctxt);
1789 }
1790