xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-dce.c (revision 9fb66d812c00ebfb445c0b47dea128f32aa6fe96)
1 /* Dead code elimination pass for the GNU compiler.
2    Copyright (C) 2002-2019 Free Software Foundation, Inc.
3    Contributed by Ben Elliston <bje@redhat.com>
4    and Andrew MacLeod <amacleod@redhat.com>
5    Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 /* Dead code elimination.
24 
25    References:
26 
27      Building an Optimizing Compiler,
28      Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29 
30      Advanced Compiler Design and Implementation,
31      Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32 
33    Dead-code elimination is the removal of statements which have no
34    impact on the program's output.  "Dead statements" have no impact
35    on the program's output, while "necessary statements" may have
36    impact on the output.
37 
38    The algorithm consists of three phases:
39    1. Marking as necessary all statements known to be necessary,
40       e.g. most function calls, writing a value to memory, etc;
41    2. Propagating necessary statements, e.g., the statements
42       giving values to operands in necessary statements; and
43    3. Removing dead statements.  */
44 
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-ssa-propagate.h"
69 #include "gimple-fold.h"
70 
71 static struct stmt_stats
72 {
73   int total;
74   int total_phis;
75   int removed;
76   int removed_phis;
77 } stats;
78 
79 #define STMT_NECESSARY GF_PLF_1
80 
81 static vec<gimple *> worklist;
82 
83 /* Vector indicating an SSA name has already been processed and marked
84    as necessary.  */
85 static sbitmap processed;
86 
87 /* Vector indicating that the last statement of a basic block has already
88    been marked as necessary.  */
89 static sbitmap last_stmt_necessary;
90 
91 /* Vector indicating that BB contains statements that are live.  */
92 static sbitmap bb_contains_live_stmts;
93 
94 /* Before we can determine whether a control branch is dead, we need to
95    compute which blocks are control dependent on which edges.
96 
97    We expect each block to be control dependent on very few edges so we
98    use a bitmap for each block recording its edges.  An array holds the
99    bitmap.  The Ith bit in the bitmap is set if that block is dependent
100    on the Ith edge.  */
101 static control_dependences *cd;
102 
103 /* Vector indicating that a basic block has already had all the edges
104    processed that it is control dependent on.  */
105 static sbitmap visited_control_parents;
106 
107 /* TRUE if this pass alters the CFG (by removing control statements).
108    FALSE otherwise.
109 
110    If this pass alters the CFG, then it will arrange for the dominators
111    to be recomputed.  */
112 static bool cfg_altered;
113 
114 /* When non-NULL holds map from basic block index into the postorder.  */
115 static int *bb_postorder;
116 
117 
118 /* If STMT is not already marked necessary, mark it, and add it to the
119    worklist if ADD_TO_WORKLIST is true.  */
120 
121 static inline void
122 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
123 {
124   gcc_assert (stmt);
125 
126   if (gimple_plf (stmt, STMT_NECESSARY))
127     return;
128 
129   if (dump_file && (dump_flags & TDF_DETAILS))
130     {
131       fprintf (dump_file, "Marking useful stmt: ");
132       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
133       fprintf (dump_file, "\n");
134     }
135 
136   gimple_set_plf (stmt, STMT_NECESSARY, true);
137   if (add_to_worklist)
138     worklist.safe_push (stmt);
139   if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
140     bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
141 }
142 
143 
144 /* Mark the statement defining operand OP as necessary.  */
145 
146 static inline void
147 mark_operand_necessary (tree op)
148 {
149   gimple *stmt;
150   int ver;
151 
152   gcc_assert (op);
153 
154   ver = SSA_NAME_VERSION (op);
155   if (bitmap_bit_p (processed, ver))
156     {
157       stmt = SSA_NAME_DEF_STMT (op);
158       gcc_assert (gimple_nop_p (stmt)
159 		  || gimple_plf (stmt, STMT_NECESSARY));
160       return;
161     }
162   bitmap_set_bit (processed, ver);
163 
164   stmt = SSA_NAME_DEF_STMT (op);
165   gcc_assert (stmt);
166 
167   if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
168     return;
169 
170   if (dump_file && (dump_flags & TDF_DETAILS))
171     {
172       fprintf (dump_file, "marking necessary through ");
173       print_generic_expr (dump_file, op);
174       fprintf (dump_file, " stmt ");
175       print_gimple_stmt (dump_file, stmt, 0);
176     }
177 
178   gimple_set_plf (stmt, STMT_NECESSARY, true);
179   if (bb_contains_live_stmts)
180     bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
181   worklist.safe_push (stmt);
182 }
183 
184 
185 /* Mark STMT as necessary if it obviously is.  Add it to the worklist if
186    it can make other statements necessary.
187 
188    If AGGRESSIVE is false, control statements are conservatively marked as
189    necessary.  */
190 
191 static void
192 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
193 {
194   /* With non-call exceptions, we have to assume that all statements could
195      throw.  If a statement could throw, it can be deemed necessary.  */
196   if (cfun->can_throw_non_call_exceptions
197       && !cfun->can_delete_dead_exceptions
198       && stmt_could_throw_p (cfun, stmt))
199     {
200       mark_stmt_necessary (stmt, true);
201       return;
202     }
203 
204   /* Statements that are implicitly live.  Most function calls, asm
205      and return statements are required.  Labels and GIMPLE_BIND nodes
206      are kept because they are control flow, and we have no way of
207      knowing whether they can be removed.  DCE can eliminate all the
208      other statements in a block, and CFG can then remove the block
209      and labels.  */
210   switch (gimple_code (stmt))
211     {
212     case GIMPLE_PREDICT:
213     case GIMPLE_LABEL:
214       mark_stmt_necessary (stmt, false);
215       return;
216 
217     case GIMPLE_ASM:
218     case GIMPLE_RESX:
219     case GIMPLE_RETURN:
220       mark_stmt_necessary (stmt, true);
221       return;
222 
223     case GIMPLE_CALL:
224       {
225 	tree callee = gimple_call_fndecl (stmt);
226 	if (callee != NULL_TREE
227 	    && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
228 	  switch (DECL_FUNCTION_CODE (callee))
229 	    {
230 	    case BUILT_IN_MALLOC:
231 	    case BUILT_IN_ALIGNED_ALLOC:
232 	    case BUILT_IN_CALLOC:
233 	    CASE_BUILT_IN_ALLOCA:
234 	    case BUILT_IN_STRDUP:
235 	    case BUILT_IN_STRNDUP:
236 	      return;
237 
238 	    default:;
239 	    }
240 	/* Most, but not all function calls are required.  Function calls that
241 	   produce no result and have no side effects (i.e. const pure
242 	   functions) are unnecessary.  */
243 	if (gimple_has_side_effects (stmt))
244 	  {
245 	    mark_stmt_necessary (stmt, true);
246 	    return;
247 	  }
248 	if (!gimple_call_lhs (stmt))
249 	  return;
250 	break;
251       }
252 
253     case GIMPLE_DEBUG:
254       /* Debug temps without a value are not useful.  ??? If we could
255 	 easily locate the debug temp bind stmt for a use thereof,
256 	 would could refrain from marking all debug temps here, and
257 	 mark them only if they're used.  */
258       if (gimple_debug_nonbind_marker_p (stmt)
259 	  || !gimple_debug_bind_p (stmt)
260 	  || gimple_debug_bind_has_value_p (stmt)
261 	  || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
262 	mark_stmt_necessary (stmt, false);
263       return;
264 
265     case GIMPLE_GOTO:
266       gcc_assert (!simple_goto_p (stmt));
267       mark_stmt_necessary (stmt, true);
268       return;
269 
270     case GIMPLE_COND:
271       gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
272       /* Fall through.  */
273 
274     case GIMPLE_SWITCH:
275       if (! aggressive)
276 	mark_stmt_necessary (stmt, true);
277       break;
278 
279     case GIMPLE_ASSIGN:
280       if (gimple_clobber_p (stmt))
281 	return;
282       break;
283 
284     default:
285       break;
286     }
287 
288   /* If the statement has volatile operands, it needs to be preserved.
289      Same for statements that can alter control flow in unpredictable
290      ways.  */
291   if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
292     {
293       mark_stmt_necessary (stmt, true);
294       return;
295     }
296 
297   if (stmt_may_clobber_global_p (stmt))
298     {
299       mark_stmt_necessary (stmt, true);
300       return;
301     }
302 
303   return;
304 }
305 
306 
307 /* Mark the last statement of BB as necessary.  */
308 
309 static void
310 mark_last_stmt_necessary (basic_block bb)
311 {
312   gimple *stmt = last_stmt (bb);
313 
314   bitmap_set_bit (last_stmt_necessary, bb->index);
315   bitmap_set_bit (bb_contains_live_stmts, bb->index);
316 
317   /* We actually mark the statement only if it is a control statement.  */
318   if (stmt && is_ctrl_stmt (stmt))
319     mark_stmt_necessary (stmt, true);
320 }
321 
322 
323 /* Mark control dependent edges of BB as necessary.  We have to do this only
324    once for each basic block so we set the appropriate bit after we're done.
325 
326    When IGNORE_SELF is true, ignore BB in the list of control dependences.  */
327 
328 static void
329 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
330 {
331   bitmap_iterator bi;
332   unsigned edge_number;
333   bool skipped = false;
334 
335   gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
336 
337   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
338     return;
339 
340   EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
341 			    0, edge_number, bi)
342     {
343       basic_block cd_bb = cd->get_edge_src (edge_number);
344 
345       if (ignore_self && cd_bb == bb)
346 	{
347 	  skipped = true;
348 	  continue;
349 	}
350 
351       if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
352 	mark_last_stmt_necessary (cd_bb);
353     }
354 
355   if (!skipped)
356     bitmap_set_bit (visited_control_parents, bb->index);
357 }
358 
359 
360 /* Find obviously necessary statements.  These are things like most function
361    calls, and stores to file level variables.
362 
363    If EL is NULL, control statements are conservatively marked as
364    necessary.  Otherwise it contains the list of edges used by control
365    dependence analysis.  */
366 
367 static void
368 find_obviously_necessary_stmts (bool aggressive)
369 {
370   basic_block bb;
371   gimple_stmt_iterator gsi;
372   edge e;
373   gimple *phi, *stmt;
374   int flags;
375 
376   FOR_EACH_BB_FN (bb, cfun)
377     {
378       /* PHI nodes are never inherently necessary.  */
379       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
380 	{
381 	  phi = gsi_stmt (gsi);
382 	  gimple_set_plf (phi, STMT_NECESSARY, false);
383 	}
384 
385       /* Check all statements in the block.  */
386       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
387 	{
388 	  stmt = gsi_stmt (gsi);
389 	  gimple_set_plf (stmt, STMT_NECESSARY, false);
390 	  mark_stmt_if_obviously_necessary (stmt, aggressive);
391 	}
392     }
393 
394   /* Pure and const functions are finite and thus have no infinite loops in
395      them.  */
396   flags = flags_from_decl_or_type (current_function_decl);
397   if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
398     return;
399 
400   /* Prevent the empty possibly infinite loops from being removed.  */
401   if (aggressive)
402     {
403       struct loop *loop;
404       if (mark_irreducible_loops ())
405 	FOR_EACH_BB_FN (bb, cfun)
406 	  {
407 	    edge_iterator ei;
408 	    FOR_EACH_EDGE (e, ei, bb->succs)
409 	      if ((e->flags & EDGE_DFS_BACK)
410 		  && (e->flags & EDGE_IRREDUCIBLE_LOOP))
411 		{
412 	          if (dump_file)
413 	            fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
414 		    	     e->src->index, e->dest->index);
415 		  mark_control_dependent_edges_necessary (e->dest, false);
416 		}
417 	  }
418 
419       FOR_EACH_LOOP (loop, 0)
420 	if (!finite_loop_p (loop))
421 	  {
422 	    if (dump_file)
423 	      fprintf (dump_file, "cannot prove finiteness of loop %i\n", loop->num);
424 	    mark_control_dependent_edges_necessary (loop->latch, false);
425 	  }
426     }
427 }
428 
429 
430 /* Return true if REF is based on an aliased base, otherwise false.  */
431 
432 static bool
433 ref_may_be_aliased (tree ref)
434 {
435   gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
436   while (handled_component_p (ref))
437     ref = TREE_OPERAND (ref, 0);
438   if (TREE_CODE (ref) == MEM_REF
439       && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
440     ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
441   return !(DECL_P (ref)
442 	   && !may_be_aliased (ref));
443 }
444 
445 static bitmap visited = NULL;
446 static unsigned int longest_chain = 0;
447 static unsigned int total_chain = 0;
448 static unsigned int nr_walks = 0;
449 static bool chain_ovfl = false;
450 
451 /* Worker for the walker that marks reaching definitions of REF,
452    which is based on a non-aliased decl, necessary.  It returns
453    true whenever the defining statement of the current VDEF is
454    a kill for REF, as no dominating may-defs are necessary for REF
455    anymore.  DATA points to the basic-block that contains the
456    stmt that refers to REF.  */
457 
458 static bool
459 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
460 {
461   gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
462 
463   /* All stmts we visit are necessary.  */
464   if (! gimple_clobber_p (def_stmt))
465     mark_operand_necessary (vdef);
466 
467   /* If the stmt lhs kills ref, then we can stop walking.  */
468   if (gimple_has_lhs (def_stmt)
469       && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
470       /* The assignment is not necessarily carried out if it can throw
471          and we can catch it in the current function where we could inspect
472 	 the previous value.
473          ???  We only need to care about the RHS throwing.  For aggregate
474 	 assignments or similar calls and non-call exceptions the LHS
475 	 might throw as well.  */
476       && !stmt_can_throw_internal (cfun, def_stmt))
477     {
478       tree base, lhs = gimple_get_lhs (def_stmt);
479       poly_int64 size, offset, max_size;
480       bool reverse;
481       ao_ref_base (ref);
482       base
483 	= get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
484       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
485 	 so base == refd->base does not always hold.  */
486       if (base == ref->base)
487 	{
488 	  /* For a must-alias check we need to be able to constrain
489 	     the accesses properly.  */
490 	  if (known_eq (size, max_size)
491 	      && known_subrange_p (ref->offset, ref->max_size, offset, size))
492 	    return true;
493 	  /* Or they need to be exactly the same.  */
494 	  else if (ref->ref
495 		   /* Make sure there is no induction variable involved
496 		      in the references (gcc.c-torture/execute/pr42142.c).
497 		      The simplest way is to check if the kill dominates
498 		      the use.  */
499 		   /* But when both are in the same block we cannot
500 		      easily tell whether we came from a backedge
501 		      unless we decide to compute stmt UIDs
502 		      (see PR58246).  */
503 		   && (basic_block) data != gimple_bb (def_stmt)
504 		   && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
505 				      gimple_bb (def_stmt))
506 		   && operand_equal_p (ref->ref, lhs, 0))
507 	    return true;
508 	}
509     }
510 
511   /* Otherwise keep walking.  */
512   return false;
513 }
514 
515 static void
516 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
517 {
518   unsigned int chain;
519   ao_ref refd;
520   gcc_assert (!chain_ovfl);
521   ao_ref_init (&refd, ref);
522   chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
523 			      mark_aliased_reaching_defs_necessary_1,
524 			      gimple_bb (stmt), NULL);
525   if (chain > longest_chain)
526     longest_chain = chain;
527   total_chain += chain;
528   nr_walks++;
529 }
530 
531 /* Worker for the walker that marks reaching definitions of REF, which
532    is not based on a non-aliased decl.  For simplicity we need to end
533    up marking all may-defs necessary that are not based on a non-aliased
534    decl.  The only job of this walker is to skip may-defs based on
535    a non-aliased decl.  */
536 
537 static bool
538 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
539 				    tree vdef, void *data ATTRIBUTE_UNUSED)
540 {
541   gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
542 
543   /* We have to skip already visited (and thus necessary) statements
544      to make the chaining work after we dropped back to simple mode.  */
545   if (chain_ovfl
546       && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
547     {
548       gcc_assert (gimple_nop_p (def_stmt)
549 		  || gimple_plf (def_stmt, STMT_NECESSARY));
550       return false;
551     }
552 
553   /* We want to skip stores to non-aliased variables.  */
554   if (!chain_ovfl
555       && gimple_assign_single_p (def_stmt))
556     {
557       tree lhs = gimple_assign_lhs (def_stmt);
558       if (!ref_may_be_aliased (lhs))
559 	return false;
560     }
561 
562   /* We want to skip statments that do not constitute stores but have
563      a virtual definition.  */
564   if (is_gimple_call (def_stmt))
565     {
566       tree callee = gimple_call_fndecl (def_stmt);
567       if (callee != NULL_TREE
568 	  && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
569 	switch (DECL_FUNCTION_CODE (callee))
570 	  {
571 	  case BUILT_IN_MALLOC:
572 	  case BUILT_IN_ALIGNED_ALLOC:
573 	  case BUILT_IN_CALLOC:
574 	  CASE_BUILT_IN_ALLOCA:
575 	  case BUILT_IN_FREE:
576 	    return false;
577 
578 	  default:;
579 	  }
580     }
581 
582   if (! gimple_clobber_p (def_stmt))
583     mark_operand_necessary (vdef);
584 
585   return false;
586 }
587 
588 static void
589 mark_all_reaching_defs_necessary (gimple *stmt)
590 {
591   walk_aliased_vdefs (NULL, gimple_vuse (stmt),
592 		      mark_all_reaching_defs_necessary_1, NULL, &visited);
593 }
594 
595 /* Return true for PHI nodes with one or identical arguments
596    can be removed.  */
597 static bool
598 degenerate_phi_p (gimple *phi)
599 {
600   unsigned int i;
601   tree op = gimple_phi_arg_def (phi, 0);
602   for (i = 1; i < gimple_phi_num_args (phi); i++)
603     if (gimple_phi_arg_def (phi, i) != op)
604       return false;
605   return true;
606 }
607 
608 /* Propagate necessity using the operands of necessary statements.
609    Process the uses on each statement in the worklist, and add all
610    feeding statements which contribute to the calculation of this
611    value to the worklist.
612 
613    In conservative mode, EL is NULL.  */
614 
615 static void
616 propagate_necessity (bool aggressive)
617 {
618   gimple *stmt;
619 
620   if (dump_file && (dump_flags & TDF_DETAILS))
621     fprintf (dump_file, "\nProcessing worklist:\n");
622 
623   while (worklist.length () > 0)
624     {
625       /* Take STMT from worklist.  */
626       stmt = worklist.pop ();
627 
628       if (dump_file && (dump_flags & TDF_DETAILS))
629 	{
630 	  fprintf (dump_file, "processing: ");
631 	  print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
632 	  fprintf (dump_file, "\n");
633 	}
634 
635       if (aggressive)
636 	{
637 	  /* Mark the last statement of the basic blocks on which the block
638 	     containing STMT is control dependent, but only if we haven't
639 	     already done so.  */
640 	  basic_block bb = gimple_bb (stmt);
641 	  if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
642 	      && !bitmap_bit_p (visited_control_parents, bb->index))
643 	    mark_control_dependent_edges_necessary (bb, false);
644 	}
645 
646       if (gimple_code (stmt) == GIMPLE_PHI
647 	  /* We do not process virtual PHI nodes nor do we track their
648 	     necessity.  */
649 	  && !virtual_operand_p (gimple_phi_result (stmt)))
650 	{
651 	  /* PHI nodes are somewhat special in that each PHI alternative has
652 	     data and control dependencies.  All the statements feeding the
653 	     PHI node's arguments are always necessary.  In aggressive mode,
654 	     we also consider the control dependent edges leading to the
655 	     predecessor block associated with each PHI alternative as
656 	     necessary.  */
657 	  gphi *phi = as_a <gphi *> (stmt);
658 	  size_t k;
659 
660 	  for (k = 0; k < gimple_phi_num_args (stmt); k++)
661             {
662 	      tree arg = PHI_ARG_DEF (stmt, k);
663 	      if (TREE_CODE (arg) == SSA_NAME)
664 		mark_operand_necessary (arg);
665 	    }
666 
667 	  /* For PHI operands it matters from where the control flow arrives
668 	     to the BB.  Consider the following example:
669 
670 	     a=exp1;
671 	     b=exp2;
672 	     if (test)
673 		;
674 	     else
675 		;
676 	     c=PHI(a,b)
677 
678 	     We need to mark control dependence of the empty basic blocks, since they
679 	     contains computation of PHI operands.
680 
681 	     Doing so is too restrictive in the case the predecestor block is in
682 	     the loop. Consider:
683 
684 	      if (b)
685 		{
686 		  int i;
687 		  for (i = 0; i<1000; ++i)
688 		    ;
689 		  j = 0;
690 		}
691 	      return j;
692 
693 	     There is PHI for J in the BB containing return statement.
694 	     In this case the control dependence of predecestor block (that is
695 	     within the empty loop) also contains the block determining number
696 	     of iterations of the block that would prevent removing of empty
697 	     loop in this case.
698 
699 	     This scenario can be avoided by splitting critical edges.
700 	     To save the critical edge splitting pass we identify how the control
701 	     dependence would look like if the edge was split.
702 
703 	     Consider the modified CFG created from current CFG by splitting
704 	     edge B->C.  In the postdominance tree of modified CFG, C' is
705 	     always child of C.  There are two cases how chlids of C' can look
706 	     like:
707 
708 		1) C' is leaf
709 
710 		   In this case the only basic block C' is control dependent on is B.
711 
712 		2) C' has single child that is B
713 
714 		   In this case control dependence of C' is same as control
715 		   dependence of B in original CFG except for block B itself.
716 		   (since C' postdominate B in modified CFG)
717 
718 	     Now how to decide what case happens?  There are two basic options:
719 
720 		a) C postdominate B.  Then C immediately postdominate B and
721 		   case 2 happens iff there is no other way from B to C except
722 		   the edge B->C.
723 
724 		   There is other way from B to C iff there is succesor of B that
725 		   is not postdominated by B.  Testing this condition is somewhat
726 		   expensive, because we need to iterate all succesors of B.
727 		   We are safe to assume that this does not happen: we will mark B
728 		   as needed when processing the other path from B to C that is
729 		   conrol dependent on B and marking control dependencies of B
730 		   itself is harmless because they will be processed anyway after
731 		   processing control statement in B.
732 
733 		b) C does not postdominate B.  Always case 1 happens since there is
734 		   path from C to exit that does not go through B and thus also C'.  */
735 
736 	  if (aggressive && !degenerate_phi_p (stmt))
737 	    {
738 	      for (k = 0; k < gimple_phi_num_args (stmt); k++)
739 		{
740 		  basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
741 
742 		  if (gimple_bb (stmt)
743 		      != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
744 		    {
745 		      if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
746 			mark_last_stmt_necessary (arg_bb);
747 		    }
748 		  else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
749 		           && !bitmap_bit_p (visited_control_parents,
750 					 arg_bb->index))
751 		    mark_control_dependent_edges_necessary (arg_bb, true);
752 		}
753 	    }
754 	}
755       else
756 	{
757 	  /* Propagate through the operands.  Examine all the USE, VUSE and
758 	     VDEF operands in this statement.  Mark all the statements
759 	     which feed this statement's uses as necessary.  */
760 	  ssa_op_iter iter;
761 	  tree use;
762 
763 	  /* If this is a call to free which is directly fed by an
764 	     allocation function do not mark that necessary through
765 	     processing the argument.  */
766 	  if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
767 	    {
768 	      tree ptr = gimple_call_arg (stmt, 0);
769 	      gimple *def_stmt;
770 	      tree def_callee;
771 	      /* If the pointer we free is defined by an allocation
772 		 function do not add the call to the worklist.  */
773 	      if (TREE_CODE (ptr) == SSA_NAME
774 		  && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
775 		  && (def_callee = gimple_call_fndecl (def_stmt))
776 		  && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
777 		  && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
778 		      || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
779 		      || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
780 		continue;
781 	    }
782 
783 	  FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
784 	    mark_operand_necessary (use);
785 
786 	  use = gimple_vuse (stmt);
787 	  if (!use)
788 	    continue;
789 
790 	  /* If we dropped to simple mode make all immediately
791 	     reachable definitions necessary.  */
792 	  if (chain_ovfl)
793 	    {
794 	      mark_all_reaching_defs_necessary (stmt);
795 	      continue;
796 	    }
797 
798 	  /* For statements that may load from memory (have a VUSE) we
799 	     have to mark all reaching (may-)definitions as necessary.
800 	     We partition this task into two cases:
801 	      1) explicit loads based on decls that are not aliased
802 	      2) implicit loads (like calls) and explicit loads not
803 	         based on decls that are not aliased (like indirect
804 		 references or loads from globals)
805 	     For 1) we mark all reaching may-defs as necessary, stopping
806 	     at dominating kills.  For 2) we want to mark all dominating
807 	     references necessary, but non-aliased ones which we handle
808 	     in 1).  By keeping a global visited bitmap for references
809 	     we walk for 2) we avoid quadratic behavior for those.  */
810 
811 	  if (is_gimple_call (stmt))
812 	    {
813 	      tree callee = gimple_call_fndecl (stmt);
814 	      unsigned i;
815 
816 	      /* Calls to functions that are merely acting as barriers
817 		 or that only store to memory do not make any previous
818 		 stores necessary.  */
819 	      if (callee != NULL_TREE
820 		  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
821 		  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
822 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
823 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
824 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
825 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
826 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
827 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
828 		      || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
829 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
830 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
831 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
832 		continue;
833 
834 	      /* Calls implicitly load from memory, their arguments
835 	         in addition may explicitly perform memory loads.  */
836 	      mark_all_reaching_defs_necessary (stmt);
837 	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
838 		{
839 		  tree arg = gimple_call_arg (stmt, i);
840 		  if (TREE_CODE (arg) == SSA_NAME
841 		      || is_gimple_min_invariant (arg))
842 		    continue;
843 		  if (TREE_CODE (arg) == WITH_SIZE_EXPR)
844 		    arg = TREE_OPERAND (arg, 0);
845 		  if (!ref_may_be_aliased (arg))
846 		    mark_aliased_reaching_defs_necessary (stmt, arg);
847 		}
848 	    }
849 	  else if (gimple_assign_single_p (stmt))
850 	    {
851 	      tree rhs;
852 	      /* If this is a load mark things necessary.  */
853 	      rhs = gimple_assign_rhs1 (stmt);
854 	      if (TREE_CODE (rhs) != SSA_NAME
855 		  && !is_gimple_min_invariant (rhs)
856 		  && TREE_CODE (rhs) != CONSTRUCTOR)
857 		{
858 		  if (!ref_may_be_aliased (rhs))
859 		    mark_aliased_reaching_defs_necessary (stmt, rhs);
860 		  else
861 		    mark_all_reaching_defs_necessary (stmt);
862 		}
863 	    }
864 	  else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
865 	    {
866 	      tree rhs = gimple_return_retval (return_stmt);
867 	      /* A return statement may perform a load.  */
868 	      if (rhs
869 		  && TREE_CODE (rhs) != SSA_NAME
870 		  && !is_gimple_min_invariant (rhs)
871 		  && TREE_CODE (rhs) != CONSTRUCTOR)
872 		{
873 		  if (!ref_may_be_aliased (rhs))
874 		    mark_aliased_reaching_defs_necessary (stmt, rhs);
875 		  else
876 		    mark_all_reaching_defs_necessary (stmt);
877 		}
878 	    }
879 	  else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
880 	    {
881 	      unsigned i;
882 	      mark_all_reaching_defs_necessary (stmt);
883 	      /* Inputs may perform loads.  */
884 	      for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
885 		{
886 		  tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
887 		  if (TREE_CODE (op) != SSA_NAME
888 		      && !is_gimple_min_invariant (op)
889 		      && TREE_CODE (op) != CONSTRUCTOR
890 		      && !ref_may_be_aliased (op))
891 		    mark_aliased_reaching_defs_necessary (stmt, op);
892 		}
893 	    }
894 	  else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
895 	    {
896 	      /* The beginning of a transaction is a memory barrier.  */
897 	      /* ??? If we were really cool, we'd only be a barrier
898 		 for the memories touched within the transaction.  */
899 	      mark_all_reaching_defs_necessary (stmt);
900 	    }
901 	  else
902 	    gcc_unreachable ();
903 
904 	  /* If we over-used our alias oracle budget drop to simple
905 	     mode.  The cost metric allows quadratic behavior
906 	     (number of uses times number of may-defs queries) up to
907 	     a constant maximal number of queries and after that falls back to
908 	     super-linear complexity.  */
909 	  if (/* Constant but quadratic for small functions.  */
910 	      total_chain > 128 * 128
911 	      /* Linear in the number of may-defs.  */
912 	      && total_chain > 32 * longest_chain
913 	      /* Linear in the number of uses.  */
914 	      && total_chain > nr_walks * 32)
915 	    {
916 	      chain_ovfl = true;
917 	      if (visited)
918 		bitmap_clear (visited);
919 	    }
920 	}
921     }
922 }
923 
924 /* Remove dead PHI nodes from block BB.  */
925 
926 static bool
927 remove_dead_phis (basic_block bb)
928 {
929   bool something_changed = false;
930   gphi *phi;
931   gphi_iterator gsi;
932 
933   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
934     {
935       stats.total_phis++;
936       phi = gsi.phi ();
937 
938       /* We do not track necessity of virtual PHI nodes.  Instead do
939          very simple dead PHI removal here.  */
940       if (virtual_operand_p (gimple_phi_result (phi)))
941 	{
942 	  /* Virtual PHI nodes with one or identical arguments
943 	     can be removed.  */
944 	  if (degenerate_phi_p (phi))
945 	    {
946 	      tree vdef = gimple_phi_result (phi);
947 	      tree vuse = gimple_phi_arg_def (phi, 0);
948 
949 	      use_operand_p use_p;
950 	      imm_use_iterator iter;
951 	      gimple *use_stmt;
952 	      FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
953 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
954 		  SET_USE (use_p, vuse);
955 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
956 	          && TREE_CODE (vuse) == SSA_NAME)
957 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
958 	    }
959 	  else
960 	    gimple_set_plf (phi, STMT_NECESSARY, true);
961 	}
962 
963       if (!gimple_plf (phi, STMT_NECESSARY))
964 	{
965 	  something_changed = true;
966 	  if (dump_file && (dump_flags & TDF_DETAILS))
967 	    {
968 	      fprintf (dump_file, "Deleting : ");
969 	      print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
970 	      fprintf (dump_file, "\n");
971 	    }
972 
973 	  remove_phi_node (&gsi, true);
974 	  stats.removed_phis++;
975 	  continue;
976 	}
977 
978       gsi_next (&gsi);
979     }
980   return something_changed;
981 }
982 
983 
984 /* Remove dead statement pointed to by iterator I.  Receives the basic block BB
985    containing I so that we don't have to look it up.  */
986 
987 static void
988 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
989 		  vec<edge> &to_remove_edges)
990 {
991   gimple *stmt = gsi_stmt (*i);
992 
993   if (dump_file && (dump_flags & TDF_DETAILS))
994     {
995       fprintf (dump_file, "Deleting : ");
996       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
997       fprintf (dump_file, "\n");
998     }
999 
1000   stats.removed++;
1001 
1002   /* If we have determined that a conditional branch statement contributes
1003      nothing to the program, then we not only remove it, but we need to update
1004      the CFG.  We can chose any of edges out of BB as long as we are sure to not
1005      close infinite loops.  This is done by always choosing the edge closer to
1006      exit in inverted_post_order_compute order.  */
1007   if (is_ctrl_stmt (stmt))
1008     {
1009       edge_iterator ei;
1010       edge e = NULL, e2;
1011 
1012       /* See if there is only one non-abnormal edge.  */
1013       if (single_succ_p (bb))
1014         e = single_succ_edge (bb);
1015       /* Otherwise chose one that is closer to bb with live statement in it.
1016          To be able to chose one, we compute inverted post order starting from
1017 	 all BBs with live statements.  */
1018       if (!e)
1019 	{
1020 	  if (!bb_postorder)
1021 	    {
1022 	      auto_vec<int, 20> postorder;
1023 		 inverted_post_order_compute (&postorder,
1024 					      &bb_contains_live_stmts);
1025 	      bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1026 	      for (unsigned int i = 0; i < postorder.length (); ++i)
1027 		 bb_postorder[postorder[i]] = i;
1028 	    }
1029           FOR_EACH_EDGE (e2, ei, bb->succs)
1030 	    if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1031 		|| bb_postorder [e->dest->index]
1032 		   < bb_postorder [e2->dest->index])
1033 	      e = e2;
1034 	}
1035       gcc_assert (e);
1036       e->probability = profile_probability::always ();
1037 
1038       /* The edge is no longer associated with a conditional, so it does
1039 	 not have TRUE/FALSE flags.
1040 	 We are also safe to drop EH/ABNORMAL flags and turn them into
1041 	 normal control flow, because we know that all the destinations (including
1042 	 those odd edges) are equivalent for program execution.  */
1043       e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1044 
1045       /* The lone outgoing edge from BB will be a fallthru edge.  */
1046       e->flags |= EDGE_FALLTHRU;
1047 
1048       /* Remove the remaining outgoing edges.  */
1049       FOR_EACH_EDGE (e2, ei, bb->succs)
1050 	if (e != e2)
1051 	  {
1052 	    /* If we made a BB unconditionally exit a loop or removed
1053 	       an entry into an irreducible region, then this transform
1054 	       alters the set of BBs in the loop.  Schedule a fixup.  */
1055 	    if (loop_exit_edge_p (bb->loop_father, e)
1056 		|| (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1057 	      loops_state_set (LOOPS_NEED_FIXUP);
1058 	    to_remove_edges.safe_push (e2);
1059 	  }
1060     }
1061 
1062   /* If this is a store into a variable that is being optimized away,
1063      add a debug bind stmt if possible.  */
1064   if (MAY_HAVE_DEBUG_BIND_STMTS
1065       && gimple_assign_single_p (stmt)
1066       && is_gimple_val (gimple_assign_rhs1 (stmt)))
1067     {
1068       tree lhs = gimple_assign_lhs (stmt);
1069       if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1070 	  && !DECL_IGNORED_P (lhs)
1071 	  && is_gimple_reg_type (TREE_TYPE (lhs))
1072 	  && !is_global_var (lhs)
1073 	  && !DECL_HAS_VALUE_EXPR_P (lhs))
1074 	{
1075 	  tree rhs = gimple_assign_rhs1 (stmt);
1076 	  gdebug *note
1077 	    = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1078 	  gsi_insert_after (i, note, GSI_SAME_STMT);
1079 	}
1080     }
1081 
1082   unlink_stmt_vdef (stmt);
1083   gsi_remove (i, true);
1084   release_defs (stmt);
1085 }
1086 
1087 /* Helper for maybe_optimize_arith_overflow.  Find in *TP if there are any
1088    uses of data (SSA_NAME) other than REALPART_EXPR referencing it.  */
1089 
1090 static tree
1091 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1092 {
1093   if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1094     *walk_subtrees = 0;
1095   if (*tp == (tree) data)
1096     return *tp;
1097   return NULL_TREE;
1098 }
1099 
1100 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1101    but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1102    into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1103    uses.  */
1104 
1105 static void
1106 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1107 			       enum tree_code subcode)
1108 {
1109   gimple *stmt = gsi_stmt (*gsi);
1110   tree lhs = gimple_call_lhs (stmt);
1111 
1112   if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1113     return;
1114 
1115   imm_use_iterator imm_iter;
1116   use_operand_p use_p;
1117   bool has_debug_uses = false;
1118   bool has_realpart_uses = false;
1119   bool has_other_uses = false;
1120   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1121     {
1122       gimple *use_stmt = USE_STMT (use_p);
1123       if (is_gimple_debug (use_stmt))
1124 	has_debug_uses = true;
1125       else if (is_gimple_assign (use_stmt)
1126 	       && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1127 	       && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1128 	has_realpart_uses = true;
1129       else
1130 	{
1131 	  has_other_uses = true;
1132 	  break;
1133 	}
1134     }
1135 
1136   if (!has_realpart_uses || has_other_uses)
1137     return;
1138 
1139   tree arg0 = gimple_call_arg (stmt, 0);
1140   tree arg1 = gimple_call_arg (stmt, 1);
1141   location_t loc = gimple_location (stmt);
1142   tree type = TREE_TYPE (TREE_TYPE (lhs));
1143   tree utype = type;
1144   if (!TYPE_UNSIGNED (type))
1145     utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1146   tree result = fold_build2_loc (loc, subcode, utype,
1147 				 fold_convert_loc (loc, utype, arg0),
1148 				 fold_convert_loc (loc, utype, arg1));
1149   result = fold_convert_loc (loc, type, result);
1150 
1151   if (has_debug_uses)
1152     {
1153       gimple *use_stmt;
1154       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1155 	{
1156 	  if (!gimple_debug_bind_p (use_stmt))
1157 	    continue;
1158 	  tree v = gimple_debug_bind_get_value (use_stmt);
1159 	  if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1160 	    {
1161 	      gimple_debug_bind_reset_value (use_stmt);
1162 	      update_stmt (use_stmt);
1163 	    }
1164 	}
1165     }
1166 
1167   if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1168     result = drop_tree_overflow (result);
1169   tree overflow = build_zero_cst (type);
1170   tree ctype = build_complex_type (type);
1171   if (TREE_CODE (result) == INTEGER_CST)
1172     result = build_complex (ctype, result, overflow);
1173   else
1174     result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1175 			 ctype, result, overflow);
1176 
1177   if (dump_file && (dump_flags & TDF_DETAILS))
1178     {
1179       fprintf (dump_file, "Transforming call: ");
1180       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1181       fprintf (dump_file, "because the overflow result is never used into: ");
1182       print_generic_stmt (dump_file, result, TDF_SLIM);
1183       fprintf (dump_file, "\n");
1184     }
1185 
1186   if (!update_call_from_tree (gsi, result))
1187     gimplify_and_update_call_from_tree (gsi, result);
1188 }
1189 
1190 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1191    contributes nothing to the program, and can be deleted.  */
1192 
1193 static bool
1194 eliminate_unnecessary_stmts (void)
1195 {
1196   bool something_changed = false;
1197   basic_block bb;
1198   gimple_stmt_iterator gsi, psi;
1199   gimple *stmt;
1200   tree call;
1201   vec<basic_block> h;
1202   auto_vec<edge> to_remove_edges;
1203 
1204   if (dump_file && (dump_flags & TDF_DETAILS))
1205     fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1206 
1207   clear_special_calls ();
1208 
1209   /* Walking basic blocks and statements in reverse order avoids
1210      releasing SSA names before any other DEFs that refer to them are
1211      released.  This helps avoid loss of debug information, as we get
1212      a chance to propagate all RHSs of removed SSAs into debug uses,
1213      rather than only the latest ones.  E.g., consider:
1214 
1215      x_3 = y_1 + z_2;
1216      a_5 = x_3 - b_4;
1217      # DEBUG a => a_5
1218 
1219      If we were to release x_3 before a_5, when we reached a_5 and
1220      tried to substitute it into the debug stmt, we'd see x_3 there,
1221      but x_3's DEF, type, etc would have already been disconnected.
1222      By going backwards, the debug stmt first changes to:
1223 
1224      # DEBUG a => x_3 - b_4
1225 
1226      and then to:
1227 
1228      # DEBUG a => y_1 + z_2 - b_4
1229 
1230      as desired.  */
1231   gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1232   h = get_all_dominated_blocks (CDI_DOMINATORS,
1233 				single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1234 
1235   while (h.length ())
1236     {
1237       bb = h.pop ();
1238 
1239       /* Remove dead statements.  */
1240       auto_bitmap debug_seen;
1241       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1242 	{
1243 	  stmt = gsi_stmt (gsi);
1244 
1245 	  psi = gsi;
1246 	  gsi_prev (&psi);
1247 
1248 	  stats.total++;
1249 
1250 	  /* We can mark a call to free as not necessary if the
1251 	     defining statement of its argument is not necessary
1252 	     (and thus is getting removed).  */
1253 	  if (gimple_plf (stmt, STMT_NECESSARY)
1254 	      && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1255 	    {
1256 	      tree ptr = gimple_call_arg (stmt, 0);
1257 	      if (TREE_CODE (ptr) == SSA_NAME)
1258 		{
1259 		  gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1260 		  if (!gimple_nop_p (def_stmt)
1261 		      && !gimple_plf (def_stmt, STMT_NECESSARY))
1262 		    gimple_set_plf (stmt, STMT_NECESSARY, false);
1263 		}
1264 	    }
1265 
1266 	  /* If GSI is not necessary then remove it.  */
1267 	  if (!gimple_plf (stmt, STMT_NECESSARY))
1268 	    {
1269 	      /* Keep clobbers that we can keep live live.  */
1270 	      if (gimple_clobber_p (stmt))
1271 		{
1272 		  ssa_op_iter iter;
1273 		  use_operand_p use_p;
1274 		  bool dead = false;
1275 		  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1276 		    {
1277 		      tree name = USE_FROM_PTR (use_p);
1278 		      if (!SSA_NAME_IS_DEFAULT_DEF (name)
1279 			  && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1280 			{
1281 			  dead = true;
1282 			  break;
1283 			}
1284 		    }
1285 		  if (!dead)
1286 		    {
1287 		      bitmap_clear (debug_seen);
1288 		      continue;
1289 		    }
1290 		}
1291 	      if (!is_gimple_debug (stmt))
1292 		something_changed = true;
1293 	      remove_dead_stmt (&gsi, bb, to_remove_edges);
1294 	      continue;
1295 	    }
1296 	  else if (is_gimple_call (stmt))
1297 	    {
1298 	      tree name = gimple_call_lhs (stmt);
1299 
1300 	      notice_special_calls (as_a <gcall *> (stmt));
1301 
1302 	      /* When LHS of var = call (); is dead, simplify it into
1303 		 call (); saving one operand.  */
1304 	      if (name
1305 		  && TREE_CODE (name) == SSA_NAME
1306 		  && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1307 		  /* Avoid doing so for allocation calls which we
1308 		     did not mark as necessary, it will confuse the
1309 		     special logic we apply to malloc/free pair removal.  */
1310 		  && (!(call = gimple_call_fndecl (stmt))
1311 		      || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1312 		      || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1313 			  && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1314 			  && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1315 			  && !ALLOCA_FUNCTION_CODE_P
1316 			      (DECL_FUNCTION_CODE (call)))))
1317 		{
1318 		  something_changed = true;
1319 		  if (dump_file && (dump_flags & TDF_DETAILS))
1320 		    {
1321 		      fprintf (dump_file, "Deleting LHS of call: ");
1322 		      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1323 		      fprintf (dump_file, "\n");
1324 		    }
1325 
1326 		  gimple_call_set_lhs (stmt, NULL_TREE);
1327 		  maybe_clean_or_replace_eh_stmt (stmt, stmt);
1328 		  update_stmt (stmt);
1329 		  release_ssa_name (name);
1330 
1331 		  /* GOMP_SIMD_LANE (unless two argument) or ASAN_POISON
1332 		     without lhs is not needed.  */
1333 		  if (gimple_call_internal_p (stmt))
1334 		    switch (gimple_call_internal_fn (stmt))
1335 		      {
1336 		      case IFN_GOMP_SIMD_LANE:
1337 			if (gimple_call_num_args (stmt) >= 2
1338 			    && !integer_nonzerop (gimple_call_arg (stmt, 1)))
1339 			  break;
1340 			/* FALLTHRU */
1341 		      case IFN_ASAN_POISON:
1342 			remove_dead_stmt (&gsi, bb, to_remove_edges);
1343 			break;
1344 		      default:
1345 			break;
1346 		      }
1347 		}
1348 	      else if (gimple_call_internal_p (stmt))
1349 		switch (gimple_call_internal_fn (stmt))
1350 		  {
1351 		  case IFN_ADD_OVERFLOW:
1352 		    maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1353 		    break;
1354 		  case IFN_SUB_OVERFLOW:
1355 		    maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1356 		    break;
1357 		  case IFN_MUL_OVERFLOW:
1358 		    maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1359 		    break;
1360 		  default:
1361 		    break;
1362 		  }
1363 	    }
1364 	  else if (gimple_debug_bind_p (stmt))
1365 	    {
1366 	      /* We are only keeping the last debug-bind of a
1367 	         non-DEBUG_EXPR_DECL variable in a series of
1368 		 debug-bind stmts.  */
1369 	      tree var = gimple_debug_bind_get_var (stmt);
1370 	      if (TREE_CODE (var) != DEBUG_EXPR_DECL
1371 		  && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1372 		remove_dead_stmt (&gsi, bb, to_remove_edges);
1373 	      continue;
1374 	    }
1375 	  bitmap_clear (debug_seen);
1376 	}
1377 
1378       /* Remove dead PHI nodes.  */
1379       something_changed |= remove_dead_phis (bb);
1380     }
1381 
1382   h.release ();
1383 
1384   /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1385      rendered some PHI nodes unreachable while they are still in use.
1386      Mark them for renaming.  */
1387   if (!to_remove_edges.is_empty ())
1388     {
1389       basic_block prev_bb;
1390 
1391       /* Remove edges.  We've delayed this to not get bogus debug stmts
1392          during PHI node removal.  */
1393       for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1394 	remove_edge (to_remove_edges[i]);
1395       cfg_altered = true;
1396 
1397       find_unreachable_blocks ();
1398 
1399       /* Delete all unreachable basic blocks in reverse dominator order.  */
1400       for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1401 	   bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1402 	{
1403 	  prev_bb = bb->prev_bb;
1404 
1405 	  if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1406 	      || !(bb->flags & BB_REACHABLE))
1407 	    {
1408 	      for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1409 		   gsi_next (&gsi))
1410 		if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1411 		  {
1412 		    bool found = false;
1413 		    imm_use_iterator iter;
1414 
1415 		    FOR_EACH_IMM_USE_STMT (stmt, iter,
1416 					   gimple_phi_result (gsi.phi ()))
1417 		      {
1418 			if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1419 			  continue;
1420 			if (gimple_code (stmt) == GIMPLE_PHI
1421 			    || gimple_plf (stmt, STMT_NECESSARY))
1422 			  {
1423 			    found = true;
1424 			    BREAK_FROM_IMM_USE_STMT (iter);
1425 			  }
1426 		      }
1427 		    if (found)
1428 		      mark_virtual_phi_result_for_renaming (gsi.phi ());
1429 		  }
1430 
1431 	      if (!(bb->flags & BB_REACHABLE))
1432 		{
1433 		  /* Speed up the removal of blocks that don't
1434 		     dominate others.  Walking backwards, this should
1435 		     be the common case.  ??? Do we need to recompute
1436 		     dominators because of cfg_altered?  */
1437 		  if (!first_dom_son (CDI_DOMINATORS, bb))
1438 		    delete_basic_block (bb);
1439 		  else
1440 		    {
1441 		      h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1442 
1443 		      while (h.length ())
1444 			{
1445 			  bb = h.pop ();
1446 			  prev_bb = bb->prev_bb;
1447 			  /* Rearrangements to the CFG may have failed
1448 			     to update the dominators tree, so that
1449 			     formerly-dominated blocks are now
1450 			     otherwise reachable.  */
1451 			  if (!!(bb->flags & BB_REACHABLE))
1452 			    continue;
1453 			  delete_basic_block (bb);
1454 			}
1455 
1456 		      h.release ();
1457 		    }
1458 		}
1459 	    }
1460 	}
1461     }
1462 
1463   if (bb_postorder)
1464     free (bb_postorder);
1465   bb_postorder = NULL;
1466 
1467   return something_changed;
1468 }
1469 
1470 
1471 /* Print out removed statement statistics.  */
1472 
1473 static void
1474 print_stats (void)
1475 {
1476   float percg;
1477 
1478   percg = ((float) stats.removed / (float) stats.total) * 100;
1479   fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1480 	   stats.removed, stats.total, (int) percg);
1481 
1482   if (stats.total_phis == 0)
1483     percg = 0;
1484   else
1485     percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1486 
1487   fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1488 	   stats.removed_phis, stats.total_phis, (int) percg);
1489 }
1490 
1491 /* Initialization for this pass.  Set up the used data structures.  */
1492 
1493 static void
1494 tree_dce_init (bool aggressive)
1495 {
1496   memset ((void *) &stats, 0, sizeof (stats));
1497 
1498   if (aggressive)
1499     {
1500       last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1501       bitmap_clear (last_stmt_necessary);
1502       bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1503       bitmap_clear (bb_contains_live_stmts);
1504     }
1505 
1506   processed = sbitmap_alloc (num_ssa_names + 1);
1507   bitmap_clear (processed);
1508 
1509   worklist.create (64);
1510   cfg_altered = false;
1511 }
1512 
1513 /* Cleanup after this pass.  */
1514 
1515 static void
1516 tree_dce_done (bool aggressive)
1517 {
1518   if (aggressive)
1519     {
1520       delete cd;
1521       sbitmap_free (visited_control_parents);
1522       sbitmap_free (last_stmt_necessary);
1523       sbitmap_free (bb_contains_live_stmts);
1524       bb_contains_live_stmts = NULL;
1525     }
1526 
1527   sbitmap_free (processed);
1528 
1529   worklist.release ();
1530 }
1531 
1532 /* Main routine to eliminate dead code.
1533 
1534    AGGRESSIVE controls the aggressiveness of the algorithm.
1535    In conservative mode, we ignore control dependence and simply declare
1536    all but the most trivially dead branches necessary.  This mode is fast.
1537    In aggressive mode, control dependences are taken into account, which
1538    results in more dead code elimination, but at the cost of some time.
1539 
1540    FIXME: Aggressive mode before PRE doesn't work currently because
1541 	  the dominance info is not invalidated after DCE1.  This is
1542 	  not an issue right now because we only run aggressive DCE
1543 	  as the last tree SSA pass, but keep this in mind when you
1544 	  start experimenting with pass ordering.  */
1545 
1546 static unsigned int
1547 perform_tree_ssa_dce (bool aggressive)
1548 {
1549   bool something_changed = 0;
1550 
1551   calculate_dominance_info (CDI_DOMINATORS);
1552 
1553   /* Preheaders are needed for SCEV to work.
1554      Simple lateches and recorded exits improve chances that loop will
1555      proved to be finite in testcases such as in loop-15.c and loop-24.c  */
1556   bool in_loop_pipeline = scev_initialized_p ();
1557   if (aggressive && ! in_loop_pipeline)
1558     {
1559       scev_initialize ();
1560       loop_optimizer_init (LOOPS_NORMAL
1561 			   | LOOPS_HAVE_RECORDED_EXITS);
1562     }
1563 
1564   tree_dce_init (aggressive);
1565 
1566   if (aggressive)
1567     {
1568       /* Compute control dependence.  */
1569       calculate_dominance_info (CDI_POST_DOMINATORS);
1570       cd = new control_dependences ();
1571 
1572       visited_control_parents =
1573 	sbitmap_alloc (last_basic_block_for_fn (cfun));
1574       bitmap_clear (visited_control_parents);
1575 
1576       mark_dfs_back_edges ();
1577     }
1578 
1579   find_obviously_necessary_stmts (aggressive);
1580 
1581   if (aggressive && ! in_loop_pipeline)
1582     {
1583       loop_optimizer_finalize ();
1584       scev_finalize ();
1585     }
1586 
1587   longest_chain = 0;
1588   total_chain = 0;
1589   nr_walks = 0;
1590   chain_ovfl = false;
1591   visited = BITMAP_ALLOC (NULL);
1592   propagate_necessity (aggressive);
1593   BITMAP_FREE (visited);
1594 
1595   something_changed |= eliminate_unnecessary_stmts ();
1596   something_changed |= cfg_altered;
1597 
1598   /* We do not update postdominators, so free them unconditionally.  */
1599   free_dominance_info (CDI_POST_DOMINATORS);
1600 
1601   /* If we removed paths in the CFG, then we need to update
1602      dominators as well.  I haven't investigated the possibility
1603      of incrementally updating dominators.  */
1604   if (cfg_altered)
1605     free_dominance_info (CDI_DOMINATORS);
1606 
1607   statistics_counter_event (cfun, "Statements deleted", stats.removed);
1608   statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1609 
1610   /* Debugging dumps.  */
1611   if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1612     print_stats ();
1613 
1614   tree_dce_done (aggressive);
1615 
1616   if (something_changed)
1617     {
1618       free_numbers_of_iterations_estimates (cfun);
1619       if (in_loop_pipeline)
1620 	scev_reset ();
1621       return TODO_update_ssa | TODO_cleanup_cfg;
1622     }
1623   return 0;
1624 }
1625 
1626 /* Pass entry points.  */
1627 static unsigned int
1628 tree_ssa_dce (void)
1629 {
1630   return perform_tree_ssa_dce (/*aggressive=*/false);
1631 }
1632 
1633 static unsigned int
1634 tree_ssa_cd_dce (void)
1635 {
1636   return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1637 }
1638 
1639 namespace {
1640 
1641 const pass_data pass_data_dce =
1642 {
1643   GIMPLE_PASS, /* type */
1644   "dce", /* name */
1645   OPTGROUP_NONE, /* optinfo_flags */
1646   TV_TREE_DCE, /* tv_id */
1647   ( PROP_cfg | PROP_ssa ), /* properties_required */
1648   0, /* properties_provided */
1649   0, /* properties_destroyed */
1650   0, /* todo_flags_start */
1651   0, /* todo_flags_finish */
1652 };
1653 
1654 class pass_dce : public gimple_opt_pass
1655 {
1656 public:
1657   pass_dce (gcc::context *ctxt)
1658     : gimple_opt_pass (pass_data_dce, ctxt)
1659   {}
1660 
1661   /* opt_pass methods: */
1662   opt_pass * clone () { return new pass_dce (m_ctxt); }
1663   virtual bool gate (function *) { return flag_tree_dce != 0; }
1664   virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1665 
1666 }; // class pass_dce
1667 
1668 } // anon namespace
1669 
1670 gimple_opt_pass *
1671 make_pass_dce (gcc::context *ctxt)
1672 {
1673   return new pass_dce (ctxt);
1674 }
1675 
1676 namespace {
1677 
1678 const pass_data pass_data_cd_dce =
1679 {
1680   GIMPLE_PASS, /* type */
1681   "cddce", /* name */
1682   OPTGROUP_NONE, /* optinfo_flags */
1683   TV_TREE_CD_DCE, /* tv_id */
1684   ( PROP_cfg | PROP_ssa ), /* properties_required */
1685   0, /* properties_provided */
1686   0, /* properties_destroyed */
1687   0, /* todo_flags_start */
1688   0, /* todo_flags_finish */
1689 };
1690 
1691 class pass_cd_dce : public gimple_opt_pass
1692 {
1693 public:
1694   pass_cd_dce (gcc::context *ctxt)
1695     : gimple_opt_pass (pass_data_cd_dce, ctxt)
1696   {}
1697 
1698   /* opt_pass methods: */
1699   opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1700   virtual bool gate (function *) { return flag_tree_dce != 0; }
1701   virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1702 
1703 }; // class pass_cd_dce
1704 
1705 } // anon namespace
1706 
1707 gimple_opt_pass *
1708 make_pass_cd_dce (gcc::context *ctxt)
1709 {
1710   return new pass_cd_dce (ctxt);
1711 }
1712 
1713 
1714 /* A cheap DCE interface.  WORKLIST is a list of possibly dead stmts and
1715    is consumed by this function.  The function has linear complexity in
1716    the number of dead stmts with a constant factor like the average SSA
1717    use operands number.  */
1718 
1719 void
1720 simple_dce_from_worklist (bitmap worklist)
1721 {
1722   while (! bitmap_empty_p (worklist))
1723     {
1724       /* Pop item.  */
1725       unsigned i = bitmap_first_set_bit (worklist);
1726       bitmap_clear_bit (worklist, i);
1727 
1728       tree def = ssa_name (i);
1729       /* Removed by somebody else or still in use.  */
1730       if (! def || ! has_zero_uses (def))
1731 	continue;
1732 
1733       gimple *t = SSA_NAME_DEF_STMT (def);
1734       if (gimple_has_side_effects (t))
1735 	continue;
1736 
1737       /* Add uses to the worklist.  */
1738       ssa_op_iter iter;
1739       use_operand_p use_p;
1740       FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
1741 	{
1742 	  tree use = USE_FROM_PTR (use_p);
1743 	  if (TREE_CODE (use) == SSA_NAME
1744 	      && ! SSA_NAME_IS_DEFAULT_DEF (use))
1745 	    bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
1746 	}
1747 
1748       /* Remove stmt.  */
1749       if (dump_file && (dump_flags & TDF_DETAILS))
1750 	{
1751 	  fprintf (dump_file, "Removing dead stmt:");
1752 	  print_gimple_stmt (dump_file, t, 0);
1753 	}
1754       gimple_stmt_iterator gsi = gsi_for_stmt (t);
1755       if (gimple_code (t) == GIMPLE_PHI)
1756 	remove_phi_node (&gsi, true);
1757       else
1758 	{
1759 	  gsi_remove (&gsi, true);
1760 	  release_defs (t);
1761 	}
1762     }
1763 }
1764