xref: /netbsd-src/external/gpl3/gcc/dist/gcc/tree-ssa-dce.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* Dead code elimination pass for the GNU compiler.
2    Copyright (C) 2002-2022 Free Software Foundation, Inc.
3    Contributed by Ben Elliston <bje@redhat.com>
4    and Andrew MacLeod <amacleod@redhat.com>
5    Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 /* Dead code elimination.
24 
25    References:
26 
27      Building an Optimizing Compiler,
28      Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29 
30      Advanced Compiler Design and Implementation,
31      Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32 
33    Dead-code elimination is the removal of statements which have no
34    impact on the program's output.  "Dead statements" have no impact
35    on the program's output, while "necessary statements" may have
36    impact on the output.
37 
38    The algorithm consists of three phases:
39    1. Marking as necessary all statements known to be necessary,
40       e.g. most function calls, writing a value to memory, etc;
41    2. Propagating necessary statements, e.g., the statements
42       giving values to operands in necessary statements; and
43    3. Removing dead statements.  */
44 
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-ssa-propagate.h"
69 #include "gimple-fold.h"
70 #include "tree-ssa.h"
71 
72 static struct stmt_stats
73 {
74   int total;
75   int total_phis;
76   int removed;
77   int removed_phis;
78 } stats;
79 
80 #define STMT_NECESSARY GF_PLF_1
81 
82 static vec<gimple *> worklist;
83 
84 /* Vector indicating an SSA name has already been processed and marked
85    as necessary.  */
86 static sbitmap processed;
87 
88 /* Vector indicating that the last statement of a basic block has already
89    been marked as necessary.  */
90 static sbitmap last_stmt_necessary;
91 
92 /* Vector indicating that BB contains statements that are live.  */
93 static sbitmap bb_contains_live_stmts;
94 
95 /* Before we can determine whether a control branch is dead, we need to
96    compute which blocks are control dependent on which edges.
97 
98    We expect each block to be control dependent on very few edges so we
99    use a bitmap for each block recording its edges.  An array holds the
100    bitmap.  The Ith bit in the bitmap is set if that block is dependent
101    on the Ith edge.  */
102 static control_dependences *cd;
103 
104 /* Vector indicating that a basic block has already had all the edges
105    processed that it is control dependent on.  */
106 static sbitmap visited_control_parents;
107 
108 /* TRUE if this pass alters the CFG (by removing control statements).
109    FALSE otherwise.
110 
111    If this pass alters the CFG, then it will arrange for the dominators
112    to be recomputed.  */
113 static bool cfg_altered;
114 
115 /* When non-NULL holds map from basic block index into the postorder.  */
116 static int *bb_postorder;
117 
118 
119 /* True if we should treat any stmt with a vdef as necessary.  */
120 
121 static inline bool
keep_all_vdefs_p()122 keep_all_vdefs_p ()
123 {
124   return optimize_debug;
125 }
126 
127 /* If STMT is not already marked necessary, mark it, and add it to the
128    worklist if ADD_TO_WORKLIST is true.  */
129 
130 static inline void
mark_stmt_necessary(gimple * stmt,bool add_to_worklist)131 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
132 {
133   gcc_assert (stmt);
134 
135   if (gimple_plf (stmt, STMT_NECESSARY))
136     return;
137 
138   if (dump_file && (dump_flags & TDF_DETAILS))
139     {
140       fprintf (dump_file, "Marking useful stmt: ");
141       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
142       fprintf (dump_file, "\n");
143     }
144 
145   gimple_set_plf (stmt, STMT_NECESSARY, true);
146   if (add_to_worklist)
147     worklist.safe_push (stmt);
148   if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
149     bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
150 }
151 
152 
153 /* Mark the statement defining operand OP as necessary.  */
154 
155 static inline void
mark_operand_necessary(tree op)156 mark_operand_necessary (tree op)
157 {
158   gimple *stmt;
159   int ver;
160 
161   gcc_assert (op);
162 
163   ver = SSA_NAME_VERSION (op);
164   if (bitmap_bit_p (processed, ver))
165     {
166       stmt = SSA_NAME_DEF_STMT (op);
167       gcc_assert (gimple_nop_p (stmt)
168 		  || gimple_plf (stmt, STMT_NECESSARY));
169       return;
170     }
171   bitmap_set_bit (processed, ver);
172 
173   stmt = SSA_NAME_DEF_STMT (op);
174   gcc_assert (stmt);
175 
176   if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
177     return;
178 
179   if (dump_file && (dump_flags & TDF_DETAILS))
180     {
181       fprintf (dump_file, "marking necessary through ");
182       print_generic_expr (dump_file, op);
183       fprintf (dump_file, " stmt ");
184       print_gimple_stmt (dump_file, stmt, 0);
185     }
186 
187   gimple_set_plf (stmt, STMT_NECESSARY, true);
188   if (bb_contains_live_stmts)
189     bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
190   worklist.safe_push (stmt);
191 }
192 
193 
194 /* Mark STMT as necessary if it obviously is.  Add it to the worklist if
195    it can make other statements necessary.
196 
197    If AGGRESSIVE is false, control statements are conservatively marked as
198    necessary.  */
199 
200 static void
mark_stmt_if_obviously_necessary(gimple * stmt,bool aggressive)201 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
202 {
203   /* Statements that are implicitly live.  Most function calls, asm
204      and return statements are required.  Labels and GIMPLE_BIND nodes
205      are kept because they are control flow, and we have no way of
206      knowing whether they can be removed.  DCE can eliminate all the
207      other statements in a block, and CFG can then remove the block
208      and labels.  */
209   switch (gimple_code (stmt))
210     {
211     case GIMPLE_PREDICT:
212     case GIMPLE_LABEL:
213       mark_stmt_necessary (stmt, false);
214       return;
215 
216     case GIMPLE_ASM:
217     case GIMPLE_RESX:
218     case GIMPLE_RETURN:
219       mark_stmt_necessary (stmt, true);
220       return;
221 
222     case GIMPLE_CALL:
223       {
224 	tree callee = gimple_call_fndecl (stmt);
225 	if (callee != NULL_TREE
226 	    && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
227 	  switch (DECL_FUNCTION_CODE (callee))
228 	    {
229 	    case BUILT_IN_MALLOC:
230 	    case BUILT_IN_ALIGNED_ALLOC:
231 	    case BUILT_IN_CALLOC:
232 	    CASE_BUILT_IN_ALLOCA:
233 	    case BUILT_IN_STRDUP:
234 	    case BUILT_IN_STRNDUP:
235 	    case BUILT_IN_GOMP_ALLOC:
236 	      return;
237 
238 	    default:;
239 	    }
240 
241 	if (callee != NULL_TREE
242 	    && flag_allocation_dce
243 	    && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
244 	  return;
245 
246 	/* IFN_GOACC_LOOP calls are necessary in that they are used to
247 	   represent parameter (i.e. step, bound) of a lowered OpenACC
248 	   partitioned loop.  But this kind of partitioned loop might not
249 	   survive from aggressive loop removal for it has loop exit and
250 	   is assumed to be finite.  Therefore, we need to explicitly mark
251 	   these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
252 	if (gimple_call_internal_p (stmt, IFN_GOACC_LOOP))
253 	  {
254 	    mark_stmt_necessary (stmt, true);
255 	    return;
256 	  }
257 	break;
258       }
259 
260     case GIMPLE_DEBUG:
261       /* Debug temps without a value are not useful.  ??? If we could
262 	 easily locate the debug temp bind stmt for a use thereof,
263 	 would could refrain from marking all debug temps here, and
264 	 mark them only if they're used.  */
265       if (gimple_debug_nonbind_marker_p (stmt)
266 	  || !gimple_debug_bind_p (stmt)
267 	  || gimple_debug_bind_has_value_p (stmt)
268 	  || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
269 	mark_stmt_necessary (stmt, false);
270       return;
271 
272     case GIMPLE_GOTO:
273       gcc_assert (!simple_goto_p (stmt));
274       mark_stmt_necessary (stmt, true);
275       return;
276 
277     case GIMPLE_COND:
278       gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
279       /* Fall through.  */
280 
281     case GIMPLE_SWITCH:
282       if (! aggressive)
283 	mark_stmt_necessary (stmt, true);
284       break;
285 
286     case GIMPLE_ASSIGN:
287       /* Mark indirect CLOBBERs to be lazily removed if their SSA operands
288 	 do not prevail.  That also makes control flow leading to them
289 	 not necessary in aggressive mode.  */
290       if (gimple_clobber_p (stmt) && !zero_ssa_operands (stmt, SSA_OP_USE))
291 	return;
292       break;
293 
294     default:
295       break;
296     }
297 
298   /* If the statement has volatile operands, it needs to be preserved.
299      Same for statements that can alter control flow in unpredictable
300      ways.  */
301   if (gimple_has_side_effects (stmt) || is_ctrl_altering_stmt (stmt))
302     {
303       mark_stmt_necessary (stmt, true);
304       return;
305     }
306 
307   /* If a statement could throw, it can be deemed necessary unless we
308      are allowed to remove dead EH.  Test this after checking for
309      new/delete operators since we always elide their EH.  */
310   if (!cfun->can_delete_dead_exceptions
311       && stmt_could_throw_p (cfun, stmt))
312     {
313       mark_stmt_necessary (stmt, true);
314       return;
315     }
316 
317   if ((gimple_vdef (stmt) && keep_all_vdefs_p ())
318       || stmt_may_clobber_global_p (stmt, false))
319     {
320       mark_stmt_necessary (stmt, true);
321       return;
322     }
323 
324   return;
325 }
326 
327 
328 /* Mark the last statement of BB as necessary.  */
329 
330 static void
mark_last_stmt_necessary(basic_block bb)331 mark_last_stmt_necessary (basic_block bb)
332 {
333   gimple *stmt = last_stmt (bb);
334 
335   bitmap_set_bit (last_stmt_necessary, bb->index);
336   bitmap_set_bit (bb_contains_live_stmts, bb->index);
337 
338   /* We actually mark the statement only if it is a control statement.  */
339   if (stmt && is_ctrl_stmt (stmt))
340     mark_stmt_necessary (stmt, true);
341 }
342 
343 
344 /* Mark control dependent edges of BB as necessary.  We have to do this only
345    once for each basic block so we set the appropriate bit after we're done.
346 
347    When IGNORE_SELF is true, ignore BB in the list of control dependences.  */
348 
349 static void
mark_control_dependent_edges_necessary(basic_block bb,bool ignore_self)350 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
351 {
352   bitmap_iterator bi;
353   unsigned edge_number;
354   bool skipped = false;
355 
356   gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
357 
358   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
359     return;
360 
361   EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
362 			    0, edge_number, bi)
363     {
364       basic_block cd_bb = cd->get_edge_src (edge_number);
365 
366       if (ignore_self && cd_bb == bb)
367 	{
368 	  skipped = true;
369 	  continue;
370 	}
371 
372       if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
373 	mark_last_stmt_necessary (cd_bb);
374     }
375 
376   if (!skipped)
377     bitmap_set_bit (visited_control_parents, bb->index);
378 }
379 
380 
381 /* Find obviously necessary statements.  These are things like most function
382    calls, and stores to file level variables.
383 
384    If EL is NULL, control statements are conservatively marked as
385    necessary.  Otherwise it contains the list of edges used by control
386    dependence analysis.  */
387 
388 static void
find_obviously_necessary_stmts(bool aggressive)389 find_obviously_necessary_stmts (bool aggressive)
390 {
391   basic_block bb;
392   gimple_stmt_iterator gsi;
393   edge e;
394   gimple *phi, *stmt;
395   int flags;
396 
397   FOR_EACH_BB_FN (bb, cfun)
398     {
399       /* PHI nodes are never inherently necessary.  */
400       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
401 	{
402 	  phi = gsi_stmt (gsi);
403 	  gimple_set_plf (phi, STMT_NECESSARY, false);
404 	}
405 
406       /* Check all statements in the block.  */
407       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
408 	{
409 	  stmt = gsi_stmt (gsi);
410 	  gimple_set_plf (stmt, STMT_NECESSARY, false);
411 	  mark_stmt_if_obviously_necessary (stmt, aggressive);
412 	}
413     }
414 
415   /* Pure and const functions are finite and thus have no infinite loops in
416      them.  */
417   flags = flags_from_decl_or_type (current_function_decl);
418   if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
419     return;
420 
421   /* Prevent the empty possibly infinite loops from being removed.  This is
422      needed to make the logic in remove_dead_stmt work to identify the
423      correct edge to keep when removing a controlling condition.  */
424   if (aggressive)
425     {
426       if (mark_irreducible_loops ())
427 	FOR_EACH_BB_FN (bb, cfun)
428 	  {
429 	    edge_iterator ei;
430 	    FOR_EACH_EDGE (e, ei, bb->succs)
431 	      if ((e->flags & EDGE_DFS_BACK)
432 		  && (e->flags & EDGE_IRREDUCIBLE_LOOP))
433 		{
434 	          if (dump_file)
435 		    fprintf (dump_file, "Marking back edge of irreducible "
436 			     "loop %i->%i\n", e->src->index, e->dest->index);
437 		  mark_control_dependent_edges_necessary (e->dest, false);
438 		}
439 	  }
440 
441       for (auto loop : loops_list (cfun, 0))
442 	/* For loops without an exit do not mark any condition.  */
443 	if (loop->exits->next->e && !finite_loop_p (loop))
444 	  {
445 	    if (dump_file)
446 	      fprintf (dump_file, "cannot prove finiteness of loop %i\n",
447 		       loop->num);
448 	    mark_control_dependent_edges_necessary (loop->latch, false);
449 	  }
450     }
451 }
452 
453 
454 /* Return true if REF is based on an aliased base, otherwise false.  */
455 
456 static bool
ref_may_be_aliased(tree ref)457 ref_may_be_aliased (tree ref)
458 {
459   gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
460   while (handled_component_p (ref))
461     ref = TREE_OPERAND (ref, 0);
462   if ((TREE_CODE (ref) == MEM_REF || TREE_CODE (ref) == TARGET_MEM_REF)
463       && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
464     ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
465   return !(DECL_P (ref)
466 	   && !may_be_aliased (ref));
467 }
468 
469 static bitmap visited = NULL;
470 static unsigned int longest_chain = 0;
471 static unsigned int total_chain = 0;
472 static unsigned int nr_walks = 0;
473 static bool chain_ovfl = false;
474 
475 /* Worker for the walker that marks reaching definitions of REF,
476    which is based on a non-aliased decl, necessary.  It returns
477    true whenever the defining statement of the current VDEF is
478    a kill for REF, as no dominating may-defs are necessary for REF
479    anymore.  DATA points to the basic-block that contains the
480    stmt that refers to REF.  */
481 
482 static bool
mark_aliased_reaching_defs_necessary_1(ao_ref * ref,tree vdef,void * data)483 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
484 {
485   gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
486 
487   /* All stmts we visit are necessary.  */
488   if (! gimple_clobber_p (def_stmt))
489     mark_operand_necessary (vdef);
490 
491   /* If the stmt lhs kills ref, then we can stop walking.  */
492   if (gimple_has_lhs (def_stmt)
493       && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
494       /* The assignment is not necessarily carried out if it can throw
495          and we can catch it in the current function where we could inspect
496 	 the previous value.
497          ???  We only need to care about the RHS throwing.  For aggregate
498 	 assignments or similar calls and non-call exceptions the LHS
499 	 might throw as well.  */
500       && !stmt_can_throw_internal (cfun, def_stmt))
501     {
502       tree base, lhs = gimple_get_lhs (def_stmt);
503       poly_int64 size, offset, max_size;
504       bool reverse;
505       ao_ref_base (ref);
506       base
507 	= get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
508       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
509 	 so base == refd->base does not always hold.  */
510       if (base == ref->base)
511 	{
512 	  /* For a must-alias check we need to be able to constrain
513 	     the accesses properly.  */
514 	  if (known_eq (size, max_size)
515 	      && known_subrange_p (ref->offset, ref->max_size, offset, size))
516 	    return true;
517 	  /* Or they need to be exactly the same.  */
518 	  else if (ref->ref
519 		   /* Make sure there is no induction variable involved
520 		      in the references (gcc.c-torture/execute/pr42142.c).
521 		      The simplest way is to check if the kill dominates
522 		      the use.  */
523 		   /* But when both are in the same block we cannot
524 		      easily tell whether we came from a backedge
525 		      unless we decide to compute stmt UIDs
526 		      (see PR58246).  */
527 		   && (basic_block) data != gimple_bb (def_stmt)
528 		   && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
529 				      gimple_bb (def_stmt))
530 		   && operand_equal_p (ref->ref, lhs, 0))
531 	    return true;
532 	}
533     }
534 
535   /* Otherwise keep walking.  */
536   return false;
537 }
538 
539 static void
mark_aliased_reaching_defs_necessary(gimple * stmt,tree ref)540 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
541 {
542   /* Should have been caught before calling this function.  */
543   gcc_checking_assert (!keep_all_vdefs_p ());
544 
545   unsigned int chain;
546   ao_ref refd;
547   gcc_assert (!chain_ovfl);
548   ao_ref_init (&refd, ref);
549   chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
550 			      mark_aliased_reaching_defs_necessary_1,
551 			      gimple_bb (stmt), NULL);
552   if (chain > longest_chain)
553     longest_chain = chain;
554   total_chain += chain;
555   nr_walks++;
556 }
557 
558 /* Worker for the walker that marks reaching definitions of REF, which
559    is not based on a non-aliased decl.  For simplicity we need to end
560    up marking all may-defs necessary that are not based on a non-aliased
561    decl.  The only job of this walker is to skip may-defs based on
562    a non-aliased decl.  */
563 
564 static bool
mark_all_reaching_defs_necessary_1(ao_ref * ref ATTRIBUTE_UNUSED,tree vdef,void * data ATTRIBUTE_UNUSED)565 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
566 				    tree vdef, void *data ATTRIBUTE_UNUSED)
567 {
568   gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
569 
570   /* We have to skip already visited (and thus necessary) statements
571      to make the chaining work after we dropped back to simple mode.  */
572   if (chain_ovfl
573       && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
574     {
575       gcc_assert (gimple_nop_p (def_stmt)
576 		  || gimple_plf (def_stmt, STMT_NECESSARY));
577       return false;
578     }
579 
580   /* We want to skip stores to non-aliased variables.  */
581   if (!chain_ovfl
582       && gimple_assign_single_p (def_stmt))
583     {
584       tree lhs = gimple_assign_lhs (def_stmt);
585       if (!ref_may_be_aliased (lhs))
586 	return false;
587     }
588 
589   /* We want to skip statments that do not constitute stores but have
590      a virtual definition.  */
591   if (gcall *call = dyn_cast <gcall *> (def_stmt))
592     {
593       tree callee = gimple_call_fndecl (call);
594       if (callee != NULL_TREE
595 	  && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
596 	switch (DECL_FUNCTION_CODE (callee))
597 	  {
598 	  case BUILT_IN_MALLOC:
599 	  case BUILT_IN_ALIGNED_ALLOC:
600 	  case BUILT_IN_CALLOC:
601 	  CASE_BUILT_IN_ALLOCA:
602 	  case BUILT_IN_FREE:
603 	  case BUILT_IN_GOMP_ALLOC:
604 	  case BUILT_IN_GOMP_FREE:
605 	    return false;
606 
607 	  default:;
608 	  }
609 
610       if (callee != NULL_TREE
611 	  && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
612 	      || DECL_IS_OPERATOR_DELETE_P (callee))
613 	  && gimple_call_from_new_or_delete (call))
614 	return false;
615     }
616 
617   if (! gimple_clobber_p (def_stmt))
618     mark_operand_necessary (vdef);
619 
620   return false;
621 }
622 
623 static void
mark_all_reaching_defs_necessary(gimple * stmt)624 mark_all_reaching_defs_necessary (gimple *stmt)
625 {
626   /* Should have been caught before calling this function.  */
627   gcc_checking_assert (!keep_all_vdefs_p ());
628   walk_aliased_vdefs (NULL, gimple_vuse (stmt),
629 		      mark_all_reaching_defs_necessary_1, NULL, &visited);
630 }
631 
632 /* Return true for PHI nodes with one or identical arguments
633    can be removed.  */
634 static bool
degenerate_phi_p(gimple * phi)635 degenerate_phi_p (gimple *phi)
636 {
637   unsigned int i;
638   tree op = gimple_phi_arg_def (phi, 0);
639   for (i = 1; i < gimple_phi_num_args (phi); i++)
640     if (gimple_phi_arg_def (phi, i) != op)
641       return false;
642   return true;
643 }
644 
645 /* Return that NEW_CALL and DELETE_CALL are a valid pair of new
646    and delete  operators.  */
647 
648 static bool
valid_new_delete_pair_p(gimple * new_call,gimple * delete_call)649 valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
650 {
651   tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
652   tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
653   return valid_new_delete_pair_p (new_asm, delete_asm);
654 }
655 
656 /* Propagate necessity using the operands of necessary statements.
657    Process the uses on each statement in the worklist, and add all
658    feeding statements which contribute to the calculation of this
659    value to the worklist.
660 
661    In conservative mode, EL is NULL.  */
662 
663 static void
propagate_necessity(bool aggressive)664 propagate_necessity (bool aggressive)
665 {
666   gimple *stmt;
667 
668   if (dump_file && (dump_flags & TDF_DETAILS))
669     fprintf (dump_file, "\nProcessing worklist:\n");
670 
671   while (worklist.length () > 0)
672     {
673       /* Take STMT from worklist.  */
674       stmt = worklist.pop ();
675 
676       if (dump_file && (dump_flags & TDF_DETAILS))
677 	{
678 	  fprintf (dump_file, "processing: ");
679 	  print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
680 	  fprintf (dump_file, "\n");
681 	}
682 
683       if (aggressive)
684 	{
685 	  /* Mark the last statement of the basic blocks on which the block
686 	     containing STMT is control dependent, but only if we haven't
687 	     already done so.  */
688 	  basic_block bb = gimple_bb (stmt);
689 	  if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
690 	      && !bitmap_bit_p (visited_control_parents, bb->index))
691 	    mark_control_dependent_edges_necessary (bb, false);
692 	}
693 
694       if (gimple_code (stmt) == GIMPLE_PHI
695 	  /* We do not process virtual PHI nodes nor do we track their
696 	     necessity.  */
697 	  && !virtual_operand_p (gimple_phi_result (stmt)))
698 	{
699 	  /* PHI nodes are somewhat special in that each PHI alternative has
700 	     data and control dependencies.  All the statements feeding the
701 	     PHI node's arguments are always necessary.  In aggressive mode,
702 	     we also consider the control dependent edges leading to the
703 	     predecessor block associated with each PHI alternative as
704 	     necessary.  */
705 	  gphi *phi = as_a <gphi *> (stmt);
706 	  size_t k;
707 
708 	  for (k = 0; k < gimple_phi_num_args (stmt); k++)
709             {
710 	      tree arg = PHI_ARG_DEF (stmt, k);
711 	      if (TREE_CODE (arg) == SSA_NAME)
712 		mark_operand_necessary (arg);
713 	    }
714 
715 	  /* For PHI operands it matters from where the control flow arrives
716 	     to the BB.  Consider the following example:
717 
718 	     a=exp1;
719 	     b=exp2;
720 	     if (test)
721 		;
722 	     else
723 		;
724 	     c=PHI(a,b)
725 
726 	     We need to mark control dependence of the empty basic blocks, since they
727 	     contains computation of PHI operands.
728 
729 	     Doing so is too restrictive in the case the predecestor block is in
730 	     the loop. Consider:
731 
732 	      if (b)
733 		{
734 		  int i;
735 		  for (i = 0; i<1000; ++i)
736 		    ;
737 		  j = 0;
738 		}
739 	      return j;
740 
741 	     There is PHI for J in the BB containing return statement.
742 	     In this case the control dependence of predecestor block (that is
743 	     within the empty loop) also contains the block determining number
744 	     of iterations of the block that would prevent removing of empty
745 	     loop in this case.
746 
747 	     This scenario can be avoided by splitting critical edges.
748 	     To save the critical edge splitting pass we identify how the control
749 	     dependence would look like if the edge was split.
750 
751 	     Consider the modified CFG created from current CFG by splitting
752 	     edge B->C.  In the postdominance tree of modified CFG, C' is
753 	     always child of C.  There are two cases how chlids of C' can look
754 	     like:
755 
756 		1) C' is leaf
757 
758 		   In this case the only basic block C' is control dependent on is B.
759 
760 		2) C' has single child that is B
761 
762 		   In this case control dependence of C' is same as control
763 		   dependence of B in original CFG except for block B itself.
764 		   (since C' postdominate B in modified CFG)
765 
766 	     Now how to decide what case happens?  There are two basic options:
767 
768 		a) C postdominate B.  Then C immediately postdominate B and
769 		   case 2 happens iff there is no other way from B to C except
770 		   the edge B->C.
771 
772 		   There is other way from B to C iff there is succesor of B that
773 		   is not postdominated by B.  Testing this condition is somewhat
774 		   expensive, because we need to iterate all succesors of B.
775 		   We are safe to assume that this does not happen: we will mark B
776 		   as needed when processing the other path from B to C that is
777 		   conrol dependent on B and marking control dependencies of B
778 		   itself is harmless because they will be processed anyway after
779 		   processing control statement in B.
780 
781 		b) C does not postdominate B.  Always case 1 happens since there is
782 		   path from C to exit that does not go through B and thus also C'.  */
783 
784 	  if (aggressive && !degenerate_phi_p (stmt))
785 	    {
786 	      for (k = 0; k < gimple_phi_num_args (stmt); k++)
787 		{
788 		  basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
789 
790 		  if (gimple_bb (stmt)
791 		      != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
792 		    {
793 		      if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
794 			mark_last_stmt_necessary (arg_bb);
795 		    }
796 		  else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
797 		           && !bitmap_bit_p (visited_control_parents,
798 					 arg_bb->index))
799 		    mark_control_dependent_edges_necessary (arg_bb, true);
800 		}
801 	    }
802 	}
803       else
804 	{
805 	  /* Propagate through the operands.  Examine all the USE, VUSE and
806 	     VDEF operands in this statement.  Mark all the statements
807 	     which feed this statement's uses as necessary.  */
808 	  ssa_op_iter iter;
809 	  tree use;
810 
811 	  /* If this is a call to free which is directly fed by an
812 	     allocation function do not mark that necessary through
813 	     processing the argument.  */
814 	  bool is_delete_operator
815 	    = (is_gimple_call (stmt)
816 	       && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
817 	       && gimple_call_operator_delete_p (as_a <gcall *> (stmt)));
818 	  if (is_delete_operator
819 	      || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
820 	      || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
821 	    {
822 	      tree ptr = gimple_call_arg (stmt, 0);
823 	      gcall *def_stmt;
824 	      tree def_callee;
825 	      /* If the pointer we free is defined by an allocation
826 		 function do not add the call to the worklist.  */
827 	      if (TREE_CODE (ptr) == SSA_NAME
828 		  && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
829 		  && (def_callee = gimple_call_fndecl (def_stmt))
830 		  && ((DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
831 		       && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
832 			   || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
833 			   || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC
834 			   || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_GOMP_ALLOC))
835 		      || (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (def_callee)
836 			  && gimple_call_from_new_or_delete (def_stmt))))
837 		{
838 		  if (is_delete_operator
839 		      && !valid_new_delete_pair_p (def_stmt, stmt))
840 		    mark_operand_necessary (gimple_call_arg (stmt, 0));
841 
842 		  /* Delete operators can have alignment and (or) size
843 		     as next arguments.  When being a SSA_NAME, they
844 		     must be marked as necessary.  Similarly GOMP_free.  */
845 		  if (gimple_call_num_args (stmt) >= 2)
846 		    for (unsigned i = 1; i < gimple_call_num_args (stmt);
847 			 i++)
848 		      {
849 			tree arg = gimple_call_arg (stmt, i);
850 			if (TREE_CODE (arg) == SSA_NAME)
851 			  mark_operand_necessary (arg);
852 		      }
853 
854 		  continue;
855 		}
856 	    }
857 
858 	  FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
859 	    mark_operand_necessary (use);
860 
861 	  use = gimple_vuse (stmt);
862 	  if (!use)
863 	    continue;
864 
865 	  /* No need to search for vdefs if we intrinsicly keep them all.  */
866 	  if (keep_all_vdefs_p ())
867 	    continue;
868 
869 	  /* If we dropped to simple mode make all immediately
870 	     reachable definitions necessary.  */
871 	  if (chain_ovfl)
872 	    {
873 	      mark_all_reaching_defs_necessary (stmt);
874 	      continue;
875 	    }
876 
877 	  /* For statements that may load from memory (have a VUSE) we
878 	     have to mark all reaching (may-)definitions as necessary.
879 	     We partition this task into two cases:
880 	      1) explicit loads based on decls that are not aliased
881 	      2) implicit loads (like calls) and explicit loads not
882 	         based on decls that are not aliased (like indirect
883 		 references or loads from globals)
884 	     For 1) we mark all reaching may-defs as necessary, stopping
885 	     at dominating kills.  For 2) we want to mark all dominating
886 	     references necessary, but non-aliased ones which we handle
887 	     in 1).  By keeping a global visited bitmap for references
888 	     we walk for 2) we avoid quadratic behavior for those.  */
889 
890 	  if (gcall *call = dyn_cast <gcall *> (stmt))
891 	    {
892 	      tree callee = gimple_call_fndecl (call);
893 	      unsigned i;
894 
895 	      /* Calls to functions that are merely acting as barriers
896 		 or that only store to memory do not make any previous
897 		 stores necessary.  */
898 	      if (callee != NULL_TREE
899 		  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
900 		  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
901 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
902 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
903 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
904 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
905 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
906 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
907 		      || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
908 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
909 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
910 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
911 		continue;
912 
913 	      if (callee != NULL_TREE
914 		  && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
915 		      || DECL_IS_OPERATOR_DELETE_P (callee))
916 		  && gimple_call_from_new_or_delete (call))
917 		continue;
918 
919 	      /* Calls implicitly load from memory, their arguments
920 	         in addition may explicitly perform memory loads.  */
921 	      mark_all_reaching_defs_necessary (call);
922 	      for (i = 0; i < gimple_call_num_args (call); ++i)
923 		{
924 		  tree arg = gimple_call_arg (call, i);
925 		  if (TREE_CODE (arg) == SSA_NAME
926 		      || is_gimple_min_invariant (arg))
927 		    continue;
928 		  if (TREE_CODE (arg) == WITH_SIZE_EXPR)
929 		    arg = TREE_OPERAND (arg, 0);
930 		  if (!ref_may_be_aliased (arg))
931 		    mark_aliased_reaching_defs_necessary (call, arg);
932 		}
933 	    }
934 	  else if (gimple_assign_single_p (stmt))
935 	    {
936 	      tree rhs;
937 	      /* If this is a load mark things necessary.  */
938 	      rhs = gimple_assign_rhs1 (stmt);
939 	      if (TREE_CODE (rhs) != SSA_NAME
940 		  && !is_gimple_min_invariant (rhs)
941 		  && TREE_CODE (rhs) != CONSTRUCTOR)
942 		{
943 		  if (!ref_may_be_aliased (rhs))
944 		    mark_aliased_reaching_defs_necessary (stmt, rhs);
945 		  else
946 		    mark_all_reaching_defs_necessary (stmt);
947 		}
948 	    }
949 	  else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
950 	    {
951 	      tree rhs = gimple_return_retval (return_stmt);
952 	      /* A return statement may perform a load.  */
953 	      if (rhs
954 		  && TREE_CODE (rhs) != SSA_NAME
955 		  && !is_gimple_min_invariant (rhs)
956 		  && TREE_CODE (rhs) != CONSTRUCTOR)
957 		{
958 		  if (!ref_may_be_aliased (rhs))
959 		    mark_aliased_reaching_defs_necessary (stmt, rhs);
960 		  else
961 		    mark_all_reaching_defs_necessary (stmt);
962 		}
963 	    }
964 	  else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
965 	    {
966 	      unsigned i;
967 	      mark_all_reaching_defs_necessary (stmt);
968 	      /* Inputs may perform loads.  */
969 	      for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
970 		{
971 		  tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
972 		  if (TREE_CODE (op) != SSA_NAME
973 		      && !is_gimple_min_invariant (op)
974 		      && TREE_CODE (op) != CONSTRUCTOR
975 		      && !ref_may_be_aliased (op))
976 		    mark_aliased_reaching_defs_necessary (stmt, op);
977 		}
978 	    }
979 	  else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
980 	    {
981 	      /* The beginning of a transaction is a memory barrier.  */
982 	      /* ??? If we were really cool, we'd only be a barrier
983 		 for the memories touched within the transaction.  */
984 	      mark_all_reaching_defs_necessary (stmt);
985 	    }
986 	  else
987 	    gcc_unreachable ();
988 
989 	  /* If we over-used our alias oracle budget drop to simple
990 	     mode.  The cost metric allows quadratic behavior
991 	     (number of uses times number of may-defs queries) up to
992 	     a constant maximal number of queries and after that falls back to
993 	     super-linear complexity.  */
994 	  if (/* Constant but quadratic for small functions.  */
995 	      total_chain > 128 * 128
996 	      /* Linear in the number of may-defs.  */
997 	      && total_chain > 32 * longest_chain
998 	      /* Linear in the number of uses.  */
999 	      && total_chain > nr_walks * 32)
1000 	    {
1001 	      chain_ovfl = true;
1002 	      if (visited)
1003 		bitmap_clear (visited);
1004 	    }
1005 	}
1006     }
1007 }
1008 
1009 /* Remove dead PHI nodes from block BB.  */
1010 
1011 static bool
remove_dead_phis(basic_block bb)1012 remove_dead_phis (basic_block bb)
1013 {
1014   bool something_changed = false;
1015   gphi *phi;
1016   gphi_iterator gsi;
1017 
1018   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1019     {
1020       stats.total_phis++;
1021       phi = gsi.phi ();
1022 
1023       /* We do not track necessity of virtual PHI nodes.  Instead do
1024          very simple dead PHI removal here.  */
1025       if (virtual_operand_p (gimple_phi_result (phi)))
1026 	{
1027 	  /* Virtual PHI nodes with one or identical arguments
1028 	     can be removed.  */
1029 	  if (degenerate_phi_p (phi))
1030 	    {
1031 	      tree vdef = gimple_phi_result (phi);
1032 	      tree vuse = gimple_phi_arg_def (phi, 0);
1033 
1034 	      use_operand_p use_p;
1035 	      imm_use_iterator iter;
1036 	      gimple *use_stmt;
1037 	      FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1038 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1039 		  SET_USE (use_p, vuse);
1040 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1041 	          && TREE_CODE (vuse) == SSA_NAME)
1042 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1043 	    }
1044 	  else
1045 	    gimple_set_plf (phi, STMT_NECESSARY, true);
1046 	}
1047 
1048       if (!gimple_plf (phi, STMT_NECESSARY))
1049 	{
1050 	  something_changed = true;
1051 	  if (dump_file && (dump_flags & TDF_DETAILS))
1052 	    {
1053 	      fprintf (dump_file, "Deleting : ");
1054 	      print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1055 	      fprintf (dump_file, "\n");
1056 	    }
1057 
1058 	  remove_phi_node (&gsi, true);
1059 	  stats.removed_phis++;
1060 	  continue;
1061 	}
1062 
1063       gsi_next (&gsi);
1064     }
1065   return something_changed;
1066 }
1067 
1068 
1069 /* Remove dead statement pointed to by iterator I.  Receives the basic block BB
1070    containing I so that we don't have to look it up.  */
1071 
1072 static void
remove_dead_stmt(gimple_stmt_iterator * i,basic_block bb,vec<edge> & to_remove_edges)1073 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1074 		  vec<edge> &to_remove_edges)
1075 {
1076   gimple *stmt = gsi_stmt (*i);
1077 
1078   if (dump_file && (dump_flags & TDF_DETAILS))
1079     {
1080       fprintf (dump_file, "Deleting : ");
1081       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1082       fprintf (dump_file, "\n");
1083     }
1084 
1085   stats.removed++;
1086 
1087   /* If we have determined that a conditional branch statement contributes
1088      nothing to the program, then we not only remove it, but we need to update
1089      the CFG.  We can chose any of edges out of BB as long as we are sure to not
1090      close infinite loops.  This is done by always choosing the edge closer to
1091      exit in inverted_post_order_compute order.  */
1092   if (is_ctrl_stmt (stmt))
1093     {
1094       edge_iterator ei;
1095       edge e = NULL, e2;
1096 
1097       /* See if there is only one non-abnormal edge.  */
1098       if (single_succ_p (bb))
1099         e = single_succ_edge (bb);
1100       /* Otherwise chose one that is closer to bb with live statement in it.
1101          To be able to chose one, we compute inverted post order starting from
1102 	 all BBs with live statements.  */
1103       if (!e)
1104 	{
1105 	  if (!bb_postorder)
1106 	    {
1107 	      auto_vec<int, 20> postorder;
1108 		 inverted_post_order_compute (&postorder,
1109 					      &bb_contains_live_stmts);
1110 	      bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1111 	      for (unsigned int i = 0; i < postorder.length (); ++i)
1112 		 bb_postorder[postorder[i]] = i;
1113 	    }
1114           FOR_EACH_EDGE (e2, ei, bb->succs)
1115 	    if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1116 		|| bb_postorder [e->dest->index]
1117 		   < bb_postorder [e2->dest->index])
1118 	      e = e2;
1119 	}
1120       gcc_assert (e);
1121       e->probability = profile_probability::always ();
1122 
1123       /* The edge is no longer associated with a conditional, so it does
1124 	 not have TRUE/FALSE flags.
1125 	 We are also safe to drop EH/ABNORMAL flags and turn them into
1126 	 normal control flow, because we know that all the destinations (including
1127 	 those odd edges) are equivalent for program execution.  */
1128       e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1129 
1130       /* The lone outgoing edge from BB will be a fallthru edge.  */
1131       e->flags |= EDGE_FALLTHRU;
1132 
1133       /* Remove the remaining outgoing edges.  */
1134       FOR_EACH_EDGE (e2, ei, bb->succs)
1135 	if (e != e2)
1136 	  {
1137 	    /* If we made a BB unconditionally exit a loop or removed
1138 	       an entry into an irreducible region, then this transform
1139 	       alters the set of BBs in the loop.  Schedule a fixup.  */
1140 	    if (loop_exit_edge_p (bb->loop_father, e)
1141 		|| (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1142 	      loops_state_set (LOOPS_NEED_FIXUP);
1143 	    to_remove_edges.safe_push (e2);
1144 	  }
1145     }
1146 
1147   /* If this is a store into a variable that is being optimized away,
1148      add a debug bind stmt if possible.  */
1149   if (MAY_HAVE_DEBUG_BIND_STMTS
1150       && gimple_assign_single_p (stmt)
1151       && is_gimple_val (gimple_assign_rhs1 (stmt)))
1152     {
1153       tree lhs = gimple_assign_lhs (stmt);
1154       if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1155 	  && !DECL_IGNORED_P (lhs)
1156 	  && is_gimple_reg_type (TREE_TYPE (lhs))
1157 	  && !is_global_var (lhs)
1158 	  && !DECL_HAS_VALUE_EXPR_P (lhs))
1159 	{
1160 	  tree rhs = gimple_assign_rhs1 (stmt);
1161 	  gdebug *note
1162 	    = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1163 	  gsi_insert_after (i, note, GSI_SAME_STMT);
1164 	}
1165     }
1166 
1167   unlink_stmt_vdef (stmt);
1168   gsi_remove (i, true);
1169   release_defs (stmt);
1170 }
1171 
1172 /* Helper for maybe_optimize_arith_overflow.  Find in *TP if there are any
1173    uses of data (SSA_NAME) other than REALPART_EXPR referencing it.  */
1174 
1175 static tree
find_non_realpart_uses(tree * tp,int * walk_subtrees,void * data)1176 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1177 {
1178   if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1179     *walk_subtrees = 0;
1180   if (*tp == (tree) data)
1181     return *tp;
1182   return NULL_TREE;
1183 }
1184 
1185 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1186    but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1187    into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1188    uses.  */
1189 
1190 static void
maybe_optimize_arith_overflow(gimple_stmt_iterator * gsi,enum tree_code subcode)1191 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1192 			       enum tree_code subcode)
1193 {
1194   gimple *stmt = gsi_stmt (*gsi);
1195   tree lhs = gimple_call_lhs (stmt);
1196 
1197   if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1198     return;
1199 
1200   imm_use_iterator imm_iter;
1201   use_operand_p use_p;
1202   bool has_debug_uses = false;
1203   bool has_realpart_uses = false;
1204   bool has_other_uses = false;
1205   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1206     {
1207       gimple *use_stmt = USE_STMT (use_p);
1208       if (is_gimple_debug (use_stmt))
1209 	has_debug_uses = true;
1210       else if (is_gimple_assign (use_stmt)
1211 	       && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1212 	       && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1213 	has_realpart_uses = true;
1214       else
1215 	{
1216 	  has_other_uses = true;
1217 	  break;
1218 	}
1219     }
1220 
1221   if (!has_realpart_uses || has_other_uses)
1222     return;
1223 
1224   tree arg0 = gimple_call_arg (stmt, 0);
1225   tree arg1 = gimple_call_arg (stmt, 1);
1226   location_t loc = gimple_location (stmt);
1227   tree type = TREE_TYPE (TREE_TYPE (lhs));
1228   tree utype = type;
1229   if (!TYPE_UNSIGNED (type))
1230     utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1231   tree result = fold_build2_loc (loc, subcode, utype,
1232 				 fold_convert_loc (loc, utype, arg0),
1233 				 fold_convert_loc (loc, utype, arg1));
1234   result = fold_convert_loc (loc, type, result);
1235 
1236   if (has_debug_uses)
1237     {
1238       gimple *use_stmt;
1239       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1240 	{
1241 	  if (!gimple_debug_bind_p (use_stmt))
1242 	    continue;
1243 	  tree v = gimple_debug_bind_get_value (use_stmt);
1244 	  if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1245 	    {
1246 	      gimple_debug_bind_reset_value (use_stmt);
1247 	      update_stmt (use_stmt);
1248 	    }
1249 	}
1250     }
1251 
1252   if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1253     result = drop_tree_overflow (result);
1254   tree overflow = build_zero_cst (type);
1255   tree ctype = build_complex_type (type);
1256   if (TREE_CODE (result) == INTEGER_CST)
1257     result = build_complex (ctype, result, overflow);
1258   else
1259     result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1260 			 ctype, result, overflow);
1261 
1262   if (dump_file && (dump_flags & TDF_DETAILS))
1263     {
1264       fprintf (dump_file, "Transforming call: ");
1265       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1266       fprintf (dump_file, "because the overflow result is never used into: ");
1267       print_generic_stmt (dump_file, result, TDF_SLIM);
1268       fprintf (dump_file, "\n");
1269     }
1270 
1271   gimplify_and_update_call_from_tree (gsi, result);
1272 }
1273 
1274 /* Returns whether the control parents of BB are preserved.  */
1275 
1276 static bool
control_parents_preserved_p(basic_block bb)1277 control_parents_preserved_p (basic_block bb)
1278 {
1279   /* If we marked the control parents from BB they are preserved.  */
1280   if (bitmap_bit_p (visited_control_parents, bb->index))
1281     return true;
1282 
1283   /* But they can also end up being marked from elsewhere.  */
1284   bitmap_iterator bi;
1285   unsigned edge_number;
1286   EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
1287 			    0, edge_number, bi)
1288     {
1289       basic_block cd_bb = cd->get_edge_src (edge_number);
1290       if (cd_bb != bb
1291 	  && !bitmap_bit_p (last_stmt_necessary, cd_bb->index))
1292 	return false;
1293     }
1294   /* And cache the result.  */
1295   bitmap_set_bit (visited_control_parents, bb->index);
1296   return true;
1297 }
1298 
1299 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1300    contributes nothing to the program, and can be deleted.  */
1301 
1302 static bool
eliminate_unnecessary_stmts(bool aggressive)1303 eliminate_unnecessary_stmts (bool aggressive)
1304 {
1305   bool something_changed = false;
1306   basic_block bb;
1307   gimple_stmt_iterator gsi, psi;
1308   gimple *stmt;
1309   tree call;
1310   auto_vec<edge> to_remove_edges;
1311 
1312   if (dump_file && (dump_flags & TDF_DETAILS))
1313     fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1314 
1315   clear_special_calls ();
1316 
1317   /* Walking basic blocks and statements in reverse order avoids
1318      releasing SSA names before any other DEFs that refer to them are
1319      released.  This helps avoid loss of debug information, as we get
1320      a chance to propagate all RHSs of removed SSAs into debug uses,
1321      rather than only the latest ones.  E.g., consider:
1322 
1323      x_3 = y_1 + z_2;
1324      a_5 = x_3 - b_4;
1325      # DEBUG a => a_5
1326 
1327      If we were to release x_3 before a_5, when we reached a_5 and
1328      tried to substitute it into the debug stmt, we'd see x_3 there,
1329      but x_3's DEF, type, etc would have already been disconnected.
1330      By going backwards, the debug stmt first changes to:
1331 
1332      # DEBUG a => x_3 - b_4
1333 
1334      and then to:
1335 
1336      # DEBUG a => y_1 + z_2 - b_4
1337 
1338      as desired.  */
1339   gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1340   auto_vec<basic_block> h;
1341   h = get_all_dominated_blocks (CDI_DOMINATORS,
1342 				single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1343 
1344   while (h.length ())
1345     {
1346       bb = h.pop ();
1347 
1348       /* Remove dead statements.  */
1349       auto_bitmap debug_seen;
1350       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1351 	{
1352 	  stmt = gsi_stmt (gsi);
1353 
1354 	  psi = gsi;
1355 	  gsi_prev (&psi);
1356 
1357 	  stats.total++;
1358 
1359 	  /* We can mark a call to free as not necessary if the
1360 	     defining statement of its argument is not necessary
1361 	     (and thus is getting removed).  */
1362 	  if (gimple_plf (stmt, STMT_NECESSARY)
1363 	      && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1364 		  || (is_gimple_call (stmt)
1365 		      && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
1366 		      && gimple_call_operator_delete_p (as_a <gcall *> (stmt)))))
1367 	    {
1368 	      tree ptr = gimple_call_arg (stmt, 0);
1369 	      if (TREE_CODE (ptr) == SSA_NAME)
1370 		{
1371 		  gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1372 		  if (!gimple_nop_p (def_stmt)
1373 		      && !gimple_plf (def_stmt, STMT_NECESSARY))
1374 		    gimple_set_plf (stmt, STMT_NECESSARY, false);
1375 		}
1376 	    }
1377 
1378 	  /* If GSI is not necessary then remove it.  */
1379 	  if (!gimple_plf (stmt, STMT_NECESSARY))
1380 	    {
1381 	      /* Keep clobbers that we can keep live live.  */
1382 	      if (gimple_clobber_p (stmt))
1383 		{
1384 		  ssa_op_iter iter;
1385 		  use_operand_p use_p;
1386 		  bool dead = false;
1387 		  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1388 		    {
1389 		      tree name = USE_FROM_PTR (use_p);
1390 		      if (!SSA_NAME_IS_DEFAULT_DEF (name)
1391 			  && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1392 			{
1393 			  dead = true;
1394 			  break;
1395 			}
1396 		    }
1397 		  if (!dead
1398 		      /* When doing CD-DCE we have to ensure all controls
1399 			 of the stmt are still live.  */
1400 		      && (!aggressive || control_parents_preserved_p (bb)))
1401 		    {
1402 		      bitmap_clear (debug_seen);
1403 		      continue;
1404 		    }
1405 		}
1406 	      if (!is_gimple_debug (stmt))
1407 		something_changed = true;
1408 	      remove_dead_stmt (&gsi, bb, to_remove_edges);
1409 	      continue;
1410 	    }
1411 	  else if (is_gimple_call (stmt))
1412 	    {
1413 	      tree name = gimple_call_lhs (stmt);
1414 
1415 	      notice_special_calls (as_a <gcall *> (stmt));
1416 
1417 	      /* When LHS of var = call (); is dead, simplify it into
1418 		 call (); saving one operand.  */
1419 	      if (name
1420 		  && TREE_CODE (name) == SSA_NAME
1421 		  && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1422 		  /* Avoid doing so for allocation calls which we
1423 		     did not mark as necessary, it will confuse the
1424 		     special logic we apply to malloc/free pair removal.  */
1425 		  && (!(call = gimple_call_fndecl (stmt))
1426 		      || ((DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1427 			   || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1428 			       && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1429 			       && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1430 			       && !ALLOCA_FUNCTION_CODE_P
1431 			       (DECL_FUNCTION_CODE (call))))
1432 			  && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (call))))
1433 		{
1434 		  something_changed = true;
1435 		  if (dump_file && (dump_flags & TDF_DETAILS))
1436 		    {
1437 		      fprintf (dump_file, "Deleting LHS of call: ");
1438 		      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1439 		      fprintf (dump_file, "\n");
1440 		    }
1441 
1442 		  gimple_call_set_lhs (stmt, NULL_TREE);
1443 		  maybe_clean_or_replace_eh_stmt (stmt, stmt);
1444 		  update_stmt (stmt);
1445 		  release_ssa_name (name);
1446 
1447 		  /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1448 		     without lhs is not needed.  */
1449 		  if (gimple_call_internal_p (stmt))
1450 		    switch (gimple_call_internal_fn (stmt))
1451 		      {
1452 		      case IFN_GOMP_SIMD_LANE:
1453 			if (gimple_call_num_args (stmt) >= 3
1454 			    && !integer_nonzerop (gimple_call_arg (stmt, 2)))
1455 			  break;
1456 			/* FALLTHRU */
1457 		      case IFN_ASAN_POISON:
1458 			remove_dead_stmt (&gsi, bb, to_remove_edges);
1459 			break;
1460 		      default:
1461 			break;
1462 		      }
1463 		}
1464 	      else if (gimple_call_internal_p (stmt))
1465 		switch (gimple_call_internal_fn (stmt))
1466 		  {
1467 		  case IFN_ADD_OVERFLOW:
1468 		    maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1469 		    break;
1470 		  case IFN_SUB_OVERFLOW:
1471 		    maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1472 		    break;
1473 		  case IFN_MUL_OVERFLOW:
1474 		    maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1475 		    break;
1476 		  default:
1477 		    break;
1478 		  }
1479 	    }
1480 	  else if (gimple_debug_bind_p (stmt))
1481 	    {
1482 	      /* We are only keeping the last debug-bind of a
1483 	         non-DEBUG_EXPR_DECL variable in a series of
1484 		 debug-bind stmts.  */
1485 	      tree var = gimple_debug_bind_get_var (stmt);
1486 	      if (TREE_CODE (var) != DEBUG_EXPR_DECL
1487 		  && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1488 		remove_dead_stmt (&gsi, bb, to_remove_edges);
1489 	      continue;
1490 	    }
1491 	  bitmap_clear (debug_seen);
1492 	}
1493 
1494       /* Remove dead PHI nodes.  */
1495       something_changed |= remove_dead_phis (bb);
1496     }
1497 
1498 
1499   /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1500      rendered some PHI nodes unreachable while they are still in use.
1501      Mark them for renaming.  */
1502   if (!to_remove_edges.is_empty ())
1503     {
1504       basic_block prev_bb;
1505 
1506       /* Remove edges.  We've delayed this to not get bogus debug stmts
1507          during PHI node removal.  */
1508       for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1509 	remove_edge (to_remove_edges[i]);
1510       cfg_altered = true;
1511 
1512       find_unreachable_blocks ();
1513 
1514       /* Delete all unreachable basic blocks in reverse dominator order.  */
1515       for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1516 	   bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1517 	{
1518 	  prev_bb = bb->prev_bb;
1519 
1520 	  if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1521 	      || !(bb->flags & BB_REACHABLE))
1522 	    {
1523 	      for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1524 		   gsi_next (&gsi))
1525 		if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1526 		  {
1527 		    bool found = false;
1528 		    imm_use_iterator iter;
1529 
1530 		    FOR_EACH_IMM_USE_STMT (stmt, iter,
1531 					   gimple_phi_result (gsi.phi ()))
1532 		      {
1533 			if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1534 			  continue;
1535 			if (gimple_code (stmt) == GIMPLE_PHI
1536 			    || gimple_plf (stmt, STMT_NECESSARY))
1537 			  {
1538 			    found = true;
1539 			    break;
1540 			  }
1541 		      }
1542 		    if (found)
1543 		      mark_virtual_phi_result_for_renaming (gsi.phi ());
1544 		  }
1545 
1546 	      if (!(bb->flags & BB_REACHABLE))
1547 		{
1548 		  /* Speed up the removal of blocks that don't
1549 		     dominate others.  Walking backwards, this should
1550 		     be the common case.  ??? Do we need to recompute
1551 		     dominators because of cfg_altered?  */
1552 		  if (!first_dom_son (CDI_DOMINATORS, bb))
1553 		    delete_basic_block (bb);
1554 		  else
1555 		    {
1556 		      h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1557 
1558 		      while (h.length ())
1559 			{
1560 			  bb = h.pop ();
1561 			  prev_bb = bb->prev_bb;
1562 			  /* Rearrangements to the CFG may have failed
1563 			     to update the dominators tree, so that
1564 			     formerly-dominated blocks are now
1565 			     otherwise reachable.  */
1566 			  if (!!(bb->flags & BB_REACHABLE))
1567 			    continue;
1568 			  delete_basic_block (bb);
1569 			}
1570 
1571 		      h.release ();
1572 		    }
1573 		}
1574 	    }
1575 	}
1576     }
1577 
1578   if (bb_postorder)
1579     free (bb_postorder);
1580   bb_postorder = NULL;
1581 
1582   return something_changed;
1583 }
1584 
1585 
1586 /* Print out removed statement statistics.  */
1587 
1588 static void
print_stats(void)1589 print_stats (void)
1590 {
1591   float percg;
1592 
1593   percg = ((float) stats.removed / (float) stats.total) * 100;
1594   fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1595 	   stats.removed, stats.total, (int) percg);
1596 
1597   if (stats.total_phis == 0)
1598     percg = 0;
1599   else
1600     percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1601 
1602   fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1603 	   stats.removed_phis, stats.total_phis, (int) percg);
1604 }
1605 
1606 /* Initialization for this pass.  Set up the used data structures.  */
1607 
1608 static void
tree_dce_init(bool aggressive)1609 tree_dce_init (bool aggressive)
1610 {
1611   memset ((void *) &stats, 0, sizeof (stats));
1612 
1613   if (aggressive)
1614     {
1615       last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1616       bitmap_clear (last_stmt_necessary);
1617       bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1618       bitmap_clear (bb_contains_live_stmts);
1619     }
1620 
1621   processed = sbitmap_alloc (num_ssa_names + 1);
1622   bitmap_clear (processed);
1623 
1624   worklist.create (64);
1625   cfg_altered = false;
1626 }
1627 
1628 /* Cleanup after this pass.  */
1629 
1630 static void
tree_dce_done(bool aggressive)1631 tree_dce_done (bool aggressive)
1632 {
1633   if (aggressive)
1634     {
1635       delete cd;
1636       sbitmap_free (visited_control_parents);
1637       sbitmap_free (last_stmt_necessary);
1638       sbitmap_free (bb_contains_live_stmts);
1639       bb_contains_live_stmts = NULL;
1640     }
1641 
1642   sbitmap_free (processed);
1643 
1644   worklist.release ();
1645 }
1646 
1647 /* Sort PHI argument values for make_forwarders_with_degenerate_phis.  */
1648 
1649 static int
sort_phi_args(const void * a_,const void * b_)1650 sort_phi_args (const void *a_, const void *b_)
1651 {
1652   auto *a = (const std::pair<edge, hashval_t> *) a_;
1653   auto *b = (const std::pair<edge, hashval_t> *) b_;
1654   hashval_t ha = a->second;
1655   hashval_t hb = b->second;
1656   if (ha < hb)
1657     return -1;
1658   else if (ha > hb)
1659     return 1;
1660   else if (a->first->dest_idx < b->first->dest_idx)
1661     return -1;
1662   else if (a->first->dest_idx > b->first->dest_idx)
1663     return 1;
1664   else
1665     return 0;
1666 }
1667 
1668 /* Look for a non-virtual PHIs and make a forwarder block when all PHIs
1669    have the same argument on a set of edges.  This is to not consider
1670    control dependences of individual edges for same values but only for
1671    the common set.  */
1672 
1673 static unsigned
make_forwarders_with_degenerate_phis(function * fn)1674 make_forwarders_with_degenerate_phis (function *fn)
1675 {
1676   unsigned todo = 0;
1677 
1678   basic_block bb;
1679   FOR_EACH_BB_FN (bb, fn)
1680     {
1681       /* Only PHIs with three or more arguments have opportunities.  */
1682       if (EDGE_COUNT (bb->preds) < 3)
1683 	continue;
1684       /* Do not touch loop headers or blocks with abnormal predecessors.
1685 	 ???  This is to avoid creating valid loops here, see PR103458.
1686 	 We might want to improve things to either explicitely add those
1687 	 loops or at least consider blocks with no backedges.  */
1688       if (bb->loop_father->header == bb
1689 	  || bb_has_abnormal_pred (bb))
1690 	continue;
1691 
1692       /* Take one PHI node as template to look for identical
1693 	 arguments.  Build a vector of candidates forming sets
1694 	 of argument edges with equal values.  Note optimality
1695 	 depends on the particular choice of the template PHI
1696 	 since equal arguments are unordered leaving other PHIs
1697 	 with more than one set of equal arguments within this
1698 	 argument range unsorted.  We'd have to break ties by
1699 	 looking at other PHI nodes.  */
1700       gphi_iterator gsi = gsi_start_nonvirtual_phis (bb);
1701       if (gsi_end_p (gsi))
1702 	continue;
1703       gphi *phi = gsi.phi ();
1704       auto_vec<std::pair<edge, hashval_t>, 8> args;
1705       bool need_resort = false;
1706       for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
1707 	{
1708 	  edge e = gimple_phi_arg_edge (phi, i);
1709 	  /* Skip abnormal edges since we cannot redirect them.  */
1710 	  if (e->flags & EDGE_ABNORMAL)
1711 	    continue;
1712 	  /* Skip loop exit edges when we are in loop-closed SSA form
1713 	     since the forwarder we'd create does not have a PHI node.  */
1714 	  if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
1715 	      && loop_exit_edge_p (e->src->loop_father, e))
1716 	    continue;
1717 
1718 	  tree arg = gimple_phi_arg_def (phi, i);
1719 	  if (!CONSTANT_CLASS_P (arg) && TREE_CODE (arg) != SSA_NAME)
1720 	    need_resort = true;
1721 	  args.safe_push (std::make_pair (e, iterative_hash_expr (arg, 0)));
1722 	}
1723       if (args.length () < 2)
1724 	continue;
1725       args.qsort (sort_phi_args);
1726       /* The above sorting can be different between -g and -g0, as e.g. decls
1727 	 can have different uids (-g could have bigger gaps in between them).
1728 	 So, only use that to determine which args are equal, then change
1729 	 second from hash value to smallest dest_idx of the edges which have
1730 	 equal argument and sort again.  If all the phi arguments are
1731 	 constants or SSA_NAME, there is no need for the second sort, the hash
1732 	 values are stable in that case.  */
1733       hashval_t hash = args[0].second;
1734       args[0].second = args[0].first->dest_idx;
1735       bool any_equal = false;
1736       for (unsigned i = 1; i < args.length (); ++i)
1737 	if (hash == args[i].second
1738 	    && operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, args[i - 1].first),
1739 				PHI_ARG_DEF_FROM_EDGE (phi, args[i].first)))
1740 	  {
1741 	    args[i].second = args[i - 1].second;
1742 	    any_equal = true;
1743 	  }
1744 	else
1745 	  {
1746 	    hash = args[i].second;
1747 	    args[i].second = args[i].first->dest_idx;
1748 	  }
1749       if (!any_equal)
1750 	continue;
1751       if (need_resort)
1752 	args.qsort (sort_phi_args);
1753 
1754       /* From the candidates vector now verify true candidates for
1755 	 forwarders and create them.  */
1756       gphi *vphi = get_virtual_phi (bb);
1757       unsigned start = 0;
1758       while (start < args.length () - 1)
1759 	{
1760 	  unsigned i;
1761 	  for (i = start + 1; i < args.length (); ++i)
1762 	    if (args[start].second != args[i].second)
1763 	      break;
1764 	  /* args[start]..args[i-1] are equal.  */
1765 	  if (start != i - 1)
1766 	    {
1767 	      /* Check all PHI nodes for argument equality.  */
1768 	      bool equal = true;
1769 	      gphi_iterator gsi2 = gsi;
1770 	      gsi_next (&gsi2);
1771 	      for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
1772 		{
1773 		  gphi *phi2 = gsi2.phi ();
1774 		  if (virtual_operand_p (gimple_phi_result (phi2)))
1775 		    continue;
1776 		  tree start_arg
1777 		    = PHI_ARG_DEF_FROM_EDGE (phi2, args[start].first);
1778 		  for (unsigned j = start + 1; j < i; ++j)
1779 		    {
1780 		      if (!operand_equal_p (start_arg,
1781 					    PHI_ARG_DEF_FROM_EDGE
1782 					      (phi2, args[j].first)))
1783 			{
1784 			  /* Another PHI might have a shorter set of
1785 			     equivalent args.  Go for that.  */
1786 			  i = j;
1787 			  if (j == start + 1)
1788 			    equal = false;
1789 			  break;
1790 			}
1791 		    }
1792 		  if (!equal)
1793 		    break;
1794 		}
1795 	      if (equal)
1796 		{
1797 		  /* If we are asked to forward all edges the block
1798 		     has all degenerate PHIs.  Do nothing in that case.  */
1799 		  if (start == 0
1800 		      && i == args.length ()
1801 		      && args.length () == gimple_phi_num_args (phi))
1802 		    break;
1803 		  /* Instead of using make_forwarder_block we are
1804 		     rolling our own variant knowing that the forwarder
1805 		     does not need PHI nodes apart from eventually
1806 		     a virtual one.  */
1807 		  auto_vec<tree, 8> vphi_args;
1808 		  if (vphi)
1809 		    {
1810 		      vphi_args.reserve_exact (i - start);
1811 		      for (unsigned j = start; j < i; ++j)
1812 			vphi_args.quick_push
1813 			  (PHI_ARG_DEF_FROM_EDGE (vphi, args[j].first));
1814 		    }
1815 		  free_dominance_info (fn, CDI_DOMINATORS);
1816 		  basic_block forwarder = split_edge (args[start].first);
1817 		  for (unsigned j = start + 1; j < i; ++j)
1818 		    {
1819 		      edge e = args[j].first;
1820 		      redirect_edge_and_branch_force (e, forwarder);
1821 		      redirect_edge_var_map_clear (e);
1822 		    }
1823 		  if (vphi)
1824 		    {
1825 		      tree def = copy_ssa_name (vphi_args[0]);
1826 		      gphi *vphi_copy = create_phi_node (def, forwarder);
1827 		      for (unsigned j = start; j < i; ++j)
1828 			add_phi_arg (vphi_copy, vphi_args[j - start],
1829 				     args[j].first, UNKNOWN_LOCATION);
1830 		      SET_PHI_ARG_DEF
1831 			(vphi, single_succ_edge (forwarder)->dest_idx, def);
1832 		    }
1833 		  todo |= TODO_cleanup_cfg;
1834 		}
1835 	    }
1836 	  /* Continue searching for more opportunities.  */
1837 	  start = i;
1838 	}
1839     }
1840   return todo;
1841 }
1842 
1843 /* Main routine to eliminate dead code.
1844 
1845    AGGRESSIVE controls the aggressiveness of the algorithm.
1846    In conservative mode, we ignore control dependence and simply declare
1847    all but the most trivially dead branches necessary.  This mode is fast.
1848    In aggressive mode, control dependences are taken into account, which
1849    results in more dead code elimination, but at the cost of some time.
1850 
1851    FIXME: Aggressive mode before PRE doesn't work currently because
1852 	  the dominance info is not invalidated after DCE1.  This is
1853 	  not an issue right now because we only run aggressive DCE
1854 	  as the last tree SSA pass, but keep this in mind when you
1855 	  start experimenting with pass ordering.  */
1856 
1857 static unsigned int
perform_tree_ssa_dce(bool aggressive)1858 perform_tree_ssa_dce (bool aggressive)
1859 {
1860   bool something_changed = 0;
1861   unsigned todo = 0;
1862 
1863   /* Preheaders are needed for SCEV to work.
1864      Simple lateches and recorded exits improve chances that loop will
1865      proved to be finite in testcases such as in loop-15.c and loop-24.c  */
1866   bool in_loop_pipeline = scev_initialized_p ();
1867   if (aggressive && ! in_loop_pipeline)
1868     {
1869       scev_initialize ();
1870       loop_optimizer_init (LOOPS_NORMAL
1871 			   | LOOPS_HAVE_RECORDED_EXITS);
1872     }
1873 
1874   if (aggressive)
1875     todo |= make_forwarders_with_degenerate_phis (cfun);
1876 
1877   calculate_dominance_info (CDI_DOMINATORS);
1878 
1879   tree_dce_init (aggressive);
1880 
1881   if (aggressive)
1882     {
1883       /* Compute control dependence.  */
1884       calculate_dominance_info (CDI_POST_DOMINATORS);
1885       cd = new control_dependences ();
1886 
1887       visited_control_parents =
1888 	sbitmap_alloc (last_basic_block_for_fn (cfun));
1889       bitmap_clear (visited_control_parents);
1890 
1891       mark_dfs_back_edges ();
1892     }
1893 
1894   find_obviously_necessary_stmts (aggressive);
1895 
1896   if (aggressive && ! in_loop_pipeline)
1897     {
1898       loop_optimizer_finalize ();
1899       scev_finalize ();
1900     }
1901 
1902   longest_chain = 0;
1903   total_chain = 0;
1904   nr_walks = 0;
1905   chain_ovfl = false;
1906   visited = BITMAP_ALLOC (NULL);
1907   propagate_necessity (aggressive);
1908   BITMAP_FREE (visited);
1909 
1910   something_changed |= eliminate_unnecessary_stmts (aggressive);
1911   something_changed |= cfg_altered;
1912 
1913   /* We do not update postdominators, so free them unconditionally.  */
1914   free_dominance_info (CDI_POST_DOMINATORS);
1915 
1916   /* If we removed paths in the CFG, then we need to update
1917      dominators as well.  I haven't investigated the possibility
1918      of incrementally updating dominators.  */
1919   if (cfg_altered)
1920     free_dominance_info (CDI_DOMINATORS);
1921 
1922   statistics_counter_event (cfun, "Statements deleted", stats.removed);
1923   statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1924 
1925   /* Debugging dumps.  */
1926   if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1927     print_stats ();
1928 
1929   tree_dce_done (aggressive);
1930 
1931   if (something_changed)
1932     {
1933       free_numbers_of_iterations_estimates (cfun);
1934       if (in_loop_pipeline)
1935 	scev_reset ();
1936       todo |= TODO_update_ssa | TODO_cleanup_cfg;
1937     }
1938   return todo;
1939 }
1940 
1941 /* Pass entry points.  */
1942 static unsigned int
tree_ssa_dce(void)1943 tree_ssa_dce (void)
1944 {
1945   return perform_tree_ssa_dce (/*aggressive=*/false);
1946 }
1947 
1948 static unsigned int
tree_ssa_cd_dce(void)1949 tree_ssa_cd_dce (void)
1950 {
1951   return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1952 }
1953 
1954 namespace {
1955 
1956 const pass_data pass_data_dce =
1957 {
1958   GIMPLE_PASS, /* type */
1959   "dce", /* name */
1960   OPTGROUP_NONE, /* optinfo_flags */
1961   TV_TREE_DCE, /* tv_id */
1962   ( PROP_cfg | PROP_ssa ), /* properties_required */
1963   0, /* properties_provided */
1964   0, /* properties_destroyed */
1965   0, /* todo_flags_start */
1966   0, /* todo_flags_finish */
1967 };
1968 
1969 class pass_dce : public gimple_opt_pass
1970 {
1971 public:
pass_dce(gcc::context * ctxt)1972   pass_dce (gcc::context *ctxt)
1973     : gimple_opt_pass (pass_data_dce, ctxt)
1974   {}
1975 
1976   /* opt_pass methods: */
clone()1977   opt_pass * clone () { return new pass_dce (m_ctxt); }
gate(function *)1978   virtual bool gate (function *) { return flag_tree_dce != 0; }
execute(function *)1979   virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1980 
1981 }; // class pass_dce
1982 
1983 } // anon namespace
1984 
1985 gimple_opt_pass *
make_pass_dce(gcc::context * ctxt)1986 make_pass_dce (gcc::context *ctxt)
1987 {
1988   return new pass_dce (ctxt);
1989 }
1990 
1991 namespace {
1992 
1993 const pass_data pass_data_cd_dce =
1994 {
1995   GIMPLE_PASS, /* type */
1996   "cddce", /* name */
1997   OPTGROUP_NONE, /* optinfo_flags */
1998   TV_TREE_CD_DCE, /* tv_id */
1999   ( PROP_cfg | PROP_ssa ), /* properties_required */
2000   0, /* properties_provided */
2001   0, /* properties_destroyed */
2002   0, /* todo_flags_start */
2003   0, /* todo_flags_finish */
2004 };
2005 
2006 class pass_cd_dce : public gimple_opt_pass
2007 {
2008 public:
pass_cd_dce(gcc::context * ctxt)2009   pass_cd_dce (gcc::context *ctxt)
2010     : gimple_opt_pass (pass_data_cd_dce, ctxt), update_address_taken_p (false)
2011   {}
2012 
2013   /* opt_pass methods: */
clone()2014   opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
set_pass_param(unsigned n,bool param)2015   void set_pass_param (unsigned n, bool param)
2016     {
2017       gcc_assert (n == 0);
2018       update_address_taken_p = param;
2019     }
gate(function *)2020   virtual bool gate (function *) { return flag_tree_dce != 0; }
execute(function *)2021   virtual unsigned int execute (function *)
2022     {
2023       return (tree_ssa_cd_dce ()
2024 	      | (update_address_taken_p ? TODO_update_address_taken : 0));
2025     }
2026 
2027 private:
2028   bool update_address_taken_p;
2029 }; // class pass_cd_dce
2030 
2031 } // anon namespace
2032 
2033 gimple_opt_pass *
make_pass_cd_dce(gcc::context * ctxt)2034 make_pass_cd_dce (gcc::context *ctxt)
2035 {
2036   return new pass_cd_dce (ctxt);
2037 }
2038 
2039 
2040 /* A cheap DCE interface.  WORKLIST is a list of possibly dead stmts and
2041    is consumed by this function.  The function has linear complexity in
2042    the number of dead stmts with a constant factor like the average SSA
2043    use operands number.  */
2044 
2045 void
simple_dce_from_worklist(bitmap worklist)2046 simple_dce_from_worklist (bitmap worklist)
2047 {
2048   while (! bitmap_empty_p (worklist))
2049     {
2050       /* Pop item.  */
2051       unsigned i = bitmap_first_set_bit (worklist);
2052       bitmap_clear_bit (worklist, i);
2053 
2054       tree def = ssa_name (i);
2055       /* Removed by somebody else or still in use.  */
2056       if (! def || ! has_zero_uses (def))
2057 	continue;
2058 
2059       gimple *t = SSA_NAME_DEF_STMT (def);
2060       if (gimple_has_side_effects (t))
2061 	continue;
2062 
2063       /* The defining statement needs to be defining only this name.
2064 	 ASM is the only statement that can define more than one
2065 	 name. */
2066       if (is_a<gasm *>(t)
2067 	  && !single_ssa_def_operand (t, SSA_OP_ALL_DEFS))
2068 	continue;
2069 
2070       /* Don't remove statements that are needed for non-call
2071 	 eh to work.  */
2072       if (stmt_unremovable_because_of_non_call_eh_p (cfun, t))
2073 	continue;
2074 
2075       /* Add uses to the worklist.  */
2076       ssa_op_iter iter;
2077       use_operand_p use_p;
2078       FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
2079 	{
2080 	  tree use = USE_FROM_PTR (use_p);
2081 	  if (TREE_CODE (use) == SSA_NAME
2082 	      && ! SSA_NAME_IS_DEFAULT_DEF (use))
2083 	    bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
2084 	}
2085 
2086       /* Remove stmt.  */
2087       if (dump_file && (dump_flags & TDF_DETAILS))
2088 	{
2089 	  fprintf (dump_file, "Removing dead stmt:");
2090 	  print_gimple_stmt (dump_file, t, 0);
2091 	}
2092       gimple_stmt_iterator gsi = gsi_for_stmt (t);
2093       if (gimple_code (t) == GIMPLE_PHI)
2094 	remove_phi_node (&gsi, true);
2095       else
2096 	{
2097 	  unlink_stmt_vdef (t);
2098 	  gsi_remove (&gsi, true);
2099 	  release_defs (t);
2100 	}
2101     }
2102 }
2103