xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/dce.c (revision d909946ca08dceb44d7d0f22ec9488679695d976)
1 /* RTL dead code elimination.
2    Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hashtab.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "except.h"
31 #include "df.h"
32 #include "cselib.h"
33 #include "dce.h"
34 #include "valtrack.h"
35 #include "tree-pass.h"
36 #include "dbgcnt.h"
37 #include "tm_p.h"
38 #include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
39 
40 
41 /* -------------------------------------------------------------------------
42    Core mark/delete routines
43    ------------------------------------------------------------------------- */
44 
45 /* True if we are invoked while the df engine is running; in this case,
46    we don't want to reenter it.  */
47 static bool df_in_progress = false;
48 
49 /* True if we are allowed to alter the CFG in this pass.  */
50 static bool can_alter_cfg = false;
51 
52 /* Instructions that have been marked but whose dependencies have not
53    yet been processed.  */
54 static vec<rtx> worklist;
55 
56 /* Bitmap of instructions marked as needed indexed by INSN_UID.  */
57 static sbitmap marked;
58 
59 /* Bitmap obstacks used for block processing by the fast algorithm.  */
60 static bitmap_obstack dce_blocks_bitmap_obstack;
61 static bitmap_obstack dce_tmp_bitmap_obstack;
62 
63 static bool find_call_stack_args (rtx, bool, bool, bitmap);
64 
65 /* A subroutine for which BODY is part of the instruction being tested;
66    either the top-level pattern, or an element of a PARALLEL.  The
67    instruction is known not to be a bare USE or CLOBBER.  */
68 
69 static bool
70 deletable_insn_p_1 (rtx body)
71 {
72   switch (GET_CODE (body))
73     {
74     case PREFETCH:
75     case TRAP_IF:
76       /* The UNSPEC case was added here because the ia-64 claims that
77 	 USEs do not work after reload and generates UNSPECS rather
78 	 than USEs.  Since dce is run after reload we need to avoid
79 	 deleting these even if they are dead.  If it turns out that
80 	 USEs really do work after reload, the ia-64 should be
81 	 changed, and the UNSPEC case can be removed.  */
82     case UNSPEC:
83       return false;
84 
85     default:
86       return !volatile_refs_p (body);
87     }
88 }
89 
90 
91 /* Return true if INSN is a normal instruction that can be deleted by
92    the DCE pass.  */
93 
94 static bool
95 deletable_insn_p (rtx insn, bool fast, bitmap arg_stores)
96 {
97   rtx body, x;
98   int i;
99 
100   if (CALL_P (insn)
101       /* We cannot delete calls inside of the recursive dce because
102 	 this may cause basic blocks to be deleted and this messes up
103 	 the rest of the stack of optimization passes.  */
104       && (!df_in_progress)
105       /* We cannot delete pure or const sibling calls because it is
106 	 hard to see the result.  */
107       && (!SIBLING_CALL_P (insn))
108       /* We can delete dead const or pure calls as long as they do not
109          infinite loop.  */
110       && (RTL_CONST_OR_PURE_CALL_P (insn)
111 	  && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
112     return find_call_stack_args (insn, false, fast, arg_stores);
113 
114   /* Don't delete jumps, notes and the like.  */
115   if (!NONJUMP_INSN_P (insn))
116     return false;
117 
118   /* Don't delete insns that may throw if we cannot do so.  */
119   if (!(cfun->can_delete_dead_exceptions && can_alter_cfg)
120       && !insn_nothrow_p (insn))
121     return false;
122 
123   /* If INSN sets a global_reg, leave it untouched.  */
124   for (df_ref *def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
125     if (HARD_REGISTER_NUM_P (DF_REF_REGNO (*def_rec))
126 	&& global_regs[DF_REF_REGNO (*def_rec)])
127       return false;
128 
129   body = PATTERN (insn);
130   switch (GET_CODE (body))
131     {
132     case USE:
133     case VAR_LOCATION:
134       return false;
135 
136     case CLOBBER:
137       if (fast)
138 	{
139 	  /* A CLOBBER of a dead pseudo register serves no purpose.
140 	     That is not necessarily true for hard registers until
141 	     after reload.  */
142 	  x = XEXP (body, 0);
143 	  return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
144 	}
145       else
146 	/* Because of the way that use-def chains are built, it is not
147 	   possible to tell if the clobber is dead because it can
148 	   never be the target of a use-def chain.  */
149 	return false;
150 
151     case PARALLEL:
152       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
153 	if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
154 	  return false;
155       return true;
156 
157     default:
158       return deletable_insn_p_1 (body);
159     }
160 }
161 
162 
163 /* Return true if INSN has been marked as needed.  */
164 
165 static inline int
166 marked_insn_p (rtx insn)
167 {
168   /* Artificial defs are always needed and they do not have an insn.
169      We should never see them here.  */
170   gcc_assert (insn);
171   return bitmap_bit_p (marked, INSN_UID (insn));
172 }
173 
174 
175 /* If INSN has not yet been marked as needed, mark it now, and add it to
176    the worklist.  */
177 
178 static void
179 mark_insn (rtx insn, bool fast)
180 {
181   if (!marked_insn_p (insn))
182     {
183       if (!fast)
184 	worklist.safe_push (insn);
185       bitmap_set_bit (marked, INSN_UID (insn));
186       if (dump_file)
187 	fprintf (dump_file, "  Adding insn %d to worklist\n", INSN_UID (insn));
188       if (CALL_P (insn)
189 	  && !df_in_progress
190 	  && !SIBLING_CALL_P (insn)
191 	  && (RTL_CONST_OR_PURE_CALL_P (insn)
192 	      && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
193 	find_call_stack_args (insn, true, fast, NULL);
194     }
195 }
196 
197 
198 /* A note_stores callback used by mark_nonreg_stores.  DATA is the
199    instruction containing DEST.  */
200 
201 static void
202 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
203 {
204   if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
205     mark_insn ((rtx) data, true);
206 }
207 
208 
209 /* A note_stores callback used by mark_nonreg_stores.  DATA is the
210    instruction containing DEST.  */
211 
212 static void
213 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
214 {
215   if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
216     mark_insn ((rtx) data, false);
217 }
218 
219 
220 /* Mark INSN if BODY stores to a non-register destination.  */
221 
222 static void
223 mark_nonreg_stores (rtx body, rtx insn, bool fast)
224 {
225   if (fast)
226     note_stores (body, mark_nonreg_stores_1, insn);
227   else
228     note_stores (body, mark_nonreg_stores_2, insn);
229 }
230 
231 
232 /* Return true if store to MEM, starting OFF bytes from stack pointer,
233    is a call argument store, and clear corresponding bits from SP_BYTES
234    bitmap if it is.  */
235 
236 static bool
237 check_argument_store (rtx mem, HOST_WIDE_INT off, HOST_WIDE_INT min_sp_off,
238 		      HOST_WIDE_INT max_sp_off, bitmap sp_bytes)
239 {
240   HOST_WIDE_INT byte;
241   for (byte = off; byte < off + GET_MODE_SIZE (GET_MODE (mem)); byte++)
242     {
243       if (byte < min_sp_off
244 	  || byte >= max_sp_off
245 	  || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
246 	return false;
247     }
248   return true;
249 }
250 
251 
252 /* Try to find all stack stores of CALL_INSN arguments if
253    ACCUMULATE_OUTGOING_ARGS.  If all stack stores have been found
254    and it is therefore safe to eliminate the call, return true,
255    otherwise return false.  This function should be first called
256    with DO_MARK false, and only when the CALL_INSN is actually
257    going to be marked called again with DO_MARK true.  */
258 
259 static bool
260 find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
261 		      bitmap arg_stores)
262 {
263   rtx p, insn, prev_insn;
264   bool ret;
265   HOST_WIDE_INT min_sp_off, max_sp_off;
266   bitmap sp_bytes;
267 
268   gcc_assert (CALL_P (call_insn));
269   if (!ACCUMULATE_OUTGOING_ARGS)
270     return true;
271 
272   if (!do_mark)
273     {
274       gcc_assert (arg_stores);
275       bitmap_clear (arg_stores);
276     }
277 
278   min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
279   max_sp_off = 0;
280 
281   /* First determine the minimum and maximum offset from sp for
282      stored arguments.  */
283   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
284     if (GET_CODE (XEXP (p, 0)) == USE
285 	&& MEM_P (XEXP (XEXP (p, 0), 0)))
286       {
287 	rtx mem = XEXP (XEXP (p, 0), 0), addr;
288 	HOST_WIDE_INT off = 0, size;
289 	if (!MEM_SIZE_KNOWN_P (mem))
290 	  return false;
291 	size = MEM_SIZE (mem);
292 	addr = XEXP (mem, 0);
293 	if (GET_CODE (addr) == PLUS
294 	    && REG_P (XEXP (addr, 0))
295 	    && CONST_INT_P (XEXP (addr, 1)))
296 	  {
297 	    off = INTVAL (XEXP (addr, 1));
298 	    addr = XEXP (addr, 0);
299 	  }
300 	if (addr != stack_pointer_rtx)
301 	  {
302 	    if (!REG_P (addr))
303 	      return false;
304 	    /* If not fast, use chains to see if addr wasn't set to
305 	       sp + offset.  */
306 	    if (!fast)
307 	      {
308 		df_ref *use_rec;
309 		struct df_link *defs;
310 		rtx set;
311 
312 		for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
313 		  if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
314 		    break;
315 
316 		if (*use_rec == NULL)
317 		  return false;
318 
319 		for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
320 		  if (! DF_REF_IS_ARTIFICIAL (defs->ref))
321 		    break;
322 
323 		if (defs == NULL)
324 		  return false;
325 
326 		set = single_set (DF_REF_INSN (defs->ref));
327 		if (!set)
328 		  return false;
329 
330 		if (GET_CODE (SET_SRC (set)) != PLUS
331 		    || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
332 		    || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
333 		  return false;
334 
335 		off += INTVAL (XEXP (SET_SRC (set), 1));
336 	      }
337 	    else
338 	      return false;
339 	  }
340 	min_sp_off = MIN (min_sp_off, off);
341 	max_sp_off = MAX (max_sp_off, off + size);
342       }
343 
344   if (min_sp_off >= max_sp_off)
345     return true;
346   sp_bytes = BITMAP_ALLOC (NULL);
347 
348   /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
349      which contain arguments.  Checking has been done in the previous
350      loop.  */
351   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
352     if (GET_CODE (XEXP (p, 0)) == USE
353 	&& MEM_P (XEXP (XEXP (p, 0), 0)))
354       {
355 	rtx mem = XEXP (XEXP (p, 0), 0), addr;
356 	HOST_WIDE_INT off = 0, byte;
357 	addr = XEXP (mem, 0);
358 	if (GET_CODE (addr) == PLUS
359 	    && REG_P (XEXP (addr, 0))
360 	    && CONST_INT_P (XEXP (addr, 1)))
361 	  {
362 	    off = INTVAL (XEXP (addr, 1));
363 	    addr = XEXP (addr, 0);
364 	  }
365 	if (addr != stack_pointer_rtx)
366 	  {
367 	    df_ref *use_rec;
368 	    struct df_link *defs;
369 	    rtx set;
370 
371 	    for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
372 	      if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
373 		break;
374 
375 	    for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
376 	      if (! DF_REF_IS_ARTIFICIAL (defs->ref))
377 		break;
378 
379 	    set = single_set (DF_REF_INSN (defs->ref));
380 	    off += INTVAL (XEXP (SET_SRC (set), 1));
381 	  }
382 	for (byte = off; byte < off + MEM_SIZE (mem); byte++)
383 	  {
384 	    if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
385 	      gcc_unreachable ();
386 	  }
387       }
388 
389   /* Walk backwards, looking for argument stores.  The search stops
390      when seeing another call, sp adjustment or memory store other than
391      argument store.  */
392   ret = false;
393   for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
394     {
395       rtx set, mem, addr;
396       HOST_WIDE_INT off;
397 
398       if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
399 	prev_insn = NULL_RTX;
400       else
401 	prev_insn = PREV_INSN (insn);
402 
403       if (CALL_P (insn))
404 	break;
405 
406       if (!NONDEBUG_INSN_P (insn))
407 	continue;
408 
409       set = single_set (insn);
410       if (!set || SET_DEST (set) == stack_pointer_rtx)
411 	break;
412 
413       if (!MEM_P (SET_DEST (set)))
414 	continue;
415 
416       mem = SET_DEST (set);
417       addr = XEXP (mem, 0);
418       off = 0;
419       if (GET_CODE (addr) == PLUS
420 	  && REG_P (XEXP (addr, 0))
421 	  && CONST_INT_P (XEXP (addr, 1)))
422 	{
423 	  off = INTVAL (XEXP (addr, 1));
424 	  addr = XEXP (addr, 0);
425 	}
426       if (addr != stack_pointer_rtx)
427 	{
428 	  if (!REG_P (addr))
429 	    break;
430 	  if (!fast)
431 	    {
432 	      df_ref *use_rec;
433 	      struct df_link *defs;
434 	      rtx set;
435 
436 	      for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
437 		if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
438 		  break;
439 
440 	      if (*use_rec == NULL)
441 		break;
442 
443 	      for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
444 		if (! DF_REF_IS_ARTIFICIAL (defs->ref))
445 		  break;
446 
447 	      if (defs == NULL)
448 		break;
449 
450 	      set = single_set (DF_REF_INSN (defs->ref));
451 	      if (!set)
452 		break;
453 
454 	      if (GET_CODE (SET_SRC (set)) != PLUS
455 		  || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
456 		  || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
457 		break;
458 
459 	      off += INTVAL (XEXP (SET_SRC (set), 1));
460 	    }
461 	  else
462 	    break;
463 	}
464 
465       if (GET_MODE_SIZE (GET_MODE (mem)) == 0
466 	  || !check_argument_store (mem, off, min_sp_off,
467 				    max_sp_off, sp_bytes))
468 	break;
469 
470       if (!deletable_insn_p (insn, fast, NULL))
471 	break;
472 
473       if (do_mark)
474 	mark_insn (insn, fast);
475       else
476 	bitmap_set_bit (arg_stores, INSN_UID (insn));
477 
478       if (bitmap_empty_p (sp_bytes))
479 	{
480 	  ret = true;
481 	  break;
482 	}
483     }
484 
485   BITMAP_FREE (sp_bytes);
486   if (!ret && arg_stores)
487     bitmap_clear (arg_stores);
488 
489   return ret;
490 }
491 
492 
493 /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
494    writes to.  */
495 
496 static void
497 remove_reg_equal_equiv_notes_for_defs (rtx insn)
498 {
499   df_ref *def_rec;
500 
501   for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
502     remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (*def_rec));
503 }
504 
505 /* Scan all BBs for debug insns and reset those that reference values
506    defined in unmarked insns.  */
507 
508 static void
509 reset_unmarked_insns_debug_uses (void)
510 {
511   basic_block bb;
512   rtx insn, next;
513 
514   FOR_EACH_BB_REVERSE (bb)
515     FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
516       if (DEBUG_INSN_P (insn))
517 	{
518 	  df_ref *use_rec;
519 
520 	  for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
521 	    {
522 	      df_ref use = *use_rec;
523 	      struct df_link *defs;
524 	      for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
525 		{
526 		  rtx ref_insn;
527 		  if (DF_REF_IS_ARTIFICIAL (defs->ref))
528 		    continue;
529 		  ref_insn = DF_REF_INSN (defs->ref);
530 		  if (!marked_insn_p (ref_insn))
531 		    break;
532 		}
533 	      if (!defs)
534 		continue;
535 	      /* ??? FIXME could we propagate the values assigned to
536 		 each of the DEFs?  */
537 	      INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
538 	      df_insn_rescan_debug_internal (insn);
539 	      break;
540 	    }
541 	}
542 }
543 
544 /* Delete every instruction that hasn't been marked.  */
545 
546 static void
547 delete_unmarked_insns (void)
548 {
549   basic_block bb;
550   rtx insn, next;
551   bool must_clean = false;
552 
553   FOR_EACH_BB_REVERSE (bb)
554     FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
555       if (NONDEBUG_INSN_P (insn))
556 	{
557 	  /* Always delete no-op moves.  */
558 	  if (noop_move_p (insn))
559 	    ;
560 
561 	  /* Otherwise rely only on the DCE algorithm.  */
562 	  else if (marked_insn_p (insn))
563 	    continue;
564 
565 	  /* Beware that reaching a dbg counter limit here can result
566 	     in miscompiled file.  This occurs when a group of insns
567 	     must be deleted together, typically because the kept insn
568 	     depends on the output from the deleted insn.  Deleting
569 	     this insns in reverse order (both at the bb level and
570 	     when looking at the blocks) minimizes this, but does not
571 	     eliminate it, since it is possible for the using insn to
572 	     be top of a block and the producer to be at the bottom of
573 	     the block.  However, in most cases this will only result
574 	     in an uninitialized use of an insn that is dead anyway.
575 
576 	     However, there is one rare case that will cause a
577 	     miscompile: deletion of non-looping pure and constant
578 	     calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
579 	     In this case it is possible to remove the call, but leave
580 	     the argument pushes to the stack.  Because of the changes
581 	     to the stack pointer, this will almost always lead to a
582 	     miscompile.  */
583 	  if (!dbg_cnt (dce))
584 	    continue;
585 
586 	  if (dump_file)
587 	    fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
588 
589 	  /* Before we delete the insn we have to remove the REG_EQUAL notes
590 	     for the destination regs in order to avoid dangling notes.  */
591 	  remove_reg_equal_equiv_notes_for_defs (insn);
592 
593 	  /* If a pure or const call is deleted, this may make the cfg
594 	     have unreachable blocks.  We rememeber this and call
595 	     delete_unreachable_blocks at the end.  */
596 	  if (CALL_P (insn))
597 	    must_clean = true;
598 
599 	  /* Now delete the insn.  */
600 	  delete_insn_and_edges (insn);
601 	}
602 
603   /* Deleted a pure or const call.  */
604   if (must_clean)
605     delete_unreachable_blocks ();
606 }
607 
608 
609 /* Go through the instructions and mark those whose necessity is not
610    dependent on inter-instruction information.  Make sure all other
611    instructions are not marked.  */
612 
613 static void
614 prescan_insns_for_dce (bool fast)
615 {
616   basic_block bb;
617   rtx insn, prev;
618   bitmap arg_stores = NULL;
619 
620   if (dump_file)
621     fprintf (dump_file, "Finding needed instructions:\n");
622 
623   if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
624     arg_stores = BITMAP_ALLOC (NULL);
625 
626   FOR_EACH_BB (bb)
627     {
628       FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
629 	if (NONDEBUG_INSN_P (insn))
630 	  {
631 	    /* Don't mark argument stores now.  They will be marked
632 	       if needed when the associated CALL is marked.  */
633 	    if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
634 	      continue;
635 	    if (deletable_insn_p (insn, fast, arg_stores))
636 	      mark_nonreg_stores (PATTERN (insn), insn, fast);
637 	    else
638 	      mark_insn (insn, fast);
639 	  }
640       /* find_call_stack_args only looks at argument stores in the
641 	 same bb.  */
642       if (arg_stores)
643 	bitmap_clear (arg_stores);
644     }
645 
646   if (arg_stores)
647     BITMAP_FREE (arg_stores);
648 
649   if (dump_file)
650     fprintf (dump_file, "Finished finding needed instructions:\n");
651 }
652 
653 
654 /* UD-based DSE routines. */
655 
656 /* Mark instructions that define artificially-used registers, such as
657    the frame pointer and the stack pointer.  */
658 
659 static void
660 mark_artificial_uses (void)
661 {
662   basic_block bb;
663   struct df_link *defs;
664   df_ref *use_rec;
665 
666   FOR_ALL_BB (bb)
667     {
668       for (use_rec = df_get_artificial_uses (bb->index);
669 	   *use_rec; use_rec++)
670 	for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
671 	  if (! DF_REF_IS_ARTIFICIAL (defs->ref))
672 	    mark_insn (DF_REF_INSN (defs->ref), false);
673     }
674 }
675 
676 
677 /* Mark every instruction that defines a register value that INSN uses.  */
678 
679 static void
680 mark_reg_dependencies (rtx insn)
681 {
682   struct df_link *defs;
683   df_ref *use_rec;
684 
685   if (DEBUG_INSN_P (insn))
686     return;
687 
688   for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
689     {
690       df_ref use = *use_rec;
691       if (dump_file)
692 	{
693 	  fprintf (dump_file, "Processing use of ");
694 	  print_simple_rtl (dump_file, DF_REF_REG (use));
695 	  fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
696 	}
697       for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
698 	if (! DF_REF_IS_ARTIFICIAL (defs->ref))
699 	  mark_insn (DF_REF_INSN (defs->ref), false);
700     }
701 }
702 
703 
704 /* Initialize global variables for a new DCE pass.  */
705 
706 static void
707 init_dce (bool fast)
708 {
709   if (!df_in_progress)
710     {
711       if (!fast)
712 	{
713 	  df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
714 	  df_chain_add_problem (DF_UD_CHAIN);
715 	}
716       df_analyze ();
717     }
718 
719   if (dump_file)
720     df_dump (dump_file);
721 
722   if (fast)
723     {
724       bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
725       bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
726       can_alter_cfg = false;
727     }
728   else
729     can_alter_cfg = true;
730 
731   marked = sbitmap_alloc (get_max_uid () + 1);
732   bitmap_clear (marked);
733 }
734 
735 
736 /* Free the data allocated by init_dce.  */
737 
738 static void
739 fini_dce (bool fast)
740 {
741   sbitmap_free (marked);
742 
743   if (fast)
744     {
745       bitmap_obstack_release (&dce_blocks_bitmap_obstack);
746       bitmap_obstack_release (&dce_tmp_bitmap_obstack);
747     }
748 }
749 
750 
751 /* UD-chain based DCE.  */
752 
753 static unsigned int
754 rest_of_handle_ud_dce (void)
755 {
756   rtx insn;
757 
758   init_dce (false);
759 
760   prescan_insns_for_dce (false);
761   mark_artificial_uses ();
762   while (worklist.length () > 0)
763     {
764       insn = worklist.pop ();
765       mark_reg_dependencies (insn);
766     }
767   worklist.release ();
768 
769   if (MAY_HAVE_DEBUG_INSNS)
770     reset_unmarked_insns_debug_uses ();
771 
772   /* Before any insns are deleted, we must remove the chains since
773      they are not bidirectional.  */
774   df_remove_problem (df_chain);
775   delete_unmarked_insns ();
776 
777   fini_dce (false);
778   return 0;
779 }
780 
781 
782 static bool
783 gate_ud_dce (void)
784 {
785   return optimize > 1 && flag_dce
786     && dbg_cnt (dce_ud);
787 }
788 
789 struct rtl_opt_pass pass_ud_rtl_dce =
790 {
791  {
792   RTL_PASS,
793   "ud_dce",                             /* name */
794   OPTGROUP_NONE,                        /* optinfo_flags */
795   gate_ud_dce,                          /* gate */
796   rest_of_handle_ud_dce,                /* execute */
797   NULL,                                 /* sub */
798   NULL,                                 /* next */
799   0,                                    /* static_pass_number */
800   TV_DCE,                               /* tv_id */
801   0,                                    /* properties_required */
802   0,                                    /* properties_provided */
803   0,                                    /* properties_destroyed */
804   0,                                    /* todo_flags_start */
805   TODO_df_finish | TODO_verify_rtl_sharing |
806   TODO_ggc_collect                     /* todo_flags_finish */
807  }
808 };
809 
810 
811 /* -------------------------------------------------------------------------
812    Fast DCE functions
813    ------------------------------------------------------------------------- */
814 
815 /* Process basic block BB.  Return true if the live_in set has
816    changed. REDO_OUT is true if the info at the bottom of the block
817    needs to be recalculated before starting.  AU is the proper set of
818    artificial uses.  Track global substitution of uses of dead pseudos
819    in debug insns using GLOBAL_DEBUG.  */
820 
821 static bool
822 word_dce_process_block (basic_block bb, bool redo_out,
823 			struct dead_debug_global *global_debug)
824 {
825   bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
826   rtx insn;
827   bool block_changed;
828   struct dead_debug_local debug;
829 
830   if (redo_out)
831     {
832       /* Need to redo the live_out set of this block if when one of
833 	 the succs of this block has had a change in it live in
834 	 set.  */
835       edge e;
836       edge_iterator ei;
837       df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
838       bitmap_clear (DF_WORD_LR_OUT (bb));
839       FOR_EACH_EDGE (e, ei, bb->succs)
840 	(*con_fun_n) (e);
841     }
842 
843   if (dump_file)
844     {
845       fprintf (dump_file, "processing block %d live out = ", bb->index);
846       df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
847     }
848 
849   bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
850   dead_debug_local_init (&debug, NULL, global_debug);
851 
852   FOR_BB_INSNS_REVERSE (bb, insn)
853     if (DEBUG_INSN_P (insn))
854       {
855 	df_ref *use_rec;
856 	for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
857 	  if (DF_REF_REGNO (*use_rec) >= FIRST_PSEUDO_REGISTER
858 	      && (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (*use_rec)))
859 		  == 2 * UNITS_PER_WORD)
860 	      && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (*use_rec))
861 	      && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (*use_rec) + 1))
862 	    dead_debug_add (&debug, *use_rec, DF_REF_REGNO (*use_rec));
863       }
864     else if (INSN_P (insn))
865       {
866 	bool any_changed;
867 
868 	/* No matter if the instruction is needed or not, we remove
869 	   any regno in the defs from the live set.  */
870 	any_changed = df_word_lr_simulate_defs (insn, local_live);
871 	if (any_changed)
872 	  mark_insn (insn, true);
873 
874 	/* On the other hand, we do not allow the dead uses to set
875 	   anything in local_live.  */
876 	if (marked_insn_p (insn))
877 	  df_word_lr_simulate_uses (insn, local_live);
878 
879 	/* Insert debug temps for dead REGs used in subsequent debug
880 	   insns.  We may have to emit a debug temp even if the insn
881 	   was marked, in case the debug use was after the point of
882 	   death.  */
883 	if (debug.used && !bitmap_empty_p (debug.used))
884 	  {
885 	    df_ref *def_rec;
886 
887 	    for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
888 	      dead_debug_insert_temp (&debug, DF_REF_REGNO (*def_rec), insn,
889 				      marked_insn_p (insn)
890 				      && !control_flow_insn_p (insn)
891 				      ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
892 				      : DEBUG_TEMP_BEFORE_WITH_VALUE);
893 	  }
894 
895 	if (dump_file)
896 	  {
897 	    fprintf (dump_file, "finished processing insn %d live out = ",
898 		     INSN_UID (insn));
899 	    df_print_word_regset (dump_file, local_live);
900 	  }
901       }
902 
903   block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
904   if (block_changed)
905     bitmap_copy (DF_WORD_LR_IN (bb), local_live);
906 
907   dead_debug_local_finish (&debug, NULL);
908   BITMAP_FREE (local_live);
909   return block_changed;
910 }
911 
912 
913 /* Process basic block BB.  Return true if the live_in set has
914    changed. REDO_OUT is true if the info at the bottom of the block
915    needs to be recalculated before starting.  AU is the proper set of
916    artificial uses.  Track global substitution of uses of dead pseudos
917    in debug insns using GLOBAL_DEBUG.  */
918 
919 static bool
920 dce_process_block (basic_block bb, bool redo_out, bitmap au,
921 		   struct dead_debug_global *global_debug)
922 {
923   bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
924   rtx insn;
925   bool block_changed;
926   df_ref *def_rec;
927   struct dead_debug_local debug;
928 
929   if (redo_out)
930     {
931       /* Need to redo the live_out set of this block if when one of
932 	 the succs of this block has had a change in it live in
933 	 set.  */
934       edge e;
935       edge_iterator ei;
936       df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
937       bitmap_clear (DF_LR_OUT (bb));
938       FOR_EACH_EDGE (e, ei, bb->succs)
939 	(*con_fun_n) (e);
940     }
941 
942   if (dump_file)
943     {
944       fprintf (dump_file, "processing block %d lr out = ", bb->index);
945       df_print_regset (dump_file, DF_LR_OUT (bb));
946     }
947 
948   bitmap_copy (local_live, DF_LR_OUT (bb));
949 
950   df_simulate_initialize_backwards (bb, local_live);
951   dead_debug_local_init (&debug, NULL, global_debug);
952 
953   FOR_BB_INSNS_REVERSE (bb, insn)
954     if (DEBUG_INSN_P (insn))
955       {
956 	df_ref *use_rec;
957 	for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
958 	  if (!bitmap_bit_p (local_live, DF_REF_REGNO (*use_rec))
959 	      && !bitmap_bit_p (au, DF_REF_REGNO (*use_rec)))
960 	    dead_debug_add (&debug, *use_rec, DF_REF_REGNO (*use_rec));
961       }
962     else if (INSN_P (insn))
963       {
964 	bool needed = marked_insn_p (insn);
965 
966 	/* The insn is needed if there is someone who uses the output.  */
967 	if (!needed)
968 	  for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
969 	    if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
970 		|| bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
971 	      {
972 		needed = true;
973 		mark_insn (insn, true);
974 		break;
975 	      }
976 
977 	/* No matter if the instruction is needed or not, we remove
978 	   any regno in the defs from the live set.  */
979 	df_simulate_defs (insn, local_live);
980 
981 	/* On the other hand, we do not allow the dead uses to set
982 	   anything in local_live.  */
983 	if (needed)
984 	  df_simulate_uses (insn, local_live);
985 
986 	/* Insert debug temps for dead REGs used in subsequent debug
987 	   insns.  We may have to emit a debug temp even if the insn
988 	   was marked, in case the debug use was after the point of
989 	   death.  */
990 	if (debug.used && !bitmap_empty_p (debug.used))
991 	  for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
992 	    dead_debug_insert_temp (&debug, DF_REF_REGNO (*def_rec), insn,
993 				    needed && !control_flow_insn_p (insn)
994 				    ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
995 				    : DEBUG_TEMP_BEFORE_WITH_VALUE);
996       }
997 
998   dead_debug_local_finish (&debug, NULL);
999   df_simulate_finalize_backwards (bb, local_live);
1000 
1001   block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
1002   if (block_changed)
1003     bitmap_copy (DF_LR_IN (bb), local_live);
1004 
1005   BITMAP_FREE (local_live);
1006   return block_changed;
1007 }
1008 
1009 
1010 /* Perform fast DCE once initialization is done.  If WORD_LEVEL is
1011    true, use the word level dce, otherwise do it at the pseudo
1012    level.  */
1013 
1014 static void
1015 fast_dce (bool word_level)
1016 {
1017   int *postorder = df_get_postorder (DF_BACKWARD);
1018   int n_blocks = df_get_n_blocks (DF_BACKWARD);
1019   /* The set of blocks that have been seen on this iteration.  */
1020   bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1021   /* The set of blocks that need to have the out vectors reset because
1022      the in of one of their successors has changed.  */
1023   bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1024   bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
1025   bool global_changed = true;
1026 
1027   /* These regs are considered always live so if they end up dying
1028      because of some def, we need to bring the back again.  Calling
1029      df_simulate_fixup_sets has the disadvantage of calling
1030      bb_has_eh_pred once per insn, so we cache the information
1031      here.  */
1032   bitmap au = &df->regular_block_artificial_uses;
1033   bitmap au_eh = &df->eh_block_artificial_uses;
1034   int i;
1035   struct dead_debug_global global_debug;
1036 
1037   prescan_insns_for_dce (true);
1038 
1039   for (i = 0; i < n_blocks; i++)
1040     bitmap_set_bit (all_blocks, postorder[i]);
1041 
1042   dead_debug_global_init (&global_debug, NULL);
1043 
1044   while (global_changed)
1045     {
1046       global_changed = false;
1047 
1048       for (i = 0; i < n_blocks; i++)
1049 	{
1050 	  int index = postorder[i];
1051 	  basic_block bb = BASIC_BLOCK (index);
1052 	  bool local_changed;
1053 
1054 	  if (index < NUM_FIXED_BLOCKS)
1055 	    {
1056 	      bitmap_set_bit (processed, index);
1057 	      continue;
1058 	    }
1059 
1060 	  if (word_level)
1061 	    local_changed
1062 	      = word_dce_process_block (bb, bitmap_bit_p (redo_out, index),
1063 					&global_debug);
1064 	  else
1065 	    local_changed
1066 	      = dce_process_block (bb, bitmap_bit_p (redo_out, index),
1067 				   bb_has_eh_pred (bb) ? au_eh : au,
1068 				   &global_debug);
1069 	  bitmap_set_bit (processed, index);
1070 
1071 	  if (local_changed)
1072 	    {
1073 	      edge e;
1074 	      edge_iterator ei;
1075 	      FOR_EACH_EDGE (e, ei, bb->preds)
1076 		if (bitmap_bit_p (processed, e->src->index))
1077 		  /* Be tricky about when we need to iterate the
1078 		     analysis.  We only have redo the analysis if the
1079 		     bitmaps change at the top of a block that is the
1080 		     entry to a loop.  */
1081 		  global_changed = true;
1082 		else
1083 		  bitmap_set_bit (redo_out, e->src->index);
1084 	    }
1085 	}
1086 
1087       if (global_changed)
1088 	{
1089 	  /* Turn off the RUN_DCE flag to prevent recursive calls to
1090 	     dce.  */
1091 	  int old_flag = df_clear_flags (DF_LR_RUN_DCE);
1092 
1093 	  /* So something was deleted that requires a redo.  Do it on
1094 	     the cheap.  */
1095 	  delete_unmarked_insns ();
1096 	  bitmap_clear (marked);
1097 	  bitmap_clear (processed);
1098 	  bitmap_clear (redo_out);
1099 
1100 	  /* We do not need to rescan any instructions.  We only need
1101 	     to redo the dataflow equations for the blocks that had a
1102 	     change at the top of the block.  Then we need to redo the
1103 	     iteration.  */
1104 	  if (word_level)
1105 	    df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
1106 	  else
1107 	    df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1108 
1109 	  if (old_flag & DF_LR_RUN_DCE)
1110 	    df_set_flags (DF_LR_RUN_DCE);
1111 
1112 	  prescan_insns_for_dce (true);
1113 	}
1114     }
1115 
1116   dead_debug_global_finish (&global_debug, NULL);
1117 
1118   delete_unmarked_insns ();
1119 
1120   BITMAP_FREE (processed);
1121   BITMAP_FREE (redo_out);
1122   BITMAP_FREE (all_blocks);
1123 }
1124 
1125 
1126 /* Fast register level DCE.  */
1127 
1128 static unsigned int
1129 rest_of_handle_fast_dce (void)
1130 {
1131   init_dce (true);
1132   fast_dce (false);
1133   fini_dce (true);
1134   return 0;
1135 }
1136 
1137 
1138 /* Fast byte level DCE.  */
1139 
1140 void
1141 run_word_dce (void)
1142 {
1143   int old_flags;
1144 
1145   if (!flag_dce)
1146     return;
1147 
1148   timevar_push (TV_DCE);
1149   old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1150   df_word_lr_add_problem ();
1151   init_dce (true);
1152   fast_dce (true);
1153   fini_dce (true);
1154   df_set_flags (old_flags);
1155   timevar_pop (TV_DCE);
1156 }
1157 
1158 
1159 /* This is an internal call that is used by the df live register
1160    problem to run fast dce as a side effect of creating the live
1161    information.  The stack is organized so that the lr problem is run,
1162    this pass is run, which updates the live info and the df scanning
1163    info, and then returns to allow the rest of the problems to be run.
1164 
1165    This can be called by elsewhere but it will not update the bit
1166    vectors for any other problems than LR.  */
1167 
1168 void
1169 run_fast_df_dce (void)
1170 {
1171   if (flag_dce)
1172     {
1173       /* If dce is able to delete something, it has to happen
1174 	 immediately.  Otherwise there will be problems handling the
1175 	 eq_notes.  */
1176       int old_flags =
1177 	df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1178 
1179       df_in_progress = true;
1180       rest_of_handle_fast_dce ();
1181       df_in_progress = false;
1182 
1183       df_set_flags (old_flags);
1184     }
1185 }
1186 
1187 
1188 /* Run a fast DCE pass.  */
1189 
1190 void
1191 run_fast_dce (void)
1192 {
1193   if (flag_dce)
1194     rest_of_handle_fast_dce ();
1195 }
1196 
1197 
1198 static bool
1199 gate_fast_dce (void)
1200 {
1201   return optimize > 0 && flag_dce
1202     && dbg_cnt (dce_fast);
1203 }
1204 
1205 struct rtl_opt_pass pass_fast_rtl_dce =
1206 {
1207  {
1208   RTL_PASS,
1209   "rtl_dce",                            /* name */
1210   OPTGROUP_NONE,                        /* optinfo_flags */
1211   gate_fast_dce,                        /* gate */
1212   rest_of_handle_fast_dce,              /* execute */
1213   NULL,                                 /* sub */
1214   NULL,                                 /* next */
1215   0,                                    /* static_pass_number */
1216   TV_DCE,                               /* tv_id */
1217   0,                                    /* properties_required */
1218   0,                                    /* properties_provided */
1219   0,                                    /* properties_destroyed */
1220   0,                                    /* todo_flags_start */
1221   TODO_df_finish | TODO_verify_rtl_sharing |
1222   TODO_ggc_collect                      /* todo_flags_finish */
1223  }
1224 };
1225