xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/dce.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* RTL dead code elimination.
2    Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hashtab.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "except.h"
31 #include "df.h"
32 #include "cselib.h"
33 #include "dce.h"
34 #include "timevar.h"
35 #include "tree-pass.h"
36 #include "dbgcnt.h"
37 #include "tm_p.h"
38 
39 
40 /* -------------------------------------------------------------------------
41    Core mark/delete routines
42    ------------------------------------------------------------------------- */
43 
44 /* True if we are invoked while the df engine is running; in this case,
45    we don't want to reenter it.  */
46 static bool df_in_progress = false;
47 
48 /* Instructions that have been marked but whose dependencies have not
49    yet been processed.  */
50 static VEC(rtx,heap) *worklist;
51 
52 /* Bitmap of instructions marked as needed indexed by INSN_UID.  */
53 static sbitmap marked;
54 
55 /* Bitmap obstacks used for block processing by the fast algorithm.  */
56 static bitmap_obstack dce_blocks_bitmap_obstack;
57 static bitmap_obstack dce_tmp_bitmap_obstack;
58 
59 static bool find_call_stack_args (rtx, bool, bool, bitmap);
60 
61 /* A subroutine for which BODY is part of the instruction being tested;
62    either the top-level pattern, or an element of a PARALLEL.  The
63    instruction is known not to be a bare USE or CLOBBER.  */
64 
65 static bool
66 deletable_insn_p_1 (rtx body)
67 {
68   switch (GET_CODE (body))
69     {
70     case PREFETCH:
71     case TRAP_IF:
72       /* The UNSPEC case was added here because the ia-64 claims that
73 	 USEs do not work after reload and generates UNSPECS rather
74 	 than USEs.  Since dce is run after reload we need to avoid
75 	 deleting these even if they are dead.  If it turns out that
76 	 USEs really do work after reload, the ia-64 should be
77 	 changed, and the UNSPEC case can be removed.  */
78     case UNSPEC:
79       return false;
80 
81     default:
82       return !volatile_refs_p (body);
83     }
84 }
85 
86 
87 /* Return true if INSN is a normal instruction that can be deleted by
88    the DCE pass.  */
89 
90 static bool
91 deletable_insn_p (rtx insn, bool fast, bitmap arg_stores)
92 {
93   rtx body, x;
94   int i;
95 
96   if (CALL_P (insn)
97       /* We cannot delete calls inside of the recursive dce because
98 	 this may cause basic blocks to be deleted and this messes up
99 	 the rest of the stack of optimization passes.  */
100       && (!df_in_progress)
101       /* We cannot delete pure or const sibling calls because it is
102 	 hard to see the result.  */
103       && (!SIBLING_CALL_P (insn))
104       /* We can delete dead const or pure calls as long as they do not
105          infinite loop.  */
106       && (RTL_CONST_OR_PURE_CALL_P (insn)
107 	  && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
108     return find_call_stack_args (insn, false, fast, arg_stores);
109 
110   /* Don't delete jumps, notes and the like.  */
111   if (!NONJUMP_INSN_P (insn))
112     return false;
113 
114   /* Don't delete insns that can throw.  */
115   if (!insn_nothrow_p (insn))
116     return false;
117 
118   body = PATTERN (insn);
119   switch (GET_CODE (body))
120     {
121     case USE:
122     case VAR_LOCATION:
123       return false;
124 
125     case CLOBBER:
126       if (fast)
127 	{
128 	  /* A CLOBBER of a dead pseudo register serves no purpose.
129 	     That is not necessarily true for hard registers until
130 	     after reload.  */
131 	  x = XEXP (body, 0);
132 	  return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
133 	}
134       else
135 	/* Because of the way that use-def chains are built, it is not
136 	   possible to tell if the clobber is dead because it can
137 	   never be the target of a use-def chain.  */
138 	return false;
139 
140     case PARALLEL:
141       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
142 	if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
143 	  return false;
144       return true;
145 
146     default:
147       return deletable_insn_p_1 (body);
148     }
149 }
150 
151 
152 /* Return true if INSN has been marked as needed.  */
153 
154 static inline int
155 marked_insn_p (rtx insn)
156 {
157   /* Artificial defs are always needed and they do not have an insn.
158      We should never see them here.  */
159   gcc_assert (insn);
160   return TEST_BIT (marked, INSN_UID (insn));
161 }
162 
163 
164 /* If INSN has not yet been marked as needed, mark it now, and add it to
165    the worklist.  */
166 
167 static void
168 mark_insn (rtx insn, bool fast)
169 {
170   if (!marked_insn_p (insn))
171     {
172       if (!fast)
173 	VEC_safe_push (rtx, heap, worklist, insn);
174       SET_BIT (marked, INSN_UID (insn));
175       if (dump_file)
176 	fprintf (dump_file, "  Adding insn %d to worklist\n", INSN_UID (insn));
177       if (CALL_P (insn)
178 	  && !df_in_progress
179 	  && !SIBLING_CALL_P (insn)
180 	  && (RTL_CONST_OR_PURE_CALL_P (insn)
181 	      && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
182 	find_call_stack_args (insn, true, fast, NULL);
183     }
184 }
185 
186 
187 /* A note_stores callback used by mark_nonreg_stores.  DATA is the
188    instruction containing DEST.  */
189 
190 static void
191 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
192 {
193   if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
194     mark_insn ((rtx) data, true);
195 }
196 
197 
198 /* A note_stores callback used by mark_nonreg_stores.  DATA is the
199    instruction containing DEST.  */
200 
201 static void
202 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
203 {
204   if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
205     mark_insn ((rtx) data, false);
206 }
207 
208 
209 /* Mark INSN if BODY stores to a non-register destination.  */
210 
211 static void
212 mark_nonreg_stores (rtx body, rtx insn, bool fast)
213 {
214   if (fast)
215     note_stores (body, mark_nonreg_stores_1, insn);
216   else
217     note_stores (body, mark_nonreg_stores_2, insn);
218 }
219 
220 
221 /* Try to find all stack stores of CALL_INSN arguments if
222    ACCUMULATE_OUTGOING_ARGS.  If all stack stores have been found
223    and it is therefore safe to eliminate the call, return true,
224    otherwise return false.  This function should be first called
225    with DO_MARK false, and only when the CALL_INSN is actually
226    going to be marked called again with DO_MARK true.  */
227 
228 static bool
229 find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
230 		      bitmap arg_stores)
231 {
232   rtx p, insn, prev_insn;
233   bool ret;
234   HOST_WIDE_INT min_sp_off, max_sp_off;
235   bitmap sp_bytes;
236 
237   gcc_assert (CALL_P (call_insn));
238   if (!ACCUMULATE_OUTGOING_ARGS)
239     return true;
240 
241   if (!do_mark)
242     {
243       gcc_assert (arg_stores);
244       bitmap_clear (arg_stores);
245     }
246 
247   min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
248   max_sp_off = 0;
249 
250   /* First determine the minimum and maximum offset from sp for
251      stored arguments.  */
252   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
253     if (GET_CODE (XEXP (p, 0)) == USE
254 	&& MEM_P (XEXP (XEXP (p, 0), 0)))
255       {
256 	rtx mem = XEXP (XEXP (p, 0), 0), addr, size;
257 	HOST_WIDE_INT off = 0;
258 	size = MEM_SIZE (mem);
259 	if (size == NULL_RTX)
260 	  return false;
261 	addr = XEXP (mem, 0);
262 	if (GET_CODE (addr) == PLUS
263 	    && REG_P (XEXP (addr, 0))
264 	    && CONST_INT_P (XEXP (addr, 1)))
265 	  {
266 	    off = INTVAL (XEXP (addr, 1));
267 	    addr = XEXP (addr, 0);
268 	  }
269 	if (addr != stack_pointer_rtx)
270 	  {
271 	    if (!REG_P (addr))
272 	      return false;
273 	    /* If not fast, use chains to see if addr wasn't set to
274 	       sp + offset.  */
275 	    if (!fast)
276 	      {
277 		df_ref *use_rec;
278 		struct df_link *defs;
279 		rtx set;
280 
281 		for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
282 		  if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
283 		    break;
284 
285 		if (*use_rec == NULL)
286 		  return false;
287 
288 		for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
289 		  if (! DF_REF_IS_ARTIFICIAL (defs->ref))
290 		    break;
291 
292 		if (defs == NULL)
293 		  return false;
294 
295 		set = single_set (DF_REF_INSN (defs->ref));
296 		if (!set)
297 		  return false;
298 
299 		if (GET_CODE (SET_SRC (set)) != PLUS
300 		    || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
301 		    || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
302 		  return false;
303 
304 		off += INTVAL (XEXP (SET_SRC (set), 1));
305 	      }
306 	    else
307 	      return false;
308 	  }
309 	min_sp_off = MIN (min_sp_off, off);
310 	max_sp_off = MAX (max_sp_off, off + INTVAL (size));
311       }
312 
313   if (min_sp_off >= max_sp_off)
314     return true;
315   sp_bytes = BITMAP_ALLOC (NULL);
316 
317   /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
318      which contain arguments.  Checking has been done in the previous
319      loop.  */
320   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
321     if (GET_CODE (XEXP (p, 0)) == USE
322 	&& MEM_P (XEXP (XEXP (p, 0), 0)))
323       {
324 	rtx mem = XEXP (XEXP (p, 0), 0), addr;
325 	HOST_WIDE_INT off = 0, byte;
326 	addr = XEXP (mem, 0);
327 	if (GET_CODE (addr) == PLUS
328 	    && REG_P (XEXP (addr, 0))
329 	    && CONST_INT_P (XEXP (addr, 1)))
330 	  {
331 	    off = INTVAL (XEXP (addr, 1));
332 	    addr = XEXP (addr, 0);
333 	  }
334 	if (addr != stack_pointer_rtx)
335 	  {
336 	    df_ref *use_rec;
337 	    struct df_link *defs;
338 	    rtx set;
339 
340 	    for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
341 	      if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
342 		break;
343 
344 	    for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
345 	      if (! DF_REF_IS_ARTIFICIAL (defs->ref))
346 		break;
347 
348 	    set = single_set (DF_REF_INSN (defs->ref));
349 	    off += INTVAL (XEXP (SET_SRC (set), 1));
350 	  }
351 	for (byte = off; byte < off + INTVAL (MEM_SIZE (mem)); byte++)
352 	  {
353 	    if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
354 	      gcc_unreachable ();
355 	  }
356       }
357 
358   /* Walk backwards, looking for argument stores.  The search stops
359      when seeing another call, sp adjustment or memory store other than
360      argument store.  */
361   ret = false;
362   for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
363     {
364       rtx set, mem, addr;
365       HOST_WIDE_INT off, byte;
366 
367       if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
368 	prev_insn = NULL_RTX;
369       else
370 	prev_insn = PREV_INSN (insn);
371 
372       if (CALL_P (insn))
373 	break;
374 
375       if (!INSN_P (insn))
376 	continue;
377 
378       set = single_set (insn);
379       if (!set || SET_DEST (set) == stack_pointer_rtx)
380 	break;
381 
382       if (!MEM_P (SET_DEST (set)))
383 	continue;
384 
385       mem = SET_DEST (set);
386       addr = XEXP (mem, 0);
387       off = 0;
388       if (GET_CODE (addr) == PLUS
389 	  && REG_P (XEXP (addr, 0))
390 	  && CONST_INT_P (XEXP (addr, 1)))
391 	{
392 	  off = INTVAL (XEXP (addr, 1));
393 	  addr = XEXP (addr, 0);
394 	}
395       if (addr != stack_pointer_rtx)
396 	{
397 	  if (!REG_P (addr))
398 	    break;
399 	  if (!fast)
400 	    {
401 	      df_ref *use_rec;
402 	      struct df_link *defs;
403 	      rtx set;
404 
405 	      for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
406 		if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
407 		  break;
408 
409 	      if (*use_rec == NULL)
410 		break;
411 
412 	      for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
413 		if (! DF_REF_IS_ARTIFICIAL (defs->ref))
414 		  break;
415 
416 	      if (defs == NULL)
417 		break;
418 
419 	      set = single_set (DF_REF_INSN (defs->ref));
420 	      if (!set)
421 		break;
422 
423 	      if (GET_CODE (SET_SRC (set)) != PLUS
424 		  || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
425 		  || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
426 		break;
427 
428 	      off += INTVAL (XEXP (SET_SRC (set), 1));
429 	    }
430 	  else
431 	    break;
432 	}
433 
434       if (GET_MODE_SIZE (GET_MODE (mem)) == 0)
435 	break;
436 
437       for (byte = off; byte < off + GET_MODE_SIZE (GET_MODE (mem)); byte++)
438 	{
439 	  if (byte < min_sp_off
440 	      || byte >= max_sp_off
441 	      || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
442 	    break;
443 	}
444 
445       if (!deletable_insn_p (insn, fast, NULL))
446 	break;
447 
448       if (do_mark)
449 	mark_insn (insn, fast);
450       else
451 	bitmap_set_bit (arg_stores, INSN_UID (insn));
452 
453       if (bitmap_empty_p (sp_bytes))
454 	{
455 	  ret = true;
456 	  break;
457 	}
458     }
459 
460   BITMAP_FREE (sp_bytes);
461   if (!ret && arg_stores)
462     bitmap_clear (arg_stores);
463 
464   return ret;
465 }
466 
467 
468 /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
469    writes to.  */
470 
471 static void
472 remove_reg_equal_equiv_notes_for_defs (rtx insn)
473 {
474   df_ref *def_rec;
475 
476   for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
477     remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (*def_rec));
478 }
479 
480 
481 /* Delete every instruction that hasn't been marked.  */
482 
483 static void
484 delete_unmarked_insns (void)
485 {
486   basic_block bb;
487   rtx insn, next;
488   bool must_clean = false;
489 
490   FOR_EACH_BB_REVERSE (bb)
491     FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
492       if (INSN_P (insn))
493 	{
494 	  /* Always delete no-op moves.  */
495 	  if (noop_move_p (insn))
496 	    ;
497 
498 	  /* Otherwise rely only on the DCE algorithm.  */
499 	  else if (marked_insn_p (insn))
500 	    continue;
501 
502 	  /* Beware that reaching a dbg counter limit here can result
503 	     in miscompiled file.  This occurs when a group of insns
504 	     must be deleted together, typically because the kept insn
505 	     depends on the output from the deleted insn.  Deleting
506 	     this insns in reverse order (both at the bb level and
507 	     when looking at the blocks) minimizes this, but does not
508 	     eliminate it, since it is possible for the using insn to
509 	     be top of a block and the producer to be at the bottom of
510 	     the block.  However, in most cases this will only result
511 	     in an uninitialized use of an insn that is dead anyway.
512 
513 	     However, there is one rare case that will cause a
514 	     miscompile: deletion of non-looping pure and constant
515 	     calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
516 	     In this case it is possible to remove the call, but leave
517 	     the argument pushes to the stack.  Because of the changes
518 	     to the stack pointer, this will almost always lead to a
519 	     miscompile.  */
520 	  if (!dbg_cnt (dce))
521 	    continue;
522 
523 	  if (dump_file)
524 	    fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
525 
526 	  /* Before we delete the insn we have to remove the REG_EQUAL notes
527 	     for the destination regs in order to avoid dangling notes.  */
528 	  remove_reg_equal_equiv_notes_for_defs (insn);
529 
530 	  /* If a pure or const call is deleted, this may make the cfg
531 	     have unreachable blocks.  We rememeber this and call
532 	     delete_unreachable_blocks at the end.  */
533 	  if (CALL_P (insn))
534 	    must_clean = true;
535 
536 	  /* Now delete the insn.  */
537 	  delete_insn_and_edges (insn);
538 	}
539 
540   /* Deleted a pure or const call.  */
541   if (must_clean)
542     delete_unreachable_blocks ();
543 }
544 
545 
546 /* Go through the instructions and mark those whose necessity is not
547    dependent on inter-instruction information.  Make sure all other
548    instructions are not marked.  */
549 
550 static void
551 prescan_insns_for_dce (bool fast)
552 {
553   basic_block bb;
554   rtx insn, prev;
555   bitmap arg_stores = NULL;
556 
557   if (dump_file)
558     fprintf (dump_file, "Finding needed instructions:\n");
559 
560   if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
561     arg_stores = BITMAP_ALLOC (NULL);
562 
563   FOR_EACH_BB (bb)
564     {
565       FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
566 	if (INSN_P (insn))
567 	  {
568 	    /* Don't mark argument stores now.  They will be marked
569 	       if needed when the associated CALL is marked.  */
570 	    if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
571 	      continue;
572 	    if (deletable_insn_p (insn, fast, arg_stores))
573 	      mark_nonreg_stores (PATTERN (insn), insn, fast);
574 	    else
575 	      mark_insn (insn, fast);
576 	  }
577       /* find_call_stack_args only looks at argument stores in the
578 	 same bb.  */
579       if (arg_stores)
580 	bitmap_clear (arg_stores);
581     }
582 
583   if (arg_stores)
584     BITMAP_FREE (arg_stores);
585 
586   if (dump_file)
587     fprintf (dump_file, "Finished finding needed instructions:\n");
588 }
589 
590 
591 /* UD-based DSE routines. */
592 
593 /* Mark instructions that define artificially-used registers, such as
594    the frame pointer and the stack pointer.  */
595 
596 static void
597 mark_artificial_uses (void)
598 {
599   basic_block bb;
600   struct df_link *defs;
601   df_ref *use_rec;
602 
603   FOR_ALL_BB (bb)
604     {
605       for (use_rec = df_get_artificial_uses (bb->index);
606 	   *use_rec; use_rec++)
607 	for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
608 	  if (! DF_REF_IS_ARTIFICIAL (defs->ref))
609 	    mark_insn (DF_REF_INSN (defs->ref), false);
610     }
611 }
612 
613 
614 /* Mark every instruction that defines a register value that INSN uses.  */
615 
616 static void
617 mark_reg_dependencies (rtx insn)
618 {
619   struct df_link *defs;
620   df_ref *use_rec;
621 
622   if (DEBUG_INSN_P (insn))
623     return;
624 
625   for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
626     {
627       df_ref use = *use_rec;
628       if (dump_file)
629 	{
630 	  fprintf (dump_file, "Processing use of ");
631 	  print_simple_rtl (dump_file, DF_REF_REG (use));
632 	  fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
633 	}
634       for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
635 	if (! DF_REF_IS_ARTIFICIAL (defs->ref))
636 	  mark_insn (DF_REF_INSN (defs->ref), false);
637     }
638 }
639 
640 
641 /* Initialize global variables for a new DCE pass.  */
642 
643 static void
644 init_dce (bool fast)
645 {
646   if (!df_in_progress)
647     {
648       if (!fast)
649 	df_chain_add_problem (DF_UD_CHAIN);
650       df_analyze ();
651     }
652 
653   if (dump_file)
654     df_dump (dump_file);
655 
656   if (fast)
657     {
658       bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
659       bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
660     }
661 
662   marked = sbitmap_alloc (get_max_uid () + 1);
663   sbitmap_zero (marked);
664 }
665 
666 
667 /* Free the data allocated by init_dce.  */
668 
669 static void
670 fini_dce (bool fast)
671 {
672   sbitmap_free (marked);
673 
674   if (fast)
675     {
676       bitmap_obstack_release (&dce_blocks_bitmap_obstack);
677       bitmap_obstack_release (&dce_tmp_bitmap_obstack);
678     }
679 }
680 
681 
682 /* UD-chain based DCE.  */
683 
684 static unsigned int
685 rest_of_handle_ud_dce (void)
686 {
687   rtx insn;
688 
689   init_dce (false);
690 
691   prescan_insns_for_dce (false);
692   mark_artificial_uses ();
693   while (VEC_length (rtx, worklist) > 0)
694     {
695       insn = VEC_pop (rtx, worklist);
696       mark_reg_dependencies (insn);
697     }
698   VEC_free (rtx, heap, worklist);
699 
700   /* Before any insns are deleted, we must remove the chains since
701      they are not bidirectional.  */
702   df_remove_problem (df_chain);
703   delete_unmarked_insns ();
704 
705   fini_dce (false);
706   return 0;
707 }
708 
709 
710 static bool
711 gate_ud_dce (void)
712 {
713   return optimize > 1 && flag_dce
714     && dbg_cnt (dce_ud);
715 }
716 
717 struct rtl_opt_pass pass_ud_rtl_dce =
718 {
719  {
720   RTL_PASS,
721   "ud dce",                             /* name */
722   gate_ud_dce,                          /* gate */
723   rest_of_handle_ud_dce,                /* execute */
724   NULL,                                 /* sub */
725   NULL,                                 /* next */
726   0,                                    /* static_pass_number */
727   TV_DCE,                               /* tv_id */
728   0,                                    /* properties_required */
729   0,                                    /* properties_provided */
730   0,                                    /* properties_destroyed */
731   0,                                    /* todo_flags_start */
732   TODO_dump_func |
733   TODO_df_finish | TODO_verify_rtl_sharing |
734   TODO_ggc_collect                     /* todo_flags_finish */
735  }
736 };
737 
738 
739 /* -------------------------------------------------------------------------
740    Fast DCE functions
741    ------------------------------------------------------------------------- */
742 
743 /* Process basic block BB.  Return true if the live_in set has
744    changed. REDO_OUT is true if the info at the bottom of the block
745    needs to be recalculated before starting.  AU is the proper set of
746    artificial uses. */
747 
748 static bool
749 byte_dce_process_block (basic_block bb, bool redo_out, bitmap au)
750 {
751   bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
752   rtx insn;
753   bool block_changed;
754   df_ref *def_rec;
755 
756   if (redo_out)
757     {
758       /* Need to redo the live_out set of this block if when one of
759 	 the succs of this block has had a change in it live in
760 	 set.  */
761       edge e;
762       edge_iterator ei;
763       df_confluence_function_n con_fun_n = df_byte_lr->problem->con_fun_n;
764       bitmap_clear (DF_BYTE_LR_OUT (bb));
765       FOR_EACH_EDGE (e, ei, bb->succs)
766 	(*con_fun_n) (e);
767     }
768 
769   if (dump_file)
770     {
771       fprintf (dump_file, "processing block %d live out = ", bb->index);
772       df_print_byte_regset (dump_file, DF_BYTE_LR_OUT (bb));
773     }
774 
775   bitmap_copy (local_live, DF_BYTE_LR_OUT (bb));
776 
777   df_byte_lr_simulate_artificial_refs_at_end (bb, local_live);
778 
779   FOR_BB_INSNS_REVERSE (bb, insn)
780     if (INSN_P (insn))
781       {
782 	/* The insn is needed if there is someone who uses the output.  */
783 	for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
784 	  {
785 	    df_ref def = *def_rec;
786 	    unsigned int last;
787 	    unsigned int dregno = DF_REF_REGNO (def);
788 	    unsigned int start = df_byte_lr_get_regno_start (dregno);
789 	    unsigned int len = df_byte_lr_get_regno_len (dregno);
790 
791 	    unsigned int sb;
792 	    unsigned int lb;
793 	    /* This is one of the only places where DF_MM_MAY should
794 	       be used for defs.  Need to make sure that we are
795 	       checking for all of the bits that may be used.  */
796 
797 	    if (!df_compute_accessed_bytes (def, DF_MM_MAY, &sb, &lb))
798 	      {
799 		start += sb;
800 		len = lb - sb;
801 	      }
802 
803 	    if (bitmap_bit_p (au, dregno))
804 	      {
805 		mark_insn (insn, true);
806 		goto quickexit;
807 	      }
808 
809 	    last = start + len;
810 	    while (start < last)
811 	      if (bitmap_bit_p (local_live, start++))
812 		{
813 		  mark_insn (insn, true);
814 		  goto quickexit;
815 		}
816 	  }
817 
818       quickexit:
819 
820 	/* No matter if the instruction is needed or not, we remove
821 	   any regno in the defs from the live set.  */
822 	df_byte_lr_simulate_defs (insn, local_live);
823 
824 	/* On the other hand, we do not allow the dead uses to set
825 	   anything in local_live.  */
826 	if (marked_insn_p (insn))
827 	  df_byte_lr_simulate_uses (insn, local_live);
828 
829 	if (dump_file)
830 	  {
831 	    fprintf (dump_file, "finished processing insn %d live out = ",
832 		     INSN_UID (insn));
833 	    df_print_byte_regset (dump_file, local_live);
834 	  }
835       }
836 
837   df_byte_lr_simulate_artificial_refs_at_top (bb, local_live);
838 
839   block_changed = !bitmap_equal_p (local_live, DF_BYTE_LR_IN (bb));
840   if (block_changed)
841     bitmap_copy (DF_BYTE_LR_IN (bb), local_live);
842   BITMAP_FREE (local_live);
843   return block_changed;
844 }
845 
846 
847 /* Process basic block BB.  Return true if the live_in set has
848    changed. REDO_OUT is true if the info at the bottom of the block
849    needs to be recalculated before starting.  AU is the proper set of
850    artificial uses. */
851 
852 static bool
853 dce_process_block (basic_block bb, bool redo_out, bitmap au)
854 {
855   bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
856   rtx insn;
857   bool block_changed;
858   df_ref *def_rec;
859 
860   if (redo_out)
861     {
862       /* Need to redo the live_out set of this block if when one of
863 	 the succs of this block has had a change in it live in
864 	 set.  */
865       edge e;
866       edge_iterator ei;
867       df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
868       bitmap_clear (DF_LR_OUT (bb));
869       FOR_EACH_EDGE (e, ei, bb->succs)
870 	(*con_fun_n) (e);
871     }
872 
873   if (dump_file)
874     {
875       fprintf (dump_file, "processing block %d lr out = ", bb->index);
876       df_print_regset (dump_file, DF_LR_OUT (bb));
877     }
878 
879   bitmap_copy (local_live, DF_LR_OUT (bb));
880 
881   df_simulate_initialize_backwards (bb, local_live);
882 
883   FOR_BB_INSNS_REVERSE (bb, insn)
884     if (INSN_P (insn))
885       {
886 	bool needed = false;
887 
888 	/* The insn is needed if there is someone who uses the output.  */
889 	for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
890 	  if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
891 	      || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
892 	    {
893 	      needed = true;
894 	      break;
895 	    }
896 
897 	if (needed)
898 	  mark_insn (insn, true);
899 
900 	/* No matter if the instruction is needed or not, we remove
901 	   any regno in the defs from the live set.  */
902 	df_simulate_defs (insn, local_live);
903 
904 	/* On the other hand, we do not allow the dead uses to set
905 	   anything in local_live.  */
906 	if (marked_insn_p (insn))
907 	  df_simulate_uses (insn, local_live);
908       }
909 
910   df_simulate_finalize_backwards (bb, local_live);
911 
912   block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
913   if (block_changed)
914     bitmap_copy (DF_LR_IN (bb), local_live);
915 
916   BITMAP_FREE (local_live);
917   return block_changed;
918 }
919 
920 
921 /* Perform fast DCE once initialization is done.  If BYTE_LEVEL is
922    true, use the byte level dce, otherwise do it at the pseudo
923    level.  */
924 
925 static void
926 fast_dce (bool byte_level)
927 {
928   int *postorder = df_get_postorder (DF_BACKWARD);
929   int n_blocks = df_get_n_blocks (DF_BACKWARD);
930   /* The set of blocks that have been seen on this iteration.  */
931   bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
932   /* The set of blocks that need to have the out vectors reset because
933      the in of one of their successors has changed.  */
934   bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
935   bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
936   bool global_changed = true;
937 
938   /* These regs are considered always live so if they end up dying
939      because of some def, we need to bring the back again.  Calling
940      df_simulate_fixup_sets has the disadvantage of calling
941      bb_has_eh_pred once per insn, so we cache the information
942      here.  */
943   bitmap au = df->regular_block_artificial_uses;
944   bitmap au_eh = df->eh_block_artificial_uses;
945   int i;
946 
947   prescan_insns_for_dce (true);
948 
949   for (i = 0; i < n_blocks; i++)
950     bitmap_set_bit (all_blocks, postorder[i]);
951 
952   while (global_changed)
953     {
954       global_changed = false;
955 
956       for (i = 0; i < n_blocks; i++)
957 	{
958 	  int index = postorder[i];
959 	  basic_block bb = BASIC_BLOCK (index);
960 	  bool local_changed;
961 
962 	  if (index < NUM_FIXED_BLOCKS)
963 	    {
964 	      bitmap_set_bit (processed, index);
965 	      continue;
966 	    }
967 
968 	  if (byte_level)
969 	    local_changed
970 	      = byte_dce_process_block (bb, bitmap_bit_p (redo_out, index),
971 					  bb_has_eh_pred (bb) ? au_eh : au);
972 	  else
973 	    local_changed
974 	      = dce_process_block (bb, bitmap_bit_p (redo_out, index),
975 				   bb_has_eh_pred (bb) ? au_eh : au);
976 	  bitmap_set_bit (processed, index);
977 
978 	  if (local_changed)
979 	    {
980 	      edge e;
981 	      edge_iterator ei;
982 	      FOR_EACH_EDGE (e, ei, bb->preds)
983 		if (bitmap_bit_p (processed, e->src->index))
984 		  /* Be tricky about when we need to iterate the
985 		     analysis.  We only have redo the analysis if the
986 		     bitmaps change at the top of a block that is the
987 		     entry to a loop.  */
988 		  global_changed = true;
989 		else
990 		  bitmap_set_bit (redo_out, e->src->index);
991 	    }
992 	}
993 
994       if (global_changed)
995 	{
996 	  /* Turn off the RUN_DCE flag to prevent recursive calls to
997 	     dce.  */
998 	  int old_flag = df_clear_flags (DF_LR_RUN_DCE);
999 
1000 	  /* So something was deleted that requires a redo.  Do it on
1001 	     the cheap.  */
1002 	  delete_unmarked_insns ();
1003 	  sbitmap_zero (marked);
1004 	  bitmap_clear (processed);
1005 	  bitmap_clear (redo_out);
1006 
1007 	  /* We do not need to rescan any instructions.  We only need
1008 	     to redo the dataflow equations for the blocks that had a
1009 	     change at the top of the block.  Then we need to redo the
1010 	     iteration.  */
1011 	  if (byte_level)
1012 	    df_analyze_problem (df_byte_lr, all_blocks, postorder, n_blocks);
1013 	  else
1014 	    df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
1015 
1016 	  if (old_flag & DF_LR_RUN_DCE)
1017 	    df_set_flags (DF_LR_RUN_DCE);
1018 
1019 	  prescan_insns_for_dce (true);
1020 	}
1021     }
1022 
1023   delete_unmarked_insns ();
1024 
1025   BITMAP_FREE (processed);
1026   BITMAP_FREE (redo_out);
1027   BITMAP_FREE (all_blocks);
1028 }
1029 
1030 
1031 /* Fast register level DCE.  */
1032 
1033 static unsigned int
1034 rest_of_handle_fast_dce (void)
1035 {
1036   init_dce (true);
1037   fast_dce (false);
1038   fini_dce (true);
1039   return 0;
1040 }
1041 
1042 
1043 /* Fast byte level DCE.  */
1044 
1045 static unsigned int
1046 rest_of_handle_fast_byte_dce (void)
1047 {
1048   df_byte_lr_add_problem ();
1049   init_dce (true);
1050   fast_dce (true);
1051   fini_dce (true);
1052   return 0;
1053 }
1054 
1055 
1056 /* This is an internal call that is used by the df live register
1057    problem to run fast dce as a side effect of creating the live
1058    information.  The stack is organized so that the lr problem is run,
1059    this pass is run, which updates the live info and the df scanning
1060    info, and then returns to allow the rest of the problems to be run.
1061 
1062    This can be called by elsewhere but it will not update the bit
1063    vectors for any other problems than LR.  */
1064 
1065 void
1066 run_fast_df_dce (void)
1067 {
1068   if (flag_dce)
1069     {
1070       /* If dce is able to delete something, it has to happen
1071 	 immediately.  Otherwise there will be problems handling the
1072 	 eq_notes.  */
1073       int old_flags =
1074 	df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
1075 
1076       df_in_progress = true;
1077       rest_of_handle_fast_dce ();
1078       df_in_progress = false;
1079 
1080       df_set_flags (old_flags);
1081     }
1082 }
1083 
1084 
1085 /* Run a fast DCE pass.  */
1086 
1087 void
1088 run_fast_dce (void)
1089 {
1090   if (flag_dce)
1091     rest_of_handle_fast_dce ();
1092 }
1093 
1094 
1095 static bool
1096 gate_fast_dce (void)
1097 {
1098   return optimize > 0 && flag_dce
1099     && dbg_cnt (dce_fast);
1100 }
1101 
1102 struct rtl_opt_pass pass_fast_rtl_dce =
1103 {
1104  {
1105   RTL_PASS,
1106   "rtl dce",                            /* name */
1107   gate_fast_dce,                        /* gate */
1108   rest_of_handle_fast_dce,              /* execute */
1109   NULL,                                 /* sub */
1110   NULL,                                 /* next */
1111   0,                                    /* static_pass_number */
1112   TV_DCE,                               /* tv_id */
1113   0,                                    /* properties_required */
1114   0,                                    /* properties_provided */
1115   0,                                    /* properties_destroyed */
1116   0,                                    /* todo_flags_start */
1117   TODO_dump_func |
1118   TODO_df_finish | TODO_verify_rtl_sharing |
1119   TODO_ggc_collect                      /* todo_flags_finish */
1120  }
1121 };
1122 
1123 struct rtl_opt_pass pass_fast_rtl_byte_dce =
1124 {
1125  {
1126   RTL_PASS,
1127   "byte-dce",                           /* name */
1128   gate_fast_dce,                        /* gate */
1129   rest_of_handle_fast_byte_dce,         /* execute */
1130   NULL,                                 /* sub */
1131   NULL,                                 /* next */
1132   0,                                    /* static_pass_number */
1133   TV_DCE,                               /* tv_id */
1134   0,                                    /* properties_required */
1135   0,                                    /* properties_provided */
1136   0,                                    /* properties_destroyed */
1137   0,                                    /* todo_flags_start */
1138   TODO_dump_func |
1139   TODO_df_finish | TODO_verify_rtl_sharing |
1140   TODO_ggc_collect                      /* todo_flags_finish */
1141  }
1142 };
1143