11debfc3dSmrg /* RTL dead code elimination.
2*8feb0f0bSmrg Copyright (C) 2005-2020 Free Software Foundation, Inc.
31debfc3dSmrg
41debfc3dSmrg This file is part of GCC.
51debfc3dSmrg
61debfc3dSmrg GCC is free software; you can redistribute it and/or modify it under
71debfc3dSmrg the terms of the GNU General Public License as published by the Free
81debfc3dSmrg Software Foundation; either version 3, or (at your option) any later
91debfc3dSmrg version.
101debfc3dSmrg
111debfc3dSmrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
121debfc3dSmrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
131debfc3dSmrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
141debfc3dSmrg for more details.
151debfc3dSmrg
161debfc3dSmrg You should have received a copy of the GNU General Public License
171debfc3dSmrg along with GCC; see the file COPYING3. If not see
181debfc3dSmrg <http://www.gnu.org/licenses/>. */
191debfc3dSmrg
201debfc3dSmrg #include "config.h"
211debfc3dSmrg #include "system.h"
221debfc3dSmrg #include "coretypes.h"
231debfc3dSmrg #include "backend.h"
241debfc3dSmrg #include "rtl.h"
251debfc3dSmrg #include "tree.h"
261debfc3dSmrg #include "predict.h"
271debfc3dSmrg #include "df.h"
281debfc3dSmrg #include "memmodel.h"
291debfc3dSmrg #include "tm_p.h"
301debfc3dSmrg #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
311debfc3dSmrg #include "cfgrtl.h"
321debfc3dSmrg #include "cfgbuild.h"
331debfc3dSmrg #include "cfgcleanup.h"
341debfc3dSmrg #include "dce.h"
351debfc3dSmrg #include "valtrack.h"
361debfc3dSmrg #include "tree-pass.h"
371debfc3dSmrg #include "dbgcnt.h"
38a2dc1f3fSmrg #include "rtl-iter.h"
391debfc3dSmrg
401debfc3dSmrg
411debfc3dSmrg /* -------------------------------------------------------------------------
421debfc3dSmrg Core mark/delete routines
431debfc3dSmrg ------------------------------------------------------------------------- */
441debfc3dSmrg
451debfc3dSmrg /* True if we are invoked while the df engine is running; in this case,
461debfc3dSmrg we don't want to reenter it. */
471debfc3dSmrg static bool df_in_progress = false;
481debfc3dSmrg
491debfc3dSmrg /* True if we are allowed to alter the CFG in this pass. */
501debfc3dSmrg static bool can_alter_cfg = false;
511debfc3dSmrg
521debfc3dSmrg /* Instructions that have been marked but whose dependencies have not
531debfc3dSmrg yet been processed. */
541debfc3dSmrg static vec<rtx_insn *> worklist;
551debfc3dSmrg
561debfc3dSmrg /* Bitmap of instructions marked as needed indexed by INSN_UID. */
571debfc3dSmrg static sbitmap marked;
581debfc3dSmrg
591debfc3dSmrg /* Bitmap obstacks used for block processing by the fast algorithm. */
601debfc3dSmrg static bitmap_obstack dce_blocks_bitmap_obstack;
611debfc3dSmrg static bitmap_obstack dce_tmp_bitmap_obstack;
621debfc3dSmrg
631debfc3dSmrg static bool find_call_stack_args (rtx_call_insn *, bool, bool, bitmap);
641debfc3dSmrg
651debfc3dSmrg /* A subroutine for which BODY is part of the instruction being tested;
661debfc3dSmrg either the top-level pattern, or an element of a PARALLEL. The
671debfc3dSmrg instruction is known not to be a bare USE or CLOBBER. */
681debfc3dSmrg
691debfc3dSmrg static bool
deletable_insn_p_1(rtx body)701debfc3dSmrg deletable_insn_p_1 (rtx body)
711debfc3dSmrg {
721debfc3dSmrg switch (GET_CODE (body))
731debfc3dSmrg {
741debfc3dSmrg case PREFETCH:
751debfc3dSmrg case TRAP_IF:
761debfc3dSmrg /* The UNSPEC case was added here because the ia-64 claims that
771debfc3dSmrg USEs do not work after reload and generates UNSPECS rather
781debfc3dSmrg than USEs. Since dce is run after reload we need to avoid
791debfc3dSmrg deleting these even if they are dead. If it turns out that
801debfc3dSmrg USEs really do work after reload, the ia-64 should be
811debfc3dSmrg changed, and the UNSPEC case can be removed. */
821debfc3dSmrg case UNSPEC:
831debfc3dSmrg return false;
841debfc3dSmrg
851debfc3dSmrg default:
861debfc3dSmrg return !volatile_refs_p (body);
871debfc3dSmrg }
881debfc3dSmrg }
891debfc3dSmrg
90a05ac97eSmrg /* Don't delete calls that may throw if we cannot do so. */
91a05ac97eSmrg
92a05ac97eSmrg static bool
can_delete_call(rtx_insn * insn)93a05ac97eSmrg can_delete_call (rtx_insn *insn)
94a05ac97eSmrg {
95a05ac97eSmrg if (cfun->can_delete_dead_exceptions && can_alter_cfg)
96a05ac97eSmrg return true;
97a05ac97eSmrg if (!insn_nothrow_p (insn))
98a05ac97eSmrg return false;
99a05ac97eSmrg if (can_alter_cfg)
100a05ac97eSmrg return true;
101a05ac97eSmrg /* If we can't alter cfg, even when the call can't throw exceptions, it
102a05ac97eSmrg might have EDGE_ABNORMAL_CALL edges and so we shouldn't delete such
103a05ac97eSmrg calls. */
104a05ac97eSmrg gcc_assert (CALL_P (insn));
105a05ac97eSmrg if (BLOCK_FOR_INSN (insn) && BB_END (BLOCK_FOR_INSN (insn)) == insn)
106a05ac97eSmrg {
107a05ac97eSmrg edge e;
108a05ac97eSmrg edge_iterator ei;
109a05ac97eSmrg
110a05ac97eSmrg FOR_EACH_EDGE (e, ei, BLOCK_FOR_INSN (insn)->succs)
111a05ac97eSmrg if ((e->flags & EDGE_ABNORMAL_CALL) != 0)
112a05ac97eSmrg return false;
113a05ac97eSmrg }
114a05ac97eSmrg return true;
115a05ac97eSmrg }
1161debfc3dSmrg
1171debfc3dSmrg /* Return true if INSN is a normal instruction that can be deleted by
1181debfc3dSmrg the DCE pass. */
1191debfc3dSmrg
1201debfc3dSmrg static bool
deletable_insn_p(rtx_insn * insn,bool fast,bitmap arg_stores)1211debfc3dSmrg deletable_insn_p (rtx_insn *insn, bool fast, bitmap arg_stores)
1221debfc3dSmrg {
1231debfc3dSmrg rtx body, x;
1241debfc3dSmrg int i;
1251debfc3dSmrg df_ref def;
1261debfc3dSmrg
1271debfc3dSmrg if (CALL_P (insn)
1281debfc3dSmrg /* We cannot delete calls inside of the recursive dce because
1291debfc3dSmrg this may cause basic blocks to be deleted and this messes up
1301debfc3dSmrg the rest of the stack of optimization passes. */
1311debfc3dSmrg && (!df_in_progress)
1321debfc3dSmrg /* We cannot delete pure or const sibling calls because it is
1331debfc3dSmrg hard to see the result. */
1341debfc3dSmrg && (!SIBLING_CALL_P (insn))
1351debfc3dSmrg /* We can delete dead const or pure calls as long as they do not
1361debfc3dSmrg infinite loop. */
1371debfc3dSmrg && (RTL_CONST_OR_PURE_CALL_P (insn)
138a05ac97eSmrg && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn))
139a05ac97eSmrg /* Don't delete calls that may throw if we cannot do so. */
140a05ac97eSmrg && can_delete_call (insn))
1411debfc3dSmrg return find_call_stack_args (as_a <rtx_call_insn *> (insn), false,
1421debfc3dSmrg fast, arg_stores);
1431debfc3dSmrg
1441debfc3dSmrg /* Don't delete jumps, notes and the like. */
1451debfc3dSmrg if (!NONJUMP_INSN_P (insn))
1461debfc3dSmrg return false;
1471debfc3dSmrg
1481debfc3dSmrg /* Don't delete insns that may throw if we cannot do so. */
1491debfc3dSmrg if (!(cfun->can_delete_dead_exceptions && can_alter_cfg)
1501debfc3dSmrg && !insn_nothrow_p (insn))
1511debfc3dSmrg return false;
1521debfc3dSmrg
1531debfc3dSmrg /* If INSN sets a global_reg, leave it untouched. */
1541debfc3dSmrg FOR_EACH_INSN_DEF (def, insn)
1551debfc3dSmrg if (HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
1561debfc3dSmrg && global_regs[DF_REF_REGNO (def)])
1571debfc3dSmrg return false;
1581debfc3dSmrg /* Initialization of pseudo PIC register should never be removed. */
1591debfc3dSmrg else if (DF_REF_REG (def) == pic_offset_table_rtx
1601debfc3dSmrg && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
1611debfc3dSmrg return false;
1621debfc3dSmrg
1631debfc3dSmrg /* Callee-save restores are needed. */
1641debfc3dSmrg if (RTX_FRAME_RELATED_P (insn)
1651debfc3dSmrg && crtl->shrink_wrapped_separate
1661debfc3dSmrg && find_reg_note (insn, REG_CFA_RESTORE, NULL))
1671debfc3dSmrg return false;
1681debfc3dSmrg
1691debfc3dSmrg body = PATTERN (insn);
1701debfc3dSmrg switch (GET_CODE (body))
1711debfc3dSmrg {
1721debfc3dSmrg case USE:
1731debfc3dSmrg case VAR_LOCATION:
1741debfc3dSmrg return false;
1751debfc3dSmrg
1761debfc3dSmrg case CLOBBER:
1771debfc3dSmrg if (fast)
1781debfc3dSmrg {
1791debfc3dSmrg /* A CLOBBER of a dead pseudo register serves no purpose.
1801debfc3dSmrg That is not necessarily true for hard registers until
1811debfc3dSmrg after reload. */
1821debfc3dSmrg x = XEXP (body, 0);
1831debfc3dSmrg return REG_P (x) && (!HARD_REGISTER_P (x) || reload_completed);
1841debfc3dSmrg }
1851debfc3dSmrg else
1861debfc3dSmrg /* Because of the way that use-def chains are built, it is not
1871debfc3dSmrg possible to tell if the clobber is dead because it can
1881debfc3dSmrg never be the target of a use-def chain. */
1891debfc3dSmrg return false;
1901debfc3dSmrg
1911debfc3dSmrg case PARALLEL:
1921debfc3dSmrg for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1931debfc3dSmrg if (!deletable_insn_p_1 (XVECEXP (body, 0, i)))
1941debfc3dSmrg return false;
1951debfc3dSmrg return true;
1961debfc3dSmrg
1971debfc3dSmrg default:
1981debfc3dSmrg return deletable_insn_p_1 (body);
1991debfc3dSmrg }
2001debfc3dSmrg }
2011debfc3dSmrg
2021debfc3dSmrg
2031debfc3dSmrg /* Return true if INSN has been marked as needed. */
2041debfc3dSmrg
2051debfc3dSmrg static inline int
marked_insn_p(rtx_insn * insn)2061debfc3dSmrg marked_insn_p (rtx_insn *insn)
2071debfc3dSmrg {
2081debfc3dSmrg /* Artificial defs are always needed and they do not have an insn.
2091debfc3dSmrg We should never see them here. */
2101debfc3dSmrg gcc_assert (insn);
2111debfc3dSmrg return bitmap_bit_p (marked, INSN_UID (insn));
2121debfc3dSmrg }
2131debfc3dSmrg
2141debfc3dSmrg
2151debfc3dSmrg /* If INSN has not yet been marked as needed, mark it now, and add it to
2161debfc3dSmrg the worklist. */
2171debfc3dSmrg
2181debfc3dSmrg static void
mark_insn(rtx_insn * insn,bool fast)2191debfc3dSmrg mark_insn (rtx_insn *insn, bool fast)
2201debfc3dSmrg {
2211debfc3dSmrg if (!marked_insn_p (insn))
2221debfc3dSmrg {
2231debfc3dSmrg if (!fast)
2241debfc3dSmrg worklist.safe_push (insn);
2251debfc3dSmrg bitmap_set_bit (marked, INSN_UID (insn));
2261debfc3dSmrg if (dump_file)
2271debfc3dSmrg fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
2281debfc3dSmrg if (CALL_P (insn)
2291debfc3dSmrg && !df_in_progress
2301debfc3dSmrg && !SIBLING_CALL_P (insn)
2311debfc3dSmrg && (RTL_CONST_OR_PURE_CALL_P (insn)
232a05ac97eSmrg && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn))
233a05ac97eSmrg && can_delete_call (insn))
2341debfc3dSmrg find_call_stack_args (as_a <rtx_call_insn *> (insn), true, fast, NULL);
2351debfc3dSmrg }
2361debfc3dSmrg }
2371debfc3dSmrg
2381debfc3dSmrg
2391debfc3dSmrg /* A note_stores callback used by mark_nonreg_stores. DATA is the
2401debfc3dSmrg instruction containing DEST. */
2411debfc3dSmrg
2421debfc3dSmrg static void
mark_nonreg_stores_1(rtx dest,const_rtx pattern,void * data)2431debfc3dSmrg mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
2441debfc3dSmrg {
2451debfc3dSmrg if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
2461debfc3dSmrg mark_insn ((rtx_insn *) data, true);
2471debfc3dSmrg }
2481debfc3dSmrg
2491debfc3dSmrg
2501debfc3dSmrg /* A note_stores callback used by mark_nonreg_stores. DATA is the
2511debfc3dSmrg instruction containing DEST. */
2521debfc3dSmrg
2531debfc3dSmrg static void
mark_nonreg_stores_2(rtx dest,const_rtx pattern,void * data)2541debfc3dSmrg mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
2551debfc3dSmrg {
2561debfc3dSmrg if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
2571debfc3dSmrg mark_insn ((rtx_insn *) data, false);
2581debfc3dSmrg }
2591debfc3dSmrg
2601debfc3dSmrg
261*8feb0f0bSmrg /* Mark INSN if it stores to a non-register destination. */
2621debfc3dSmrg
2631debfc3dSmrg static void
mark_nonreg_stores(rtx_insn * insn,bool fast)264*8feb0f0bSmrg mark_nonreg_stores (rtx_insn *insn, bool fast)
2651debfc3dSmrg {
2661debfc3dSmrg if (fast)
267*8feb0f0bSmrg note_stores (insn, mark_nonreg_stores_1, insn);
2681debfc3dSmrg else
269*8feb0f0bSmrg note_stores (insn, mark_nonreg_stores_2, insn);
2701debfc3dSmrg }
2711debfc3dSmrg
2721debfc3dSmrg
2731debfc3dSmrg /* Return true if a store to SIZE bytes, starting OFF bytes from stack pointer,
2741debfc3dSmrg is a call argument store, and clear corresponding bits from SP_BYTES
2751debfc3dSmrg bitmap if it is. */
2761debfc3dSmrg
2771debfc3dSmrg static bool
check_argument_store(HOST_WIDE_INT size,HOST_WIDE_INT off,HOST_WIDE_INT min_sp_off,HOST_WIDE_INT max_sp_off,bitmap sp_bytes)2781debfc3dSmrg check_argument_store (HOST_WIDE_INT size, HOST_WIDE_INT off,
2791debfc3dSmrg HOST_WIDE_INT min_sp_off, HOST_WIDE_INT max_sp_off,
2801debfc3dSmrg bitmap sp_bytes)
2811debfc3dSmrg {
2821debfc3dSmrg HOST_WIDE_INT byte;
2831debfc3dSmrg for (byte = off; byte < off + size; byte++)
2841debfc3dSmrg {
2851debfc3dSmrg if (byte < min_sp_off
2861debfc3dSmrg || byte >= max_sp_off
2871debfc3dSmrg || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
2881debfc3dSmrg return false;
2891debfc3dSmrg }
2901debfc3dSmrg return true;
2911debfc3dSmrg }
2921debfc3dSmrg
293a2dc1f3fSmrg /* If MEM has sp address, return 0, if it has sp + const address,
294a2dc1f3fSmrg return that const, if it has reg address where reg is set to sp + const
295a2dc1f3fSmrg and FAST is false, return const, otherwise return
296a2dc1f3fSmrg INTTYPE_MINUMUM (HOST_WIDE_INT). */
297a2dc1f3fSmrg
298a2dc1f3fSmrg static HOST_WIDE_INT
sp_based_mem_offset(rtx_call_insn * call_insn,const_rtx mem,bool fast)299a2dc1f3fSmrg sp_based_mem_offset (rtx_call_insn *call_insn, const_rtx mem, bool fast)
300a2dc1f3fSmrg {
301a2dc1f3fSmrg HOST_WIDE_INT off = 0;
302a2dc1f3fSmrg rtx addr = XEXP (mem, 0);
303a2dc1f3fSmrg if (GET_CODE (addr) == PLUS
304a2dc1f3fSmrg && REG_P (XEXP (addr, 0))
305a2dc1f3fSmrg && CONST_INT_P (XEXP (addr, 1)))
306a2dc1f3fSmrg {
307a2dc1f3fSmrg off = INTVAL (XEXP (addr, 1));
308a2dc1f3fSmrg addr = XEXP (addr, 0);
309a2dc1f3fSmrg }
310a2dc1f3fSmrg if (addr == stack_pointer_rtx)
311a2dc1f3fSmrg return off;
312a2dc1f3fSmrg
313a2dc1f3fSmrg if (!REG_P (addr) || fast)
314a2dc1f3fSmrg return INTTYPE_MINIMUM (HOST_WIDE_INT);
315a2dc1f3fSmrg
316a2dc1f3fSmrg /* If not fast, use chains to see if addr wasn't set to sp + offset. */
317a2dc1f3fSmrg df_ref use;
318a2dc1f3fSmrg FOR_EACH_INSN_USE (use, call_insn)
319a2dc1f3fSmrg if (rtx_equal_p (addr, DF_REF_REG (use)))
320a2dc1f3fSmrg break;
321a2dc1f3fSmrg
322a2dc1f3fSmrg if (use == NULL)
323a2dc1f3fSmrg return INTTYPE_MINIMUM (HOST_WIDE_INT);
324a2dc1f3fSmrg
325a2dc1f3fSmrg struct df_link *defs;
326a2dc1f3fSmrg for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
327a2dc1f3fSmrg if (! DF_REF_IS_ARTIFICIAL (defs->ref))
328a2dc1f3fSmrg break;
329a2dc1f3fSmrg
330a2dc1f3fSmrg if (defs == NULL)
331a2dc1f3fSmrg return INTTYPE_MINIMUM (HOST_WIDE_INT);
332a2dc1f3fSmrg
333a2dc1f3fSmrg rtx set = single_set (DF_REF_INSN (defs->ref));
334a2dc1f3fSmrg if (!set)
335a2dc1f3fSmrg return INTTYPE_MINIMUM (HOST_WIDE_INT);
336a2dc1f3fSmrg
337a2dc1f3fSmrg if (GET_CODE (SET_SRC (set)) != PLUS
338a2dc1f3fSmrg || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
339a2dc1f3fSmrg || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
340a2dc1f3fSmrg return INTTYPE_MINIMUM (HOST_WIDE_INT);
341a2dc1f3fSmrg
342a2dc1f3fSmrg off += INTVAL (XEXP (SET_SRC (set), 1));
343a2dc1f3fSmrg return off;
344a2dc1f3fSmrg }
345a2dc1f3fSmrg
346a2dc1f3fSmrg /* Data for check_argument_load called via note_uses. */
347a2dc1f3fSmrg struct check_argument_load_data {
348a2dc1f3fSmrg bitmap sp_bytes;
349a2dc1f3fSmrg HOST_WIDE_INT min_sp_off, max_sp_off;
350a2dc1f3fSmrg rtx_call_insn *call_insn;
351a2dc1f3fSmrg bool fast;
352a2dc1f3fSmrg bool load_found;
353a2dc1f3fSmrg };
354a2dc1f3fSmrg
355a2dc1f3fSmrg /* Helper function for find_call_stack_args. Check if there are
356a2dc1f3fSmrg any loads from the argument slots in between the const/pure call
357a2dc1f3fSmrg and store to the argument slot, set LOAD_FOUND if any is found. */
358a2dc1f3fSmrg
359a2dc1f3fSmrg static void
check_argument_load(rtx * loc,void * data)360a2dc1f3fSmrg check_argument_load (rtx *loc, void *data)
361a2dc1f3fSmrg {
362a2dc1f3fSmrg struct check_argument_load_data *d
363a2dc1f3fSmrg = (struct check_argument_load_data *) data;
364a2dc1f3fSmrg subrtx_iterator::array_type array;
365a2dc1f3fSmrg FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
366a2dc1f3fSmrg {
367a2dc1f3fSmrg const_rtx mem = *iter;
368a2dc1f3fSmrg HOST_WIDE_INT size;
369a2dc1f3fSmrg if (MEM_P (mem)
370a2dc1f3fSmrg && MEM_SIZE_KNOWN_P (mem)
371a2dc1f3fSmrg && MEM_SIZE (mem).is_constant (&size))
372a2dc1f3fSmrg {
373a2dc1f3fSmrg HOST_WIDE_INT off = sp_based_mem_offset (d->call_insn, mem, d->fast);
374a2dc1f3fSmrg if (off != INTTYPE_MINIMUM (HOST_WIDE_INT)
375a2dc1f3fSmrg && off < d->max_sp_off
376a2dc1f3fSmrg && off + size > d->min_sp_off)
377a2dc1f3fSmrg for (HOST_WIDE_INT byte = MAX (off, d->min_sp_off);
378a2dc1f3fSmrg byte < MIN (off + size, d->max_sp_off); byte++)
379a2dc1f3fSmrg if (bitmap_bit_p (d->sp_bytes, byte - d->min_sp_off))
380a2dc1f3fSmrg {
381a2dc1f3fSmrg d->load_found = true;
382a2dc1f3fSmrg return;
383a2dc1f3fSmrg }
384a2dc1f3fSmrg }
385a2dc1f3fSmrg }
386a2dc1f3fSmrg }
3871debfc3dSmrg
3881debfc3dSmrg /* Try to find all stack stores of CALL_INSN arguments if
3891debfc3dSmrg ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
3901debfc3dSmrg and it is therefore safe to eliminate the call, return true,
3911debfc3dSmrg otherwise return false. This function should be first called
3921debfc3dSmrg with DO_MARK false, and only when the CALL_INSN is actually
3931debfc3dSmrg going to be marked called again with DO_MARK true. */
3941debfc3dSmrg
3951debfc3dSmrg static bool
find_call_stack_args(rtx_call_insn * call_insn,bool do_mark,bool fast,bitmap arg_stores)3961debfc3dSmrg find_call_stack_args (rtx_call_insn *call_insn, bool do_mark, bool fast,
3971debfc3dSmrg bitmap arg_stores)
3981debfc3dSmrg {
3991debfc3dSmrg rtx p;
4001debfc3dSmrg rtx_insn *insn, *prev_insn;
4011debfc3dSmrg bool ret;
4021debfc3dSmrg HOST_WIDE_INT min_sp_off, max_sp_off;
4031debfc3dSmrg bitmap sp_bytes;
4041debfc3dSmrg
4051debfc3dSmrg gcc_assert (CALL_P (call_insn));
4061debfc3dSmrg if (!ACCUMULATE_OUTGOING_ARGS)
4071debfc3dSmrg return true;
4081debfc3dSmrg
4091debfc3dSmrg if (!do_mark)
4101debfc3dSmrg {
4111debfc3dSmrg gcc_assert (arg_stores);
4121debfc3dSmrg bitmap_clear (arg_stores);
4131debfc3dSmrg }
4141debfc3dSmrg
4151debfc3dSmrg min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
4161debfc3dSmrg max_sp_off = 0;
4171debfc3dSmrg
4181debfc3dSmrg /* First determine the minimum and maximum offset from sp for
4191debfc3dSmrg stored arguments. */
4201debfc3dSmrg for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
4211debfc3dSmrg if (GET_CODE (XEXP (p, 0)) == USE
4221debfc3dSmrg && MEM_P (XEXP (XEXP (p, 0), 0)))
4231debfc3dSmrg {
424a2dc1f3fSmrg rtx mem = XEXP (XEXP (p, 0), 0);
425a2dc1f3fSmrg HOST_WIDE_INT size;
426a2dc1f3fSmrg if (!MEM_SIZE_KNOWN_P (mem) || !MEM_SIZE (mem).is_constant (&size))
4271debfc3dSmrg return false;
428a2dc1f3fSmrg HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast);
429a2dc1f3fSmrg if (off == INTTYPE_MINIMUM (HOST_WIDE_INT))
4301debfc3dSmrg return false;
4311debfc3dSmrg min_sp_off = MIN (min_sp_off, off);
4321debfc3dSmrg max_sp_off = MAX (max_sp_off, off + size);
4331debfc3dSmrg }
4341debfc3dSmrg
4351debfc3dSmrg if (min_sp_off >= max_sp_off)
4361debfc3dSmrg return true;
4371debfc3dSmrg sp_bytes = BITMAP_ALLOC (NULL);
4381debfc3dSmrg
4391debfc3dSmrg /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
4401debfc3dSmrg which contain arguments. Checking has been done in the previous
4411debfc3dSmrg loop. */
4421debfc3dSmrg for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
4431debfc3dSmrg if (GET_CODE (XEXP (p, 0)) == USE
4441debfc3dSmrg && MEM_P (XEXP (XEXP (p, 0), 0)))
4451debfc3dSmrg {
446a2dc1f3fSmrg rtx mem = XEXP (XEXP (p, 0), 0);
447a2dc1f3fSmrg /* Checked in the previous iteration. */
448a2dc1f3fSmrg HOST_WIDE_INT size = MEM_SIZE (mem).to_constant ();
449a2dc1f3fSmrg HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast);
450a2dc1f3fSmrg gcc_checking_assert (off != INTTYPE_MINIMUM (HOST_WIDE_INT));
451a2dc1f3fSmrg for (HOST_WIDE_INT byte = off; byte < off + size; byte++)
4521debfc3dSmrg if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
4531debfc3dSmrg gcc_unreachable ();
4541debfc3dSmrg }
4551debfc3dSmrg
4561debfc3dSmrg /* Walk backwards, looking for argument stores. The search stops
457a2dc1f3fSmrg when seeing another call, sp adjustment, memory store other than
458a2dc1f3fSmrg argument store or a read from an argument stack slot. */
459a2dc1f3fSmrg struct check_argument_load_data data
460a2dc1f3fSmrg = { sp_bytes, min_sp_off, max_sp_off, call_insn, fast, false };
4611debfc3dSmrg ret = false;
4621debfc3dSmrg for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
4631debfc3dSmrg {
4641debfc3dSmrg if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
4651debfc3dSmrg prev_insn = NULL;
4661debfc3dSmrg else
4671debfc3dSmrg prev_insn = PREV_INSN (insn);
4681debfc3dSmrg
4691debfc3dSmrg if (CALL_P (insn))
4701debfc3dSmrg break;
4711debfc3dSmrg
4721debfc3dSmrg if (!NONDEBUG_INSN_P (insn))
4731debfc3dSmrg continue;
4741debfc3dSmrg
475a2dc1f3fSmrg rtx set = single_set (insn);
4761debfc3dSmrg if (!set || SET_DEST (set) == stack_pointer_rtx)
4771debfc3dSmrg break;
4781debfc3dSmrg
479a2dc1f3fSmrg note_uses (&PATTERN (insn), check_argument_load, &data);
480a2dc1f3fSmrg if (data.load_found)
481a2dc1f3fSmrg break;
482a2dc1f3fSmrg
4831debfc3dSmrg if (!MEM_P (SET_DEST (set)))
4841debfc3dSmrg continue;
4851debfc3dSmrg
486a2dc1f3fSmrg rtx mem = SET_DEST (set);
487a2dc1f3fSmrg HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast);
488a2dc1f3fSmrg if (off == INTTYPE_MINIMUM (HOST_WIDE_INT))
4891debfc3dSmrg break;
4901debfc3dSmrg
491a2dc1f3fSmrg HOST_WIDE_INT size;
4921debfc3dSmrg if (!MEM_SIZE_KNOWN_P (mem)
493a2dc1f3fSmrg || !MEM_SIZE (mem).is_constant (&size)
494a2dc1f3fSmrg || !check_argument_store (size, off, min_sp_off,
4951debfc3dSmrg max_sp_off, sp_bytes))
4961debfc3dSmrg break;
4971debfc3dSmrg
4981debfc3dSmrg if (!deletable_insn_p (insn, fast, NULL))
4991debfc3dSmrg break;
5001debfc3dSmrg
5011debfc3dSmrg if (do_mark)
5021debfc3dSmrg mark_insn (insn, fast);
5031debfc3dSmrg else
5041debfc3dSmrg bitmap_set_bit (arg_stores, INSN_UID (insn));
5051debfc3dSmrg
5061debfc3dSmrg if (bitmap_empty_p (sp_bytes))
5071debfc3dSmrg {
5081debfc3dSmrg ret = true;
5091debfc3dSmrg break;
5101debfc3dSmrg }
5111debfc3dSmrg }
5121debfc3dSmrg
5131debfc3dSmrg BITMAP_FREE (sp_bytes);
5141debfc3dSmrg if (!ret && arg_stores)
5151debfc3dSmrg bitmap_clear (arg_stores);
5161debfc3dSmrg
5171debfc3dSmrg return ret;
5181debfc3dSmrg }
5191debfc3dSmrg
5201debfc3dSmrg
5211debfc3dSmrg /* Remove all REG_EQUAL and REG_EQUIV notes referring to the registers INSN
5221debfc3dSmrg writes to. */
5231debfc3dSmrg
5241debfc3dSmrg static void
remove_reg_equal_equiv_notes_for_defs(rtx_insn * insn)5251debfc3dSmrg remove_reg_equal_equiv_notes_for_defs (rtx_insn *insn)
5261debfc3dSmrg {
5271debfc3dSmrg df_ref def;
5281debfc3dSmrg
5291debfc3dSmrg FOR_EACH_INSN_DEF (def, insn)
5301debfc3dSmrg remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (def));
5311debfc3dSmrg }
5321debfc3dSmrg
5331debfc3dSmrg /* Scan all BBs for debug insns and reset those that reference values
5341debfc3dSmrg defined in unmarked insns. */
5351debfc3dSmrg
5361debfc3dSmrg static void
reset_unmarked_insns_debug_uses(void)5371debfc3dSmrg reset_unmarked_insns_debug_uses (void)
5381debfc3dSmrg {
5391debfc3dSmrg basic_block bb;
5401debfc3dSmrg rtx_insn *insn, *next;
5411debfc3dSmrg
5421debfc3dSmrg FOR_EACH_BB_REVERSE_FN (bb, cfun)
5431debfc3dSmrg FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
5441debfc3dSmrg if (DEBUG_INSN_P (insn))
5451debfc3dSmrg {
5461debfc3dSmrg df_ref use;
5471debfc3dSmrg
5481debfc3dSmrg FOR_EACH_INSN_USE (use, insn)
5491debfc3dSmrg {
5501debfc3dSmrg struct df_link *defs;
5511debfc3dSmrg for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
5521debfc3dSmrg {
5531debfc3dSmrg rtx_insn *ref_insn;
5541debfc3dSmrg if (DF_REF_IS_ARTIFICIAL (defs->ref))
5551debfc3dSmrg continue;
5561debfc3dSmrg ref_insn = DF_REF_INSN (defs->ref);
5571debfc3dSmrg if (!marked_insn_p (ref_insn))
5581debfc3dSmrg break;
5591debfc3dSmrg }
5601debfc3dSmrg if (!defs)
5611debfc3dSmrg continue;
5621debfc3dSmrg /* ??? FIXME could we propagate the values assigned to
5631debfc3dSmrg each of the DEFs? */
5641debfc3dSmrg INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
5651debfc3dSmrg df_insn_rescan_debug_internal (insn);
5661debfc3dSmrg break;
5671debfc3dSmrg }
5681debfc3dSmrg }
5691debfc3dSmrg }
5701debfc3dSmrg
5711debfc3dSmrg /* Delete every instruction that hasn't been marked. */
5721debfc3dSmrg
5731debfc3dSmrg static void
delete_unmarked_insns(void)5741debfc3dSmrg delete_unmarked_insns (void)
5751debfc3dSmrg {
5761debfc3dSmrg basic_block bb;
5771debfc3dSmrg rtx_insn *insn, *next;
5781debfc3dSmrg bool must_clean = false;
5791debfc3dSmrg
5801debfc3dSmrg FOR_EACH_BB_REVERSE_FN (bb, cfun)
5811debfc3dSmrg FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
5821debfc3dSmrg if (NONDEBUG_INSN_P (insn))
5831debfc3dSmrg {
5841debfc3dSmrg rtx turn_into_use = NULL_RTX;
5851debfc3dSmrg
5861debfc3dSmrg /* Always delete no-op moves. */
587a05ac97eSmrg if (noop_move_p (insn)
588a05ac97eSmrg /* Unless the no-op move can throw and we are not allowed
589a05ac97eSmrg to alter cfg. */
590a05ac97eSmrg && (!cfun->can_throw_non_call_exceptions
591a05ac97eSmrg || (cfun->can_delete_dead_exceptions && can_alter_cfg)
592a05ac97eSmrg || insn_nothrow_p (insn)))
5931debfc3dSmrg {
5941debfc3dSmrg if (RTX_FRAME_RELATED_P (insn))
5951debfc3dSmrg turn_into_use
5961debfc3dSmrg = find_reg_note (insn, REG_CFA_RESTORE, NULL);
5971debfc3dSmrg if (turn_into_use && REG_P (XEXP (turn_into_use, 0)))
5981debfc3dSmrg turn_into_use = XEXP (turn_into_use, 0);
5991debfc3dSmrg else
6001debfc3dSmrg turn_into_use = NULL_RTX;
6011debfc3dSmrg }
6021debfc3dSmrg
6031debfc3dSmrg /* Otherwise rely only on the DCE algorithm. */
6041debfc3dSmrg else if (marked_insn_p (insn))
6051debfc3dSmrg continue;
6061debfc3dSmrg
6071debfc3dSmrg /* Beware that reaching a dbg counter limit here can result
6081debfc3dSmrg in miscompiled file. This occurs when a group of insns
6091debfc3dSmrg must be deleted together, typically because the kept insn
6101debfc3dSmrg depends on the output from the deleted insn. Deleting
6111debfc3dSmrg this insns in reverse order (both at the bb level and
6121debfc3dSmrg when looking at the blocks) minimizes this, but does not
6131debfc3dSmrg eliminate it, since it is possible for the using insn to
6141debfc3dSmrg be top of a block and the producer to be at the bottom of
6151debfc3dSmrg the block. However, in most cases this will only result
6161debfc3dSmrg in an uninitialized use of an insn that is dead anyway.
6171debfc3dSmrg
6181debfc3dSmrg However, there is one rare case that will cause a
6191debfc3dSmrg miscompile: deletion of non-looping pure and constant
6201debfc3dSmrg calls on a machine where ACCUMULATE_OUTGOING_ARGS is true.
6211debfc3dSmrg In this case it is possible to remove the call, but leave
6221debfc3dSmrg the argument pushes to the stack. Because of the changes
6231debfc3dSmrg to the stack pointer, this will almost always lead to a
6241debfc3dSmrg miscompile. */
6251debfc3dSmrg if (!dbg_cnt (dce))
6261debfc3dSmrg continue;
6271debfc3dSmrg
6281debfc3dSmrg if (dump_file)
6291debfc3dSmrg fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
6301debfc3dSmrg
6311debfc3dSmrg /* Before we delete the insn we have to remove the REG_EQUAL notes
6321debfc3dSmrg for the destination regs in order to avoid dangling notes. */
6331debfc3dSmrg remove_reg_equal_equiv_notes_for_defs (insn);
6341debfc3dSmrg
6351debfc3dSmrg if (turn_into_use)
6361debfc3dSmrg {
6371debfc3dSmrg /* Don't remove frame related noop moves if they cary
6381debfc3dSmrg REG_CFA_RESTORE note, while we don't need to emit any code,
6391debfc3dSmrg we need it to emit the CFI restore note. */
6401debfc3dSmrg PATTERN (insn)
6411debfc3dSmrg = gen_rtx_USE (GET_MODE (turn_into_use), turn_into_use);
6421debfc3dSmrg INSN_CODE (insn) = -1;
6431debfc3dSmrg df_insn_rescan (insn);
6441debfc3dSmrg }
6451debfc3dSmrg else
6461debfc3dSmrg /* Now delete the insn. */
647a05ac97eSmrg must_clean |= delete_insn_and_edges (insn);
6481debfc3dSmrg }
6491debfc3dSmrg
6501debfc3dSmrg /* Deleted a pure or const call. */
6511debfc3dSmrg if (must_clean)
652a05ac97eSmrg {
653c0a68be4Smrg gcc_assert (can_alter_cfg);
6541debfc3dSmrg delete_unreachable_blocks ();
655a05ac97eSmrg free_dominance_info (CDI_DOMINATORS);
656a05ac97eSmrg }
6571debfc3dSmrg }
6581debfc3dSmrg
6591debfc3dSmrg
6601debfc3dSmrg /* Go through the instructions and mark those whose necessity is not
6611debfc3dSmrg dependent on inter-instruction information. Make sure all other
6621debfc3dSmrg instructions are not marked. */
6631debfc3dSmrg
6641debfc3dSmrg static void
prescan_insns_for_dce(bool fast)6651debfc3dSmrg prescan_insns_for_dce (bool fast)
6661debfc3dSmrg {
6671debfc3dSmrg basic_block bb;
6681debfc3dSmrg rtx_insn *insn, *prev;
6691debfc3dSmrg bitmap arg_stores = NULL;
6701debfc3dSmrg
6711debfc3dSmrg if (dump_file)
6721debfc3dSmrg fprintf (dump_file, "Finding needed instructions:\n");
6731debfc3dSmrg
6741debfc3dSmrg if (!df_in_progress && ACCUMULATE_OUTGOING_ARGS)
6751debfc3dSmrg arg_stores = BITMAP_ALLOC (NULL);
6761debfc3dSmrg
6771debfc3dSmrg FOR_EACH_BB_FN (bb, cfun)
6781debfc3dSmrg {
6791debfc3dSmrg FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
6801debfc3dSmrg if (NONDEBUG_INSN_P (insn))
6811debfc3dSmrg {
6821debfc3dSmrg /* Don't mark argument stores now. They will be marked
6831debfc3dSmrg if needed when the associated CALL is marked. */
6841debfc3dSmrg if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
6851debfc3dSmrg continue;
6861debfc3dSmrg if (deletable_insn_p (insn, fast, arg_stores))
687*8feb0f0bSmrg mark_nonreg_stores (insn, fast);
6881debfc3dSmrg else
6891debfc3dSmrg mark_insn (insn, fast);
6901debfc3dSmrg }
6911debfc3dSmrg /* find_call_stack_args only looks at argument stores in the
6921debfc3dSmrg same bb. */
6931debfc3dSmrg if (arg_stores)
6941debfc3dSmrg bitmap_clear (arg_stores);
6951debfc3dSmrg }
6961debfc3dSmrg
6971debfc3dSmrg if (arg_stores)
6981debfc3dSmrg BITMAP_FREE (arg_stores);
6991debfc3dSmrg
7001debfc3dSmrg if (dump_file)
7011debfc3dSmrg fprintf (dump_file, "Finished finding needed instructions:\n");
7021debfc3dSmrg }
7031debfc3dSmrg
7041debfc3dSmrg
7051debfc3dSmrg /* UD-based DSE routines. */
7061debfc3dSmrg
7071debfc3dSmrg /* Mark instructions that define artificially-used registers, such as
7081debfc3dSmrg the frame pointer and the stack pointer. */
7091debfc3dSmrg
7101debfc3dSmrg static void
mark_artificial_uses(void)7111debfc3dSmrg mark_artificial_uses (void)
7121debfc3dSmrg {
7131debfc3dSmrg basic_block bb;
7141debfc3dSmrg struct df_link *defs;
7151debfc3dSmrg df_ref use;
7161debfc3dSmrg
7171debfc3dSmrg FOR_ALL_BB_FN (bb, cfun)
7181debfc3dSmrg FOR_EACH_ARTIFICIAL_USE (use, bb->index)
7191debfc3dSmrg for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
7201debfc3dSmrg if (!DF_REF_IS_ARTIFICIAL (defs->ref))
7211debfc3dSmrg mark_insn (DF_REF_INSN (defs->ref), false);
7221debfc3dSmrg }
7231debfc3dSmrg
7241debfc3dSmrg
7251debfc3dSmrg /* Mark every instruction that defines a register value that INSN uses. */
7261debfc3dSmrg
7271debfc3dSmrg static void
mark_reg_dependencies(rtx_insn * insn)7281debfc3dSmrg mark_reg_dependencies (rtx_insn *insn)
7291debfc3dSmrg {
7301debfc3dSmrg struct df_link *defs;
7311debfc3dSmrg df_ref use;
7321debfc3dSmrg
7331debfc3dSmrg if (DEBUG_INSN_P (insn))
7341debfc3dSmrg return;
7351debfc3dSmrg
7361debfc3dSmrg FOR_EACH_INSN_USE (use, insn)
7371debfc3dSmrg {
7381debfc3dSmrg if (dump_file)
7391debfc3dSmrg {
7401debfc3dSmrg fprintf (dump_file, "Processing use of ");
7411debfc3dSmrg print_simple_rtl (dump_file, DF_REF_REG (use));
7421debfc3dSmrg fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
7431debfc3dSmrg }
7441debfc3dSmrg for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
7451debfc3dSmrg if (! DF_REF_IS_ARTIFICIAL (defs->ref))
7461debfc3dSmrg mark_insn (DF_REF_INSN (defs->ref), false);
7471debfc3dSmrg }
7481debfc3dSmrg }
7491debfc3dSmrg
7501debfc3dSmrg
7511debfc3dSmrg /* Initialize global variables for a new DCE pass. */
7521debfc3dSmrg
7531debfc3dSmrg static void
init_dce(bool fast)7541debfc3dSmrg init_dce (bool fast)
7551debfc3dSmrg {
7561debfc3dSmrg if (!df_in_progress)
7571debfc3dSmrg {
7581debfc3dSmrg if (!fast)
7591debfc3dSmrg {
7601debfc3dSmrg df_set_flags (DF_RD_PRUNE_DEAD_DEFS);
7611debfc3dSmrg df_chain_add_problem (DF_UD_CHAIN);
7621debfc3dSmrg }
7631debfc3dSmrg df_analyze ();
7641debfc3dSmrg }
7651debfc3dSmrg
7661debfc3dSmrg if (dump_file)
7671debfc3dSmrg df_dump (dump_file);
7681debfc3dSmrg
7691debfc3dSmrg if (fast)
7701debfc3dSmrg {
7711debfc3dSmrg bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
7721debfc3dSmrg bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
7731debfc3dSmrg can_alter_cfg = false;
7741debfc3dSmrg }
7751debfc3dSmrg else
7761debfc3dSmrg can_alter_cfg = true;
7771debfc3dSmrg
7781debfc3dSmrg marked = sbitmap_alloc (get_max_uid () + 1);
7791debfc3dSmrg bitmap_clear (marked);
7801debfc3dSmrg }
7811debfc3dSmrg
7821debfc3dSmrg
7831debfc3dSmrg /* Free the data allocated by init_dce. */
7841debfc3dSmrg
7851debfc3dSmrg static void
fini_dce(bool fast)7861debfc3dSmrg fini_dce (bool fast)
7871debfc3dSmrg {
7881debfc3dSmrg sbitmap_free (marked);
7891debfc3dSmrg
7901debfc3dSmrg if (fast)
7911debfc3dSmrg {
7921debfc3dSmrg bitmap_obstack_release (&dce_blocks_bitmap_obstack);
7931debfc3dSmrg bitmap_obstack_release (&dce_tmp_bitmap_obstack);
7941debfc3dSmrg }
7951debfc3dSmrg }
7961debfc3dSmrg
7971debfc3dSmrg
7981debfc3dSmrg /* UD-chain based DCE. */
7991debfc3dSmrg
8001debfc3dSmrg static unsigned int
rest_of_handle_ud_dce(void)8011debfc3dSmrg rest_of_handle_ud_dce (void)
8021debfc3dSmrg {
8031debfc3dSmrg rtx_insn *insn;
8041debfc3dSmrg
8051debfc3dSmrg init_dce (false);
8061debfc3dSmrg
8071debfc3dSmrg prescan_insns_for_dce (false);
8081debfc3dSmrg mark_artificial_uses ();
8091debfc3dSmrg while (worklist.length () > 0)
8101debfc3dSmrg {
8111debfc3dSmrg insn = worklist.pop ();
8121debfc3dSmrg mark_reg_dependencies (insn);
8131debfc3dSmrg }
8141debfc3dSmrg worklist.release ();
8151debfc3dSmrg
816a2dc1f3fSmrg if (MAY_HAVE_DEBUG_BIND_INSNS)
8171debfc3dSmrg reset_unmarked_insns_debug_uses ();
8181debfc3dSmrg
8191debfc3dSmrg /* Before any insns are deleted, we must remove the chains since
8201debfc3dSmrg they are not bidirectional. */
8211debfc3dSmrg df_remove_problem (df_chain);
8221debfc3dSmrg delete_unmarked_insns ();
8231debfc3dSmrg
8241debfc3dSmrg fini_dce (false);
8251debfc3dSmrg return 0;
8261debfc3dSmrg }
8271debfc3dSmrg
8281debfc3dSmrg
8291debfc3dSmrg namespace {
8301debfc3dSmrg
8311debfc3dSmrg const pass_data pass_data_ud_rtl_dce =
8321debfc3dSmrg {
8331debfc3dSmrg RTL_PASS, /* type */
8341debfc3dSmrg "ud_dce", /* name */
8351debfc3dSmrg OPTGROUP_NONE, /* optinfo_flags */
8361debfc3dSmrg TV_DCE, /* tv_id */
8371debfc3dSmrg 0, /* properties_required */
8381debfc3dSmrg 0, /* properties_provided */
8391debfc3dSmrg 0, /* properties_destroyed */
8401debfc3dSmrg 0, /* todo_flags_start */
8411debfc3dSmrg TODO_df_finish, /* todo_flags_finish */
8421debfc3dSmrg };
8431debfc3dSmrg
8441debfc3dSmrg class pass_ud_rtl_dce : public rtl_opt_pass
8451debfc3dSmrg {
8461debfc3dSmrg public:
pass_ud_rtl_dce(gcc::context * ctxt)8471debfc3dSmrg pass_ud_rtl_dce (gcc::context *ctxt)
8481debfc3dSmrg : rtl_opt_pass (pass_data_ud_rtl_dce, ctxt)
8491debfc3dSmrg {}
8501debfc3dSmrg
8511debfc3dSmrg /* opt_pass methods: */
gate(function *)8521debfc3dSmrg virtual bool gate (function *)
8531debfc3dSmrg {
8541debfc3dSmrg return optimize > 1 && flag_dce && dbg_cnt (dce_ud);
8551debfc3dSmrg }
8561debfc3dSmrg
execute(function *)8571debfc3dSmrg virtual unsigned int execute (function *)
8581debfc3dSmrg {
8591debfc3dSmrg return rest_of_handle_ud_dce ();
8601debfc3dSmrg }
8611debfc3dSmrg
8621debfc3dSmrg }; // class pass_ud_rtl_dce
8631debfc3dSmrg
8641debfc3dSmrg } // anon namespace
8651debfc3dSmrg
8661debfc3dSmrg rtl_opt_pass *
make_pass_ud_rtl_dce(gcc::context * ctxt)8671debfc3dSmrg make_pass_ud_rtl_dce (gcc::context *ctxt)
8681debfc3dSmrg {
8691debfc3dSmrg return new pass_ud_rtl_dce (ctxt);
8701debfc3dSmrg }
8711debfc3dSmrg
8721debfc3dSmrg
8731debfc3dSmrg /* -------------------------------------------------------------------------
8741debfc3dSmrg Fast DCE functions
8751debfc3dSmrg ------------------------------------------------------------------------- */
8761debfc3dSmrg
8771debfc3dSmrg /* Process basic block BB. Return true if the live_in set has
8781debfc3dSmrg changed. REDO_OUT is true if the info at the bottom of the block
8791debfc3dSmrg needs to be recalculated before starting. AU is the proper set of
8801debfc3dSmrg artificial uses. Track global substitution of uses of dead pseudos
8811debfc3dSmrg in debug insns using GLOBAL_DEBUG. */
8821debfc3dSmrg
8831debfc3dSmrg static bool
word_dce_process_block(basic_block bb,bool redo_out,struct dead_debug_global * global_debug)8841debfc3dSmrg word_dce_process_block (basic_block bb, bool redo_out,
8851debfc3dSmrg struct dead_debug_global *global_debug)
8861debfc3dSmrg {
8871debfc3dSmrg bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
8881debfc3dSmrg rtx_insn *insn;
8891debfc3dSmrg bool block_changed;
8901debfc3dSmrg struct dead_debug_local debug;
8911debfc3dSmrg
8921debfc3dSmrg if (redo_out)
8931debfc3dSmrg {
8941debfc3dSmrg /* Need to redo the live_out set of this block if when one of
8951debfc3dSmrg the succs of this block has had a change in it live in
8961debfc3dSmrg set. */
8971debfc3dSmrg edge e;
8981debfc3dSmrg edge_iterator ei;
8991debfc3dSmrg df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
9001debfc3dSmrg bitmap_clear (DF_WORD_LR_OUT (bb));
9011debfc3dSmrg FOR_EACH_EDGE (e, ei, bb->succs)
9021debfc3dSmrg (*con_fun_n) (e);
9031debfc3dSmrg }
9041debfc3dSmrg
9051debfc3dSmrg if (dump_file)
9061debfc3dSmrg {
9071debfc3dSmrg fprintf (dump_file, "processing block %d live out = ", bb->index);
9081debfc3dSmrg df_print_word_regset (dump_file, DF_WORD_LR_OUT (bb));
9091debfc3dSmrg }
9101debfc3dSmrg
9111debfc3dSmrg bitmap_copy (local_live, DF_WORD_LR_OUT (bb));
9121debfc3dSmrg dead_debug_local_init (&debug, NULL, global_debug);
9131debfc3dSmrg
9141debfc3dSmrg FOR_BB_INSNS_REVERSE (bb, insn)
9151debfc3dSmrg if (DEBUG_INSN_P (insn))
9161debfc3dSmrg {
9171debfc3dSmrg df_ref use;
9181debfc3dSmrg FOR_EACH_INSN_USE (use, insn)
9191debfc3dSmrg if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER
920a2dc1f3fSmrg && known_eq (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (use))),
921a2dc1f3fSmrg 2 * UNITS_PER_WORD)
9221debfc3dSmrg && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use))
9231debfc3dSmrg && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use) + 1))
9241debfc3dSmrg dead_debug_add (&debug, use, DF_REF_REGNO (use));
9251debfc3dSmrg }
9261debfc3dSmrg else if (INSN_P (insn))
9271debfc3dSmrg {
9281debfc3dSmrg bool any_changed;
9291debfc3dSmrg
9301debfc3dSmrg /* No matter if the instruction is needed or not, we remove
9311debfc3dSmrg any regno in the defs from the live set. */
9321debfc3dSmrg any_changed = df_word_lr_simulate_defs (insn, local_live);
9331debfc3dSmrg if (any_changed)
9341debfc3dSmrg mark_insn (insn, true);
9351debfc3dSmrg
9361debfc3dSmrg /* On the other hand, we do not allow the dead uses to set
9371debfc3dSmrg anything in local_live. */
9381debfc3dSmrg if (marked_insn_p (insn))
9391debfc3dSmrg df_word_lr_simulate_uses (insn, local_live);
9401debfc3dSmrg
9411debfc3dSmrg /* Insert debug temps for dead REGs used in subsequent debug
9421debfc3dSmrg insns. We may have to emit a debug temp even if the insn
9431debfc3dSmrg was marked, in case the debug use was after the point of
9441debfc3dSmrg death. */
9451debfc3dSmrg if (debug.used && !bitmap_empty_p (debug.used))
9461debfc3dSmrg {
9471debfc3dSmrg df_ref def;
9481debfc3dSmrg
9491debfc3dSmrg FOR_EACH_INSN_DEF (def, insn)
9501debfc3dSmrg dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
9511debfc3dSmrg marked_insn_p (insn)
9521debfc3dSmrg && !control_flow_insn_p (insn)
9531debfc3dSmrg ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
9541debfc3dSmrg : DEBUG_TEMP_BEFORE_WITH_VALUE);
9551debfc3dSmrg }
9561debfc3dSmrg
9571debfc3dSmrg if (dump_file)
9581debfc3dSmrg {
9591debfc3dSmrg fprintf (dump_file, "finished processing insn %d live out = ",
9601debfc3dSmrg INSN_UID (insn));
9611debfc3dSmrg df_print_word_regset (dump_file, local_live);
9621debfc3dSmrg }
9631debfc3dSmrg }
9641debfc3dSmrg
9651debfc3dSmrg block_changed = !bitmap_equal_p (local_live, DF_WORD_LR_IN (bb));
9661debfc3dSmrg if (block_changed)
9671debfc3dSmrg bitmap_copy (DF_WORD_LR_IN (bb), local_live);
9681debfc3dSmrg
9691debfc3dSmrg dead_debug_local_finish (&debug, NULL);
9701debfc3dSmrg BITMAP_FREE (local_live);
9711debfc3dSmrg return block_changed;
9721debfc3dSmrg }
9731debfc3dSmrg
9741debfc3dSmrg
9751debfc3dSmrg /* Process basic block BB. Return true if the live_in set has
9761debfc3dSmrg changed. REDO_OUT is true if the info at the bottom of the block
9771debfc3dSmrg needs to be recalculated before starting. AU is the proper set of
9781debfc3dSmrg artificial uses. Track global substitution of uses of dead pseudos
9791debfc3dSmrg in debug insns using GLOBAL_DEBUG. */
9801debfc3dSmrg
9811debfc3dSmrg static bool
dce_process_block(basic_block bb,bool redo_out,bitmap au,struct dead_debug_global * global_debug)9821debfc3dSmrg dce_process_block (basic_block bb, bool redo_out, bitmap au,
9831debfc3dSmrg struct dead_debug_global *global_debug)
9841debfc3dSmrg {
9851debfc3dSmrg bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
9861debfc3dSmrg rtx_insn *insn;
9871debfc3dSmrg bool block_changed;
9881debfc3dSmrg df_ref def;
9891debfc3dSmrg struct dead_debug_local debug;
9901debfc3dSmrg
9911debfc3dSmrg if (redo_out)
9921debfc3dSmrg {
9931debfc3dSmrg /* Need to redo the live_out set of this block if when one of
9941debfc3dSmrg the succs of this block has had a change in it live in
9951debfc3dSmrg set. */
9961debfc3dSmrg edge e;
9971debfc3dSmrg edge_iterator ei;
9981debfc3dSmrg df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
9991debfc3dSmrg bitmap_clear (DF_LR_OUT (bb));
10001debfc3dSmrg FOR_EACH_EDGE (e, ei, bb->succs)
10011debfc3dSmrg (*con_fun_n) (e);
10021debfc3dSmrg }
10031debfc3dSmrg
10041debfc3dSmrg if (dump_file)
10051debfc3dSmrg {
10061debfc3dSmrg fprintf (dump_file, "processing block %d lr out = ", bb->index);
10071debfc3dSmrg df_print_regset (dump_file, DF_LR_OUT (bb));
10081debfc3dSmrg }
10091debfc3dSmrg
10101debfc3dSmrg bitmap_copy (local_live, DF_LR_OUT (bb));
10111debfc3dSmrg
10121debfc3dSmrg df_simulate_initialize_backwards (bb, local_live);
10131debfc3dSmrg dead_debug_local_init (&debug, NULL, global_debug);
10141debfc3dSmrg
10151debfc3dSmrg FOR_BB_INSNS_REVERSE (bb, insn)
10161debfc3dSmrg if (DEBUG_INSN_P (insn))
10171debfc3dSmrg {
10181debfc3dSmrg df_ref use;
10191debfc3dSmrg FOR_EACH_INSN_USE (use, insn)
10201debfc3dSmrg if (!bitmap_bit_p (local_live, DF_REF_REGNO (use))
10211debfc3dSmrg && !bitmap_bit_p (au, DF_REF_REGNO (use)))
10221debfc3dSmrg dead_debug_add (&debug, use, DF_REF_REGNO (use));
10231debfc3dSmrg }
10241debfc3dSmrg else if (INSN_P (insn))
10251debfc3dSmrg {
10261debfc3dSmrg bool needed = marked_insn_p (insn);
10271debfc3dSmrg
10281debfc3dSmrg /* The insn is needed if there is someone who uses the output. */
10291debfc3dSmrg if (!needed)
10301debfc3dSmrg FOR_EACH_INSN_DEF (def, insn)
10311debfc3dSmrg if (bitmap_bit_p (local_live, DF_REF_REGNO (def))
10321debfc3dSmrg || bitmap_bit_p (au, DF_REF_REGNO (def)))
10331debfc3dSmrg {
10341debfc3dSmrg needed = true;
10351debfc3dSmrg mark_insn (insn, true);
10361debfc3dSmrg break;
10371debfc3dSmrg }
10381debfc3dSmrg
10391debfc3dSmrg /* No matter if the instruction is needed or not, we remove
10401debfc3dSmrg any regno in the defs from the live set. */
10411debfc3dSmrg df_simulate_defs (insn, local_live);
10421debfc3dSmrg
10431debfc3dSmrg /* On the other hand, we do not allow the dead uses to set
10441debfc3dSmrg anything in local_live. */
10451debfc3dSmrg if (needed)
10461debfc3dSmrg df_simulate_uses (insn, local_live);
10471debfc3dSmrg
10481debfc3dSmrg /* Insert debug temps for dead REGs used in subsequent debug
10491debfc3dSmrg insns. We may have to emit a debug temp even if the insn
10501debfc3dSmrg was marked, in case the debug use was after the point of
10511debfc3dSmrg death. */
10521debfc3dSmrg if (debug.used && !bitmap_empty_p (debug.used))
10531debfc3dSmrg FOR_EACH_INSN_DEF (def, insn)
10541debfc3dSmrg dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
10551debfc3dSmrg needed && !control_flow_insn_p (insn)
10561debfc3dSmrg ? DEBUG_TEMP_AFTER_WITH_REG_FORCE
10571debfc3dSmrg : DEBUG_TEMP_BEFORE_WITH_VALUE);
10581debfc3dSmrg }
10591debfc3dSmrg
10601debfc3dSmrg dead_debug_local_finish (&debug, NULL);
10611debfc3dSmrg df_simulate_finalize_backwards (bb, local_live);
10621debfc3dSmrg
10631debfc3dSmrg block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
10641debfc3dSmrg if (block_changed)
10651debfc3dSmrg bitmap_copy (DF_LR_IN (bb), local_live);
10661debfc3dSmrg
10671debfc3dSmrg BITMAP_FREE (local_live);
10681debfc3dSmrg return block_changed;
10691debfc3dSmrg }
10701debfc3dSmrg
10711debfc3dSmrg
10721debfc3dSmrg /* Perform fast DCE once initialization is done. If WORD_LEVEL is
10731debfc3dSmrg true, use the word level dce, otherwise do it at the pseudo
10741debfc3dSmrg level. */
10751debfc3dSmrg
10761debfc3dSmrg static void
fast_dce(bool word_level)10771debfc3dSmrg fast_dce (bool word_level)
10781debfc3dSmrg {
10791debfc3dSmrg int *postorder = df_get_postorder (DF_BACKWARD);
10801debfc3dSmrg int n_blocks = df_get_n_blocks (DF_BACKWARD);
10811debfc3dSmrg /* The set of blocks that have been seen on this iteration. */
10821debfc3dSmrg bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
10831debfc3dSmrg /* The set of blocks that need to have the out vectors reset because
10841debfc3dSmrg the in of one of their successors has changed. */
10851debfc3dSmrg bitmap redo_out = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
10861debfc3dSmrg bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
10871debfc3dSmrg bool global_changed = true;
10881debfc3dSmrg
10891debfc3dSmrg /* These regs are considered always live so if they end up dying
10901debfc3dSmrg because of some def, we need to bring the back again. Calling
10911debfc3dSmrg df_simulate_fixup_sets has the disadvantage of calling
10921debfc3dSmrg bb_has_eh_pred once per insn, so we cache the information
10931debfc3dSmrg here. */
10941debfc3dSmrg bitmap au = &df->regular_block_artificial_uses;
10951debfc3dSmrg bitmap au_eh = &df->eh_block_artificial_uses;
10961debfc3dSmrg int i;
10971debfc3dSmrg struct dead_debug_global global_debug;
10981debfc3dSmrg
10991debfc3dSmrg prescan_insns_for_dce (true);
11001debfc3dSmrg
11011debfc3dSmrg for (i = 0; i < n_blocks; i++)
11021debfc3dSmrg bitmap_set_bit (all_blocks, postorder[i]);
11031debfc3dSmrg
11041debfc3dSmrg dead_debug_global_init (&global_debug, NULL);
11051debfc3dSmrg
11061debfc3dSmrg while (global_changed)
11071debfc3dSmrg {
11081debfc3dSmrg global_changed = false;
11091debfc3dSmrg
11101debfc3dSmrg for (i = 0; i < n_blocks; i++)
11111debfc3dSmrg {
11121debfc3dSmrg int index = postorder[i];
11131debfc3dSmrg basic_block bb = BASIC_BLOCK_FOR_FN (cfun, index);
11141debfc3dSmrg bool local_changed;
11151debfc3dSmrg
11161debfc3dSmrg if (index < NUM_FIXED_BLOCKS)
11171debfc3dSmrg {
11181debfc3dSmrg bitmap_set_bit (processed, index);
11191debfc3dSmrg continue;
11201debfc3dSmrg }
11211debfc3dSmrg
11221debfc3dSmrg if (word_level)
11231debfc3dSmrg local_changed
11241debfc3dSmrg = word_dce_process_block (bb, bitmap_bit_p (redo_out, index),
11251debfc3dSmrg &global_debug);
11261debfc3dSmrg else
11271debfc3dSmrg local_changed
11281debfc3dSmrg = dce_process_block (bb, bitmap_bit_p (redo_out, index),
11291debfc3dSmrg bb_has_eh_pred (bb) ? au_eh : au,
11301debfc3dSmrg &global_debug);
11311debfc3dSmrg bitmap_set_bit (processed, index);
11321debfc3dSmrg
11331debfc3dSmrg if (local_changed)
11341debfc3dSmrg {
11351debfc3dSmrg edge e;
11361debfc3dSmrg edge_iterator ei;
11371debfc3dSmrg FOR_EACH_EDGE (e, ei, bb->preds)
11381debfc3dSmrg if (bitmap_bit_p (processed, e->src->index))
11391debfc3dSmrg /* Be tricky about when we need to iterate the
11401debfc3dSmrg analysis. We only have redo the analysis if the
11411debfc3dSmrg bitmaps change at the top of a block that is the
11421debfc3dSmrg entry to a loop. */
11431debfc3dSmrg global_changed = true;
11441debfc3dSmrg else
11451debfc3dSmrg bitmap_set_bit (redo_out, e->src->index);
11461debfc3dSmrg }
11471debfc3dSmrg }
11481debfc3dSmrg
11491debfc3dSmrg if (global_changed)
11501debfc3dSmrg {
11511debfc3dSmrg /* Turn off the RUN_DCE flag to prevent recursive calls to
11521debfc3dSmrg dce. */
11531debfc3dSmrg int old_flag = df_clear_flags (DF_LR_RUN_DCE);
11541debfc3dSmrg
11551debfc3dSmrg /* So something was deleted that requires a redo. Do it on
11561debfc3dSmrg the cheap. */
11571debfc3dSmrg delete_unmarked_insns ();
11581debfc3dSmrg bitmap_clear (marked);
11591debfc3dSmrg bitmap_clear (processed);
11601debfc3dSmrg bitmap_clear (redo_out);
11611debfc3dSmrg
11621debfc3dSmrg /* We do not need to rescan any instructions. We only need
11631debfc3dSmrg to redo the dataflow equations for the blocks that had a
11641debfc3dSmrg change at the top of the block. Then we need to redo the
11651debfc3dSmrg iteration. */
11661debfc3dSmrg if (word_level)
11671debfc3dSmrg df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks);
11681debfc3dSmrg else
11691debfc3dSmrg df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
11701debfc3dSmrg
11711debfc3dSmrg if (old_flag & DF_LR_RUN_DCE)
11721debfc3dSmrg df_set_flags (DF_LR_RUN_DCE);
11731debfc3dSmrg
11741debfc3dSmrg prescan_insns_for_dce (true);
11751debfc3dSmrg }
11761debfc3dSmrg }
11771debfc3dSmrg
11781debfc3dSmrg dead_debug_global_finish (&global_debug, NULL);
11791debfc3dSmrg
11801debfc3dSmrg delete_unmarked_insns ();
11811debfc3dSmrg
11821debfc3dSmrg BITMAP_FREE (processed);
11831debfc3dSmrg BITMAP_FREE (redo_out);
11841debfc3dSmrg BITMAP_FREE (all_blocks);
11851debfc3dSmrg }
11861debfc3dSmrg
11871debfc3dSmrg
11881debfc3dSmrg /* Fast register level DCE. */
11891debfc3dSmrg
11901debfc3dSmrg static unsigned int
rest_of_handle_fast_dce(void)11911debfc3dSmrg rest_of_handle_fast_dce (void)
11921debfc3dSmrg {
11931debfc3dSmrg init_dce (true);
11941debfc3dSmrg fast_dce (false);
11951debfc3dSmrg fini_dce (true);
11961debfc3dSmrg return 0;
11971debfc3dSmrg }
11981debfc3dSmrg
11991debfc3dSmrg
12001debfc3dSmrg /* Fast byte level DCE. */
12011debfc3dSmrg
12021debfc3dSmrg void
run_word_dce(void)12031debfc3dSmrg run_word_dce (void)
12041debfc3dSmrg {
12051debfc3dSmrg int old_flags;
12061debfc3dSmrg
12071debfc3dSmrg if (!flag_dce)
12081debfc3dSmrg return;
12091debfc3dSmrg
12101debfc3dSmrg timevar_push (TV_DCE);
12111debfc3dSmrg old_flags = df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
12121debfc3dSmrg df_word_lr_add_problem ();
12131debfc3dSmrg init_dce (true);
12141debfc3dSmrg fast_dce (true);
12151debfc3dSmrg fini_dce (true);
12161debfc3dSmrg df_set_flags (old_flags);
12171debfc3dSmrg timevar_pop (TV_DCE);
12181debfc3dSmrg }
12191debfc3dSmrg
12201debfc3dSmrg
12211debfc3dSmrg /* This is an internal call that is used by the df live register
12221debfc3dSmrg problem to run fast dce as a side effect of creating the live
12231debfc3dSmrg information. The stack is organized so that the lr problem is run,
12241debfc3dSmrg this pass is run, which updates the live info and the df scanning
12251debfc3dSmrg info, and then returns to allow the rest of the problems to be run.
12261debfc3dSmrg
12271debfc3dSmrg This can be called by elsewhere but it will not update the bit
12281debfc3dSmrg vectors for any other problems than LR. */
12291debfc3dSmrg
12301debfc3dSmrg void
run_fast_df_dce(void)12311debfc3dSmrg run_fast_df_dce (void)
12321debfc3dSmrg {
12331debfc3dSmrg if (flag_dce)
12341debfc3dSmrg {
12351debfc3dSmrg /* If dce is able to delete something, it has to happen
12361debfc3dSmrg immediately. Otherwise there will be problems handling the
12371debfc3dSmrg eq_notes. */
12381debfc3dSmrg int old_flags =
12391debfc3dSmrg df_clear_flags (DF_DEFER_INSN_RESCAN + DF_NO_INSN_RESCAN);
12401debfc3dSmrg
12411debfc3dSmrg df_in_progress = true;
12421debfc3dSmrg rest_of_handle_fast_dce ();
12431debfc3dSmrg df_in_progress = false;
12441debfc3dSmrg
12451debfc3dSmrg df_set_flags (old_flags);
12461debfc3dSmrg }
12471debfc3dSmrg }
12481debfc3dSmrg
12491debfc3dSmrg
12501debfc3dSmrg /* Run a fast DCE pass. */
12511debfc3dSmrg
12521debfc3dSmrg void
run_fast_dce(void)12531debfc3dSmrg run_fast_dce (void)
12541debfc3dSmrg {
12551debfc3dSmrg if (flag_dce)
12561debfc3dSmrg rest_of_handle_fast_dce ();
12571debfc3dSmrg }
12581debfc3dSmrg
12591debfc3dSmrg
12601debfc3dSmrg namespace {
12611debfc3dSmrg
12621debfc3dSmrg const pass_data pass_data_fast_rtl_dce =
12631debfc3dSmrg {
12641debfc3dSmrg RTL_PASS, /* type */
12651debfc3dSmrg "rtl_dce", /* name */
12661debfc3dSmrg OPTGROUP_NONE, /* optinfo_flags */
12671debfc3dSmrg TV_DCE, /* tv_id */
12681debfc3dSmrg 0, /* properties_required */
12691debfc3dSmrg 0, /* properties_provided */
12701debfc3dSmrg 0, /* properties_destroyed */
12711debfc3dSmrg 0, /* todo_flags_start */
12721debfc3dSmrg TODO_df_finish, /* todo_flags_finish */
12731debfc3dSmrg };
12741debfc3dSmrg
12751debfc3dSmrg class pass_fast_rtl_dce : public rtl_opt_pass
12761debfc3dSmrg {
12771debfc3dSmrg public:
pass_fast_rtl_dce(gcc::context * ctxt)12781debfc3dSmrg pass_fast_rtl_dce (gcc::context *ctxt)
12791debfc3dSmrg : rtl_opt_pass (pass_data_fast_rtl_dce, ctxt)
12801debfc3dSmrg {}
12811debfc3dSmrg
12821debfc3dSmrg /* opt_pass methods: */
gate(function *)12831debfc3dSmrg virtual bool gate (function *)
12841debfc3dSmrg {
12851debfc3dSmrg return optimize > 0 && flag_dce && dbg_cnt (dce_fast);
12861debfc3dSmrg }
12871debfc3dSmrg
execute(function *)12881debfc3dSmrg virtual unsigned int execute (function *)
12891debfc3dSmrg {
12901debfc3dSmrg return rest_of_handle_fast_dce ();
12911debfc3dSmrg }
12921debfc3dSmrg
12931debfc3dSmrg }; // class pass_fast_rtl_dce
12941debfc3dSmrg
12951debfc3dSmrg } // anon namespace
12961debfc3dSmrg
12971debfc3dSmrg rtl_opt_pass *
make_pass_fast_rtl_dce(gcc::context * ctxt)12981debfc3dSmrg make_pass_fast_rtl_dce (gcc::context *ctxt)
12991debfc3dSmrg {
13001debfc3dSmrg return new pass_fast_rtl_dce (ctxt);
13011debfc3dSmrg }
1302