11debfc3dSmrg /* Definitions for computing resource usage of specific insns.
2*8feb0f0bSmrg Copyright (C) 1999-2020 Free Software Foundation, Inc.
31debfc3dSmrg
41debfc3dSmrg This file is part of GCC.
51debfc3dSmrg
61debfc3dSmrg GCC is free software; you can redistribute it and/or modify it under
71debfc3dSmrg the terms of the GNU General Public License as published by the Free
81debfc3dSmrg Software Foundation; either version 3, or (at your option) any later
91debfc3dSmrg version.
101debfc3dSmrg
111debfc3dSmrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
121debfc3dSmrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
131debfc3dSmrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
141debfc3dSmrg for more details.
151debfc3dSmrg
161debfc3dSmrg You should have received a copy of the GNU General Public License
171debfc3dSmrg along with GCC; see the file COPYING3. If not see
181debfc3dSmrg <http://www.gnu.org/licenses/>. */
191debfc3dSmrg
201debfc3dSmrg #include "config.h"
211debfc3dSmrg #include "system.h"
221debfc3dSmrg #include "coretypes.h"
231debfc3dSmrg #include "backend.h"
241debfc3dSmrg #include "rtl.h"
251debfc3dSmrg #include "df.h"
261debfc3dSmrg #include "memmodel.h"
271debfc3dSmrg #include "tm_p.h"
281debfc3dSmrg #include "regs.h"
291debfc3dSmrg #include "emit-rtl.h"
301debfc3dSmrg #include "resource.h"
311debfc3dSmrg #include "insn-attr.h"
32*8feb0f0bSmrg #include "function-abi.h"
331debfc3dSmrg
341debfc3dSmrg /* This structure is used to record liveness information at the targets or
351debfc3dSmrg fallthrough insns of branches. We will most likely need the information
361debfc3dSmrg at targets again, so save them in a hash table rather than recomputing them
371debfc3dSmrg each time. */
381debfc3dSmrg
391debfc3dSmrg struct target_info
401debfc3dSmrg {
411debfc3dSmrg int uid; /* INSN_UID of target. */
421debfc3dSmrg struct target_info *next; /* Next info for same hash bucket. */
431debfc3dSmrg HARD_REG_SET live_regs; /* Registers live at target. */
441debfc3dSmrg int block; /* Basic block number containing target. */
451debfc3dSmrg int bb_tick; /* Generation count of basic block info. */
461debfc3dSmrg };
471debfc3dSmrg
481debfc3dSmrg #define TARGET_HASH_PRIME 257
491debfc3dSmrg
501debfc3dSmrg /* Indicates what resources are required at the beginning of the epilogue. */
511debfc3dSmrg static struct resources start_of_epilogue_needs;
521debfc3dSmrg
531debfc3dSmrg /* Indicates what resources are required at function end. */
541debfc3dSmrg static struct resources end_of_function_needs;
551debfc3dSmrg
561debfc3dSmrg /* Define the hash table itself. */
571debfc3dSmrg static struct target_info **target_hash_table = NULL;
581debfc3dSmrg
591debfc3dSmrg /* For each basic block, we maintain a generation number of its basic
601debfc3dSmrg block info, which is updated each time we move an insn from the
611debfc3dSmrg target of a jump. This is the generation number indexed by block
621debfc3dSmrg number. */
631debfc3dSmrg
641debfc3dSmrg static int *bb_ticks;
651debfc3dSmrg
661debfc3dSmrg /* Marks registers possibly live at the current place being scanned by
671debfc3dSmrg mark_target_live_regs. Also used by update_live_status. */
681debfc3dSmrg
691debfc3dSmrg static HARD_REG_SET current_live_regs;
701debfc3dSmrg
711debfc3dSmrg /* Marks registers for which we have seen a REG_DEAD note but no assignment.
721debfc3dSmrg Also only used by the next two functions. */
731debfc3dSmrg
741debfc3dSmrg static HARD_REG_SET pending_dead_regs;
751debfc3dSmrg
761debfc3dSmrg static void update_live_status (rtx, const_rtx, void *);
771debfc3dSmrg static int find_basic_block (rtx_insn *, int);
781debfc3dSmrg static rtx_insn *next_insn_no_annul (rtx_insn *);
791debfc3dSmrg static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*,
801debfc3dSmrg rtx *, int, struct resources,
811debfc3dSmrg struct resources);
821debfc3dSmrg
831debfc3dSmrg /* Utility function called from mark_target_live_regs via note_stores.
841debfc3dSmrg It deadens any CLOBBERed registers and livens any SET registers. */
851debfc3dSmrg
861debfc3dSmrg static void
update_live_status(rtx dest,const_rtx x,void * data ATTRIBUTE_UNUSED)871debfc3dSmrg update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
881debfc3dSmrg {
891debfc3dSmrg int first_regno, last_regno;
901debfc3dSmrg int i;
911debfc3dSmrg
921debfc3dSmrg if (!REG_P (dest)
931debfc3dSmrg && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
941debfc3dSmrg return;
951debfc3dSmrg
961debfc3dSmrg if (GET_CODE (dest) == SUBREG)
971debfc3dSmrg {
981debfc3dSmrg first_regno = subreg_regno (dest);
991debfc3dSmrg last_regno = first_regno + subreg_nregs (dest);
1001debfc3dSmrg
1011debfc3dSmrg }
1021debfc3dSmrg else
1031debfc3dSmrg {
1041debfc3dSmrg first_regno = REGNO (dest);
1051debfc3dSmrg last_regno = END_REGNO (dest);
1061debfc3dSmrg }
1071debfc3dSmrg
1081debfc3dSmrg if (GET_CODE (x) == CLOBBER)
1091debfc3dSmrg for (i = first_regno; i < last_regno; i++)
1101debfc3dSmrg CLEAR_HARD_REG_BIT (current_live_regs, i);
1111debfc3dSmrg else
1121debfc3dSmrg for (i = first_regno; i < last_regno; i++)
1131debfc3dSmrg {
1141debfc3dSmrg SET_HARD_REG_BIT (current_live_regs, i);
1151debfc3dSmrg CLEAR_HARD_REG_BIT (pending_dead_regs, i);
1161debfc3dSmrg }
1171debfc3dSmrg }
1181debfc3dSmrg
1191debfc3dSmrg /* Find the number of the basic block with correct live register
1201debfc3dSmrg information that starts closest to INSN. Return -1 if we couldn't
1211debfc3dSmrg find such a basic block or the beginning is more than
1221debfc3dSmrg SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for
1231debfc3dSmrg an unlimited search.
1241debfc3dSmrg
1251debfc3dSmrg The delay slot filling code destroys the control-flow graph so,
1261debfc3dSmrg instead of finding the basic block containing INSN, we search
1271debfc3dSmrg backwards toward a BARRIER where the live register information is
1281debfc3dSmrg correct. */
1291debfc3dSmrg
1301debfc3dSmrg static int
find_basic_block(rtx_insn * insn,int search_limit)1311debfc3dSmrg find_basic_block (rtx_insn *insn, int search_limit)
1321debfc3dSmrg {
1331debfc3dSmrg /* Scan backwards to the previous BARRIER. Then see if we can find a
1341debfc3dSmrg label that starts a basic block. Return the basic block number. */
1351debfc3dSmrg for (insn = prev_nonnote_insn (insn);
1361debfc3dSmrg insn && !BARRIER_P (insn) && search_limit != 0;
1371debfc3dSmrg insn = prev_nonnote_insn (insn), --search_limit)
1381debfc3dSmrg ;
1391debfc3dSmrg
1401debfc3dSmrg /* The closest BARRIER is too far away. */
1411debfc3dSmrg if (search_limit == 0)
1421debfc3dSmrg return -1;
1431debfc3dSmrg
1441debfc3dSmrg /* The start of the function. */
1451debfc3dSmrg else if (insn == 0)
1461debfc3dSmrg return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
1471debfc3dSmrg
1481debfc3dSmrg /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
1491debfc3dSmrg anything other than a CODE_LABEL or note, we can't find this code. */
1501debfc3dSmrg for (insn = next_nonnote_insn (insn);
1511debfc3dSmrg insn && LABEL_P (insn);
1521debfc3dSmrg insn = next_nonnote_insn (insn))
1531debfc3dSmrg if (BLOCK_FOR_INSN (insn))
1541debfc3dSmrg return BLOCK_FOR_INSN (insn)->index;
1551debfc3dSmrg
1561debfc3dSmrg return -1;
1571debfc3dSmrg }
1581debfc3dSmrg
1591debfc3dSmrg /* Similar to next_insn, but ignores insns in the delay slots of
1601debfc3dSmrg an annulled branch. */
1611debfc3dSmrg
1621debfc3dSmrg static rtx_insn *
next_insn_no_annul(rtx_insn * insn)1631debfc3dSmrg next_insn_no_annul (rtx_insn *insn)
1641debfc3dSmrg {
1651debfc3dSmrg if (insn)
1661debfc3dSmrg {
1671debfc3dSmrg /* If INSN is an annulled branch, skip any insns from the target
1681debfc3dSmrg of the branch. */
1691debfc3dSmrg if (JUMP_P (insn)
1701debfc3dSmrg && INSN_ANNULLED_BRANCH_P (insn)
1711debfc3dSmrg && NEXT_INSN (PREV_INSN (insn)) != insn)
1721debfc3dSmrg {
1731debfc3dSmrg rtx_insn *next = NEXT_INSN (insn);
1741debfc3dSmrg
1751debfc3dSmrg while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
1761debfc3dSmrg && INSN_FROM_TARGET_P (next))
1771debfc3dSmrg {
1781debfc3dSmrg insn = next;
1791debfc3dSmrg next = NEXT_INSN (insn);
1801debfc3dSmrg }
1811debfc3dSmrg }
1821debfc3dSmrg
1831debfc3dSmrg insn = NEXT_INSN (insn);
1841debfc3dSmrg if (insn && NONJUMP_INSN_P (insn)
1851debfc3dSmrg && GET_CODE (PATTERN (insn)) == SEQUENCE)
1861debfc3dSmrg insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
1871debfc3dSmrg }
1881debfc3dSmrg
1891debfc3dSmrg return insn;
1901debfc3dSmrg }
1911debfc3dSmrg
1921debfc3dSmrg /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
1931debfc3dSmrg which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS
1941debfc3dSmrg is TRUE, resources used by the called routine will be included for
1951debfc3dSmrg CALL_INSNs. */
1961debfc3dSmrg
1971debfc3dSmrg void
mark_referenced_resources(rtx x,struct resources * res,bool include_delayed_effects)1981debfc3dSmrg mark_referenced_resources (rtx x, struct resources *res,
1991debfc3dSmrg bool include_delayed_effects)
2001debfc3dSmrg {
2011debfc3dSmrg enum rtx_code code = GET_CODE (x);
2021debfc3dSmrg int i, j;
2031debfc3dSmrg unsigned int r;
2041debfc3dSmrg const char *format_ptr;
2051debfc3dSmrg
2061debfc3dSmrg /* Handle leaf items for which we set resource flags. Also, special-case
2071debfc3dSmrg CALL, SET and CLOBBER operators. */
2081debfc3dSmrg switch (code)
2091debfc3dSmrg {
2101debfc3dSmrg case CONST:
2111debfc3dSmrg CASE_CONST_ANY:
2121debfc3dSmrg case PC:
2131debfc3dSmrg case SYMBOL_REF:
2141debfc3dSmrg case LABEL_REF:
215a2dc1f3fSmrg case DEBUG_INSN:
2161debfc3dSmrg return;
2171debfc3dSmrg
2181debfc3dSmrg case SUBREG:
2191debfc3dSmrg if (!REG_P (SUBREG_REG (x)))
2201debfc3dSmrg mark_referenced_resources (SUBREG_REG (x), res, false);
2211debfc3dSmrg else
2221debfc3dSmrg {
2231debfc3dSmrg unsigned int regno = subreg_regno (x);
2241debfc3dSmrg unsigned int last_regno = regno + subreg_nregs (x);
2251debfc3dSmrg
2261debfc3dSmrg gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
2271debfc3dSmrg for (r = regno; r < last_regno; r++)
2281debfc3dSmrg SET_HARD_REG_BIT (res->regs, r);
2291debfc3dSmrg }
2301debfc3dSmrg return;
2311debfc3dSmrg
2321debfc3dSmrg case REG:
2331debfc3dSmrg gcc_assert (HARD_REGISTER_P (x));
2341debfc3dSmrg add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
2351debfc3dSmrg return;
2361debfc3dSmrg
2371debfc3dSmrg case MEM:
2381debfc3dSmrg /* If this memory shouldn't change, it really isn't referencing
2391debfc3dSmrg memory. */
2401debfc3dSmrg if (! MEM_READONLY_P (x))
2411debfc3dSmrg res->memory = 1;
2421debfc3dSmrg res->volatil |= MEM_VOLATILE_P (x);
2431debfc3dSmrg
2441debfc3dSmrg /* Mark registers used to access memory. */
2451debfc3dSmrg mark_referenced_resources (XEXP (x, 0), res, false);
2461debfc3dSmrg return;
2471debfc3dSmrg
2481debfc3dSmrg case CC0:
2491debfc3dSmrg res->cc = 1;
2501debfc3dSmrg return;
2511debfc3dSmrg
2521debfc3dSmrg case UNSPEC_VOLATILE:
2531debfc3dSmrg case TRAP_IF:
2541debfc3dSmrg case ASM_INPUT:
2551debfc3dSmrg /* Traditional asm's are always volatile. */
2561debfc3dSmrg res->volatil = 1;
2571debfc3dSmrg break;
2581debfc3dSmrg
2591debfc3dSmrg case ASM_OPERANDS:
2601debfc3dSmrg res->volatil |= MEM_VOLATILE_P (x);
2611debfc3dSmrg
2621debfc3dSmrg /* For all ASM_OPERANDS, we must traverse the vector of input operands.
2631debfc3dSmrg We cannot just fall through here since then we would be confused
2641debfc3dSmrg by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
2651debfc3dSmrg traditional asms unlike their normal usage. */
2661debfc3dSmrg
2671debfc3dSmrg for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2681debfc3dSmrg mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
2691debfc3dSmrg return;
2701debfc3dSmrg
2711debfc3dSmrg case CALL:
2721debfc3dSmrg /* The first operand will be a (MEM (xxx)) but doesn't really reference
2731debfc3dSmrg memory. The second operand may be referenced, though. */
2741debfc3dSmrg mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
2751debfc3dSmrg mark_referenced_resources (XEXP (x, 1), res, false);
2761debfc3dSmrg return;
2771debfc3dSmrg
2781debfc3dSmrg case SET:
2791debfc3dSmrg /* Usually, the first operand of SET is set, not referenced. But
2801debfc3dSmrg registers used to access memory are referenced. SET_DEST is
2811debfc3dSmrg also referenced if it is a ZERO_EXTRACT. */
2821debfc3dSmrg
2831debfc3dSmrg mark_referenced_resources (SET_SRC (x), res, false);
2841debfc3dSmrg
2851debfc3dSmrg x = SET_DEST (x);
2861debfc3dSmrg if (GET_CODE (x) == ZERO_EXTRACT
2871debfc3dSmrg || GET_CODE (x) == STRICT_LOW_PART)
2881debfc3dSmrg mark_referenced_resources (x, res, false);
2891debfc3dSmrg else if (GET_CODE (x) == SUBREG)
2901debfc3dSmrg x = SUBREG_REG (x);
2911debfc3dSmrg if (MEM_P (x))
2921debfc3dSmrg mark_referenced_resources (XEXP (x, 0), res, false);
2931debfc3dSmrg return;
2941debfc3dSmrg
2951debfc3dSmrg case CLOBBER:
2961debfc3dSmrg return;
2971debfc3dSmrg
2981debfc3dSmrg case CALL_INSN:
2991debfc3dSmrg if (include_delayed_effects)
3001debfc3dSmrg {
3011debfc3dSmrg /* A CALL references memory, the frame pointer if it exists, the
3021debfc3dSmrg stack pointer, any global registers and any registers given in
3031debfc3dSmrg USE insns immediately in front of the CALL.
3041debfc3dSmrg
3051debfc3dSmrg However, we may have moved some of the parameter loading insns
3061debfc3dSmrg into the delay slot of this CALL. If so, the USE's for them
3071debfc3dSmrg don't count and should be skipped. */
3081debfc3dSmrg rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
3091debfc3dSmrg rtx_sequence *sequence = 0;
3101debfc3dSmrg int seq_size = 0;
3111debfc3dSmrg int i;
3121debfc3dSmrg
3131debfc3dSmrg /* If we are part of a delay slot sequence, point at the SEQUENCE. */
3141debfc3dSmrg if (NEXT_INSN (insn) != x)
3151debfc3dSmrg {
3161debfc3dSmrg sequence = as_a <rtx_sequence *> (PATTERN (NEXT_INSN (insn)));
3171debfc3dSmrg seq_size = sequence->len ();
3181debfc3dSmrg gcc_assert (GET_CODE (sequence) == SEQUENCE);
3191debfc3dSmrg }
3201debfc3dSmrg
3211debfc3dSmrg res->memory = 1;
3221debfc3dSmrg SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
3231debfc3dSmrg if (frame_pointer_needed)
3241debfc3dSmrg {
3251debfc3dSmrg SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
3261debfc3dSmrg if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3271debfc3dSmrg SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
3281debfc3dSmrg }
3291debfc3dSmrg
3301debfc3dSmrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3311debfc3dSmrg if (global_regs[i])
3321debfc3dSmrg SET_HARD_REG_BIT (res->regs, i);
3331debfc3dSmrg
3341debfc3dSmrg /* Check for a REG_SETJMP. If it exists, then we must
3351debfc3dSmrg assume that this call can need any register.
3361debfc3dSmrg
3371debfc3dSmrg This is done to be more conservative about how we handle setjmp.
3381debfc3dSmrg We assume that they both use and set all registers. Using all
3391debfc3dSmrg registers ensures that a register will not be considered dead
3401debfc3dSmrg just because it crosses a setjmp call. A register should be
3411debfc3dSmrg considered dead only if the setjmp call returns nonzero. */
3421debfc3dSmrg if (find_reg_note (x, REG_SETJMP, NULL))
3431debfc3dSmrg SET_HARD_REG_SET (res->regs);
3441debfc3dSmrg
3451debfc3dSmrg {
3461debfc3dSmrg rtx link;
3471debfc3dSmrg
3481debfc3dSmrg for (link = CALL_INSN_FUNCTION_USAGE (x);
3491debfc3dSmrg link;
3501debfc3dSmrg link = XEXP (link, 1))
3511debfc3dSmrg if (GET_CODE (XEXP (link, 0)) == USE)
3521debfc3dSmrg {
3531debfc3dSmrg for (i = 1; i < seq_size; i++)
3541debfc3dSmrg {
3551debfc3dSmrg rtx slot_pat = PATTERN (sequence->element (i));
3561debfc3dSmrg if (GET_CODE (slot_pat) == SET
3571debfc3dSmrg && rtx_equal_p (SET_DEST (slot_pat),
3581debfc3dSmrg XEXP (XEXP (link, 0), 0)))
3591debfc3dSmrg break;
3601debfc3dSmrg }
3611debfc3dSmrg if (i >= seq_size)
3621debfc3dSmrg mark_referenced_resources (XEXP (XEXP (link, 0), 0),
3631debfc3dSmrg res, false);
3641debfc3dSmrg }
3651debfc3dSmrg }
3661debfc3dSmrg }
3671debfc3dSmrg
3681debfc3dSmrg /* ... fall through to other INSN processing ... */
3691debfc3dSmrg gcc_fallthrough ();
3701debfc3dSmrg
3711debfc3dSmrg case INSN:
3721debfc3dSmrg case JUMP_INSN:
3731debfc3dSmrg
3741debfc3dSmrg if (GET_CODE (PATTERN (x)) == COND_EXEC)
3751debfc3dSmrg /* In addition to the usual references, also consider all outputs
3761debfc3dSmrg as referenced, to compensate for mark_set_resources treating
3771debfc3dSmrg them as killed. This is similar to ZERO_EXTRACT / STRICT_LOW_PART
3781debfc3dSmrg handling, execpt that we got a partial incidence instead of a partial
3791debfc3dSmrg width. */
3801debfc3dSmrg mark_set_resources (x, res, 0,
3811debfc3dSmrg include_delayed_effects
3821debfc3dSmrg ? MARK_SRC_DEST_CALL : MARK_SRC_DEST);
3831debfc3dSmrg
3841debfc3dSmrg if (! include_delayed_effects
3851debfc3dSmrg && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x)))
3861debfc3dSmrg return;
3871debfc3dSmrg
3881debfc3dSmrg /* No special processing, just speed up. */
3891debfc3dSmrg mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
3901debfc3dSmrg return;
3911debfc3dSmrg
3921debfc3dSmrg default:
3931debfc3dSmrg break;
3941debfc3dSmrg }
3951debfc3dSmrg
3961debfc3dSmrg /* Process each sub-expression and flag what it needs. */
3971debfc3dSmrg format_ptr = GET_RTX_FORMAT (code);
3981debfc3dSmrg for (i = 0; i < GET_RTX_LENGTH (code); i++)
3991debfc3dSmrg switch (*format_ptr++)
4001debfc3dSmrg {
4011debfc3dSmrg case 'e':
4021debfc3dSmrg mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
4031debfc3dSmrg break;
4041debfc3dSmrg
4051debfc3dSmrg case 'E':
4061debfc3dSmrg for (j = 0; j < XVECLEN (x, i); j++)
4071debfc3dSmrg mark_referenced_resources (XVECEXP (x, i, j), res,
4081debfc3dSmrg include_delayed_effects);
4091debfc3dSmrg break;
4101debfc3dSmrg }
4111debfc3dSmrg }
4121debfc3dSmrg
4131debfc3dSmrg /* A subroutine of mark_target_live_regs. Search forward from TARGET
4141debfc3dSmrg looking for registers that are set before they are used. These are dead.
4151debfc3dSmrg Stop after passing a few conditional jumps, and/or a small
4161debfc3dSmrg number of unconditional branches. */
4171debfc3dSmrg
4181debfc3dSmrg static rtx_insn *
find_dead_or_set_registers(rtx_insn * target,struct resources * res,rtx * jump_target,int jump_count,struct resources set,struct resources needed)4191debfc3dSmrg find_dead_or_set_registers (rtx_insn *target, struct resources *res,
4201debfc3dSmrg rtx *jump_target, int jump_count,
4211debfc3dSmrg struct resources set, struct resources needed)
4221debfc3dSmrg {
4231debfc3dSmrg HARD_REG_SET scratch;
4241debfc3dSmrg rtx_insn *insn;
4251debfc3dSmrg rtx_insn *next_insn;
4261debfc3dSmrg rtx_insn *jump_insn = 0;
4271debfc3dSmrg int i;
4281debfc3dSmrg
4291debfc3dSmrg for (insn = target; insn; insn = next_insn)
4301debfc3dSmrg {
4311debfc3dSmrg rtx_insn *this_insn = insn;
4321debfc3dSmrg
4331debfc3dSmrg next_insn = NEXT_INSN (insn);
4341debfc3dSmrg
4351debfc3dSmrg /* If this instruction can throw an exception, then we don't
4361debfc3dSmrg know where we might end up next. That means that we have to
4371debfc3dSmrg assume that whatever we have already marked as live really is
4381debfc3dSmrg live. */
4391debfc3dSmrg if (can_throw_internal (insn))
4401debfc3dSmrg break;
4411debfc3dSmrg
4421debfc3dSmrg switch (GET_CODE (insn))
4431debfc3dSmrg {
4441debfc3dSmrg case CODE_LABEL:
4451debfc3dSmrg /* After a label, any pending dead registers that weren't yet
4461debfc3dSmrg used can be made dead. */
447*8feb0f0bSmrg pending_dead_regs &= ~needed.regs;
448*8feb0f0bSmrg res->regs &= ~pending_dead_regs;
4491debfc3dSmrg CLEAR_HARD_REG_SET (pending_dead_regs);
4501debfc3dSmrg
4511debfc3dSmrg continue;
4521debfc3dSmrg
4531debfc3dSmrg case BARRIER:
4541debfc3dSmrg case NOTE:
455a2dc1f3fSmrg case DEBUG_INSN:
4561debfc3dSmrg continue;
4571debfc3dSmrg
4581debfc3dSmrg case INSN:
4591debfc3dSmrg if (GET_CODE (PATTERN (insn)) == USE)
4601debfc3dSmrg {
4611debfc3dSmrg /* If INSN is a USE made by update_block, we care about the
4621debfc3dSmrg underlying insn. Any registers set by the underlying insn
4631debfc3dSmrg are live since the insn is being done somewhere else. */
4641debfc3dSmrg if (INSN_P (XEXP (PATTERN (insn), 0)))
4651debfc3dSmrg mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
4661debfc3dSmrg MARK_SRC_DEST_CALL);
4671debfc3dSmrg
4681debfc3dSmrg /* All other USE insns are to be ignored. */
4691debfc3dSmrg continue;
4701debfc3dSmrg }
4711debfc3dSmrg else if (GET_CODE (PATTERN (insn)) == CLOBBER)
4721debfc3dSmrg continue;
4731debfc3dSmrg else if (rtx_sequence *seq =
4741debfc3dSmrg dyn_cast <rtx_sequence *> (PATTERN (insn)))
4751debfc3dSmrg {
4761debfc3dSmrg /* An unconditional jump can be used to fill the delay slot
4771debfc3dSmrg of a call, so search for a JUMP_INSN in any position. */
4781debfc3dSmrg for (i = 0; i < seq->len (); i++)
4791debfc3dSmrg {
4801debfc3dSmrg this_insn = seq->insn (i);
4811debfc3dSmrg if (JUMP_P (this_insn))
4821debfc3dSmrg break;
4831debfc3dSmrg }
4841debfc3dSmrg }
4851debfc3dSmrg
4861debfc3dSmrg default:
4871debfc3dSmrg break;
4881debfc3dSmrg }
4891debfc3dSmrg
4901debfc3dSmrg if (rtx_jump_insn *this_jump_insn =
4911debfc3dSmrg dyn_cast <rtx_jump_insn *> (this_insn))
4921debfc3dSmrg {
4931debfc3dSmrg if (jump_count++ < 10)
4941debfc3dSmrg {
4951debfc3dSmrg if (any_uncondjump_p (this_jump_insn)
4961debfc3dSmrg || ANY_RETURN_P (PATTERN (this_jump_insn)))
4971debfc3dSmrg {
4981debfc3dSmrg rtx lab_or_return = this_jump_insn->jump_label ();
4991debfc3dSmrg if (ANY_RETURN_P (lab_or_return))
5001debfc3dSmrg next_insn = NULL;
5011debfc3dSmrg else
5021debfc3dSmrg next_insn = as_a <rtx_insn *> (lab_or_return);
5031debfc3dSmrg if (jump_insn == 0)
5041debfc3dSmrg {
5051debfc3dSmrg jump_insn = insn;
5061debfc3dSmrg if (jump_target)
5071debfc3dSmrg *jump_target = JUMP_LABEL (this_jump_insn);
5081debfc3dSmrg }
5091debfc3dSmrg }
5101debfc3dSmrg else if (any_condjump_p (this_jump_insn))
5111debfc3dSmrg {
5121debfc3dSmrg struct resources target_set, target_res;
5131debfc3dSmrg struct resources fallthrough_res;
5141debfc3dSmrg
5151debfc3dSmrg /* We can handle conditional branches here by following
5161debfc3dSmrg both paths, and then IOR the results of the two paths
5171debfc3dSmrg together, which will give us registers that are dead
5181debfc3dSmrg on both paths. Since this is expensive, we give it
5191debfc3dSmrg a much higher cost than unconditional branches. The
5201debfc3dSmrg cost was chosen so that we will follow at most 1
5211debfc3dSmrg conditional branch. */
5221debfc3dSmrg
5231debfc3dSmrg jump_count += 4;
5241debfc3dSmrg if (jump_count >= 10)
5251debfc3dSmrg break;
5261debfc3dSmrg
5271debfc3dSmrg mark_referenced_resources (insn, &needed, true);
5281debfc3dSmrg
5291debfc3dSmrg /* For an annulled branch, mark_set_resources ignores slots
5301debfc3dSmrg filled by instructions from the target. This is correct
5311debfc3dSmrg if the branch is not taken. Since we are following both
5321debfc3dSmrg paths from the branch, we must also compute correct info
5331debfc3dSmrg if the branch is taken. We do this by inverting all of
5341debfc3dSmrg the INSN_FROM_TARGET_P bits, calling mark_set_resources,
5351debfc3dSmrg and then inverting the INSN_FROM_TARGET_P bits again. */
5361debfc3dSmrg
5371debfc3dSmrg if (GET_CODE (PATTERN (insn)) == SEQUENCE
5381debfc3dSmrg && INSN_ANNULLED_BRANCH_P (this_jump_insn))
5391debfc3dSmrg {
5401debfc3dSmrg rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5411debfc3dSmrg for (i = 1; i < seq->len (); i++)
5421debfc3dSmrg INSN_FROM_TARGET_P (seq->element (i))
5431debfc3dSmrg = ! INSN_FROM_TARGET_P (seq->element (i));
5441debfc3dSmrg
5451debfc3dSmrg target_set = set;
5461debfc3dSmrg mark_set_resources (insn, &target_set, 0,
5471debfc3dSmrg MARK_SRC_DEST_CALL);
5481debfc3dSmrg
5491debfc3dSmrg for (i = 1; i < seq->len (); i++)
5501debfc3dSmrg INSN_FROM_TARGET_P (seq->element (i))
5511debfc3dSmrg = ! INSN_FROM_TARGET_P (seq->element (i));
5521debfc3dSmrg
5531debfc3dSmrg mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
5541debfc3dSmrg }
5551debfc3dSmrg else
5561debfc3dSmrg {
5571debfc3dSmrg mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
5581debfc3dSmrg target_set = set;
5591debfc3dSmrg }
5601debfc3dSmrg
5611debfc3dSmrg target_res = *res;
562*8feb0f0bSmrg scratch = target_set.regs & ~needed.regs;
563*8feb0f0bSmrg target_res.regs &= ~scratch;
5641debfc3dSmrg
5651debfc3dSmrg fallthrough_res = *res;
566*8feb0f0bSmrg scratch = set.regs & ~needed.regs;
567*8feb0f0bSmrg fallthrough_res.regs &= ~scratch;
5681debfc3dSmrg
5691debfc3dSmrg if (!ANY_RETURN_P (this_jump_insn->jump_label ()))
5701debfc3dSmrg find_dead_or_set_registers
5711debfc3dSmrg (this_jump_insn->jump_target (),
5721debfc3dSmrg &target_res, 0, jump_count, target_set, needed);
5731debfc3dSmrg find_dead_or_set_registers (next_insn,
5741debfc3dSmrg &fallthrough_res, 0, jump_count,
5751debfc3dSmrg set, needed);
576*8feb0f0bSmrg fallthrough_res.regs |= target_res.regs;
577*8feb0f0bSmrg res->regs &= fallthrough_res.regs;
5781debfc3dSmrg break;
5791debfc3dSmrg }
5801debfc3dSmrg else
5811debfc3dSmrg break;
5821debfc3dSmrg }
5831debfc3dSmrg else
5841debfc3dSmrg {
5851debfc3dSmrg /* Don't try this optimization if we expired our jump count
5861debfc3dSmrg above, since that would mean there may be an infinite loop
5871debfc3dSmrg in the function being compiled. */
5881debfc3dSmrg jump_insn = 0;
5891debfc3dSmrg break;
5901debfc3dSmrg }
5911debfc3dSmrg }
5921debfc3dSmrg
5931debfc3dSmrg mark_referenced_resources (insn, &needed, true);
5941debfc3dSmrg mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
5951debfc3dSmrg
596*8feb0f0bSmrg scratch = set.regs & ~needed.regs;
597*8feb0f0bSmrg res->regs &= ~scratch;
5981debfc3dSmrg }
5991debfc3dSmrg
6001debfc3dSmrg return jump_insn;
6011debfc3dSmrg }
6021debfc3dSmrg
6031debfc3dSmrg /* Given X, a part of an insn, and a pointer to a `struct resource',
6041debfc3dSmrg RES, indicate which resources are modified by the insn. If
6051debfc3dSmrg MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
6061debfc3dSmrg set by the called routine.
6071debfc3dSmrg
6081debfc3dSmrg If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
6091debfc3dSmrg objects are being referenced instead of set.
6101debfc3dSmrg
6111debfc3dSmrg We never mark the insn as modifying the condition code unless it explicitly
6121debfc3dSmrg SETs CC0 even though this is not totally correct. The reason for this is
6131debfc3dSmrg that we require a SET of CC0 to immediately precede the reference to CC0.
6141debfc3dSmrg So if some other insn sets CC0 as a side-effect, we know it cannot affect
6151debfc3dSmrg our computation and thus may be placed in a delay slot. */
6161debfc3dSmrg
6171debfc3dSmrg void
mark_set_resources(rtx x,struct resources * res,int in_dest,enum mark_resource_type mark_type)6181debfc3dSmrg mark_set_resources (rtx x, struct resources *res, int in_dest,
6191debfc3dSmrg enum mark_resource_type mark_type)
6201debfc3dSmrg {
6211debfc3dSmrg enum rtx_code code;
6221debfc3dSmrg int i, j;
6231debfc3dSmrg unsigned int r;
6241debfc3dSmrg const char *format_ptr;
6251debfc3dSmrg
6261debfc3dSmrg restart:
6271debfc3dSmrg
6281debfc3dSmrg code = GET_CODE (x);
6291debfc3dSmrg
6301debfc3dSmrg switch (code)
6311debfc3dSmrg {
6321debfc3dSmrg case NOTE:
6331debfc3dSmrg case BARRIER:
6341debfc3dSmrg case CODE_LABEL:
6351debfc3dSmrg case USE:
6361debfc3dSmrg CASE_CONST_ANY:
6371debfc3dSmrg case LABEL_REF:
6381debfc3dSmrg case SYMBOL_REF:
6391debfc3dSmrg case CONST:
6401debfc3dSmrg case PC:
641a2dc1f3fSmrg case DEBUG_INSN:
6421debfc3dSmrg /* These don't set any resources. */
6431debfc3dSmrg return;
6441debfc3dSmrg
6451debfc3dSmrg case CC0:
6461debfc3dSmrg if (in_dest)
6471debfc3dSmrg res->cc = 1;
6481debfc3dSmrg return;
6491debfc3dSmrg
6501debfc3dSmrg case CALL_INSN:
6511debfc3dSmrg /* Called routine modifies the condition code, memory, any registers
6521debfc3dSmrg that aren't saved across calls, global registers and anything
6531debfc3dSmrg explicitly CLOBBERed immediately after the CALL_INSN. */
6541debfc3dSmrg
6551debfc3dSmrg if (mark_type == MARK_SRC_DEST_CALL)
6561debfc3dSmrg {
6571debfc3dSmrg rtx_call_insn *call_insn = as_a <rtx_call_insn *> (x);
6581debfc3dSmrg rtx link;
6591debfc3dSmrg
6601debfc3dSmrg res->cc = res->memory = 1;
6611debfc3dSmrg
662*8feb0f0bSmrg res->regs |= insn_callee_abi (call_insn).full_reg_clobbers ();
6631debfc3dSmrg
6641debfc3dSmrg for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
6651debfc3dSmrg link; link = XEXP (link, 1))
6661debfc3dSmrg if (GET_CODE (XEXP (link, 0)) == CLOBBER)
6671debfc3dSmrg mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
6681debfc3dSmrg MARK_SRC_DEST);
6691debfc3dSmrg
6701debfc3dSmrg /* Check for a REG_SETJMP. If it exists, then we must
6711debfc3dSmrg assume that this call can clobber any register. */
6721debfc3dSmrg if (find_reg_note (call_insn, REG_SETJMP, NULL))
6731debfc3dSmrg SET_HARD_REG_SET (res->regs);
6741debfc3dSmrg }
6751debfc3dSmrg
6761debfc3dSmrg /* ... and also what its RTL says it modifies, if anything. */
6771debfc3dSmrg gcc_fallthrough ();
6781debfc3dSmrg
6791debfc3dSmrg case JUMP_INSN:
6801debfc3dSmrg case INSN:
6811debfc3dSmrg
6821debfc3dSmrg /* An insn consisting of just a CLOBBER (or USE) is just for flow
6831debfc3dSmrg and doesn't actually do anything, so we ignore it. */
6841debfc3dSmrg
6851debfc3dSmrg if (mark_type != MARK_SRC_DEST_CALL
6861debfc3dSmrg && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x)))
6871debfc3dSmrg return;
6881debfc3dSmrg
6891debfc3dSmrg x = PATTERN (x);
6901debfc3dSmrg if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
6911debfc3dSmrg goto restart;
6921debfc3dSmrg return;
6931debfc3dSmrg
6941debfc3dSmrg case SET:
6951debfc3dSmrg /* If the source of a SET is a CALL, this is actually done by
6961debfc3dSmrg the called routine. So only include it if we are to include the
6971debfc3dSmrg effects of the calling routine. */
6981debfc3dSmrg
6991debfc3dSmrg mark_set_resources (SET_DEST (x), res,
7001debfc3dSmrg (mark_type == MARK_SRC_DEST_CALL
7011debfc3dSmrg || GET_CODE (SET_SRC (x)) != CALL),
7021debfc3dSmrg mark_type);
7031debfc3dSmrg
7041debfc3dSmrg mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
7051debfc3dSmrg return;
7061debfc3dSmrg
7071debfc3dSmrg case CLOBBER:
7081debfc3dSmrg mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
7091debfc3dSmrg return;
7101debfc3dSmrg
7111debfc3dSmrg case SEQUENCE:
7121debfc3dSmrg {
7131debfc3dSmrg rtx_sequence *seq = as_a <rtx_sequence *> (x);
7141debfc3dSmrg rtx control = seq->element (0);
7151debfc3dSmrg bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
7161debfc3dSmrg
7171debfc3dSmrg mark_set_resources (control, res, 0, mark_type);
7181debfc3dSmrg for (i = seq->len () - 1; i >= 0; --i)
7191debfc3dSmrg {
7201debfc3dSmrg rtx elt = seq->element (i);
7211debfc3dSmrg if (!annul_p && INSN_FROM_TARGET_P (elt))
7221debfc3dSmrg mark_set_resources (elt, res, 0, mark_type);
7231debfc3dSmrg }
7241debfc3dSmrg }
7251debfc3dSmrg return;
7261debfc3dSmrg
7271debfc3dSmrg case POST_INC:
7281debfc3dSmrg case PRE_INC:
7291debfc3dSmrg case POST_DEC:
7301debfc3dSmrg case PRE_DEC:
7311debfc3dSmrg mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
7321debfc3dSmrg return;
7331debfc3dSmrg
7341debfc3dSmrg case PRE_MODIFY:
7351debfc3dSmrg case POST_MODIFY:
7361debfc3dSmrg mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
7371debfc3dSmrg mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
7381debfc3dSmrg mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
7391debfc3dSmrg return;
7401debfc3dSmrg
7411debfc3dSmrg case SIGN_EXTRACT:
7421debfc3dSmrg case ZERO_EXTRACT:
7431debfc3dSmrg mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
7441debfc3dSmrg mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
7451debfc3dSmrg mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
7461debfc3dSmrg return;
7471debfc3dSmrg
7481debfc3dSmrg case MEM:
7491debfc3dSmrg if (in_dest)
7501debfc3dSmrg {
7511debfc3dSmrg res->memory = 1;
7521debfc3dSmrg res->volatil |= MEM_VOLATILE_P (x);
7531debfc3dSmrg }
7541debfc3dSmrg
7551debfc3dSmrg mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
7561debfc3dSmrg return;
7571debfc3dSmrg
7581debfc3dSmrg case SUBREG:
7591debfc3dSmrg if (in_dest)
7601debfc3dSmrg {
7611debfc3dSmrg if (!REG_P (SUBREG_REG (x)))
7621debfc3dSmrg mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
7631debfc3dSmrg else
7641debfc3dSmrg {
7651debfc3dSmrg unsigned int regno = subreg_regno (x);
7661debfc3dSmrg unsigned int last_regno = regno + subreg_nregs (x);
7671debfc3dSmrg
7681debfc3dSmrg gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
7691debfc3dSmrg for (r = regno; r < last_regno; r++)
7701debfc3dSmrg SET_HARD_REG_BIT (res->regs, r);
7711debfc3dSmrg }
7721debfc3dSmrg }
7731debfc3dSmrg return;
7741debfc3dSmrg
7751debfc3dSmrg case REG:
7761debfc3dSmrg if (in_dest)
7771debfc3dSmrg {
7781debfc3dSmrg gcc_assert (HARD_REGISTER_P (x));
7791debfc3dSmrg add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
7801debfc3dSmrg }
7811debfc3dSmrg return;
7821debfc3dSmrg
7831debfc3dSmrg case UNSPEC_VOLATILE:
7841debfc3dSmrg case ASM_INPUT:
7851debfc3dSmrg /* Traditional asm's are always volatile. */
7861debfc3dSmrg res->volatil = 1;
7871debfc3dSmrg return;
7881debfc3dSmrg
7891debfc3dSmrg case TRAP_IF:
7901debfc3dSmrg res->volatil = 1;
7911debfc3dSmrg break;
7921debfc3dSmrg
7931debfc3dSmrg case ASM_OPERANDS:
7941debfc3dSmrg res->volatil |= MEM_VOLATILE_P (x);
7951debfc3dSmrg
7961debfc3dSmrg /* For all ASM_OPERANDS, we must traverse the vector of input operands.
7971debfc3dSmrg We cannot just fall through here since then we would be confused
7981debfc3dSmrg by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
7991debfc3dSmrg traditional asms unlike their normal usage. */
8001debfc3dSmrg
8011debfc3dSmrg for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
8021debfc3dSmrg mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
8031debfc3dSmrg MARK_SRC_DEST);
8041debfc3dSmrg return;
8051debfc3dSmrg
8061debfc3dSmrg default:
8071debfc3dSmrg break;
8081debfc3dSmrg }
8091debfc3dSmrg
8101debfc3dSmrg /* Process each sub-expression and flag what it needs. */
8111debfc3dSmrg format_ptr = GET_RTX_FORMAT (code);
8121debfc3dSmrg for (i = 0; i < GET_RTX_LENGTH (code); i++)
8131debfc3dSmrg switch (*format_ptr++)
8141debfc3dSmrg {
8151debfc3dSmrg case 'e':
8161debfc3dSmrg mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
8171debfc3dSmrg break;
8181debfc3dSmrg
8191debfc3dSmrg case 'E':
8201debfc3dSmrg for (j = 0; j < XVECLEN (x, i); j++)
8211debfc3dSmrg mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
8221debfc3dSmrg break;
8231debfc3dSmrg }
8241debfc3dSmrg }
8251debfc3dSmrg
8261debfc3dSmrg /* Return TRUE if INSN is a return, possibly with a filled delay slot. */
8271debfc3dSmrg
8281debfc3dSmrg static bool
return_insn_p(const_rtx insn)8291debfc3dSmrg return_insn_p (const_rtx insn)
8301debfc3dSmrg {
8311debfc3dSmrg if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
8321debfc3dSmrg return true;
8331debfc3dSmrg
8341debfc3dSmrg if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
8351debfc3dSmrg return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
8361debfc3dSmrg
8371debfc3dSmrg return false;
8381debfc3dSmrg }
8391debfc3dSmrg
8401debfc3dSmrg /* Set the resources that are live at TARGET.
8411debfc3dSmrg
8421debfc3dSmrg If TARGET is zero, we refer to the end of the current function and can
8431debfc3dSmrg return our precomputed value.
8441debfc3dSmrg
8451debfc3dSmrg Otherwise, we try to find out what is live by consulting the basic block
8461debfc3dSmrg information. This is tricky, because we must consider the actions of
8471debfc3dSmrg reload and jump optimization, which occur after the basic block information
8481debfc3dSmrg has been computed.
8491debfc3dSmrg
8501debfc3dSmrg Accordingly, we proceed as follows::
8511debfc3dSmrg
8521debfc3dSmrg We find the previous BARRIER and look at all immediately following labels
8531debfc3dSmrg (with no intervening active insns) to see if any of them start a basic
8541debfc3dSmrg block. If we hit the start of the function first, we use block 0.
8551debfc3dSmrg
8561debfc3dSmrg Once we have found a basic block and a corresponding first insn, we can
8571debfc3dSmrg accurately compute the live status (by starting at a label following a
8581debfc3dSmrg BARRIER, we are immune to actions taken by reload and jump.) Then we
8591debfc3dSmrg scan all insns between that point and our target. For each CLOBBER (or
8601debfc3dSmrg for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
8611debfc3dSmrg registers are dead. For a SET, mark them as live.
8621debfc3dSmrg
8631debfc3dSmrg We have to be careful when using REG_DEAD notes because they are not
8641debfc3dSmrg updated by such things as find_equiv_reg. So keep track of registers
8651debfc3dSmrg marked as dead that haven't been assigned to, and mark them dead at the
8661debfc3dSmrg next CODE_LABEL since reload and jump won't propagate values across labels.
8671debfc3dSmrg
8681debfc3dSmrg If we cannot find the start of a basic block (should be a very rare
8691debfc3dSmrg case, if it can happen at all), mark everything as potentially live.
8701debfc3dSmrg
8711debfc3dSmrg Next, scan forward from TARGET looking for things set or clobbered
8721debfc3dSmrg before they are used. These are not live.
8731debfc3dSmrg
8741debfc3dSmrg Because we can be called many times on the same target, save our results
8751debfc3dSmrg in a hash table indexed by INSN_UID. This is only done if the function
8761debfc3dSmrg init_resource_info () was invoked before we are called. */
8771debfc3dSmrg
8781debfc3dSmrg void
mark_target_live_regs(rtx_insn * insns,rtx target_maybe_return,struct resources * res)8791debfc3dSmrg mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resources *res)
8801debfc3dSmrg {
8811debfc3dSmrg int b = -1;
8821debfc3dSmrg unsigned int i;
8831debfc3dSmrg struct target_info *tinfo = NULL;
8841debfc3dSmrg rtx_insn *insn;
8851debfc3dSmrg rtx jump_target;
8861debfc3dSmrg HARD_REG_SET scratch;
8871debfc3dSmrg struct resources set, needed;
8881debfc3dSmrg
8891debfc3dSmrg /* Handle end of function. */
8901debfc3dSmrg if (target_maybe_return == 0 || ANY_RETURN_P (target_maybe_return))
8911debfc3dSmrg {
8921debfc3dSmrg *res = end_of_function_needs;
8931debfc3dSmrg return;
8941debfc3dSmrg }
8951debfc3dSmrg
8961debfc3dSmrg /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an
8971debfc3dSmrg instruction. */
8981debfc3dSmrg rtx_insn *target = as_a <rtx_insn *> (target_maybe_return);
8991debfc3dSmrg
9001debfc3dSmrg /* Handle return insn. */
9011debfc3dSmrg if (return_insn_p (target))
9021debfc3dSmrg {
9031debfc3dSmrg *res = end_of_function_needs;
9041debfc3dSmrg mark_referenced_resources (target, res, false);
9051debfc3dSmrg return;
9061debfc3dSmrg }
9071debfc3dSmrg
9081debfc3dSmrg /* We have to assume memory is needed, but the CC isn't. */
9091debfc3dSmrg res->memory = 1;
9101debfc3dSmrg res->volatil = 0;
9111debfc3dSmrg res->cc = 0;
9121debfc3dSmrg
9131debfc3dSmrg /* See if we have computed this value already. */
9141debfc3dSmrg if (target_hash_table != NULL)
9151debfc3dSmrg {
9161debfc3dSmrg for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
9171debfc3dSmrg tinfo; tinfo = tinfo->next)
9181debfc3dSmrg if (tinfo->uid == INSN_UID (target))
9191debfc3dSmrg break;
9201debfc3dSmrg
9211debfc3dSmrg /* Start by getting the basic block number. If we have saved
9221debfc3dSmrg information, we can get it from there unless the insn at the
9231debfc3dSmrg start of the basic block has been deleted. */
9241debfc3dSmrg if (tinfo && tinfo->block != -1
9251debfc3dSmrg && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, tinfo->block))->deleted ())
9261debfc3dSmrg b = tinfo->block;
9271debfc3dSmrg }
9281debfc3dSmrg
9291debfc3dSmrg if (b == -1)
930*8feb0f0bSmrg b = find_basic_block (target, param_max_delay_slot_live_search);
9311debfc3dSmrg
9321debfc3dSmrg if (target_hash_table != NULL)
9331debfc3dSmrg {
9341debfc3dSmrg if (tinfo)
9351debfc3dSmrg {
9361debfc3dSmrg /* If the information is up-to-date, use it. Otherwise, we will
9371debfc3dSmrg update it below. */
9381debfc3dSmrg if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
9391debfc3dSmrg {
940*8feb0f0bSmrg res->regs = tinfo->live_regs;
9411debfc3dSmrg return;
9421debfc3dSmrg }
9431debfc3dSmrg }
9441debfc3dSmrg else
9451debfc3dSmrg {
9461debfc3dSmrg /* Allocate a place to put our results and chain it into the
9471debfc3dSmrg hash table. */
9481debfc3dSmrg tinfo = XNEW (struct target_info);
9491debfc3dSmrg tinfo->uid = INSN_UID (target);
9501debfc3dSmrg tinfo->block = b;
9511debfc3dSmrg tinfo->next
9521debfc3dSmrg = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
9531debfc3dSmrg target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
9541debfc3dSmrg }
9551debfc3dSmrg }
9561debfc3dSmrg
9571debfc3dSmrg CLEAR_HARD_REG_SET (pending_dead_regs);
9581debfc3dSmrg
9591debfc3dSmrg /* If we found a basic block, get the live registers from it and update
9601debfc3dSmrg them with anything set or killed between its start and the insn before
9611debfc3dSmrg TARGET; this custom life analysis is really about registers so we need
9621debfc3dSmrg to use the LR problem. Otherwise, we must assume everything is live. */
9631debfc3dSmrg if (b != -1)
9641debfc3dSmrg {
9651debfc3dSmrg regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
9661debfc3dSmrg rtx_insn *start_insn, *stop_insn;
967a05ac97eSmrg df_ref def;
9681debfc3dSmrg
9691debfc3dSmrg /* Compute hard regs live at start of block. */
9701debfc3dSmrg REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
971a05ac97eSmrg FOR_EACH_ARTIFICIAL_DEF (def, b)
972a05ac97eSmrg if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
973a05ac97eSmrg SET_HARD_REG_BIT (current_live_regs, DF_REF_REGNO (def));
9741debfc3dSmrg
9751debfc3dSmrg /* Get starting and ending insn, handling the case where each might
9761debfc3dSmrg be a SEQUENCE. */
9771debfc3dSmrg start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
9781debfc3dSmrg insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
9791debfc3dSmrg stop_insn = target;
9801debfc3dSmrg
9811debfc3dSmrg if (NONJUMP_INSN_P (start_insn)
9821debfc3dSmrg && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
9831debfc3dSmrg start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
9841debfc3dSmrg
9851debfc3dSmrg if (NONJUMP_INSN_P (stop_insn)
9861debfc3dSmrg && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
9871debfc3dSmrg stop_insn = next_insn (PREV_INSN (stop_insn));
9881debfc3dSmrg
9891debfc3dSmrg for (insn = start_insn; insn != stop_insn;
9901debfc3dSmrg insn = next_insn_no_annul (insn))
9911debfc3dSmrg {
9921debfc3dSmrg rtx link;
9931debfc3dSmrg rtx_insn *real_insn = insn;
9941debfc3dSmrg enum rtx_code code = GET_CODE (insn);
9951debfc3dSmrg
9961debfc3dSmrg if (DEBUG_INSN_P (insn))
9971debfc3dSmrg continue;
9981debfc3dSmrg
9991debfc3dSmrg /* If this insn is from the target of a branch, it isn't going to
10001debfc3dSmrg be used in the sequel. If it is used in both cases, this
10011debfc3dSmrg test will not be true. */
10021debfc3dSmrg if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
10031debfc3dSmrg && INSN_FROM_TARGET_P (insn))
10041debfc3dSmrg continue;
10051debfc3dSmrg
10061debfc3dSmrg /* If this insn is a USE made by update_block, we care about the
10071debfc3dSmrg underlying insn. */
10081debfc3dSmrg if (code == INSN
10091debfc3dSmrg && GET_CODE (PATTERN (insn)) == USE
10101debfc3dSmrg && INSN_P (XEXP (PATTERN (insn), 0)))
10111debfc3dSmrg real_insn = as_a <rtx_insn *> (XEXP (PATTERN (insn), 0));
10121debfc3dSmrg
10131debfc3dSmrg if (CALL_P (real_insn))
10141debfc3dSmrg {
10151debfc3dSmrg /* Values in call-clobbered registers survive a COND_EXEC CALL
10161debfc3dSmrg if that is not executed; this matters for resoure use because
10171debfc3dSmrg they may be used by a complementarily (or more strictly)
10181debfc3dSmrg predicated instruction, or if the CALL is NORETURN. */
10191debfc3dSmrg if (GET_CODE (PATTERN (real_insn)) != COND_EXEC)
10201debfc3dSmrg {
1021*8feb0f0bSmrg HARD_REG_SET regs_invalidated_by_this_call
1022*8feb0f0bSmrg = insn_callee_abi (real_insn).full_reg_clobbers ();
10231debfc3dSmrg /* CALL clobbers all call-used regs that aren't fixed except
10241debfc3dSmrg sp, ap, and fp. Do this before setting the result of the
10251debfc3dSmrg call live. */
1026*8feb0f0bSmrg current_live_regs &= ~regs_invalidated_by_this_call;
10271debfc3dSmrg }
10281debfc3dSmrg
10291debfc3dSmrg /* A CALL_INSN sets any global register live, since it may
10301debfc3dSmrg have been modified by the call. */
10311debfc3dSmrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10321debfc3dSmrg if (global_regs[i])
10331debfc3dSmrg SET_HARD_REG_BIT (current_live_regs, i);
10341debfc3dSmrg }
10351debfc3dSmrg
10361debfc3dSmrg /* Mark anything killed in an insn to be deadened at the next
10371debfc3dSmrg label. Ignore USE insns; the only REG_DEAD notes will be for
10381debfc3dSmrg parameters. But they might be early. A CALL_INSN will usually
10391debfc3dSmrg clobber registers used for parameters. It isn't worth bothering
10401debfc3dSmrg with the unlikely case when it won't. */
10411debfc3dSmrg if ((NONJUMP_INSN_P (real_insn)
10421debfc3dSmrg && GET_CODE (PATTERN (real_insn)) != USE
10431debfc3dSmrg && GET_CODE (PATTERN (real_insn)) != CLOBBER)
10441debfc3dSmrg || JUMP_P (real_insn)
10451debfc3dSmrg || CALL_P (real_insn))
10461debfc3dSmrg {
10471debfc3dSmrg for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
10481debfc3dSmrg if (REG_NOTE_KIND (link) == REG_DEAD
10491debfc3dSmrg && REG_P (XEXP (link, 0))
10501debfc3dSmrg && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
10511debfc3dSmrg add_to_hard_reg_set (&pending_dead_regs,
10521debfc3dSmrg GET_MODE (XEXP (link, 0)),
10531debfc3dSmrg REGNO (XEXP (link, 0)));
10541debfc3dSmrg
1055*8feb0f0bSmrg note_stores (real_insn, update_live_status, NULL);
10561debfc3dSmrg
10571debfc3dSmrg /* If any registers were unused after this insn, kill them.
10581debfc3dSmrg These notes will always be accurate. */
10591debfc3dSmrg for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
10601debfc3dSmrg if (REG_NOTE_KIND (link) == REG_UNUSED
10611debfc3dSmrg && REG_P (XEXP (link, 0))
10621debfc3dSmrg && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
10631debfc3dSmrg remove_from_hard_reg_set (¤t_live_regs,
10641debfc3dSmrg GET_MODE (XEXP (link, 0)),
10651debfc3dSmrg REGNO (XEXP (link, 0)));
10661debfc3dSmrg }
10671debfc3dSmrg
10681debfc3dSmrg else if (LABEL_P (real_insn))
10691debfc3dSmrg {
10701debfc3dSmrg basic_block bb;
10711debfc3dSmrg
10721debfc3dSmrg /* A label clobbers the pending dead registers since neither
10731debfc3dSmrg reload nor jump will propagate a value across a label. */
1074*8feb0f0bSmrg current_live_regs &= ~pending_dead_regs;
10751debfc3dSmrg CLEAR_HARD_REG_SET (pending_dead_regs);
10761debfc3dSmrg
10771debfc3dSmrg /* We must conservatively assume that all registers that used
10781debfc3dSmrg to be live here still are. The fallthrough edge may have
10791debfc3dSmrg left a live register uninitialized. */
10801debfc3dSmrg bb = BLOCK_FOR_INSN (real_insn);
10811debfc3dSmrg if (bb)
10821debfc3dSmrg {
10831debfc3dSmrg HARD_REG_SET extra_live;
10841debfc3dSmrg
10851debfc3dSmrg REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
1086*8feb0f0bSmrg current_live_regs |= extra_live;
10871debfc3dSmrg }
10881debfc3dSmrg }
10891debfc3dSmrg
10901debfc3dSmrg /* The beginning of the epilogue corresponds to the end of the
10911debfc3dSmrg RTL chain when there are no epilogue insns. Certain resources
10921debfc3dSmrg are implicitly required at that point. */
10931debfc3dSmrg else if (NOTE_P (real_insn)
10941debfc3dSmrg && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1095*8feb0f0bSmrg current_live_regs |= start_of_epilogue_needs.regs;
10961debfc3dSmrg }
10971debfc3dSmrg
1098*8feb0f0bSmrg res->regs = current_live_regs;
10991debfc3dSmrg if (tinfo != NULL)
11001debfc3dSmrg {
11011debfc3dSmrg tinfo->block = b;
11021debfc3dSmrg tinfo->bb_tick = bb_ticks[b];
11031debfc3dSmrg }
11041debfc3dSmrg }
11051debfc3dSmrg else
11061debfc3dSmrg /* We didn't find the start of a basic block. Assume everything
11071debfc3dSmrg in use. This should happen only extremely rarely. */
11081debfc3dSmrg SET_HARD_REG_SET (res->regs);
11091debfc3dSmrg
11101debfc3dSmrg CLEAR_RESOURCE (&set);
11111debfc3dSmrg CLEAR_RESOURCE (&needed);
11121debfc3dSmrg
11131debfc3dSmrg rtx_insn *jump_insn = find_dead_or_set_registers (target, res, &jump_target,
11141debfc3dSmrg 0, set, needed);
11151debfc3dSmrg
11161debfc3dSmrg /* If we hit an unconditional branch, we have another way of finding out
11171debfc3dSmrg what is live: we can see what is live at the branch target and include
11181debfc3dSmrg anything used but not set before the branch. We add the live
11191debfc3dSmrg resources found using the test below to those found until now. */
11201debfc3dSmrg
11211debfc3dSmrg if (jump_insn)
11221debfc3dSmrg {
11231debfc3dSmrg struct resources new_resources;
11241debfc3dSmrg rtx_insn *stop_insn = next_active_insn (jump_insn);
11251debfc3dSmrg
11261debfc3dSmrg if (!ANY_RETURN_P (jump_target))
11271debfc3dSmrg jump_target = next_active_insn (as_a<rtx_insn *> (jump_target));
11281debfc3dSmrg mark_target_live_regs (insns, jump_target, &new_resources);
11291debfc3dSmrg CLEAR_RESOURCE (&set);
11301debfc3dSmrg CLEAR_RESOURCE (&needed);
11311debfc3dSmrg
11321debfc3dSmrg /* Include JUMP_INSN in the needed registers. */
11331debfc3dSmrg for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
11341debfc3dSmrg {
11351debfc3dSmrg mark_referenced_resources (insn, &needed, true);
11361debfc3dSmrg
1137*8feb0f0bSmrg scratch = needed.regs & ~set.regs;
1138*8feb0f0bSmrg new_resources.regs |= scratch;
11391debfc3dSmrg
11401debfc3dSmrg mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
11411debfc3dSmrg }
11421debfc3dSmrg
1143*8feb0f0bSmrg res->regs |= new_resources.regs;
11441debfc3dSmrg }
11451debfc3dSmrg
11461debfc3dSmrg if (tinfo != NULL)
1147*8feb0f0bSmrg tinfo->live_regs = res->regs;
11481debfc3dSmrg }
11491debfc3dSmrg
11501debfc3dSmrg /* Initialize the resources required by mark_target_live_regs ().
11511debfc3dSmrg This should be invoked before the first call to mark_target_live_regs. */
11521debfc3dSmrg
11531debfc3dSmrg void
init_resource_info(rtx_insn * epilogue_insn)11541debfc3dSmrg init_resource_info (rtx_insn *epilogue_insn)
11551debfc3dSmrg {
11561debfc3dSmrg int i;
11571debfc3dSmrg basic_block bb;
11581debfc3dSmrg
11591debfc3dSmrg /* Indicate what resources are required to be valid at the end of the current
11601debfc3dSmrg function. The condition code never is and memory always is.
11611debfc3dSmrg The stack pointer is needed unless EXIT_IGNORE_STACK is true
11621debfc3dSmrg and there is an epilogue that restores the original stack pointer
11631debfc3dSmrg from the frame pointer. Registers used to return the function value
11641debfc3dSmrg are needed. Registers holding global variables are needed. */
11651debfc3dSmrg
11661debfc3dSmrg end_of_function_needs.cc = 0;
11671debfc3dSmrg end_of_function_needs.memory = 1;
11681debfc3dSmrg CLEAR_HARD_REG_SET (end_of_function_needs.regs);
11691debfc3dSmrg
11701debfc3dSmrg if (frame_pointer_needed)
11711debfc3dSmrg {
11721debfc3dSmrg SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
11731debfc3dSmrg if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
11741debfc3dSmrg SET_HARD_REG_BIT (end_of_function_needs.regs,
11751debfc3dSmrg HARD_FRAME_POINTER_REGNUM);
11761debfc3dSmrg }
11771debfc3dSmrg if (!(frame_pointer_needed
11781debfc3dSmrg && EXIT_IGNORE_STACK
11791debfc3dSmrg && epilogue_insn
11801debfc3dSmrg && !crtl->sp_is_unchanging))
11811debfc3dSmrg SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
11821debfc3dSmrg
11831debfc3dSmrg if (crtl->return_rtx != 0)
11841debfc3dSmrg mark_referenced_resources (crtl->return_rtx,
11851debfc3dSmrg &end_of_function_needs, true);
11861debfc3dSmrg
11871debfc3dSmrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11881debfc3dSmrg if (global_regs[i] || EPILOGUE_USES (i))
11891debfc3dSmrg SET_HARD_REG_BIT (end_of_function_needs.regs, i);
11901debfc3dSmrg
11911debfc3dSmrg /* The registers required to be live at the end of the function are
11921debfc3dSmrg represented in the flow information as being dead just prior to
11931debfc3dSmrg reaching the end of the function. For example, the return of a value
11941debfc3dSmrg might be represented by a USE of the return register immediately
11951debfc3dSmrg followed by an unconditional jump to the return label where the
11961debfc3dSmrg return label is the end of the RTL chain. The end of the RTL chain
11971debfc3dSmrg is then taken to mean that the return register is live.
11981debfc3dSmrg
11991debfc3dSmrg This sequence is no longer maintained when epilogue instructions are
12001debfc3dSmrg added to the RTL chain. To reconstruct the original meaning, the
12011debfc3dSmrg start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
12021debfc3dSmrg point where these registers become live (start_of_epilogue_needs).
12031debfc3dSmrg If epilogue instructions are present, the registers set by those
12041debfc3dSmrg instructions won't have been processed by flow. Thus, those
12051debfc3dSmrg registers are additionally required at the end of the RTL chain
12061debfc3dSmrg (end_of_function_needs). */
12071debfc3dSmrg
12081debfc3dSmrg start_of_epilogue_needs = end_of_function_needs;
12091debfc3dSmrg
12101debfc3dSmrg while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
12111debfc3dSmrg {
12121debfc3dSmrg mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
12131debfc3dSmrg MARK_SRC_DEST_CALL);
12141debfc3dSmrg if (return_insn_p (epilogue_insn))
12151debfc3dSmrg break;
12161debfc3dSmrg }
12171debfc3dSmrg
12181debfc3dSmrg /* Allocate and initialize the tables used by mark_target_live_regs. */
12191debfc3dSmrg target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
12201debfc3dSmrg bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
12211debfc3dSmrg
12221debfc3dSmrg /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */
12231debfc3dSmrg FOR_EACH_BB_FN (bb, cfun)
12241debfc3dSmrg if (LABEL_P (BB_HEAD (bb)))
12251debfc3dSmrg BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
12261debfc3dSmrg }
12271debfc3dSmrg
12281debfc3dSmrg /* Free up the resources allocated to mark_target_live_regs (). This
12291debfc3dSmrg should be invoked after the last call to mark_target_live_regs (). */
12301debfc3dSmrg
12311debfc3dSmrg void
free_resource_info(void)12321debfc3dSmrg free_resource_info (void)
12331debfc3dSmrg {
12341debfc3dSmrg basic_block bb;
12351debfc3dSmrg
12361debfc3dSmrg if (target_hash_table != NULL)
12371debfc3dSmrg {
12381debfc3dSmrg int i;
12391debfc3dSmrg
12401debfc3dSmrg for (i = 0; i < TARGET_HASH_PRIME; ++i)
12411debfc3dSmrg {
12421debfc3dSmrg struct target_info *ti = target_hash_table[i];
12431debfc3dSmrg
12441debfc3dSmrg while (ti)
12451debfc3dSmrg {
12461debfc3dSmrg struct target_info *next = ti->next;
12471debfc3dSmrg free (ti);
12481debfc3dSmrg ti = next;
12491debfc3dSmrg }
12501debfc3dSmrg }
12511debfc3dSmrg
12521debfc3dSmrg free (target_hash_table);
12531debfc3dSmrg target_hash_table = NULL;
12541debfc3dSmrg }
12551debfc3dSmrg
12561debfc3dSmrg if (bb_ticks != NULL)
12571debfc3dSmrg {
12581debfc3dSmrg free (bb_ticks);
12591debfc3dSmrg bb_ticks = NULL;
12601debfc3dSmrg }
12611debfc3dSmrg
12621debfc3dSmrg FOR_EACH_BB_FN (bb, cfun)
12631debfc3dSmrg if (LABEL_P (BB_HEAD (bb)))
12641debfc3dSmrg BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
12651debfc3dSmrg }
12661debfc3dSmrg
12671debfc3dSmrg /* Clear any hashed information that we have stored for INSN. */
12681debfc3dSmrg
12691debfc3dSmrg void
clear_hashed_info_for_insn(rtx_insn * insn)12701debfc3dSmrg clear_hashed_info_for_insn (rtx_insn *insn)
12711debfc3dSmrg {
12721debfc3dSmrg struct target_info *tinfo;
12731debfc3dSmrg
12741debfc3dSmrg if (target_hash_table != NULL)
12751debfc3dSmrg {
12761debfc3dSmrg for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
12771debfc3dSmrg tinfo; tinfo = tinfo->next)
12781debfc3dSmrg if (tinfo->uid == INSN_UID (insn))
12791debfc3dSmrg break;
12801debfc3dSmrg
12811debfc3dSmrg if (tinfo)
12821debfc3dSmrg tinfo->block = -1;
12831debfc3dSmrg }
12841debfc3dSmrg }
1285*8feb0f0bSmrg
1286*8feb0f0bSmrg /* Clear any hashed information that we have stored for instructions
1287*8feb0f0bSmrg between INSN and the next BARRIER that follow a JUMP or a LABEL. */
1288*8feb0f0bSmrg
1289*8feb0f0bSmrg void
clear_hashed_info_until_next_barrier(rtx_insn * insn)1290*8feb0f0bSmrg clear_hashed_info_until_next_barrier (rtx_insn *insn)
1291*8feb0f0bSmrg {
1292*8feb0f0bSmrg while (insn && !BARRIER_P (insn))
1293*8feb0f0bSmrg {
1294*8feb0f0bSmrg if (JUMP_P (insn) || LABEL_P (insn))
1295*8feb0f0bSmrg {
1296*8feb0f0bSmrg rtx_insn *next = next_active_insn (insn);
1297*8feb0f0bSmrg if (next)
1298*8feb0f0bSmrg clear_hashed_info_for_insn (next);
1299*8feb0f0bSmrg }
1300*8feb0f0bSmrg
1301*8feb0f0bSmrg insn = next_nonnote_insn (insn);
1302*8feb0f0bSmrg }
1303*8feb0f0bSmrg }
1304*8feb0f0bSmrg
13051debfc3dSmrg /* Increment the tick count for the basic block that contains INSN. */
13061debfc3dSmrg
13071debfc3dSmrg void
incr_ticks_for_insn(rtx_insn * insn)13081debfc3dSmrg incr_ticks_for_insn (rtx_insn *insn)
13091debfc3dSmrg {
1310*8feb0f0bSmrg int b = find_basic_block (insn, param_max_delay_slot_live_search);
13111debfc3dSmrg
13121debfc3dSmrg if (b != -1)
13131debfc3dSmrg bb_ticks[b]++;
13141debfc3dSmrg }
13151debfc3dSmrg
13161debfc3dSmrg /* Add TRIAL to the set of resources used at the end of the current
13171debfc3dSmrg function. */
13181debfc3dSmrg void
mark_end_of_function_resources(rtx trial,bool include_delayed_effects)13191debfc3dSmrg mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
13201debfc3dSmrg {
13211debfc3dSmrg mark_referenced_resources (trial, &end_of_function_needs,
13221debfc3dSmrg include_delayed_effects);
13231debfc3dSmrg }
1324