xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/combine-stack-adj.c (revision 8feb0f0b7eaff0608f8350bbfa3098827b4bb91b)
11debfc3dSmrg /* Combine stack adjustments.
2*8feb0f0bSmrg    Copyright (C) 1987-2020 Free Software Foundation, Inc.
31debfc3dSmrg 
41debfc3dSmrg This file is part of GCC.
51debfc3dSmrg 
61debfc3dSmrg GCC is free software; you can redistribute it and/or modify it under
71debfc3dSmrg the terms of the GNU General Public License as published by the Free
81debfc3dSmrg Software Foundation; either version 3, or (at your option) any later
91debfc3dSmrg version.
101debfc3dSmrg 
111debfc3dSmrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
121debfc3dSmrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
131debfc3dSmrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
141debfc3dSmrg for more details.
151debfc3dSmrg 
161debfc3dSmrg You should have received a copy of the GNU General Public License
171debfc3dSmrg along with GCC; see the file COPYING3.  If not see
181debfc3dSmrg <http://www.gnu.org/licenses/>.  */
191debfc3dSmrg 
201debfc3dSmrg /* Track stack adjustments and stack memory references.  Attempt to
211debfc3dSmrg    reduce the number of stack adjustments by back-propagating across
221debfc3dSmrg    the memory references.
231debfc3dSmrg 
241debfc3dSmrg    This is intended primarily for use with targets that do not define
251debfc3dSmrg    ACCUMULATE_OUTGOING_ARGS.  It is of significantly more value to
261debfc3dSmrg    targets that define PREFERRED_STACK_BOUNDARY more aligned than
271debfc3dSmrg    STACK_BOUNDARY (e.g. x86), or if not all registers can be pushed
281debfc3dSmrg    (e.g. x86 fp regs) which would ordinarily have to be implemented
291debfc3dSmrg    as a sub/mov pair due to restrictions in calls.c.
301debfc3dSmrg 
311debfc3dSmrg    Propagation stops when any of the insns that need adjusting are
321debfc3dSmrg    (a) no longer valid because we've exceeded their range, (b) a
331debfc3dSmrg    non-trivial push instruction, or (c) a call instruction.
341debfc3dSmrg 
351debfc3dSmrg    Restriction B is based on the assumption that push instructions
361debfc3dSmrg    are smaller or faster.  If a port really wants to remove all
371debfc3dSmrg    pushes, it should have defined ACCUMULATE_OUTGOING_ARGS.  The
381debfc3dSmrg    one exception that is made is for an add immediately followed
391debfc3dSmrg    by a push.  */
401debfc3dSmrg 
411debfc3dSmrg #include "config.h"
421debfc3dSmrg #include "system.h"
431debfc3dSmrg #include "coretypes.h"
441debfc3dSmrg #include "backend.h"
451debfc3dSmrg #include "rtl.h"
461debfc3dSmrg #include "df.h"
471debfc3dSmrg #include "insn-config.h"
481debfc3dSmrg #include "memmodel.h"
491debfc3dSmrg #include "emit-rtl.h"
501debfc3dSmrg #include "recog.h"
511debfc3dSmrg #include "cfgrtl.h"
521debfc3dSmrg #include "tree-pass.h"
531debfc3dSmrg #include "rtl-iter.h"
541debfc3dSmrg 
551debfc3dSmrg 
561debfc3dSmrg /* This structure records two kinds of stack references between stack
571debfc3dSmrg    adjusting instructions: stack references in memory addresses for
581debfc3dSmrg    regular insns and all stack references for debug insns.  */
591debfc3dSmrg 
601debfc3dSmrg struct csa_reflist
611debfc3dSmrg {
621debfc3dSmrg   HOST_WIDE_INT sp_offset;
631debfc3dSmrg   rtx_insn *insn;
641debfc3dSmrg   rtx *ref;
651debfc3dSmrg   struct csa_reflist *next;
661debfc3dSmrg };
671debfc3dSmrg 
681debfc3dSmrg static int stack_memref_p (rtx);
691debfc3dSmrg static rtx single_set_for_csa (rtx_insn *);
701debfc3dSmrg static void free_csa_reflist (struct csa_reflist *);
711debfc3dSmrg static struct csa_reflist *record_one_stack_ref (rtx_insn *, rtx *,
721debfc3dSmrg 						 struct csa_reflist *);
731debfc3dSmrg static int try_apply_stack_adjustment (rtx_insn *, struct csa_reflist *,
741debfc3dSmrg 				       HOST_WIDE_INT, HOST_WIDE_INT);
751debfc3dSmrg static void combine_stack_adjustments_for_block (basic_block);
761debfc3dSmrg 
771debfc3dSmrg 
781debfc3dSmrg /* Main entry point for stack adjustment combination.  */
791debfc3dSmrg 
801debfc3dSmrg static void
combine_stack_adjustments(void)811debfc3dSmrg combine_stack_adjustments (void)
821debfc3dSmrg {
831debfc3dSmrg   basic_block bb;
841debfc3dSmrg 
851debfc3dSmrg   FOR_EACH_BB_FN (bb, cfun)
861debfc3dSmrg     combine_stack_adjustments_for_block (bb);
871debfc3dSmrg }
881debfc3dSmrg 
891debfc3dSmrg /* Recognize a MEM of the form (sp) or (plus sp const).  */
901debfc3dSmrg 
911debfc3dSmrg static int
stack_memref_p(rtx x)921debfc3dSmrg stack_memref_p (rtx x)
931debfc3dSmrg {
941debfc3dSmrg   if (!MEM_P (x))
951debfc3dSmrg     return 0;
961debfc3dSmrg   x = XEXP (x, 0);
971debfc3dSmrg 
981debfc3dSmrg   if (x == stack_pointer_rtx)
991debfc3dSmrg     return 1;
1001debfc3dSmrg   if (GET_CODE (x) == PLUS
1011debfc3dSmrg       && XEXP (x, 0) == stack_pointer_rtx
1021debfc3dSmrg       && CONST_INT_P (XEXP (x, 1)))
1031debfc3dSmrg     return 1;
1041debfc3dSmrg 
1051debfc3dSmrg   return 0;
1061debfc3dSmrg }
1071debfc3dSmrg 
1081debfc3dSmrg /* Recognize either normal single_set or the hack in i386.md for
1091debfc3dSmrg    tying fp and sp adjustments.  */
1101debfc3dSmrg 
1111debfc3dSmrg static rtx
single_set_for_csa(rtx_insn * insn)1121debfc3dSmrg single_set_for_csa (rtx_insn *insn)
1131debfc3dSmrg {
1141debfc3dSmrg   int i;
1151debfc3dSmrg   rtx tmp = single_set (insn);
1161debfc3dSmrg   if (tmp)
1171debfc3dSmrg     return tmp;
1181debfc3dSmrg 
1191debfc3dSmrg   if (!NONJUMP_INSN_P (insn)
1201debfc3dSmrg       || GET_CODE (PATTERN (insn)) != PARALLEL)
1211debfc3dSmrg     return NULL_RTX;
1221debfc3dSmrg 
1231debfc3dSmrg   tmp = PATTERN (insn);
1241debfc3dSmrg   if (GET_CODE (XVECEXP (tmp, 0, 0)) != SET)
1251debfc3dSmrg     return NULL_RTX;
1261debfc3dSmrg 
1271debfc3dSmrg   for (i = 1; i < XVECLEN (tmp, 0); ++i)
1281debfc3dSmrg     {
1291debfc3dSmrg       rtx this_rtx = XVECEXP (tmp, 0, i);
1301debfc3dSmrg 
1311debfc3dSmrg       /* The special case is allowing a no-op set.  */
1321debfc3dSmrg       if (GET_CODE (this_rtx) == SET
1331debfc3dSmrg 	  && SET_SRC (this_rtx) == SET_DEST (this_rtx))
1341debfc3dSmrg 	;
1351debfc3dSmrg       else if (GET_CODE (this_rtx) != CLOBBER
1361debfc3dSmrg 	       && GET_CODE (this_rtx) != USE)
1371debfc3dSmrg 	return NULL_RTX;
1381debfc3dSmrg     }
1391debfc3dSmrg 
1401debfc3dSmrg   return XVECEXP (tmp, 0, 0);
1411debfc3dSmrg }
1421debfc3dSmrg 
1431debfc3dSmrg /* Free the list of csa_reflist nodes.  */
1441debfc3dSmrg 
1451debfc3dSmrg static void
free_csa_reflist(struct csa_reflist * reflist)1461debfc3dSmrg free_csa_reflist (struct csa_reflist *reflist)
1471debfc3dSmrg {
1481debfc3dSmrg   struct csa_reflist *next;
1491debfc3dSmrg   for (; reflist ; reflist = next)
1501debfc3dSmrg     {
1511debfc3dSmrg       next = reflist->next;
1521debfc3dSmrg       free (reflist);
1531debfc3dSmrg     }
1541debfc3dSmrg }
1551debfc3dSmrg 
1561debfc3dSmrg /* Create a new csa_reflist node from the given stack reference.
1571debfc3dSmrg    It is already known that the reference is either a MEM satisfying the
1581debfc3dSmrg    predicate stack_memref_p or a REG representing the stack pointer.  */
1591debfc3dSmrg 
1601debfc3dSmrg static struct csa_reflist *
record_one_stack_ref(rtx_insn * insn,rtx * ref,struct csa_reflist * next_reflist)1611debfc3dSmrg record_one_stack_ref (rtx_insn *insn, rtx *ref, struct csa_reflist *next_reflist)
1621debfc3dSmrg {
1631debfc3dSmrg   struct csa_reflist *ml;
1641debfc3dSmrg 
1651debfc3dSmrg   ml = XNEW (struct csa_reflist);
1661debfc3dSmrg 
1671debfc3dSmrg   if (REG_P (*ref) || XEXP (*ref, 0) == stack_pointer_rtx)
1681debfc3dSmrg     ml->sp_offset = 0;
1691debfc3dSmrg   else
1701debfc3dSmrg     ml->sp_offset = INTVAL (XEXP (XEXP (*ref, 0), 1));
1711debfc3dSmrg 
1721debfc3dSmrg   ml->insn = insn;
1731debfc3dSmrg   ml->ref = ref;
1741debfc3dSmrg   ml->next = next_reflist;
1751debfc3dSmrg 
1761debfc3dSmrg   return ml;
1771debfc3dSmrg }
1781debfc3dSmrg 
1791debfc3dSmrg /* We only know how to adjust the CFA; no other frame-related changes
1801debfc3dSmrg    may appear in any insn to be deleted.  */
1811debfc3dSmrg 
1821debfc3dSmrg static bool
no_unhandled_cfa(rtx_insn * insn)1831debfc3dSmrg no_unhandled_cfa (rtx_insn *insn)
1841debfc3dSmrg {
1851debfc3dSmrg   if (!RTX_FRAME_RELATED_P (insn))
1861debfc3dSmrg     return true;
1871debfc3dSmrg 
1881debfc3dSmrg   /* No CFA notes at all is a legacy interpretation like
1891debfc3dSmrg      FRAME_RELATED_EXPR, and is context sensitive within
1901debfc3dSmrg      the prologue state machine.  We can't handle that here.  */
1911debfc3dSmrg   bool has_cfa_adjust = false;
1921debfc3dSmrg 
1931debfc3dSmrg   for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
1941debfc3dSmrg     switch (REG_NOTE_KIND (link))
1951debfc3dSmrg       {
1961debfc3dSmrg       default:
1971debfc3dSmrg         break;
1981debfc3dSmrg       case REG_CFA_ADJUST_CFA:
1991debfc3dSmrg 	has_cfa_adjust = true;
2001debfc3dSmrg 	break;
2011debfc3dSmrg 
2021debfc3dSmrg       case REG_FRAME_RELATED_EXPR:
2031debfc3dSmrg       case REG_CFA_DEF_CFA:
2041debfc3dSmrg       case REG_CFA_OFFSET:
2051debfc3dSmrg       case REG_CFA_REGISTER:
2061debfc3dSmrg       case REG_CFA_EXPRESSION:
2071debfc3dSmrg       case REG_CFA_RESTORE:
2081debfc3dSmrg       case REG_CFA_SET_VDRAP:
2091debfc3dSmrg       case REG_CFA_WINDOW_SAVE:
2101debfc3dSmrg       case REG_CFA_FLUSH_QUEUE:
2111debfc3dSmrg       case REG_CFA_TOGGLE_RA_MANGLE:
2121debfc3dSmrg 	return false;
2131debfc3dSmrg       }
2141debfc3dSmrg 
2151debfc3dSmrg   return has_cfa_adjust;
2161debfc3dSmrg }
2171debfc3dSmrg 
2181debfc3dSmrg /* Attempt to apply ADJUST to the stack adjusting insn INSN, as well
2191debfc3dSmrg    as each of the memories and stack references in REFLIST.  Return true
2201debfc3dSmrg    on success.  */
2211debfc3dSmrg 
2221debfc3dSmrg static int
try_apply_stack_adjustment(rtx_insn * insn,struct csa_reflist * reflist,HOST_WIDE_INT new_adjust,HOST_WIDE_INT delta)2231debfc3dSmrg try_apply_stack_adjustment (rtx_insn *insn, struct csa_reflist *reflist,
2241debfc3dSmrg 			    HOST_WIDE_INT new_adjust, HOST_WIDE_INT delta)
2251debfc3dSmrg {
2261debfc3dSmrg   struct csa_reflist *ml;
2271debfc3dSmrg   rtx set;
2281debfc3dSmrg 
2291debfc3dSmrg   set = single_set_for_csa (insn);
2301debfc3dSmrg   if (MEM_P (SET_DEST (set)))
2311debfc3dSmrg     validate_change (insn, &SET_DEST (set),
2321debfc3dSmrg 		     replace_equiv_address (SET_DEST (set), stack_pointer_rtx),
2331debfc3dSmrg 		     1);
2341debfc3dSmrg   else
2351debfc3dSmrg     validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (new_adjust), 1);
2361debfc3dSmrg 
2371debfc3dSmrg   for (ml = reflist; ml ; ml = ml->next)
2381debfc3dSmrg     {
2391debfc3dSmrg       rtx new_addr = plus_constant (Pmode, stack_pointer_rtx,
2401debfc3dSmrg 				    ml->sp_offset - delta);
2411debfc3dSmrg       rtx new_val;
2421debfc3dSmrg 
2431debfc3dSmrg       if (MEM_P (*ml->ref))
2441debfc3dSmrg 	new_val = replace_equiv_address_nv (*ml->ref, new_addr);
2451debfc3dSmrg       else if (GET_MODE (*ml->ref) == GET_MODE (stack_pointer_rtx))
2461debfc3dSmrg 	new_val = new_addr;
2471debfc3dSmrg       else
2481debfc3dSmrg 	new_val = lowpart_subreg (GET_MODE (*ml->ref), new_addr,
2491debfc3dSmrg 				  GET_MODE (new_addr));
2501debfc3dSmrg       validate_change (ml->insn, ml->ref, new_val, 1);
2511debfc3dSmrg     }
2521debfc3dSmrg 
2531debfc3dSmrg   if (apply_change_group ())
2541debfc3dSmrg     {
2551debfc3dSmrg       /* Succeeded.  Update our knowledge of the stack references.  */
2561debfc3dSmrg       for (ml = reflist; ml ; ml = ml->next)
2571debfc3dSmrg 	ml->sp_offset -= delta;
2581debfc3dSmrg 
2591debfc3dSmrg       return 1;
2601debfc3dSmrg     }
2611debfc3dSmrg   else
2621debfc3dSmrg     return 0;
2631debfc3dSmrg }
2641debfc3dSmrg 
2651debfc3dSmrg /* For non-debug insns, record all stack memory references in INSN
2661debfc3dSmrg    and return true if there were no other (unrecorded) references to the
2671debfc3dSmrg    stack pointer.  For debug insns, record all stack references regardless
2681debfc3dSmrg    of context and unconditionally return true.  */
2691debfc3dSmrg 
2701debfc3dSmrg static bool
record_stack_refs(rtx_insn * insn,struct csa_reflist ** reflist)2711debfc3dSmrg record_stack_refs (rtx_insn *insn, struct csa_reflist **reflist)
2721debfc3dSmrg {
2731debfc3dSmrg   subrtx_ptr_iterator::array_type array;
2741debfc3dSmrg   FOR_EACH_SUBRTX_PTR (iter, array, &PATTERN (insn), NONCONST)
2751debfc3dSmrg     {
2761debfc3dSmrg       rtx *loc = *iter;
2771debfc3dSmrg       rtx x = *loc;
2781debfc3dSmrg       switch (GET_CODE (x))
2791debfc3dSmrg 	{
2801debfc3dSmrg 	case MEM:
2811debfc3dSmrg 	  if (!reg_mentioned_p (stack_pointer_rtx, x))
2821debfc3dSmrg 	    iter.skip_subrtxes ();
2831debfc3dSmrg 	  /* We are not able to handle correctly all possible memrefs
2841debfc3dSmrg 	     containing stack pointer, so this check is necessary.  */
2851debfc3dSmrg 	  else if (stack_memref_p (x))
2861debfc3dSmrg 	    {
2871debfc3dSmrg 	      *reflist = record_one_stack_ref (insn, loc, *reflist);
2881debfc3dSmrg 	      iter.skip_subrtxes ();
2891debfc3dSmrg 	    }
2901debfc3dSmrg 	  /* Try harder for DEBUG_INSNs, handle e.g.
2911debfc3dSmrg 	     (mem (mem (sp + 16) + 4).  */
2921debfc3dSmrg 	  else if (!DEBUG_INSN_P (insn))
2931debfc3dSmrg 	    return false;
2941debfc3dSmrg 	  break;
2951debfc3dSmrg 
2961debfc3dSmrg 	case REG:
2971debfc3dSmrg 	  /* ??? We want be able to handle non-memory stack pointer
2981debfc3dSmrg 	     references later.  For now just discard all insns referring to
2991debfc3dSmrg 	     stack pointer outside mem expressions.  We would probably
3001debfc3dSmrg 	     want to teach validate_replace to simplify expressions first.
3011debfc3dSmrg 
3021debfc3dSmrg 	     We can't just compare with STACK_POINTER_RTX because the
3031debfc3dSmrg 	     reference to the stack pointer might be in some other mode.
3041debfc3dSmrg 	     In particular, an explicit clobber in an asm statement will
3051debfc3dSmrg 	     result in a QImode clobber.
3061debfc3dSmrg 
3071debfc3dSmrg 	     In DEBUG_INSNs, we want to replace all occurrences, otherwise
3081debfc3dSmrg 	     they will cause -fcompare-debug failures.  */
3091debfc3dSmrg 	  if (REGNO (x) == STACK_POINTER_REGNUM)
3101debfc3dSmrg 	    {
3111debfc3dSmrg 	      if (!DEBUG_INSN_P (insn))
3121debfc3dSmrg 		return false;
3131debfc3dSmrg 	      *reflist = record_one_stack_ref (insn, loc, *reflist);
3141debfc3dSmrg 	    }
3151debfc3dSmrg 	  break;
3161debfc3dSmrg 
3171debfc3dSmrg 	default:
3181debfc3dSmrg 	  break;
3191debfc3dSmrg 	}
3201debfc3dSmrg     }
3211debfc3dSmrg   return true;
3221debfc3dSmrg }
3231debfc3dSmrg 
3241debfc3dSmrg /* If INSN has a REG_ARGS_SIZE note, move it to LAST.
3251debfc3dSmrg    AFTER is true iff LAST follows INSN in the instruction stream.  */
3261debfc3dSmrg 
3271debfc3dSmrg static void
maybe_move_args_size_note(rtx_insn * last,rtx_insn * insn,bool after)3281debfc3dSmrg maybe_move_args_size_note (rtx_insn *last, rtx_insn *insn, bool after)
3291debfc3dSmrg {
3301debfc3dSmrg   rtx note, last_note;
3311debfc3dSmrg 
3321debfc3dSmrg   note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
3331debfc3dSmrg   if (note == NULL)
3341debfc3dSmrg     return;
3351debfc3dSmrg 
3361debfc3dSmrg   last_note = find_reg_note (last, REG_ARGS_SIZE, NULL_RTX);
3371debfc3dSmrg   if (last_note)
3381debfc3dSmrg     {
3391debfc3dSmrg       /* The ARGS_SIZE notes are *not* cumulative.  They represent an
3401debfc3dSmrg 	 absolute value, and the "most recent" note wins.  */
3411debfc3dSmrg       if (!after)
3421debfc3dSmrg         XEXP (last_note, 0) = XEXP (note, 0);
3431debfc3dSmrg     }
3441debfc3dSmrg   else
3451debfc3dSmrg     add_reg_note (last, REG_ARGS_SIZE, XEXP (note, 0));
3461debfc3dSmrg }
3471debfc3dSmrg 
3481debfc3dSmrg /* Merge any REG_CFA_ADJUST_CFA note from SRC into DST.
3491debfc3dSmrg    AFTER is true iff DST follows SRC in the instruction stream.  */
3501debfc3dSmrg 
3511debfc3dSmrg static void
maybe_merge_cfa_adjust(rtx_insn * dst,rtx_insn * src,bool after)3521debfc3dSmrg maybe_merge_cfa_adjust (rtx_insn *dst, rtx_insn *src, bool after)
3531debfc3dSmrg {
3541debfc3dSmrg   rtx snote = NULL, dnote = NULL;
3551debfc3dSmrg   rtx sexp, dexp;
3561debfc3dSmrg   rtx exp1, exp2;
3571debfc3dSmrg 
3581debfc3dSmrg   if (RTX_FRAME_RELATED_P (src))
3591debfc3dSmrg     snote = find_reg_note (src, REG_CFA_ADJUST_CFA, NULL_RTX);
3601debfc3dSmrg   if (snote == NULL)
3611debfc3dSmrg     return;
3621debfc3dSmrg   sexp = XEXP (snote, 0);
3631debfc3dSmrg 
3641debfc3dSmrg   if (RTX_FRAME_RELATED_P (dst))
3651debfc3dSmrg     dnote = find_reg_note (dst, REG_CFA_ADJUST_CFA, NULL_RTX);
3661debfc3dSmrg   if (dnote == NULL)
3671debfc3dSmrg     {
3681debfc3dSmrg       add_reg_note (dst, REG_CFA_ADJUST_CFA, sexp);
3691debfc3dSmrg       return;
3701debfc3dSmrg     }
3711debfc3dSmrg   dexp = XEXP (dnote, 0);
3721debfc3dSmrg 
3731debfc3dSmrg   gcc_assert (GET_CODE (sexp) == SET);
3741debfc3dSmrg   gcc_assert (GET_CODE (dexp) == SET);
3751debfc3dSmrg 
3761debfc3dSmrg   if (after)
3771debfc3dSmrg     exp1 = dexp, exp2 = sexp;
3781debfc3dSmrg   else
3791debfc3dSmrg     exp1 = sexp, exp2 = dexp;
3801debfc3dSmrg 
3811debfc3dSmrg   SET_SRC (exp1) = simplify_replace_rtx (SET_SRC (exp1), SET_DEST (exp2),
3821debfc3dSmrg 					 SET_SRC (exp2));
3831debfc3dSmrg   XEXP (dnote, 0) = exp1;
3841debfc3dSmrg }
3851debfc3dSmrg 
3861debfc3dSmrg /* Return the next (or previous) active insn within BB.  */
3871debfc3dSmrg 
3881debfc3dSmrg static rtx_insn *
prev_active_insn_bb(basic_block bb,rtx_insn * insn)3891debfc3dSmrg prev_active_insn_bb (basic_block bb, rtx_insn *insn)
3901debfc3dSmrg {
3911debfc3dSmrg   for (insn = PREV_INSN (insn);
3921debfc3dSmrg        insn != PREV_INSN (BB_HEAD (bb));
3931debfc3dSmrg        insn = PREV_INSN (insn))
3941debfc3dSmrg     if (active_insn_p (insn))
3951debfc3dSmrg       return insn;
3961debfc3dSmrg   return NULL;
3971debfc3dSmrg }
3981debfc3dSmrg 
3991debfc3dSmrg static rtx_insn *
next_active_insn_bb(basic_block bb,rtx_insn * insn)4001debfc3dSmrg next_active_insn_bb (basic_block bb, rtx_insn *insn)
4011debfc3dSmrg {
4021debfc3dSmrg   for (insn = NEXT_INSN (insn);
4031debfc3dSmrg        insn != NEXT_INSN (BB_END (bb));
4041debfc3dSmrg        insn = NEXT_INSN (insn))
4051debfc3dSmrg     if (active_insn_p (insn))
4061debfc3dSmrg       return insn;
4071debfc3dSmrg   return NULL;
4081debfc3dSmrg }
4091debfc3dSmrg 
4101debfc3dSmrg /* If INSN has a REG_ARGS_SIZE note, if possible move it to PREV.  Otherwise
4111debfc3dSmrg    search for a nearby candidate within BB where we can stick the note.  */
4121debfc3dSmrg 
4131debfc3dSmrg static void
force_move_args_size_note(basic_block bb,rtx_insn * prev,rtx_insn * insn)4141debfc3dSmrg force_move_args_size_note (basic_block bb, rtx_insn *prev, rtx_insn *insn)
4151debfc3dSmrg {
4161debfc3dSmrg   rtx note;
4171debfc3dSmrg   rtx_insn *test, *next_candidate, *prev_candidate;
4181debfc3dSmrg 
4191debfc3dSmrg   /* If PREV exists, tail-call to the logic in the other function.  */
4201debfc3dSmrg   if (prev)
4211debfc3dSmrg     {
4221debfc3dSmrg       maybe_move_args_size_note (prev, insn, false);
4231debfc3dSmrg       return;
4241debfc3dSmrg     }
4251debfc3dSmrg 
4261debfc3dSmrg   /* First, make sure there's anything that needs doing.  */
4271debfc3dSmrg   note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4281debfc3dSmrg   if (note == NULL)
4291debfc3dSmrg     return;
4301debfc3dSmrg 
4311debfc3dSmrg   /* We need to find a spot between the previous and next exception points
4321debfc3dSmrg      where we can place the note and "properly" deallocate the arguments.  */
4331debfc3dSmrg   next_candidate = prev_candidate = NULL;
4341debfc3dSmrg 
4351debfc3dSmrg   /* It is often the case that we have insns in the order:
4361debfc3dSmrg 	call
4371debfc3dSmrg 	add sp (previous deallocation)
4381debfc3dSmrg 	sub sp (align for next arglist)
4391debfc3dSmrg 	push arg
4401debfc3dSmrg      and the add/sub cancel.  Therefore we begin by searching forward.  */
4411debfc3dSmrg 
4421debfc3dSmrg   test = insn;
4431debfc3dSmrg   while ((test = next_active_insn_bb (bb, test)) != NULL)
4441debfc3dSmrg     {
4451debfc3dSmrg       /* Found an existing note: nothing to do.  */
4461debfc3dSmrg       if (find_reg_note (test, REG_ARGS_SIZE, NULL_RTX))
4471debfc3dSmrg         return;
4481debfc3dSmrg       /* Found something that affects unwinding.  Stop searching.  */
4491debfc3dSmrg       if (CALL_P (test) || !insn_nothrow_p (test))
4501debfc3dSmrg 	break;
4511debfc3dSmrg       if (next_candidate == NULL)
4521debfc3dSmrg 	next_candidate = test;
4531debfc3dSmrg     }
4541debfc3dSmrg 
4551debfc3dSmrg   test = insn;
4561debfc3dSmrg   while ((test = prev_active_insn_bb (bb, test)) != NULL)
4571debfc3dSmrg     {
4581debfc3dSmrg       rtx tnote;
4591debfc3dSmrg       /* Found a place that seems logical to adjust the stack.  */
4601debfc3dSmrg       tnote = find_reg_note (test, REG_ARGS_SIZE, NULL_RTX);
4611debfc3dSmrg       if (tnote)
4621debfc3dSmrg 	{
4631debfc3dSmrg 	  XEXP (tnote, 0) = XEXP (note, 0);
4641debfc3dSmrg 	  return;
4651debfc3dSmrg 	}
4661debfc3dSmrg       if (prev_candidate == NULL)
4671debfc3dSmrg 	prev_candidate = test;
4681debfc3dSmrg       /* Found something that affects unwinding.  Stop searching.  */
4691debfc3dSmrg       if (CALL_P (test) || !insn_nothrow_p (test))
4701debfc3dSmrg 	break;
4711debfc3dSmrg     }
4721debfc3dSmrg 
4731debfc3dSmrg   if (prev_candidate)
4741debfc3dSmrg     test = prev_candidate;
4751debfc3dSmrg   else if (next_candidate)
4761debfc3dSmrg     test = next_candidate;
4771debfc3dSmrg   else
4781debfc3dSmrg     {
4791debfc3dSmrg       /* ??? We *must* have a place, lest we ICE on the lost adjustment.
4801debfc3dSmrg 	 Options are: dummy clobber insn, nop, or prevent the removal of
4811debfc3dSmrg 	 the sp += 0 insn.  */
4821debfc3dSmrg       /* TODO: Find another way to indicate to the dwarf2 code that we
4831debfc3dSmrg 	 have not in fact lost an adjustment.  */
4841debfc3dSmrg       test = emit_insn_before (gen_rtx_CLOBBER (VOIDmode, const0_rtx), insn);
4851debfc3dSmrg     }
4861debfc3dSmrg   add_reg_note (test, REG_ARGS_SIZE, XEXP (note, 0));
4871debfc3dSmrg }
4881debfc3dSmrg 
4891debfc3dSmrg /* Subroutine of combine_stack_adjustments, called for each basic block.  */
4901debfc3dSmrg 
4911debfc3dSmrg static void
combine_stack_adjustments_for_block(basic_block bb)4921debfc3dSmrg combine_stack_adjustments_for_block (basic_block bb)
4931debfc3dSmrg {
4941debfc3dSmrg   HOST_WIDE_INT last_sp_adjust = 0;
4951debfc3dSmrg   rtx_insn *last_sp_set = NULL;
4961debfc3dSmrg   rtx_insn *last2_sp_set = NULL;
4971debfc3dSmrg   struct csa_reflist *reflist = NULL;
4981debfc3dSmrg   rtx_insn *insn, *next;
4991debfc3dSmrg   rtx set;
5001debfc3dSmrg   bool end_of_block = false;
5011debfc3dSmrg 
5021debfc3dSmrg   for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
5031debfc3dSmrg     {
5041debfc3dSmrg       end_of_block = insn == BB_END (bb);
5051debfc3dSmrg       next = NEXT_INSN (insn);
5061debfc3dSmrg 
5071debfc3dSmrg       if (! INSN_P (insn))
5081debfc3dSmrg 	continue;
5091debfc3dSmrg 
5101debfc3dSmrg       set = single_set_for_csa (insn);
511a2dc1f3fSmrg       if (set && find_reg_note (insn, REG_STACK_CHECK, NULL_RTX))
512a2dc1f3fSmrg 	set = NULL_RTX;
5131debfc3dSmrg       if (set)
5141debfc3dSmrg 	{
5151debfc3dSmrg 	  rtx dest = SET_DEST (set);
5161debfc3dSmrg 	  rtx src = SET_SRC (set);
5171debfc3dSmrg 
5181debfc3dSmrg 	  /* Find constant additions to the stack pointer.  */
5191debfc3dSmrg 	  if (dest == stack_pointer_rtx
5201debfc3dSmrg 	      && GET_CODE (src) == PLUS
5211debfc3dSmrg 	      && XEXP (src, 0) == stack_pointer_rtx
5221debfc3dSmrg 	      && CONST_INT_P (XEXP (src, 1)))
5231debfc3dSmrg 	    {
5241debfc3dSmrg 	      HOST_WIDE_INT this_adjust = INTVAL (XEXP (src, 1));
5251debfc3dSmrg 
5261debfc3dSmrg 	      /* If we've not seen an adjustment previously, record
5271debfc3dSmrg 		 it now and continue.  */
5281debfc3dSmrg 	      if (! last_sp_set)
5291debfc3dSmrg 		{
5301debfc3dSmrg 		  last_sp_set = insn;
5311debfc3dSmrg 		  last_sp_adjust = this_adjust;
5321debfc3dSmrg 		  continue;
5331debfc3dSmrg 		}
5341debfc3dSmrg 
5351debfc3dSmrg 	      /* If not all recorded refs can be adjusted, or the
5361debfc3dSmrg 		 adjustment is now too large for a constant addition,
5371debfc3dSmrg 		 we cannot merge the two stack adjustments.
5381debfc3dSmrg 
5391debfc3dSmrg 		 Also we need to be careful to not move stack pointer
5401debfc3dSmrg 		 such that we create stack accesses outside the allocated
5411debfc3dSmrg 		 area.  We can combine an allocation into the first insn,
5421debfc3dSmrg 		 or a deallocation into the second insn.  We cannot
5431debfc3dSmrg 		 combine an allocation followed by a deallocation.
5441debfc3dSmrg 
5451debfc3dSmrg 		 The only somewhat frequent occurrence of the later is when
5461debfc3dSmrg 		 a function allocates a stack frame but does not use it.
5471debfc3dSmrg 		 For this case, we would need to analyze rtl stream to be
5481debfc3dSmrg 		 sure that allocated area is really unused.  This means not
5491debfc3dSmrg 		 only checking the memory references, but also all registers
5501debfc3dSmrg 		 or global memory references possibly containing a stack
5511debfc3dSmrg 		 frame address.
5521debfc3dSmrg 
5531debfc3dSmrg 		 Perhaps the best way to address this problem is to teach
5541debfc3dSmrg 		 gcc not to allocate stack for objects never used.  */
5551debfc3dSmrg 
5561debfc3dSmrg 	      /* Combine an allocation into the first instruction.  */
5571debfc3dSmrg 	      if (STACK_GROWS_DOWNWARD ? this_adjust <= 0 : this_adjust >= 0)
5581debfc3dSmrg 		{
5591debfc3dSmrg 		  if (no_unhandled_cfa (insn)
5601debfc3dSmrg 		      && try_apply_stack_adjustment (last_sp_set, reflist,
5611debfc3dSmrg 						     last_sp_adjust
5621debfc3dSmrg 						     + this_adjust,
5631debfc3dSmrg 						     this_adjust))
5641debfc3dSmrg 		    {
5651debfc3dSmrg 		      /* It worked!  */
5661debfc3dSmrg 		      maybe_move_args_size_note (last_sp_set, insn, false);
5671debfc3dSmrg 		      maybe_merge_cfa_adjust (last_sp_set, insn, false);
5681debfc3dSmrg 		      delete_insn (insn);
5691debfc3dSmrg 		      last_sp_adjust += this_adjust;
5701debfc3dSmrg 		      continue;
5711debfc3dSmrg 		    }
5721debfc3dSmrg 		}
5731debfc3dSmrg 
5741debfc3dSmrg 	      /* Otherwise we have a deallocation.  Do not combine with
5751debfc3dSmrg 		 a previous allocation.  Combine into the second insn.  */
5761debfc3dSmrg 	      else if (STACK_GROWS_DOWNWARD
5771debfc3dSmrg 		       ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
5781debfc3dSmrg 		{
5791debfc3dSmrg 		  if (no_unhandled_cfa (last_sp_set)
5801debfc3dSmrg 		      && try_apply_stack_adjustment (insn, reflist,
5811debfc3dSmrg 						     last_sp_adjust
5821debfc3dSmrg 						     + this_adjust,
5831debfc3dSmrg 						     -last_sp_adjust))
5841debfc3dSmrg 		    {
5851debfc3dSmrg 		      /* It worked!  */
5861debfc3dSmrg 		      maybe_move_args_size_note (insn, last_sp_set, true);
5871debfc3dSmrg 		      maybe_merge_cfa_adjust (insn, last_sp_set, true);
5881debfc3dSmrg 		      delete_insn (last_sp_set);
5891debfc3dSmrg 		      last_sp_set = insn;
5901debfc3dSmrg 		      last_sp_adjust += this_adjust;
5911debfc3dSmrg 		      free_csa_reflist (reflist);
5921debfc3dSmrg 		      reflist = NULL;
5931debfc3dSmrg 		      continue;
5941debfc3dSmrg 		    }
5951debfc3dSmrg 		}
5961debfc3dSmrg 
5971debfc3dSmrg 	      /* Combination failed.  Restart processing from here.  If
5981debfc3dSmrg 		 deallocation+allocation conspired to cancel, we can
5991debfc3dSmrg 		 delete the old deallocation insn.  */
6001debfc3dSmrg 	      if (last_sp_set)
6011debfc3dSmrg 		{
6021debfc3dSmrg 		  if (last_sp_adjust == 0 && no_unhandled_cfa (last_sp_set))
6031debfc3dSmrg 		    {
6041debfc3dSmrg 		      maybe_move_args_size_note (insn, last_sp_set, true);
6051debfc3dSmrg 		      maybe_merge_cfa_adjust (insn, last_sp_set, true);
6061debfc3dSmrg 		      delete_insn (last_sp_set);
6071debfc3dSmrg 		    }
6081debfc3dSmrg 		  else
6091debfc3dSmrg 		    last2_sp_set = last_sp_set;
6101debfc3dSmrg 		}
6111debfc3dSmrg 	      free_csa_reflist (reflist);
6121debfc3dSmrg 	      reflist = NULL;
6131debfc3dSmrg 	      last_sp_set = insn;
6141debfc3dSmrg 	      last_sp_adjust = this_adjust;
6151debfc3dSmrg 	      continue;
6161debfc3dSmrg 	    }
6171debfc3dSmrg 
6181debfc3dSmrg 	  /* Find a store with pre-(dec|inc)rement or pre-modify of exactly
6191debfc3dSmrg 	     the previous adjustment and turn it into a simple store.  This
6201debfc3dSmrg 	     is equivalent to anticipating the stack adjustment so this must
6211debfc3dSmrg 	     be an allocation.  */
6221debfc3dSmrg 	  if (MEM_P (dest)
6231debfc3dSmrg 	      && ((STACK_GROWS_DOWNWARD
6241debfc3dSmrg 		   ? (GET_CODE (XEXP (dest, 0)) == PRE_DEC
625a2dc1f3fSmrg 		      && known_eq (last_sp_adjust,
626a2dc1f3fSmrg 				   GET_MODE_SIZE (GET_MODE (dest))))
6271debfc3dSmrg 		   : (GET_CODE (XEXP (dest, 0)) == PRE_INC
628a2dc1f3fSmrg 		      && known_eq (-last_sp_adjust,
629a2dc1f3fSmrg 				   GET_MODE_SIZE (GET_MODE (dest)))))
6301debfc3dSmrg 		  || ((STACK_GROWS_DOWNWARD
6311debfc3dSmrg 		       ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
6321debfc3dSmrg 		      && GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
6331debfc3dSmrg 		      && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
6341debfc3dSmrg 		      && XEXP (XEXP (XEXP (dest, 0), 1), 0)
6351debfc3dSmrg 			 == stack_pointer_rtx
6361debfc3dSmrg 		      && GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
6371debfc3dSmrg 		         == CONST_INT
6381debfc3dSmrg 		      && INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
6391debfc3dSmrg 		         == -last_sp_adjust))
6401debfc3dSmrg 	      && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
6411debfc3dSmrg 	      && !reg_mentioned_p (stack_pointer_rtx, src)
6421debfc3dSmrg 	      && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
6431debfc3dSmrg 	      && try_apply_stack_adjustment (insn, reflist, 0,
6441debfc3dSmrg 					     -last_sp_adjust))
6451debfc3dSmrg 	    {
6461debfc3dSmrg 	      if (last2_sp_set)
6471debfc3dSmrg 		maybe_move_args_size_note (last2_sp_set, last_sp_set, false);
6481debfc3dSmrg 	      else
6491debfc3dSmrg 	        maybe_move_args_size_note (insn, last_sp_set, true);
6501debfc3dSmrg 	      delete_insn (last_sp_set);
6511debfc3dSmrg 	      free_csa_reflist (reflist);
6521debfc3dSmrg 	      reflist = NULL;
6531debfc3dSmrg 	      last_sp_set = NULL;
6541debfc3dSmrg 	      last_sp_adjust = 0;
6551debfc3dSmrg 	      continue;
6561debfc3dSmrg 	    }
6571debfc3dSmrg 	}
6581debfc3dSmrg 
6591debfc3dSmrg       if (!CALL_P (insn) && last_sp_set
6601debfc3dSmrg 	  && record_stack_refs (insn, &reflist))
6611debfc3dSmrg 	continue;
6621debfc3dSmrg 
6631debfc3dSmrg       /* Otherwise, we were not able to process the instruction.
6641debfc3dSmrg 	 Do not continue collecting data across such a one.  */
6651debfc3dSmrg       if (last_sp_set
6661debfc3dSmrg 	  && (CALL_P (insn)
6671debfc3dSmrg 	      || reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
6681debfc3dSmrg 	{
6691debfc3dSmrg 	  if (last_sp_set && last_sp_adjust == 0)
6701debfc3dSmrg 	    {
6711debfc3dSmrg 	      force_move_args_size_note (bb, last2_sp_set, last_sp_set);
6721debfc3dSmrg 	      delete_insn (last_sp_set);
6731debfc3dSmrg 	    }
6741debfc3dSmrg 	  free_csa_reflist (reflist);
6751debfc3dSmrg 	  reflist = NULL;
6761debfc3dSmrg 	  last2_sp_set = NULL;
6771debfc3dSmrg 	  last_sp_set = NULL;
6781debfc3dSmrg 	  last_sp_adjust = 0;
6791debfc3dSmrg 	}
6801debfc3dSmrg     }
6811debfc3dSmrg 
6821debfc3dSmrg   if (last_sp_set && last_sp_adjust == 0)
6831debfc3dSmrg     {
6841debfc3dSmrg       force_move_args_size_note (bb, last2_sp_set, last_sp_set);
6851debfc3dSmrg       delete_insn (last_sp_set);
6861debfc3dSmrg     }
6871debfc3dSmrg 
6881debfc3dSmrg   if (reflist)
6891debfc3dSmrg     free_csa_reflist (reflist);
6901debfc3dSmrg }
6911debfc3dSmrg 
6921debfc3dSmrg static unsigned int
rest_of_handle_stack_adjustments(void)6931debfc3dSmrg rest_of_handle_stack_adjustments (void)
6941debfc3dSmrg {
6951debfc3dSmrg   df_note_add_problem ();
6961debfc3dSmrg   df_analyze ();
6971debfc3dSmrg   combine_stack_adjustments ();
6981debfc3dSmrg   return 0;
6991debfc3dSmrg }
7001debfc3dSmrg 
7011debfc3dSmrg namespace {
7021debfc3dSmrg 
7031debfc3dSmrg const pass_data pass_data_stack_adjustments =
7041debfc3dSmrg {
7051debfc3dSmrg   RTL_PASS, /* type */
7061debfc3dSmrg   "csa", /* name */
7071debfc3dSmrg   OPTGROUP_NONE, /* optinfo_flags */
7081debfc3dSmrg   TV_COMBINE_STACK_ADJUST, /* tv_id */
7091debfc3dSmrg   0, /* properties_required */
7101debfc3dSmrg   0, /* properties_provided */
7111debfc3dSmrg   0, /* properties_destroyed */
7121debfc3dSmrg   0, /* todo_flags_start */
7131debfc3dSmrg   TODO_df_finish, /* todo_flags_finish */
7141debfc3dSmrg };
7151debfc3dSmrg 
7161debfc3dSmrg class pass_stack_adjustments : public rtl_opt_pass
7171debfc3dSmrg {
7181debfc3dSmrg public:
pass_stack_adjustments(gcc::context * ctxt)7191debfc3dSmrg   pass_stack_adjustments (gcc::context *ctxt)
7201debfc3dSmrg     : rtl_opt_pass (pass_data_stack_adjustments, ctxt)
7211debfc3dSmrg   {}
7221debfc3dSmrg 
7231debfc3dSmrg   /* opt_pass methods: */
7241debfc3dSmrg   virtual bool gate (function *);
execute(function *)7251debfc3dSmrg   virtual unsigned int execute (function *)
7261debfc3dSmrg     {
7271debfc3dSmrg       return rest_of_handle_stack_adjustments ();
7281debfc3dSmrg     }
7291debfc3dSmrg 
7301debfc3dSmrg }; // class pass_stack_adjustments
7311debfc3dSmrg 
7321debfc3dSmrg bool
gate(function *)7331debfc3dSmrg pass_stack_adjustments::gate (function *)
7341debfc3dSmrg {
7351debfc3dSmrg   /* This is kind of a heuristic.  We need to run combine_stack_adjustments
7361debfc3dSmrg      even for machines with possibly nonzero TARGET_RETURN_POPS_ARGS
7371debfc3dSmrg      and ACCUMULATE_OUTGOING_ARGS.  We expect that only ports having
7381debfc3dSmrg      push instructions will have popping returns.  */
7391debfc3dSmrg #ifndef PUSH_ROUNDING
7401debfc3dSmrg   if (ACCUMULATE_OUTGOING_ARGS)
7411debfc3dSmrg     return false;
7421debfc3dSmrg #endif
7431debfc3dSmrg   return flag_combine_stack_adjustments;
7441debfc3dSmrg }
7451debfc3dSmrg 
7461debfc3dSmrg } // anon namespace
7471debfc3dSmrg 
7481debfc3dSmrg rtl_opt_pass *
make_pass_stack_adjustments(gcc::context * ctxt)7491debfc3dSmrg make_pass_stack_adjustments (gcc::context *ctxt)
7501debfc3dSmrg {
7511debfc3dSmrg   return new pass_stack_adjustments (ctxt);
7521debfc3dSmrg }
753