1*e4b17023SJohn Marino /* Combine stack adjustments.
2*e4b17023SJohn Marino Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3*e4b17023SJohn Marino 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4*e4b17023SJohn Marino 2010, 2012 Free Software Foundation, Inc.
5*e4b17023SJohn Marino
6*e4b17023SJohn Marino This file is part of GCC.
7*e4b17023SJohn Marino
8*e4b17023SJohn Marino GCC is free software; you can redistribute it and/or modify it under
9*e4b17023SJohn Marino the terms of the GNU General Public License as published by the Free
10*e4b17023SJohn Marino Software Foundation; either version 3, or (at your option) any later
11*e4b17023SJohn Marino version.
12*e4b17023SJohn Marino
13*e4b17023SJohn Marino GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14*e4b17023SJohn Marino WARRANTY; without even the implied warranty of MERCHANTABILITY or
15*e4b17023SJohn Marino FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16*e4b17023SJohn Marino for more details.
17*e4b17023SJohn Marino
18*e4b17023SJohn Marino You should have received a copy of the GNU General Public License
19*e4b17023SJohn Marino along with GCC; see the file COPYING3. If not see
20*e4b17023SJohn Marino <http://www.gnu.org/licenses/>. */
21*e4b17023SJohn Marino
22*e4b17023SJohn Marino /* Track stack adjustments and stack memory references. Attempt to
23*e4b17023SJohn Marino reduce the number of stack adjustments by back-propagating across
24*e4b17023SJohn Marino the memory references.
25*e4b17023SJohn Marino
26*e4b17023SJohn Marino This is intended primarily for use with targets that do not define
27*e4b17023SJohn Marino ACCUMULATE_OUTGOING_ARGS. It is of significantly more value to
28*e4b17023SJohn Marino targets that define PREFERRED_STACK_BOUNDARY more aligned than
29*e4b17023SJohn Marino STACK_BOUNDARY (e.g. x86), or if not all registers can be pushed
30*e4b17023SJohn Marino (e.g. x86 fp regs) which would ordinarily have to be implemented
31*e4b17023SJohn Marino as a sub/mov pair due to restrictions in calls.c.
32*e4b17023SJohn Marino
33*e4b17023SJohn Marino Propagation stops when any of the insns that need adjusting are
34*e4b17023SJohn Marino (a) no longer valid because we've exceeded their range, (b) a
35*e4b17023SJohn Marino non-trivial push instruction, or (c) a call instruction.
36*e4b17023SJohn Marino
37*e4b17023SJohn Marino Restriction B is based on the assumption that push instructions
38*e4b17023SJohn Marino are smaller or faster. If a port really wants to remove all
39*e4b17023SJohn Marino pushes, it should have defined ACCUMULATE_OUTGOING_ARGS. The
40*e4b17023SJohn Marino one exception that is made is for an add immediately followed
41*e4b17023SJohn Marino by a push. */
42*e4b17023SJohn Marino
43*e4b17023SJohn Marino #include "config.h"
44*e4b17023SJohn Marino #include "system.h"
45*e4b17023SJohn Marino #include "coretypes.h"
46*e4b17023SJohn Marino #include "tm.h"
47*e4b17023SJohn Marino #include "rtl.h"
48*e4b17023SJohn Marino #include "tm_p.h"
49*e4b17023SJohn Marino #include "insn-config.h"
50*e4b17023SJohn Marino #include "recog.h"
51*e4b17023SJohn Marino #include "output.h"
52*e4b17023SJohn Marino #include "regs.h"
53*e4b17023SJohn Marino #include "hard-reg-set.h"
54*e4b17023SJohn Marino #include "flags.h"
55*e4b17023SJohn Marino #include "function.h"
56*e4b17023SJohn Marino #include "expr.h"
57*e4b17023SJohn Marino #include "basic-block.h"
58*e4b17023SJohn Marino #include "df.h"
59*e4b17023SJohn Marino #include "except.h"
60*e4b17023SJohn Marino #include "reload.h"
61*e4b17023SJohn Marino #include "timevar.h"
62*e4b17023SJohn Marino #include "tree-pass.h"
63*e4b17023SJohn Marino
64*e4b17023SJohn Marino
65*e4b17023SJohn Marino /* Turn STACK_GROWS_DOWNWARD into a boolean. */
66*e4b17023SJohn Marino #ifdef STACK_GROWS_DOWNWARD
67*e4b17023SJohn Marino #undef STACK_GROWS_DOWNWARD
68*e4b17023SJohn Marino #define STACK_GROWS_DOWNWARD 1
69*e4b17023SJohn Marino #else
70*e4b17023SJohn Marino #define STACK_GROWS_DOWNWARD 0
71*e4b17023SJohn Marino #endif
72*e4b17023SJohn Marino
73*e4b17023SJohn Marino /* This structure records two kinds of stack references between stack
74*e4b17023SJohn Marino adjusting instructions: stack references in memory addresses for
75*e4b17023SJohn Marino regular insns and all stack references for debug insns. */
76*e4b17023SJohn Marino
77*e4b17023SJohn Marino struct csa_reflist
78*e4b17023SJohn Marino {
79*e4b17023SJohn Marino HOST_WIDE_INT sp_offset;
80*e4b17023SJohn Marino rtx insn, *ref;
81*e4b17023SJohn Marino struct csa_reflist *next;
82*e4b17023SJohn Marino };
83*e4b17023SJohn Marino
84*e4b17023SJohn Marino static int stack_memref_p (rtx);
85*e4b17023SJohn Marino static rtx single_set_for_csa (rtx);
86*e4b17023SJohn Marino static void free_csa_reflist (struct csa_reflist *);
87*e4b17023SJohn Marino static struct csa_reflist *record_one_stack_ref (rtx, rtx *,
88*e4b17023SJohn Marino struct csa_reflist *);
89*e4b17023SJohn Marino static int try_apply_stack_adjustment (rtx, struct csa_reflist *,
90*e4b17023SJohn Marino HOST_WIDE_INT, HOST_WIDE_INT);
91*e4b17023SJohn Marino static void combine_stack_adjustments_for_block (basic_block);
92*e4b17023SJohn Marino static int record_stack_refs (rtx *, void *);
93*e4b17023SJohn Marino
94*e4b17023SJohn Marino
95*e4b17023SJohn Marino /* Main entry point for stack adjustment combination. */
96*e4b17023SJohn Marino
97*e4b17023SJohn Marino static void
combine_stack_adjustments(void)98*e4b17023SJohn Marino combine_stack_adjustments (void)
99*e4b17023SJohn Marino {
100*e4b17023SJohn Marino basic_block bb;
101*e4b17023SJohn Marino
102*e4b17023SJohn Marino FOR_EACH_BB (bb)
103*e4b17023SJohn Marino combine_stack_adjustments_for_block (bb);
104*e4b17023SJohn Marino }
105*e4b17023SJohn Marino
106*e4b17023SJohn Marino /* Recognize a MEM of the form (sp) or (plus sp const). */
107*e4b17023SJohn Marino
108*e4b17023SJohn Marino static int
stack_memref_p(rtx x)109*e4b17023SJohn Marino stack_memref_p (rtx x)
110*e4b17023SJohn Marino {
111*e4b17023SJohn Marino if (!MEM_P (x))
112*e4b17023SJohn Marino return 0;
113*e4b17023SJohn Marino x = XEXP (x, 0);
114*e4b17023SJohn Marino
115*e4b17023SJohn Marino if (x == stack_pointer_rtx)
116*e4b17023SJohn Marino return 1;
117*e4b17023SJohn Marino if (GET_CODE (x) == PLUS
118*e4b17023SJohn Marino && XEXP (x, 0) == stack_pointer_rtx
119*e4b17023SJohn Marino && CONST_INT_P (XEXP (x, 1)))
120*e4b17023SJohn Marino return 1;
121*e4b17023SJohn Marino
122*e4b17023SJohn Marino return 0;
123*e4b17023SJohn Marino }
124*e4b17023SJohn Marino
125*e4b17023SJohn Marino /* Recognize either normal single_set or the hack in i386.md for
126*e4b17023SJohn Marino tying fp and sp adjustments. */
127*e4b17023SJohn Marino
128*e4b17023SJohn Marino static rtx
single_set_for_csa(rtx insn)129*e4b17023SJohn Marino single_set_for_csa (rtx insn)
130*e4b17023SJohn Marino {
131*e4b17023SJohn Marino int i;
132*e4b17023SJohn Marino rtx tmp = single_set (insn);
133*e4b17023SJohn Marino if (tmp)
134*e4b17023SJohn Marino return tmp;
135*e4b17023SJohn Marino
136*e4b17023SJohn Marino if (!NONJUMP_INSN_P (insn)
137*e4b17023SJohn Marino || GET_CODE (PATTERN (insn)) != PARALLEL)
138*e4b17023SJohn Marino return NULL_RTX;
139*e4b17023SJohn Marino
140*e4b17023SJohn Marino tmp = PATTERN (insn);
141*e4b17023SJohn Marino if (GET_CODE (XVECEXP (tmp, 0, 0)) != SET)
142*e4b17023SJohn Marino return NULL_RTX;
143*e4b17023SJohn Marino
144*e4b17023SJohn Marino for (i = 1; i < XVECLEN (tmp, 0); ++i)
145*e4b17023SJohn Marino {
146*e4b17023SJohn Marino rtx this_rtx = XVECEXP (tmp, 0, i);
147*e4b17023SJohn Marino
148*e4b17023SJohn Marino /* The special case is allowing a no-op set. */
149*e4b17023SJohn Marino if (GET_CODE (this_rtx) == SET
150*e4b17023SJohn Marino && SET_SRC (this_rtx) == SET_DEST (this_rtx))
151*e4b17023SJohn Marino ;
152*e4b17023SJohn Marino else if (GET_CODE (this_rtx) != CLOBBER
153*e4b17023SJohn Marino && GET_CODE (this_rtx) != USE)
154*e4b17023SJohn Marino return NULL_RTX;
155*e4b17023SJohn Marino }
156*e4b17023SJohn Marino
157*e4b17023SJohn Marino return XVECEXP (tmp, 0, 0);
158*e4b17023SJohn Marino }
159*e4b17023SJohn Marino
160*e4b17023SJohn Marino /* Free the list of csa_reflist nodes. */
161*e4b17023SJohn Marino
162*e4b17023SJohn Marino static void
free_csa_reflist(struct csa_reflist * reflist)163*e4b17023SJohn Marino free_csa_reflist (struct csa_reflist *reflist)
164*e4b17023SJohn Marino {
165*e4b17023SJohn Marino struct csa_reflist *next;
166*e4b17023SJohn Marino for (; reflist ; reflist = next)
167*e4b17023SJohn Marino {
168*e4b17023SJohn Marino next = reflist->next;
169*e4b17023SJohn Marino free (reflist);
170*e4b17023SJohn Marino }
171*e4b17023SJohn Marino }
172*e4b17023SJohn Marino
173*e4b17023SJohn Marino /* Create a new csa_reflist node from the given stack reference.
174*e4b17023SJohn Marino It is already known that the reference is either a MEM satisfying the
175*e4b17023SJohn Marino predicate stack_memref_p or a REG representing the stack pointer. */
176*e4b17023SJohn Marino
177*e4b17023SJohn Marino static struct csa_reflist *
record_one_stack_ref(rtx insn,rtx * ref,struct csa_reflist * next_reflist)178*e4b17023SJohn Marino record_one_stack_ref (rtx insn, rtx *ref, struct csa_reflist *next_reflist)
179*e4b17023SJohn Marino {
180*e4b17023SJohn Marino struct csa_reflist *ml;
181*e4b17023SJohn Marino
182*e4b17023SJohn Marino ml = XNEW (struct csa_reflist);
183*e4b17023SJohn Marino
184*e4b17023SJohn Marino if (REG_P (*ref) || XEXP (*ref, 0) == stack_pointer_rtx)
185*e4b17023SJohn Marino ml->sp_offset = 0;
186*e4b17023SJohn Marino else
187*e4b17023SJohn Marino ml->sp_offset = INTVAL (XEXP (XEXP (*ref, 0), 1));
188*e4b17023SJohn Marino
189*e4b17023SJohn Marino ml->insn = insn;
190*e4b17023SJohn Marino ml->ref = ref;
191*e4b17023SJohn Marino ml->next = next_reflist;
192*e4b17023SJohn Marino
193*e4b17023SJohn Marino return ml;
194*e4b17023SJohn Marino }
195*e4b17023SJohn Marino
196*e4b17023SJohn Marino /* Attempt to apply ADJUST to the stack adjusting insn INSN, as well
197*e4b17023SJohn Marino as each of the memories and stack references in REFLIST. Return true
198*e4b17023SJohn Marino on success. */
199*e4b17023SJohn Marino
200*e4b17023SJohn Marino static int
try_apply_stack_adjustment(rtx insn,struct csa_reflist * reflist,HOST_WIDE_INT new_adjust,HOST_WIDE_INT delta)201*e4b17023SJohn Marino try_apply_stack_adjustment (rtx insn, struct csa_reflist *reflist,
202*e4b17023SJohn Marino HOST_WIDE_INT new_adjust, HOST_WIDE_INT delta)
203*e4b17023SJohn Marino {
204*e4b17023SJohn Marino struct csa_reflist *ml;
205*e4b17023SJohn Marino rtx set;
206*e4b17023SJohn Marino
207*e4b17023SJohn Marino set = single_set_for_csa (insn);
208*e4b17023SJohn Marino if (MEM_P (SET_DEST (set)))
209*e4b17023SJohn Marino validate_change (insn, &SET_DEST (set),
210*e4b17023SJohn Marino replace_equiv_address (SET_DEST (set), stack_pointer_rtx),
211*e4b17023SJohn Marino 1);
212*e4b17023SJohn Marino else
213*e4b17023SJohn Marino validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (new_adjust), 1);
214*e4b17023SJohn Marino
215*e4b17023SJohn Marino for (ml = reflist; ml ; ml = ml->next)
216*e4b17023SJohn Marino {
217*e4b17023SJohn Marino rtx new_addr = plus_constant (stack_pointer_rtx, ml->sp_offset - delta);
218*e4b17023SJohn Marino rtx new_val;
219*e4b17023SJohn Marino
220*e4b17023SJohn Marino if (MEM_P (*ml->ref))
221*e4b17023SJohn Marino new_val = replace_equiv_address_nv (*ml->ref, new_addr);
222*e4b17023SJohn Marino else if (GET_MODE (*ml->ref) == GET_MODE (stack_pointer_rtx))
223*e4b17023SJohn Marino new_val = new_addr;
224*e4b17023SJohn Marino else
225*e4b17023SJohn Marino new_val = lowpart_subreg (GET_MODE (*ml->ref), new_addr,
226*e4b17023SJohn Marino GET_MODE (new_addr));
227*e4b17023SJohn Marino validate_change (ml->insn, ml->ref, new_val, 1);
228*e4b17023SJohn Marino }
229*e4b17023SJohn Marino
230*e4b17023SJohn Marino if (apply_change_group ())
231*e4b17023SJohn Marino {
232*e4b17023SJohn Marino /* Succeeded. Update our knowledge of the stack references. */
233*e4b17023SJohn Marino for (ml = reflist; ml ; ml = ml->next)
234*e4b17023SJohn Marino ml->sp_offset -= delta;
235*e4b17023SJohn Marino
236*e4b17023SJohn Marino return 1;
237*e4b17023SJohn Marino }
238*e4b17023SJohn Marino else
239*e4b17023SJohn Marino return 0;
240*e4b17023SJohn Marino }
241*e4b17023SJohn Marino
242*e4b17023SJohn Marino /* Called via for_each_rtx and used to record all stack memory and other
243*e4b17023SJohn Marino references in the insn and discard all other stack pointer references. */
244*e4b17023SJohn Marino struct record_stack_refs_data
245*e4b17023SJohn Marino {
246*e4b17023SJohn Marino rtx insn;
247*e4b17023SJohn Marino struct csa_reflist *reflist;
248*e4b17023SJohn Marino };
249*e4b17023SJohn Marino
250*e4b17023SJohn Marino static int
record_stack_refs(rtx * xp,void * data)251*e4b17023SJohn Marino record_stack_refs (rtx *xp, void *data)
252*e4b17023SJohn Marino {
253*e4b17023SJohn Marino rtx x = *xp;
254*e4b17023SJohn Marino struct record_stack_refs_data *d =
255*e4b17023SJohn Marino (struct record_stack_refs_data *) data;
256*e4b17023SJohn Marino if (!x)
257*e4b17023SJohn Marino return 0;
258*e4b17023SJohn Marino switch (GET_CODE (x))
259*e4b17023SJohn Marino {
260*e4b17023SJohn Marino case MEM:
261*e4b17023SJohn Marino if (!reg_mentioned_p (stack_pointer_rtx, x))
262*e4b17023SJohn Marino return -1;
263*e4b17023SJohn Marino /* We are not able to handle correctly all possible memrefs containing
264*e4b17023SJohn Marino stack pointer, so this check is necessary. */
265*e4b17023SJohn Marino if (stack_memref_p (x))
266*e4b17023SJohn Marino {
267*e4b17023SJohn Marino d->reflist = record_one_stack_ref (d->insn, xp, d->reflist);
268*e4b17023SJohn Marino return -1;
269*e4b17023SJohn Marino }
270*e4b17023SJohn Marino /* Try harder for DEBUG_INSNs, handle e.g. (mem (mem (sp + 16) + 4). */
271*e4b17023SJohn Marino return !DEBUG_INSN_P (d->insn);
272*e4b17023SJohn Marino case REG:
273*e4b17023SJohn Marino /* ??? We want be able to handle non-memory stack pointer
274*e4b17023SJohn Marino references later. For now just discard all insns referring to
275*e4b17023SJohn Marino stack pointer outside mem expressions. We would probably
276*e4b17023SJohn Marino want to teach validate_replace to simplify expressions first.
277*e4b17023SJohn Marino
278*e4b17023SJohn Marino We can't just compare with STACK_POINTER_RTX because the
279*e4b17023SJohn Marino reference to the stack pointer might be in some other mode.
280*e4b17023SJohn Marino In particular, an explicit clobber in an asm statement will
281*e4b17023SJohn Marino result in a QImode clobber.
282*e4b17023SJohn Marino
283*e4b17023SJohn Marino In DEBUG_INSNs, we want to replace all occurrences, otherwise
284*e4b17023SJohn Marino they will cause -fcompare-debug failures. */
285*e4b17023SJohn Marino if (REGNO (x) == STACK_POINTER_REGNUM)
286*e4b17023SJohn Marino {
287*e4b17023SJohn Marino if (!DEBUG_INSN_P (d->insn))
288*e4b17023SJohn Marino return 1;
289*e4b17023SJohn Marino d->reflist = record_one_stack_ref (d->insn, xp, d->reflist);
290*e4b17023SJohn Marino return -1;
291*e4b17023SJohn Marino }
292*e4b17023SJohn Marino break;
293*e4b17023SJohn Marino default:
294*e4b17023SJohn Marino break;
295*e4b17023SJohn Marino }
296*e4b17023SJohn Marino return 0;
297*e4b17023SJohn Marino }
298*e4b17023SJohn Marino
299*e4b17023SJohn Marino /* If INSN has a REG_ARGS_SIZE note, move it to LAST.
300*e4b17023SJohn Marino AFTER is true iff LAST follows INSN in the instruction stream. */
301*e4b17023SJohn Marino
302*e4b17023SJohn Marino static void
maybe_move_args_size_note(rtx last,rtx insn,bool after)303*e4b17023SJohn Marino maybe_move_args_size_note (rtx last, rtx insn, bool after)
304*e4b17023SJohn Marino {
305*e4b17023SJohn Marino rtx note, last_note;
306*e4b17023SJohn Marino
307*e4b17023SJohn Marino note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
308*e4b17023SJohn Marino if (note == NULL)
309*e4b17023SJohn Marino return;
310*e4b17023SJohn Marino
311*e4b17023SJohn Marino last_note = find_reg_note (last, REG_ARGS_SIZE, NULL_RTX);
312*e4b17023SJohn Marino if (last_note)
313*e4b17023SJohn Marino {
314*e4b17023SJohn Marino /* The ARGS_SIZE notes are *not* cumulative. They represent an
315*e4b17023SJohn Marino absolute value, and the "most recent" note wins. */
316*e4b17023SJohn Marino if (!after)
317*e4b17023SJohn Marino XEXP (last_note, 0) = XEXP (note, 0);
318*e4b17023SJohn Marino }
319*e4b17023SJohn Marino else
320*e4b17023SJohn Marino add_reg_note (last, REG_ARGS_SIZE, XEXP (note, 0));
321*e4b17023SJohn Marino }
322*e4b17023SJohn Marino
323*e4b17023SJohn Marino /* Return the next (or previous) active insn within BB. */
324*e4b17023SJohn Marino
325*e4b17023SJohn Marino static rtx
prev_active_insn_bb(basic_block bb,rtx insn)326*e4b17023SJohn Marino prev_active_insn_bb (basic_block bb, rtx insn)
327*e4b17023SJohn Marino {
328*e4b17023SJohn Marino for (insn = PREV_INSN (insn);
329*e4b17023SJohn Marino insn != PREV_INSN (BB_HEAD (bb));
330*e4b17023SJohn Marino insn = PREV_INSN (insn))
331*e4b17023SJohn Marino if (active_insn_p (insn))
332*e4b17023SJohn Marino return insn;
333*e4b17023SJohn Marino return NULL_RTX;
334*e4b17023SJohn Marino }
335*e4b17023SJohn Marino
336*e4b17023SJohn Marino static rtx
next_active_insn_bb(basic_block bb,rtx insn)337*e4b17023SJohn Marino next_active_insn_bb (basic_block bb, rtx insn)
338*e4b17023SJohn Marino {
339*e4b17023SJohn Marino for (insn = NEXT_INSN (insn);
340*e4b17023SJohn Marino insn != NEXT_INSN (BB_END (bb));
341*e4b17023SJohn Marino insn = NEXT_INSN (insn))
342*e4b17023SJohn Marino if (active_insn_p (insn))
343*e4b17023SJohn Marino return insn;
344*e4b17023SJohn Marino return NULL_RTX;
345*e4b17023SJohn Marino }
346*e4b17023SJohn Marino
347*e4b17023SJohn Marino /* If INSN has a REG_ARGS_SIZE note, if possible move it to PREV. Otherwise
348*e4b17023SJohn Marino search for a nearby candidate within BB where we can stick the note. */
349*e4b17023SJohn Marino
350*e4b17023SJohn Marino static void
force_move_args_size_note(basic_block bb,rtx prev,rtx insn)351*e4b17023SJohn Marino force_move_args_size_note (basic_block bb, rtx prev, rtx insn)
352*e4b17023SJohn Marino {
353*e4b17023SJohn Marino rtx note, test, next_candidate, prev_candidate;
354*e4b17023SJohn Marino
355*e4b17023SJohn Marino /* If PREV exists, tail-call to the logic in the other function. */
356*e4b17023SJohn Marino if (prev)
357*e4b17023SJohn Marino {
358*e4b17023SJohn Marino maybe_move_args_size_note (prev, insn, false);
359*e4b17023SJohn Marino return;
360*e4b17023SJohn Marino }
361*e4b17023SJohn Marino
362*e4b17023SJohn Marino /* First, make sure there's anything that needs doing. */
363*e4b17023SJohn Marino note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
364*e4b17023SJohn Marino if (note == NULL)
365*e4b17023SJohn Marino return;
366*e4b17023SJohn Marino
367*e4b17023SJohn Marino /* We need to find a spot between the previous and next exception points
368*e4b17023SJohn Marino where we can place the note and "properly" deallocate the arguments. */
369*e4b17023SJohn Marino next_candidate = prev_candidate = NULL;
370*e4b17023SJohn Marino
371*e4b17023SJohn Marino /* It is often the case that we have insns in the order:
372*e4b17023SJohn Marino call
373*e4b17023SJohn Marino add sp (previous deallocation)
374*e4b17023SJohn Marino sub sp (align for next arglist)
375*e4b17023SJohn Marino push arg
376*e4b17023SJohn Marino and the add/sub cancel. Therefore we begin by searching forward. */
377*e4b17023SJohn Marino
378*e4b17023SJohn Marino test = insn;
379*e4b17023SJohn Marino while ((test = next_active_insn_bb (bb, test)) != NULL)
380*e4b17023SJohn Marino {
381*e4b17023SJohn Marino /* Found an existing note: nothing to do. */
382*e4b17023SJohn Marino if (find_reg_note (test, REG_ARGS_SIZE, NULL_RTX))
383*e4b17023SJohn Marino return;
384*e4b17023SJohn Marino /* Found something that affects unwinding. Stop searching. */
385*e4b17023SJohn Marino if (CALL_P (test) || !insn_nothrow_p (test))
386*e4b17023SJohn Marino break;
387*e4b17023SJohn Marino if (next_candidate == NULL)
388*e4b17023SJohn Marino next_candidate = test;
389*e4b17023SJohn Marino }
390*e4b17023SJohn Marino
391*e4b17023SJohn Marino test = insn;
392*e4b17023SJohn Marino while ((test = prev_active_insn_bb (bb, test)) != NULL)
393*e4b17023SJohn Marino {
394*e4b17023SJohn Marino rtx tnote;
395*e4b17023SJohn Marino /* Found a place that seems logical to adjust the stack. */
396*e4b17023SJohn Marino tnote = find_reg_note (test, REG_ARGS_SIZE, NULL_RTX);
397*e4b17023SJohn Marino if (tnote)
398*e4b17023SJohn Marino {
399*e4b17023SJohn Marino XEXP (tnote, 0) = XEXP (note, 0);
400*e4b17023SJohn Marino return;
401*e4b17023SJohn Marino }
402*e4b17023SJohn Marino if (prev_candidate == NULL)
403*e4b17023SJohn Marino prev_candidate = test;
404*e4b17023SJohn Marino /* Found something that affects unwinding. Stop searching. */
405*e4b17023SJohn Marino if (CALL_P (test) || !insn_nothrow_p (test))
406*e4b17023SJohn Marino break;
407*e4b17023SJohn Marino }
408*e4b17023SJohn Marino
409*e4b17023SJohn Marino if (prev_candidate)
410*e4b17023SJohn Marino test = prev_candidate;
411*e4b17023SJohn Marino else if (next_candidate)
412*e4b17023SJohn Marino test = next_candidate;
413*e4b17023SJohn Marino else
414*e4b17023SJohn Marino {
415*e4b17023SJohn Marino /* ??? We *must* have a place, lest we ICE on the lost adjustment.
416*e4b17023SJohn Marino Options are: dummy clobber insn, nop, or prevent the removal of
417*e4b17023SJohn Marino the sp += 0 insn. */
418*e4b17023SJohn Marino /* TODO: Find another way to indicate to the dwarf2 code that we
419*e4b17023SJohn Marino have not in fact lost an adjustment. */
420*e4b17023SJohn Marino test = emit_insn_before (gen_rtx_CLOBBER (VOIDmode, const0_rtx), insn);
421*e4b17023SJohn Marino }
422*e4b17023SJohn Marino add_reg_note (test, REG_ARGS_SIZE, XEXP (note, 0));
423*e4b17023SJohn Marino }
424*e4b17023SJohn Marino
425*e4b17023SJohn Marino /* Subroutine of combine_stack_adjustments, called for each basic block. */
426*e4b17023SJohn Marino
427*e4b17023SJohn Marino static void
combine_stack_adjustments_for_block(basic_block bb)428*e4b17023SJohn Marino combine_stack_adjustments_for_block (basic_block bb)
429*e4b17023SJohn Marino {
430*e4b17023SJohn Marino HOST_WIDE_INT last_sp_adjust = 0;
431*e4b17023SJohn Marino rtx last_sp_set = NULL_RTX;
432*e4b17023SJohn Marino rtx last2_sp_set = NULL_RTX;
433*e4b17023SJohn Marino struct csa_reflist *reflist = NULL;
434*e4b17023SJohn Marino rtx insn, next, set;
435*e4b17023SJohn Marino struct record_stack_refs_data data;
436*e4b17023SJohn Marino bool end_of_block = false;
437*e4b17023SJohn Marino
438*e4b17023SJohn Marino for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
439*e4b17023SJohn Marino {
440*e4b17023SJohn Marino end_of_block = insn == BB_END (bb);
441*e4b17023SJohn Marino next = NEXT_INSN (insn);
442*e4b17023SJohn Marino
443*e4b17023SJohn Marino if (! INSN_P (insn))
444*e4b17023SJohn Marino continue;
445*e4b17023SJohn Marino
446*e4b17023SJohn Marino set = single_set_for_csa (insn);
447*e4b17023SJohn Marino if (set)
448*e4b17023SJohn Marino {
449*e4b17023SJohn Marino rtx dest = SET_DEST (set);
450*e4b17023SJohn Marino rtx src = SET_SRC (set);
451*e4b17023SJohn Marino
452*e4b17023SJohn Marino /* Find constant additions to the stack pointer. */
453*e4b17023SJohn Marino if (dest == stack_pointer_rtx
454*e4b17023SJohn Marino && GET_CODE (src) == PLUS
455*e4b17023SJohn Marino && XEXP (src, 0) == stack_pointer_rtx
456*e4b17023SJohn Marino && CONST_INT_P (XEXP (src, 1)))
457*e4b17023SJohn Marino {
458*e4b17023SJohn Marino HOST_WIDE_INT this_adjust = INTVAL (XEXP (src, 1));
459*e4b17023SJohn Marino
460*e4b17023SJohn Marino /* If we've not seen an adjustment previously, record
461*e4b17023SJohn Marino it now and continue. */
462*e4b17023SJohn Marino if (! last_sp_set)
463*e4b17023SJohn Marino {
464*e4b17023SJohn Marino last_sp_set = insn;
465*e4b17023SJohn Marino last_sp_adjust = this_adjust;
466*e4b17023SJohn Marino continue;
467*e4b17023SJohn Marino }
468*e4b17023SJohn Marino
469*e4b17023SJohn Marino /* If not all recorded refs can be adjusted, or the
470*e4b17023SJohn Marino adjustment is now too large for a constant addition,
471*e4b17023SJohn Marino we cannot merge the two stack adjustments.
472*e4b17023SJohn Marino
473*e4b17023SJohn Marino Also we need to be careful to not move stack pointer
474*e4b17023SJohn Marino such that we create stack accesses outside the allocated
475*e4b17023SJohn Marino area. We can combine an allocation into the first insn,
476*e4b17023SJohn Marino or a deallocation into the second insn. We can not
477*e4b17023SJohn Marino combine an allocation followed by a deallocation.
478*e4b17023SJohn Marino
479*e4b17023SJohn Marino The only somewhat frequent occurrence of the later is when
480*e4b17023SJohn Marino a function allocates a stack frame but does not use it.
481*e4b17023SJohn Marino For this case, we would need to analyze rtl stream to be
482*e4b17023SJohn Marino sure that allocated area is really unused. This means not
483*e4b17023SJohn Marino only checking the memory references, but also all registers
484*e4b17023SJohn Marino or global memory references possibly containing a stack
485*e4b17023SJohn Marino frame address.
486*e4b17023SJohn Marino
487*e4b17023SJohn Marino Perhaps the best way to address this problem is to teach
488*e4b17023SJohn Marino gcc not to allocate stack for objects never used. */
489*e4b17023SJohn Marino
490*e4b17023SJohn Marino /* Combine an allocation into the first instruction. */
491*e4b17023SJohn Marino if (STACK_GROWS_DOWNWARD ? this_adjust <= 0 : this_adjust >= 0)
492*e4b17023SJohn Marino {
493*e4b17023SJohn Marino if (try_apply_stack_adjustment (last_sp_set, reflist,
494*e4b17023SJohn Marino last_sp_adjust + this_adjust,
495*e4b17023SJohn Marino this_adjust))
496*e4b17023SJohn Marino {
497*e4b17023SJohn Marino /* It worked! */
498*e4b17023SJohn Marino maybe_move_args_size_note (last_sp_set, insn, false);
499*e4b17023SJohn Marino delete_insn (insn);
500*e4b17023SJohn Marino last_sp_adjust += this_adjust;
501*e4b17023SJohn Marino continue;
502*e4b17023SJohn Marino }
503*e4b17023SJohn Marino }
504*e4b17023SJohn Marino
505*e4b17023SJohn Marino /* Otherwise we have a deallocation. Do not combine with
506*e4b17023SJohn Marino a previous allocation. Combine into the second insn. */
507*e4b17023SJohn Marino else if (STACK_GROWS_DOWNWARD
508*e4b17023SJohn Marino ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
509*e4b17023SJohn Marino {
510*e4b17023SJohn Marino if (try_apply_stack_adjustment (insn, reflist,
511*e4b17023SJohn Marino last_sp_adjust + this_adjust,
512*e4b17023SJohn Marino -last_sp_adjust))
513*e4b17023SJohn Marino {
514*e4b17023SJohn Marino /* It worked! */
515*e4b17023SJohn Marino maybe_move_args_size_note (insn, last_sp_set, true);
516*e4b17023SJohn Marino delete_insn (last_sp_set);
517*e4b17023SJohn Marino last_sp_set = insn;
518*e4b17023SJohn Marino last_sp_adjust += this_adjust;
519*e4b17023SJohn Marino free_csa_reflist (reflist);
520*e4b17023SJohn Marino reflist = NULL;
521*e4b17023SJohn Marino continue;
522*e4b17023SJohn Marino }
523*e4b17023SJohn Marino }
524*e4b17023SJohn Marino
525*e4b17023SJohn Marino /* Combination failed. Restart processing from here. If
526*e4b17023SJohn Marino deallocation+allocation conspired to cancel, we can
527*e4b17023SJohn Marino delete the old deallocation insn. */
528*e4b17023SJohn Marino if (last_sp_set)
529*e4b17023SJohn Marino {
530*e4b17023SJohn Marino if (last_sp_adjust == 0)
531*e4b17023SJohn Marino {
532*e4b17023SJohn Marino maybe_move_args_size_note (insn, last_sp_set, true);
533*e4b17023SJohn Marino delete_insn (last_sp_set);
534*e4b17023SJohn Marino }
535*e4b17023SJohn Marino else
536*e4b17023SJohn Marino last2_sp_set = last_sp_set;
537*e4b17023SJohn Marino }
538*e4b17023SJohn Marino free_csa_reflist (reflist);
539*e4b17023SJohn Marino reflist = NULL;
540*e4b17023SJohn Marino last_sp_set = insn;
541*e4b17023SJohn Marino last_sp_adjust = this_adjust;
542*e4b17023SJohn Marino continue;
543*e4b17023SJohn Marino }
544*e4b17023SJohn Marino
545*e4b17023SJohn Marino /* Find a store with pre-(dec|inc)rement or pre-modify of exactly
546*e4b17023SJohn Marino the previous adjustment and turn it into a simple store. This
547*e4b17023SJohn Marino is equivalent to anticipating the stack adjustment so this must
548*e4b17023SJohn Marino be an allocation. */
549*e4b17023SJohn Marino if (MEM_P (dest)
550*e4b17023SJohn Marino && ((STACK_GROWS_DOWNWARD
551*e4b17023SJohn Marino ? (GET_CODE (XEXP (dest, 0)) == PRE_DEC
552*e4b17023SJohn Marino && last_sp_adjust
553*e4b17023SJohn Marino == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest)))
554*e4b17023SJohn Marino : (GET_CODE (XEXP (dest, 0)) == PRE_INC
555*e4b17023SJohn Marino && last_sp_adjust
556*e4b17023SJohn Marino == -(HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest))))
557*e4b17023SJohn Marino || ((STACK_GROWS_DOWNWARD
558*e4b17023SJohn Marino ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
559*e4b17023SJohn Marino && GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
560*e4b17023SJohn Marino && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
561*e4b17023SJohn Marino && XEXP (XEXP (XEXP (dest, 0), 1), 0)
562*e4b17023SJohn Marino == stack_pointer_rtx
563*e4b17023SJohn Marino && GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
564*e4b17023SJohn Marino == CONST_INT
565*e4b17023SJohn Marino && INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
566*e4b17023SJohn Marino == -last_sp_adjust))
567*e4b17023SJohn Marino && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
568*e4b17023SJohn Marino && !reg_mentioned_p (stack_pointer_rtx, src)
569*e4b17023SJohn Marino && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
570*e4b17023SJohn Marino && try_apply_stack_adjustment (insn, reflist, 0,
571*e4b17023SJohn Marino -last_sp_adjust))
572*e4b17023SJohn Marino {
573*e4b17023SJohn Marino if (last2_sp_set)
574*e4b17023SJohn Marino maybe_move_args_size_note (last2_sp_set, last_sp_set, false);
575*e4b17023SJohn Marino else
576*e4b17023SJohn Marino maybe_move_args_size_note (insn, last_sp_set, true);
577*e4b17023SJohn Marino delete_insn (last_sp_set);
578*e4b17023SJohn Marino free_csa_reflist (reflist);
579*e4b17023SJohn Marino reflist = NULL;
580*e4b17023SJohn Marino last_sp_set = NULL_RTX;
581*e4b17023SJohn Marino last_sp_adjust = 0;
582*e4b17023SJohn Marino continue;
583*e4b17023SJohn Marino }
584*e4b17023SJohn Marino }
585*e4b17023SJohn Marino
586*e4b17023SJohn Marino data.insn = insn;
587*e4b17023SJohn Marino data.reflist = reflist;
588*e4b17023SJohn Marino if (!CALL_P (insn) && last_sp_set
589*e4b17023SJohn Marino && !for_each_rtx (&PATTERN (insn), record_stack_refs, &data))
590*e4b17023SJohn Marino {
591*e4b17023SJohn Marino reflist = data.reflist;
592*e4b17023SJohn Marino continue;
593*e4b17023SJohn Marino }
594*e4b17023SJohn Marino reflist = data.reflist;
595*e4b17023SJohn Marino
596*e4b17023SJohn Marino /* Otherwise, we were not able to process the instruction.
597*e4b17023SJohn Marino Do not continue collecting data across such a one. */
598*e4b17023SJohn Marino if (last_sp_set
599*e4b17023SJohn Marino && (CALL_P (insn)
600*e4b17023SJohn Marino || reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
601*e4b17023SJohn Marino {
602*e4b17023SJohn Marino if (last_sp_set && last_sp_adjust == 0)
603*e4b17023SJohn Marino {
604*e4b17023SJohn Marino force_move_args_size_note (bb, last2_sp_set, last_sp_set);
605*e4b17023SJohn Marino delete_insn (last_sp_set);
606*e4b17023SJohn Marino }
607*e4b17023SJohn Marino free_csa_reflist (reflist);
608*e4b17023SJohn Marino reflist = NULL;
609*e4b17023SJohn Marino last2_sp_set = NULL_RTX;
610*e4b17023SJohn Marino last_sp_set = NULL_RTX;
611*e4b17023SJohn Marino last_sp_adjust = 0;
612*e4b17023SJohn Marino }
613*e4b17023SJohn Marino }
614*e4b17023SJohn Marino
615*e4b17023SJohn Marino if (last_sp_set && last_sp_adjust == 0)
616*e4b17023SJohn Marino {
617*e4b17023SJohn Marino force_move_args_size_note (bb, last2_sp_set, last_sp_set);
618*e4b17023SJohn Marino delete_insn (last_sp_set);
619*e4b17023SJohn Marino }
620*e4b17023SJohn Marino
621*e4b17023SJohn Marino if (reflist)
622*e4b17023SJohn Marino free_csa_reflist (reflist);
623*e4b17023SJohn Marino }
624*e4b17023SJohn Marino
625*e4b17023SJohn Marino
626*e4b17023SJohn Marino static bool
gate_handle_stack_adjustments(void)627*e4b17023SJohn Marino gate_handle_stack_adjustments (void)
628*e4b17023SJohn Marino {
629*e4b17023SJohn Marino return flag_combine_stack_adjustments;
630*e4b17023SJohn Marino }
631*e4b17023SJohn Marino
632*e4b17023SJohn Marino static unsigned int
rest_of_handle_stack_adjustments(void)633*e4b17023SJohn Marino rest_of_handle_stack_adjustments (void)
634*e4b17023SJohn Marino {
635*e4b17023SJohn Marino cleanup_cfg (flag_crossjumping ? CLEANUP_CROSSJUMP : 0);
636*e4b17023SJohn Marino
637*e4b17023SJohn Marino /* This is kind of a heuristic. We need to run combine_stack_adjustments
638*e4b17023SJohn Marino even for machines with possibly nonzero TARGET_RETURN_POPS_ARGS
639*e4b17023SJohn Marino and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
640*e4b17023SJohn Marino push instructions will have popping returns. */
641*e4b17023SJohn Marino #ifndef PUSH_ROUNDING
642*e4b17023SJohn Marino if (!ACCUMULATE_OUTGOING_ARGS)
643*e4b17023SJohn Marino #endif
644*e4b17023SJohn Marino {
645*e4b17023SJohn Marino df_note_add_problem ();
646*e4b17023SJohn Marino df_analyze ();
647*e4b17023SJohn Marino combine_stack_adjustments ();
648*e4b17023SJohn Marino }
649*e4b17023SJohn Marino return 0;
650*e4b17023SJohn Marino }
651*e4b17023SJohn Marino
652*e4b17023SJohn Marino struct rtl_opt_pass pass_stack_adjustments =
653*e4b17023SJohn Marino {
654*e4b17023SJohn Marino {
655*e4b17023SJohn Marino RTL_PASS,
656*e4b17023SJohn Marino "csa", /* name */
657*e4b17023SJohn Marino gate_handle_stack_adjustments, /* gate */
658*e4b17023SJohn Marino rest_of_handle_stack_adjustments, /* execute */
659*e4b17023SJohn Marino NULL, /* sub */
660*e4b17023SJohn Marino NULL, /* next */
661*e4b17023SJohn Marino 0, /* static_pass_number */
662*e4b17023SJohn Marino TV_COMBINE_STACK_ADJUST, /* tv_id */
663*e4b17023SJohn Marino 0, /* properties_required */
664*e4b17023SJohn Marino 0, /* properties_provided */
665*e4b17023SJohn Marino 0, /* properties_destroyed */
666*e4b17023SJohn Marino 0, /* todo_flags_start */
667*e4b17023SJohn Marino TODO_df_finish | TODO_verify_rtl_sharing |
668*e4b17023SJohn Marino TODO_ggc_collect, /* todo_flags_finish */
669*e4b17023SJohn Marino }
670*e4b17023SJohn Marino };
671