xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/combine-stack-adj.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Combine stack adjustments.
2    Copyright (C) 1987-2015 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Track stack adjustments and stack memory references.  Attempt to
21    reduce the number of stack adjustments by back-propagating across
22    the memory references.
23 
24    This is intended primarily for use with targets that do not define
25    ACCUMULATE_OUTGOING_ARGS.  It is of significantly more value to
26    targets that define PREFERRED_STACK_BOUNDARY more aligned than
27    STACK_BOUNDARY (e.g. x86), or if not all registers can be pushed
28    (e.g. x86 fp regs) which would ordinarily have to be implemented
29    as a sub/mov pair due to restrictions in calls.c.
30 
31    Propagation stops when any of the insns that need adjusting are
32    (a) no longer valid because we've exceeded their range, (b) a
33    non-trivial push instruction, or (c) a call instruction.
34 
35    Restriction B is based on the assumption that push instructions
36    are smaller or faster.  If a port really wants to remove all
37    pushes, it should have defined ACCUMULATE_OUTGOING_ARGS.  The
38    one exception that is made is for an add immediately followed
39    by a push.  */
40 
41 #include "config.h"
42 #include "system.h"
43 #include "coretypes.h"
44 #include "tm.h"
45 #include "rtl.h"
46 #include "tm_p.h"
47 #include "insn-config.h"
48 #include "recog.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "flags.h"
52 #include "hashtab.h"
53 #include "hash-set.h"
54 #include "vec.h"
55 #include "machmode.h"
56 #include "input.h"
57 #include "function.h"
58 #include "symtab.h"
59 #include "statistics.h"
60 #include "double-int.h"
61 #include "real.h"
62 #include "fixed-value.h"
63 #include "alias.h"
64 #include "wide-int.h"
65 #include "inchash.h"
66 #include "tree.h"
67 #include "expmed.h"
68 #include "dojump.h"
69 #include "explow.h"
70 #include "calls.h"
71 #include "emit-rtl.h"
72 #include "varasm.h"
73 #include "stmt.h"
74 #include "expr.h"
75 #include "predict.h"
76 #include "dominance.h"
77 #include "cfg.h"
78 #include "cfgrtl.h"
79 #include "basic-block.h"
80 #include "df.h"
81 #include "except.h"
82 #include "reload.h"
83 #include "tree-pass.h"
84 #include "rtl-iter.h"
85 
86 
87 /* Turn STACK_GROWS_DOWNWARD into a boolean.  */
88 #ifdef STACK_GROWS_DOWNWARD
89 #undef STACK_GROWS_DOWNWARD
90 #define STACK_GROWS_DOWNWARD 1
91 #else
92 #define STACK_GROWS_DOWNWARD 0
93 #endif
94 
95 /* This structure records two kinds of stack references between stack
96    adjusting instructions: stack references in memory addresses for
97    regular insns and all stack references for debug insns.  */
98 
99 struct csa_reflist
100 {
101   HOST_WIDE_INT sp_offset;
102   rtx_insn *insn;
103   rtx *ref;
104   struct csa_reflist *next;
105 };
106 
107 static int stack_memref_p (rtx);
108 static rtx single_set_for_csa (rtx_insn *);
109 static void free_csa_reflist (struct csa_reflist *);
110 static struct csa_reflist *record_one_stack_ref (rtx_insn *, rtx *,
111 						 struct csa_reflist *);
112 static int try_apply_stack_adjustment (rtx_insn *, struct csa_reflist *,
113 				       HOST_WIDE_INT, HOST_WIDE_INT);
114 static void combine_stack_adjustments_for_block (basic_block);
115 
116 
117 /* Main entry point for stack adjustment combination.  */
118 
119 static void
120 combine_stack_adjustments (void)
121 {
122   basic_block bb;
123 
124   FOR_EACH_BB_FN (bb, cfun)
125     combine_stack_adjustments_for_block (bb);
126 }
127 
128 /* Recognize a MEM of the form (sp) or (plus sp const).  */
129 
130 static int
131 stack_memref_p (rtx x)
132 {
133   if (!MEM_P (x))
134     return 0;
135   x = XEXP (x, 0);
136 
137   if (x == stack_pointer_rtx)
138     return 1;
139   if (GET_CODE (x) == PLUS
140       && XEXP (x, 0) == stack_pointer_rtx
141       && CONST_INT_P (XEXP (x, 1)))
142     return 1;
143 
144   return 0;
145 }
146 
147 /* Recognize either normal single_set or the hack in i386.md for
148    tying fp and sp adjustments.  */
149 
150 static rtx
151 single_set_for_csa (rtx_insn *insn)
152 {
153   int i;
154   rtx tmp = single_set (insn);
155   if (tmp)
156     return tmp;
157 
158   if (!NONJUMP_INSN_P (insn)
159       || GET_CODE (PATTERN (insn)) != PARALLEL)
160     return NULL_RTX;
161 
162   tmp = PATTERN (insn);
163   if (GET_CODE (XVECEXP (tmp, 0, 0)) != SET)
164     return NULL_RTX;
165 
166   for (i = 1; i < XVECLEN (tmp, 0); ++i)
167     {
168       rtx this_rtx = XVECEXP (tmp, 0, i);
169 
170       /* The special case is allowing a no-op set.  */
171       if (GET_CODE (this_rtx) == SET
172 	  && SET_SRC (this_rtx) == SET_DEST (this_rtx))
173 	;
174       else if (GET_CODE (this_rtx) != CLOBBER
175 	       && GET_CODE (this_rtx) != USE)
176 	return NULL_RTX;
177     }
178 
179   return XVECEXP (tmp, 0, 0);
180 }
181 
182 /* Free the list of csa_reflist nodes.  */
183 
184 static void
185 free_csa_reflist (struct csa_reflist *reflist)
186 {
187   struct csa_reflist *next;
188   for (; reflist ; reflist = next)
189     {
190       next = reflist->next;
191       free (reflist);
192     }
193 }
194 
195 /* Create a new csa_reflist node from the given stack reference.
196    It is already known that the reference is either a MEM satisfying the
197    predicate stack_memref_p or a REG representing the stack pointer.  */
198 
199 static struct csa_reflist *
200 record_one_stack_ref (rtx_insn *insn, rtx *ref, struct csa_reflist *next_reflist)
201 {
202   struct csa_reflist *ml;
203 
204   ml = XNEW (struct csa_reflist);
205 
206   if (REG_P (*ref) || XEXP (*ref, 0) == stack_pointer_rtx)
207     ml->sp_offset = 0;
208   else
209     ml->sp_offset = INTVAL (XEXP (XEXP (*ref, 0), 1));
210 
211   ml->insn = insn;
212   ml->ref = ref;
213   ml->next = next_reflist;
214 
215   return ml;
216 }
217 
218 /* We only know how to adjust the CFA; no other frame-related changes
219    may appear in any insn to be deleted.  */
220 
221 static bool
222 no_unhandled_cfa (rtx_insn *insn)
223 {
224   if (!RTX_FRAME_RELATED_P (insn))
225     return true;
226 
227   /* No CFA notes at all is a legacy interpretation like
228      FRAME_RELATED_EXPR, and is context sensitive within
229      the prologue state machine.  We can't handle that here.  */
230   bool has_cfa_adjust = false;
231 
232   for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
233     switch (REG_NOTE_KIND (link))
234       {
235       default:
236         break;
237       case REG_CFA_ADJUST_CFA:
238 	has_cfa_adjust = true;
239 	break;
240 
241       case REG_FRAME_RELATED_EXPR:
242       case REG_CFA_DEF_CFA:
243       case REG_CFA_OFFSET:
244       case REG_CFA_REGISTER:
245       case REG_CFA_EXPRESSION:
246       case REG_CFA_RESTORE:
247       case REG_CFA_SET_VDRAP:
248       case REG_CFA_WINDOW_SAVE:
249       case REG_CFA_FLUSH_QUEUE:
250 	return false;
251       }
252 
253   return has_cfa_adjust;
254 }
255 
256 /* Attempt to apply ADJUST to the stack adjusting insn INSN, as well
257    as each of the memories and stack references in REFLIST.  Return true
258    on success.  */
259 
260 static int
261 try_apply_stack_adjustment (rtx_insn *insn, struct csa_reflist *reflist,
262 			    HOST_WIDE_INT new_adjust, HOST_WIDE_INT delta)
263 {
264   struct csa_reflist *ml;
265   rtx set;
266 
267   set = single_set_for_csa (insn);
268   if (MEM_P (SET_DEST (set)))
269     validate_change (insn, &SET_DEST (set),
270 		     replace_equiv_address (SET_DEST (set), stack_pointer_rtx),
271 		     1);
272   else
273     validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (new_adjust), 1);
274 
275   for (ml = reflist; ml ; ml = ml->next)
276     {
277       rtx new_addr = plus_constant (Pmode, stack_pointer_rtx,
278 				    ml->sp_offset - delta);
279       rtx new_val;
280 
281       if (MEM_P (*ml->ref))
282 	new_val = replace_equiv_address_nv (*ml->ref, new_addr);
283       else if (GET_MODE (*ml->ref) == GET_MODE (stack_pointer_rtx))
284 	new_val = new_addr;
285       else
286 	new_val = lowpart_subreg (GET_MODE (*ml->ref), new_addr,
287 				  GET_MODE (new_addr));
288       validate_change (ml->insn, ml->ref, new_val, 1);
289     }
290 
291   if (apply_change_group ())
292     {
293       /* Succeeded.  Update our knowledge of the stack references.  */
294       for (ml = reflist; ml ; ml = ml->next)
295 	ml->sp_offset -= delta;
296 
297       return 1;
298     }
299   else
300     return 0;
301 }
302 
303 /* For non-debug insns, record all stack memory references in INSN
304    and return true if there were no other (unrecorded) references to the
305    stack pointer.  For debug insns, record all stack references regardless
306    of context and unconditionally return true.  */
307 
308 static bool
309 record_stack_refs (rtx_insn *insn, struct csa_reflist **reflist)
310 {
311   subrtx_ptr_iterator::array_type array;
312   FOR_EACH_SUBRTX_PTR (iter, array, &PATTERN (insn), NONCONST)
313     {
314       rtx *loc = *iter;
315       rtx x = *loc;
316       switch (GET_CODE (x))
317 	{
318 	case MEM:
319 	  if (!reg_mentioned_p (stack_pointer_rtx, x))
320 	    iter.skip_subrtxes ();
321 	  /* We are not able to handle correctly all possible memrefs
322 	     containing stack pointer, so this check is necessary.  */
323 	  else if (stack_memref_p (x))
324 	    {
325 	      *reflist = record_one_stack_ref (insn, loc, *reflist);
326 	      iter.skip_subrtxes ();
327 	    }
328 	  /* Try harder for DEBUG_INSNs, handle e.g.
329 	     (mem (mem (sp + 16) + 4).  */
330 	  else if (!DEBUG_INSN_P (insn))
331 	    return false;
332 	  break;
333 
334 	case REG:
335 	  /* ??? We want be able to handle non-memory stack pointer
336 	     references later.  For now just discard all insns referring to
337 	     stack pointer outside mem expressions.  We would probably
338 	     want to teach validate_replace to simplify expressions first.
339 
340 	     We can't just compare with STACK_POINTER_RTX because the
341 	     reference to the stack pointer might be in some other mode.
342 	     In particular, an explicit clobber in an asm statement will
343 	     result in a QImode clobber.
344 
345 	     In DEBUG_INSNs, we want to replace all occurrences, otherwise
346 	     they will cause -fcompare-debug failures.  */
347 	  if (REGNO (x) == STACK_POINTER_REGNUM)
348 	    {
349 	      if (!DEBUG_INSN_P (insn))
350 		return false;
351 	      *reflist = record_one_stack_ref (insn, loc, *reflist);
352 	    }
353 	  break;
354 
355 	default:
356 	  break;
357 	}
358     }
359   return true;
360 }
361 
362 /* If INSN has a REG_ARGS_SIZE note, move it to LAST.
363    AFTER is true iff LAST follows INSN in the instruction stream.  */
364 
365 static void
366 maybe_move_args_size_note (rtx_insn *last, rtx_insn *insn, bool after)
367 {
368   rtx note, last_note;
369 
370   note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
371   if (note == NULL)
372     return;
373 
374   last_note = find_reg_note (last, REG_ARGS_SIZE, NULL_RTX);
375   if (last_note)
376     {
377       /* The ARGS_SIZE notes are *not* cumulative.  They represent an
378 	 absolute value, and the "most recent" note wins.  */
379       if (!after)
380         XEXP (last_note, 0) = XEXP (note, 0);
381     }
382   else
383     add_reg_note (last, REG_ARGS_SIZE, XEXP (note, 0));
384 }
385 
386 /* Merge any REG_CFA_ADJUST_CFA note from SRC into DST.
387    AFTER is true iff DST follows SRC in the instruction stream.  */
388 
389 static void
390 maybe_merge_cfa_adjust (rtx_insn *dst, rtx_insn *src, bool after)
391 {
392   rtx snote = NULL, dnote = NULL;
393   rtx sexp, dexp;
394   rtx exp1, exp2;
395 
396   if (RTX_FRAME_RELATED_P (src))
397     snote = find_reg_note (src, REG_CFA_ADJUST_CFA, NULL_RTX);
398   if (snote == NULL)
399     return;
400   sexp = XEXP (snote, 0);
401 
402   if (RTX_FRAME_RELATED_P (dst))
403     dnote = find_reg_note (dst, REG_CFA_ADJUST_CFA, NULL_RTX);
404   if (dnote == NULL)
405     {
406       add_reg_note (dst, REG_CFA_ADJUST_CFA, sexp);
407       return;
408     }
409   dexp = XEXP (dnote, 0);
410 
411   gcc_assert (GET_CODE (sexp) == SET);
412   gcc_assert (GET_CODE (dexp) == SET);
413 
414   if (after)
415     exp1 = dexp, exp2 = sexp;
416   else
417     exp1 = sexp, exp2 = dexp;
418 
419   SET_SRC (exp1) = simplify_replace_rtx (SET_SRC (exp1), SET_DEST (exp2),
420 					 SET_SRC (exp2));
421   XEXP (dnote, 0) = exp1;
422 }
423 
424 /* Return the next (or previous) active insn within BB.  */
425 
426 static rtx_insn *
427 prev_active_insn_bb (basic_block bb, rtx_insn *insn)
428 {
429   for (insn = PREV_INSN (insn);
430        insn != PREV_INSN (BB_HEAD (bb));
431        insn = PREV_INSN (insn))
432     if (active_insn_p (insn))
433       return insn;
434   return NULL;
435 }
436 
437 static rtx_insn *
438 next_active_insn_bb (basic_block bb, rtx_insn *insn)
439 {
440   for (insn = NEXT_INSN (insn);
441        insn != NEXT_INSN (BB_END (bb));
442        insn = NEXT_INSN (insn))
443     if (active_insn_p (insn))
444       return insn;
445   return NULL;
446 }
447 
448 /* If INSN has a REG_ARGS_SIZE note, if possible move it to PREV.  Otherwise
449    search for a nearby candidate within BB where we can stick the note.  */
450 
451 static void
452 force_move_args_size_note (basic_block bb, rtx_insn *prev, rtx_insn *insn)
453 {
454   rtx note;
455   rtx_insn *test, *next_candidate, *prev_candidate;
456 
457   /* If PREV exists, tail-call to the logic in the other function.  */
458   if (prev)
459     {
460       maybe_move_args_size_note (prev, insn, false);
461       return;
462     }
463 
464   /* First, make sure there's anything that needs doing.  */
465   note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
466   if (note == NULL)
467     return;
468 
469   /* We need to find a spot between the previous and next exception points
470      where we can place the note and "properly" deallocate the arguments.  */
471   next_candidate = prev_candidate = NULL;
472 
473   /* It is often the case that we have insns in the order:
474 	call
475 	add sp (previous deallocation)
476 	sub sp (align for next arglist)
477 	push arg
478      and the add/sub cancel.  Therefore we begin by searching forward.  */
479 
480   test = insn;
481   while ((test = next_active_insn_bb (bb, test)) != NULL)
482     {
483       /* Found an existing note: nothing to do.  */
484       if (find_reg_note (test, REG_ARGS_SIZE, NULL_RTX))
485         return;
486       /* Found something that affects unwinding.  Stop searching.  */
487       if (CALL_P (test) || !insn_nothrow_p (test))
488 	break;
489       if (next_candidate == NULL)
490 	next_candidate = test;
491     }
492 
493   test = insn;
494   while ((test = prev_active_insn_bb (bb, test)) != NULL)
495     {
496       rtx tnote;
497       /* Found a place that seems logical to adjust the stack.  */
498       tnote = find_reg_note (test, REG_ARGS_SIZE, NULL_RTX);
499       if (tnote)
500 	{
501 	  XEXP (tnote, 0) = XEXP (note, 0);
502 	  return;
503 	}
504       if (prev_candidate == NULL)
505 	prev_candidate = test;
506       /* Found something that affects unwinding.  Stop searching.  */
507       if (CALL_P (test) || !insn_nothrow_p (test))
508 	break;
509     }
510 
511   if (prev_candidate)
512     test = prev_candidate;
513   else if (next_candidate)
514     test = next_candidate;
515   else
516     {
517       /* ??? We *must* have a place, lest we ICE on the lost adjustment.
518 	 Options are: dummy clobber insn, nop, or prevent the removal of
519 	 the sp += 0 insn.  */
520       /* TODO: Find another way to indicate to the dwarf2 code that we
521 	 have not in fact lost an adjustment.  */
522       test = emit_insn_before (gen_rtx_CLOBBER (VOIDmode, const0_rtx), insn);
523     }
524   add_reg_note (test, REG_ARGS_SIZE, XEXP (note, 0));
525 }
526 
527 /* Subroutine of combine_stack_adjustments, called for each basic block.  */
528 
529 static void
530 combine_stack_adjustments_for_block (basic_block bb)
531 {
532   HOST_WIDE_INT last_sp_adjust = 0;
533   rtx_insn *last_sp_set = NULL;
534   rtx_insn *last2_sp_set = NULL;
535   struct csa_reflist *reflist = NULL;
536   rtx_insn *insn, *next;
537   rtx set;
538   bool end_of_block = false;
539 
540   for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
541     {
542       end_of_block = insn == BB_END (bb);
543       next = NEXT_INSN (insn);
544 
545       if (! INSN_P (insn))
546 	continue;
547 
548       set = single_set_for_csa (insn);
549       if (set)
550 	{
551 	  rtx dest = SET_DEST (set);
552 	  rtx src = SET_SRC (set);
553 
554 	  /* Find constant additions to the stack pointer.  */
555 	  if (dest == stack_pointer_rtx
556 	      && GET_CODE (src) == PLUS
557 	      && XEXP (src, 0) == stack_pointer_rtx
558 	      && CONST_INT_P (XEXP (src, 1)))
559 	    {
560 	      HOST_WIDE_INT this_adjust = INTVAL (XEXP (src, 1));
561 
562 	      /* If we've not seen an adjustment previously, record
563 		 it now and continue.  */
564 	      if (! last_sp_set)
565 		{
566 		  last_sp_set = insn;
567 		  last_sp_adjust = this_adjust;
568 		  continue;
569 		}
570 
571 	      /* If not all recorded refs can be adjusted, or the
572 		 adjustment is now too large for a constant addition,
573 		 we cannot merge the two stack adjustments.
574 
575 		 Also we need to be careful to not move stack pointer
576 		 such that we create stack accesses outside the allocated
577 		 area.  We can combine an allocation into the first insn,
578 		 or a deallocation into the second insn.  We can not
579 		 combine an allocation followed by a deallocation.
580 
581 		 The only somewhat frequent occurrence of the later is when
582 		 a function allocates a stack frame but does not use it.
583 		 For this case, we would need to analyze rtl stream to be
584 		 sure that allocated area is really unused.  This means not
585 		 only checking the memory references, but also all registers
586 		 or global memory references possibly containing a stack
587 		 frame address.
588 
589 		 Perhaps the best way to address this problem is to teach
590 		 gcc not to allocate stack for objects never used.  */
591 
592 	      /* Combine an allocation into the first instruction.  */
593 	      if (STACK_GROWS_DOWNWARD ? this_adjust <= 0 : this_adjust >= 0)
594 		{
595 		  if (no_unhandled_cfa (insn)
596 		      && try_apply_stack_adjustment (last_sp_set, reflist,
597 						     last_sp_adjust
598 						     + this_adjust,
599 						     this_adjust))
600 		    {
601 		      /* It worked!  */
602 		      maybe_move_args_size_note (last_sp_set, insn, false);
603 		      maybe_merge_cfa_adjust (last_sp_set, insn, false);
604 		      delete_insn (insn);
605 		      last_sp_adjust += this_adjust;
606 		      continue;
607 		    }
608 		}
609 
610 	      /* Otherwise we have a deallocation.  Do not combine with
611 		 a previous allocation.  Combine into the second insn.  */
612 	      else if (STACK_GROWS_DOWNWARD
613 		       ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
614 		{
615 		  if (no_unhandled_cfa (last_sp_set)
616 		      && try_apply_stack_adjustment (insn, reflist,
617 						     last_sp_adjust
618 						     + this_adjust,
619 						     -last_sp_adjust))
620 		    {
621 		      /* It worked!  */
622 		      maybe_move_args_size_note (insn, last_sp_set, true);
623 		      maybe_merge_cfa_adjust (insn, last_sp_set, true);
624 		      delete_insn (last_sp_set);
625 		      last_sp_set = insn;
626 		      last_sp_adjust += this_adjust;
627 		      free_csa_reflist (reflist);
628 		      reflist = NULL;
629 		      continue;
630 		    }
631 		}
632 
633 	      /* Combination failed.  Restart processing from here.  If
634 		 deallocation+allocation conspired to cancel, we can
635 		 delete the old deallocation insn.  */
636 	      if (last_sp_set)
637 		{
638 		  if (last_sp_adjust == 0 && no_unhandled_cfa (last_sp_set))
639 		    {
640 		      maybe_move_args_size_note (insn, last_sp_set, true);
641 		      maybe_merge_cfa_adjust (insn, last_sp_set, true);
642 		      delete_insn (last_sp_set);
643 		    }
644 		  else
645 		    last2_sp_set = last_sp_set;
646 		}
647 	      free_csa_reflist (reflist);
648 	      reflist = NULL;
649 	      last_sp_set = insn;
650 	      last_sp_adjust = this_adjust;
651 	      continue;
652 	    }
653 
654 	  /* Find a store with pre-(dec|inc)rement or pre-modify of exactly
655 	     the previous adjustment and turn it into a simple store.  This
656 	     is equivalent to anticipating the stack adjustment so this must
657 	     be an allocation.  */
658 	  if (MEM_P (dest)
659 	      && ((STACK_GROWS_DOWNWARD
660 		   ? (GET_CODE (XEXP (dest, 0)) == PRE_DEC
661 		      && last_sp_adjust
662 			 == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest)))
663 		   : (GET_CODE (XEXP (dest, 0)) == PRE_INC
664 		      && last_sp_adjust
665 		         == -(HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest))))
666 		  || ((STACK_GROWS_DOWNWARD
667 		       ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
668 		      && GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
669 		      && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
670 		      && XEXP (XEXP (XEXP (dest, 0), 1), 0)
671 			 == stack_pointer_rtx
672 		      && GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
673 		         == CONST_INT
674 		      && INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
675 		         == -last_sp_adjust))
676 	      && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
677 	      && !reg_mentioned_p (stack_pointer_rtx, src)
678 	      && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
679 	      && try_apply_stack_adjustment (insn, reflist, 0,
680 					     -last_sp_adjust))
681 	    {
682 	      if (last2_sp_set)
683 		maybe_move_args_size_note (last2_sp_set, last_sp_set, false);
684 	      else
685 	        maybe_move_args_size_note (insn, last_sp_set, true);
686 	      delete_insn (last_sp_set);
687 	      free_csa_reflist (reflist);
688 	      reflist = NULL;
689 	      last_sp_set = NULL;
690 	      last_sp_adjust = 0;
691 	      continue;
692 	    }
693 	}
694 
695       if (!CALL_P (insn) && last_sp_set
696 	  && record_stack_refs (insn, &reflist))
697 	continue;
698 
699       /* Otherwise, we were not able to process the instruction.
700 	 Do not continue collecting data across such a one.  */
701       if (last_sp_set
702 	  && (CALL_P (insn)
703 	      || reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
704 	{
705 	  if (last_sp_set && last_sp_adjust == 0)
706 	    {
707 	      force_move_args_size_note (bb, last2_sp_set, last_sp_set);
708 	      delete_insn (last_sp_set);
709 	    }
710 	  free_csa_reflist (reflist);
711 	  reflist = NULL;
712 	  last2_sp_set = NULL;
713 	  last_sp_set = NULL;
714 	  last_sp_adjust = 0;
715 	}
716     }
717 
718   if (last_sp_set && last_sp_adjust == 0)
719     {
720       force_move_args_size_note (bb, last2_sp_set, last_sp_set);
721       delete_insn (last_sp_set);
722     }
723 
724   if (reflist)
725     free_csa_reflist (reflist);
726 }
727 
728 static unsigned int
729 rest_of_handle_stack_adjustments (void)
730 {
731   df_note_add_problem ();
732   df_analyze ();
733   combine_stack_adjustments ();
734   return 0;
735 }
736 
737 namespace {
738 
739 const pass_data pass_data_stack_adjustments =
740 {
741   RTL_PASS, /* type */
742   "csa", /* name */
743   OPTGROUP_NONE, /* optinfo_flags */
744   TV_COMBINE_STACK_ADJUST, /* tv_id */
745   0, /* properties_required */
746   0, /* properties_provided */
747   0, /* properties_destroyed */
748   0, /* todo_flags_start */
749   TODO_df_finish, /* todo_flags_finish */
750 };
751 
752 class pass_stack_adjustments : public rtl_opt_pass
753 {
754 public:
755   pass_stack_adjustments (gcc::context *ctxt)
756     : rtl_opt_pass (pass_data_stack_adjustments, ctxt)
757   {}
758 
759   /* opt_pass methods: */
760   virtual bool gate (function *);
761   virtual unsigned int execute (function *)
762     {
763       return rest_of_handle_stack_adjustments ();
764     }
765 
766 }; // class pass_stack_adjustments
767 
768 bool
769 pass_stack_adjustments::gate (function *)
770 {
771   /* This is kind of a heuristic.  We need to run combine_stack_adjustments
772      even for machines with possibly nonzero TARGET_RETURN_POPS_ARGS
773      and ACCUMULATE_OUTGOING_ARGS.  We expect that only ports having
774      push instructions will have popping returns.  */
775 #ifndef PUSH_ROUNDING
776   if (ACCUMULATE_OUTGOING_ARGS)
777     return false;
778 #endif
779   return flag_combine_stack_adjustments;
780 }
781 
782 } // anon namespace
783 
784 rtl_opt_pass *
785 make_pass_stack_adjustments (gcc::context *ctxt)
786 {
787   return new pass_stack_adjustments (ctxt);
788 }
789