1*e4b17023SJohn Marino /* Subroutines for manipulating rtx's in semantically interesting ways.
2*e4b17023SJohn Marino Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
3*e4b17023SJohn Marino 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4*e4b17023SJohn Marino Free Software Foundation, Inc.
5*e4b17023SJohn Marino
6*e4b17023SJohn Marino This file is part of GCC.
7*e4b17023SJohn Marino
8*e4b17023SJohn Marino GCC is free software; you can redistribute it and/or modify it under
9*e4b17023SJohn Marino the terms of the GNU General Public License as published by the Free
10*e4b17023SJohn Marino Software Foundation; either version 3, or (at your option) any later
11*e4b17023SJohn Marino version.
12*e4b17023SJohn Marino
13*e4b17023SJohn Marino GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14*e4b17023SJohn Marino WARRANTY; without even the implied warranty of MERCHANTABILITY or
15*e4b17023SJohn Marino FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16*e4b17023SJohn Marino for more details.
17*e4b17023SJohn Marino
18*e4b17023SJohn Marino You should have received a copy of the GNU General Public License
19*e4b17023SJohn Marino along with GCC; see the file COPYING3. If not see
20*e4b17023SJohn Marino <http://www.gnu.org/licenses/>. */
21*e4b17023SJohn Marino
22*e4b17023SJohn Marino
23*e4b17023SJohn Marino #include "config.h"
24*e4b17023SJohn Marino #include "system.h"
25*e4b17023SJohn Marino #include "coretypes.h"
26*e4b17023SJohn Marino #include "tm.h"
27*e4b17023SJohn Marino #include "diagnostic-core.h"
28*e4b17023SJohn Marino #include "rtl.h"
29*e4b17023SJohn Marino #include "tree.h"
30*e4b17023SJohn Marino #include "tm_p.h"
31*e4b17023SJohn Marino #include "flags.h"
32*e4b17023SJohn Marino #include "except.h"
33*e4b17023SJohn Marino #include "function.h"
34*e4b17023SJohn Marino #include "expr.h"
35*e4b17023SJohn Marino #include "optabs.h"
36*e4b17023SJohn Marino #include "libfuncs.h"
37*e4b17023SJohn Marino #include "hard-reg-set.h"
38*e4b17023SJohn Marino #include "insn-config.h"
39*e4b17023SJohn Marino #include "ggc.h"
40*e4b17023SJohn Marino #include "recog.h"
41*e4b17023SJohn Marino #include "langhooks.h"
42*e4b17023SJohn Marino #include "target.h"
43*e4b17023SJohn Marino #include "common/common-target.h"
44*e4b17023SJohn Marino #include "output.h"
45*e4b17023SJohn Marino
46*e4b17023SJohn Marino static rtx break_out_memory_refs (rtx);
47*e4b17023SJohn Marino
48*e4b17023SJohn Marino
49*e4b17023SJohn Marino /* Truncate and perhaps sign-extend C as appropriate for MODE. */
50*e4b17023SJohn Marino
51*e4b17023SJohn Marino HOST_WIDE_INT
trunc_int_for_mode(HOST_WIDE_INT c,enum machine_mode mode)52*e4b17023SJohn Marino trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
53*e4b17023SJohn Marino {
54*e4b17023SJohn Marino int width = GET_MODE_PRECISION (mode);
55*e4b17023SJohn Marino
56*e4b17023SJohn Marino /* You want to truncate to a _what_? */
57*e4b17023SJohn Marino gcc_assert (SCALAR_INT_MODE_P (mode));
58*e4b17023SJohn Marino
59*e4b17023SJohn Marino /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
60*e4b17023SJohn Marino if (mode == BImode)
61*e4b17023SJohn Marino return c & 1 ? STORE_FLAG_VALUE : 0;
62*e4b17023SJohn Marino
63*e4b17023SJohn Marino /* Sign-extend for the requested mode. */
64*e4b17023SJohn Marino
65*e4b17023SJohn Marino if (width < HOST_BITS_PER_WIDE_INT)
66*e4b17023SJohn Marino {
67*e4b17023SJohn Marino HOST_WIDE_INT sign = 1;
68*e4b17023SJohn Marino sign <<= width - 1;
69*e4b17023SJohn Marino c &= (sign << 1) - 1;
70*e4b17023SJohn Marino c ^= sign;
71*e4b17023SJohn Marino c -= sign;
72*e4b17023SJohn Marino }
73*e4b17023SJohn Marino
74*e4b17023SJohn Marino return c;
75*e4b17023SJohn Marino }
76*e4b17023SJohn Marino
77*e4b17023SJohn Marino /* Return an rtx for the sum of X and the integer C. */
78*e4b17023SJohn Marino
79*e4b17023SJohn Marino rtx
plus_constant(rtx x,HOST_WIDE_INT c)80*e4b17023SJohn Marino plus_constant (rtx x, HOST_WIDE_INT c)
81*e4b17023SJohn Marino {
82*e4b17023SJohn Marino RTX_CODE code;
83*e4b17023SJohn Marino rtx y;
84*e4b17023SJohn Marino enum machine_mode mode;
85*e4b17023SJohn Marino rtx tem;
86*e4b17023SJohn Marino int all_constant = 0;
87*e4b17023SJohn Marino
88*e4b17023SJohn Marino if (c == 0)
89*e4b17023SJohn Marino return x;
90*e4b17023SJohn Marino
91*e4b17023SJohn Marino restart:
92*e4b17023SJohn Marino
93*e4b17023SJohn Marino code = GET_CODE (x);
94*e4b17023SJohn Marino mode = GET_MODE (x);
95*e4b17023SJohn Marino y = x;
96*e4b17023SJohn Marino
97*e4b17023SJohn Marino switch (code)
98*e4b17023SJohn Marino {
99*e4b17023SJohn Marino case CONST_INT:
100*e4b17023SJohn Marino return GEN_INT (INTVAL (x) + c);
101*e4b17023SJohn Marino
102*e4b17023SJohn Marino case CONST_DOUBLE:
103*e4b17023SJohn Marino {
104*e4b17023SJohn Marino unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
105*e4b17023SJohn Marino HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
106*e4b17023SJohn Marino unsigned HOST_WIDE_INT l2 = c;
107*e4b17023SJohn Marino HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
108*e4b17023SJohn Marino unsigned HOST_WIDE_INT lv;
109*e4b17023SJohn Marino HOST_WIDE_INT hv;
110*e4b17023SJohn Marino
111*e4b17023SJohn Marino add_double (l1, h1, l2, h2, &lv, &hv);
112*e4b17023SJohn Marino
113*e4b17023SJohn Marino return immed_double_const (lv, hv, VOIDmode);
114*e4b17023SJohn Marino }
115*e4b17023SJohn Marino
116*e4b17023SJohn Marino case MEM:
117*e4b17023SJohn Marino /* If this is a reference to the constant pool, try replacing it with
118*e4b17023SJohn Marino a reference to a new constant. If the resulting address isn't
119*e4b17023SJohn Marino valid, don't return it because we have no way to validize it. */
120*e4b17023SJohn Marino if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
121*e4b17023SJohn Marino && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
122*e4b17023SJohn Marino {
123*e4b17023SJohn Marino tem
124*e4b17023SJohn Marino = force_const_mem (GET_MODE (x),
125*e4b17023SJohn Marino plus_constant (get_pool_constant (XEXP (x, 0)),
126*e4b17023SJohn Marino c));
127*e4b17023SJohn Marino if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
128*e4b17023SJohn Marino return tem;
129*e4b17023SJohn Marino }
130*e4b17023SJohn Marino break;
131*e4b17023SJohn Marino
132*e4b17023SJohn Marino case CONST:
133*e4b17023SJohn Marino /* If adding to something entirely constant, set a flag
134*e4b17023SJohn Marino so that we can add a CONST around the result. */
135*e4b17023SJohn Marino x = XEXP (x, 0);
136*e4b17023SJohn Marino all_constant = 1;
137*e4b17023SJohn Marino goto restart;
138*e4b17023SJohn Marino
139*e4b17023SJohn Marino case SYMBOL_REF:
140*e4b17023SJohn Marino case LABEL_REF:
141*e4b17023SJohn Marino all_constant = 1;
142*e4b17023SJohn Marino break;
143*e4b17023SJohn Marino
144*e4b17023SJohn Marino case PLUS:
145*e4b17023SJohn Marino /* The interesting case is adding the integer to a sum.
146*e4b17023SJohn Marino Look for constant term in the sum and combine
147*e4b17023SJohn Marino with C. For an integer constant term, we make a combined
148*e4b17023SJohn Marino integer. For a constant term that is not an explicit integer,
149*e4b17023SJohn Marino we cannot really combine, but group them together anyway.
150*e4b17023SJohn Marino
151*e4b17023SJohn Marino Restart or use a recursive call in case the remaining operand is
152*e4b17023SJohn Marino something that we handle specially, such as a SYMBOL_REF.
153*e4b17023SJohn Marino
154*e4b17023SJohn Marino We may not immediately return from the recursive call here, lest
155*e4b17023SJohn Marino all_constant gets lost. */
156*e4b17023SJohn Marino
157*e4b17023SJohn Marino if (CONST_INT_P (XEXP (x, 1)))
158*e4b17023SJohn Marino {
159*e4b17023SJohn Marino c += INTVAL (XEXP (x, 1));
160*e4b17023SJohn Marino
161*e4b17023SJohn Marino if (GET_MODE (x) != VOIDmode)
162*e4b17023SJohn Marino c = trunc_int_for_mode (c, GET_MODE (x));
163*e4b17023SJohn Marino
164*e4b17023SJohn Marino x = XEXP (x, 0);
165*e4b17023SJohn Marino goto restart;
166*e4b17023SJohn Marino }
167*e4b17023SJohn Marino else if (CONSTANT_P (XEXP (x, 1)))
168*e4b17023SJohn Marino {
169*e4b17023SJohn Marino x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
170*e4b17023SJohn Marino c = 0;
171*e4b17023SJohn Marino }
172*e4b17023SJohn Marino else if (find_constant_term_loc (&y))
173*e4b17023SJohn Marino {
174*e4b17023SJohn Marino /* We need to be careful since X may be shared and we can't
175*e4b17023SJohn Marino modify it in place. */
176*e4b17023SJohn Marino rtx copy = copy_rtx (x);
177*e4b17023SJohn Marino rtx *const_loc = find_constant_term_loc (©);
178*e4b17023SJohn Marino
179*e4b17023SJohn Marino *const_loc = plus_constant (*const_loc, c);
180*e4b17023SJohn Marino x = copy;
181*e4b17023SJohn Marino c = 0;
182*e4b17023SJohn Marino }
183*e4b17023SJohn Marino break;
184*e4b17023SJohn Marino
185*e4b17023SJohn Marino default:
186*e4b17023SJohn Marino break;
187*e4b17023SJohn Marino }
188*e4b17023SJohn Marino
189*e4b17023SJohn Marino if (c != 0)
190*e4b17023SJohn Marino x = gen_rtx_PLUS (mode, x, GEN_INT (c));
191*e4b17023SJohn Marino
192*e4b17023SJohn Marino if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
193*e4b17023SJohn Marino return x;
194*e4b17023SJohn Marino else if (all_constant)
195*e4b17023SJohn Marino return gen_rtx_CONST (mode, x);
196*e4b17023SJohn Marino else
197*e4b17023SJohn Marino return x;
198*e4b17023SJohn Marino }
199*e4b17023SJohn Marino
200*e4b17023SJohn Marino /* If X is a sum, return a new sum like X but lacking any constant terms.
201*e4b17023SJohn Marino Add all the removed constant terms into *CONSTPTR.
202*e4b17023SJohn Marino X itself is not altered. The result != X if and only if
203*e4b17023SJohn Marino it is not isomorphic to X. */
204*e4b17023SJohn Marino
205*e4b17023SJohn Marino rtx
eliminate_constant_term(rtx x,rtx * constptr)206*e4b17023SJohn Marino eliminate_constant_term (rtx x, rtx *constptr)
207*e4b17023SJohn Marino {
208*e4b17023SJohn Marino rtx x0, x1;
209*e4b17023SJohn Marino rtx tem;
210*e4b17023SJohn Marino
211*e4b17023SJohn Marino if (GET_CODE (x) != PLUS)
212*e4b17023SJohn Marino return x;
213*e4b17023SJohn Marino
214*e4b17023SJohn Marino /* First handle constants appearing at this level explicitly. */
215*e4b17023SJohn Marino if (CONST_INT_P (XEXP (x, 1))
216*e4b17023SJohn Marino && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
217*e4b17023SJohn Marino XEXP (x, 1)))
218*e4b17023SJohn Marino && CONST_INT_P (tem))
219*e4b17023SJohn Marino {
220*e4b17023SJohn Marino *constptr = tem;
221*e4b17023SJohn Marino return eliminate_constant_term (XEXP (x, 0), constptr);
222*e4b17023SJohn Marino }
223*e4b17023SJohn Marino
224*e4b17023SJohn Marino tem = const0_rtx;
225*e4b17023SJohn Marino x0 = eliminate_constant_term (XEXP (x, 0), &tem);
226*e4b17023SJohn Marino x1 = eliminate_constant_term (XEXP (x, 1), &tem);
227*e4b17023SJohn Marino if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
228*e4b17023SJohn Marino && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
229*e4b17023SJohn Marino *constptr, tem))
230*e4b17023SJohn Marino && CONST_INT_P (tem))
231*e4b17023SJohn Marino {
232*e4b17023SJohn Marino *constptr = tem;
233*e4b17023SJohn Marino return gen_rtx_PLUS (GET_MODE (x), x0, x1);
234*e4b17023SJohn Marino }
235*e4b17023SJohn Marino
236*e4b17023SJohn Marino return x;
237*e4b17023SJohn Marino }
238*e4b17023SJohn Marino
239*e4b17023SJohn Marino /* Return an rtx for the size in bytes of the value of EXP. */
240*e4b17023SJohn Marino
241*e4b17023SJohn Marino rtx
expr_size(tree exp)242*e4b17023SJohn Marino expr_size (tree exp)
243*e4b17023SJohn Marino {
244*e4b17023SJohn Marino tree size;
245*e4b17023SJohn Marino
246*e4b17023SJohn Marino if (TREE_CODE (exp) == WITH_SIZE_EXPR)
247*e4b17023SJohn Marino size = TREE_OPERAND (exp, 1);
248*e4b17023SJohn Marino else
249*e4b17023SJohn Marino {
250*e4b17023SJohn Marino size = tree_expr_size (exp);
251*e4b17023SJohn Marino gcc_assert (size);
252*e4b17023SJohn Marino gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
253*e4b17023SJohn Marino }
254*e4b17023SJohn Marino
255*e4b17023SJohn Marino return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
256*e4b17023SJohn Marino }
257*e4b17023SJohn Marino
258*e4b17023SJohn Marino /* Return a wide integer for the size in bytes of the value of EXP, or -1
259*e4b17023SJohn Marino if the size can vary or is larger than an integer. */
260*e4b17023SJohn Marino
261*e4b17023SJohn Marino HOST_WIDE_INT
int_expr_size(tree exp)262*e4b17023SJohn Marino int_expr_size (tree exp)
263*e4b17023SJohn Marino {
264*e4b17023SJohn Marino tree size;
265*e4b17023SJohn Marino
266*e4b17023SJohn Marino if (TREE_CODE (exp) == WITH_SIZE_EXPR)
267*e4b17023SJohn Marino size = TREE_OPERAND (exp, 1);
268*e4b17023SJohn Marino else
269*e4b17023SJohn Marino {
270*e4b17023SJohn Marino size = tree_expr_size (exp);
271*e4b17023SJohn Marino gcc_assert (size);
272*e4b17023SJohn Marino }
273*e4b17023SJohn Marino
274*e4b17023SJohn Marino if (size == 0 || !host_integerp (size, 0))
275*e4b17023SJohn Marino return -1;
276*e4b17023SJohn Marino
277*e4b17023SJohn Marino return tree_low_cst (size, 0);
278*e4b17023SJohn Marino }
279*e4b17023SJohn Marino
280*e4b17023SJohn Marino /* Return a copy of X in which all memory references
281*e4b17023SJohn Marino and all constants that involve symbol refs
282*e4b17023SJohn Marino have been replaced with new temporary registers.
283*e4b17023SJohn Marino Also emit code to load the memory locations and constants
284*e4b17023SJohn Marino into those registers.
285*e4b17023SJohn Marino
286*e4b17023SJohn Marino If X contains no such constants or memory references,
287*e4b17023SJohn Marino X itself (not a copy) is returned.
288*e4b17023SJohn Marino
289*e4b17023SJohn Marino If a constant is found in the address that is not a legitimate constant
290*e4b17023SJohn Marino in an insn, it is left alone in the hope that it might be valid in the
291*e4b17023SJohn Marino address.
292*e4b17023SJohn Marino
293*e4b17023SJohn Marino X may contain no arithmetic except addition, subtraction and multiplication.
294*e4b17023SJohn Marino Values returned by expand_expr with 1 for sum_ok fit this constraint. */
295*e4b17023SJohn Marino
296*e4b17023SJohn Marino static rtx
break_out_memory_refs(rtx x)297*e4b17023SJohn Marino break_out_memory_refs (rtx x)
298*e4b17023SJohn Marino {
299*e4b17023SJohn Marino if (MEM_P (x)
300*e4b17023SJohn Marino || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
301*e4b17023SJohn Marino && GET_MODE (x) != VOIDmode))
302*e4b17023SJohn Marino x = force_reg (GET_MODE (x), x);
303*e4b17023SJohn Marino else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
304*e4b17023SJohn Marino || GET_CODE (x) == MULT)
305*e4b17023SJohn Marino {
306*e4b17023SJohn Marino rtx op0 = break_out_memory_refs (XEXP (x, 0));
307*e4b17023SJohn Marino rtx op1 = break_out_memory_refs (XEXP (x, 1));
308*e4b17023SJohn Marino
309*e4b17023SJohn Marino if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
310*e4b17023SJohn Marino x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
311*e4b17023SJohn Marino }
312*e4b17023SJohn Marino
313*e4b17023SJohn Marino return x;
314*e4b17023SJohn Marino }
315*e4b17023SJohn Marino
316*e4b17023SJohn Marino /* Given X, a memory address in address space AS' pointer mode, convert it to
317*e4b17023SJohn Marino an address in the address space's address mode, or vice versa (TO_MODE says
318*e4b17023SJohn Marino which way). We take advantage of the fact that pointers are not allowed to
319*e4b17023SJohn Marino overflow by commuting arithmetic operations over conversions so that address
320*e4b17023SJohn Marino arithmetic insns can be used. */
321*e4b17023SJohn Marino
322*e4b17023SJohn Marino rtx
convert_memory_address_addr_space(enum machine_mode to_mode ATTRIBUTE_UNUSED,rtx x,addr_space_t as ATTRIBUTE_UNUSED)323*e4b17023SJohn Marino convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED,
324*e4b17023SJohn Marino rtx x, addr_space_t as ATTRIBUTE_UNUSED)
325*e4b17023SJohn Marino {
326*e4b17023SJohn Marino #ifndef POINTERS_EXTEND_UNSIGNED
327*e4b17023SJohn Marino gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
328*e4b17023SJohn Marino return x;
329*e4b17023SJohn Marino #else /* defined(POINTERS_EXTEND_UNSIGNED) */
330*e4b17023SJohn Marino enum machine_mode pointer_mode, address_mode, from_mode;
331*e4b17023SJohn Marino rtx temp;
332*e4b17023SJohn Marino enum rtx_code code;
333*e4b17023SJohn Marino
334*e4b17023SJohn Marino /* If X already has the right mode, just return it. */
335*e4b17023SJohn Marino if (GET_MODE (x) == to_mode)
336*e4b17023SJohn Marino return x;
337*e4b17023SJohn Marino
338*e4b17023SJohn Marino pointer_mode = targetm.addr_space.pointer_mode (as);
339*e4b17023SJohn Marino address_mode = targetm.addr_space.address_mode (as);
340*e4b17023SJohn Marino from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
341*e4b17023SJohn Marino
342*e4b17023SJohn Marino /* Here we handle some special cases. If none of them apply, fall through
343*e4b17023SJohn Marino to the default case. */
344*e4b17023SJohn Marino switch (GET_CODE (x))
345*e4b17023SJohn Marino {
346*e4b17023SJohn Marino case CONST_INT:
347*e4b17023SJohn Marino case CONST_DOUBLE:
348*e4b17023SJohn Marino if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
349*e4b17023SJohn Marino code = TRUNCATE;
350*e4b17023SJohn Marino else if (POINTERS_EXTEND_UNSIGNED < 0)
351*e4b17023SJohn Marino break;
352*e4b17023SJohn Marino else if (POINTERS_EXTEND_UNSIGNED > 0)
353*e4b17023SJohn Marino code = ZERO_EXTEND;
354*e4b17023SJohn Marino else
355*e4b17023SJohn Marino code = SIGN_EXTEND;
356*e4b17023SJohn Marino temp = simplify_unary_operation (code, to_mode, x, from_mode);
357*e4b17023SJohn Marino if (temp)
358*e4b17023SJohn Marino return temp;
359*e4b17023SJohn Marino break;
360*e4b17023SJohn Marino
361*e4b17023SJohn Marino case SUBREG:
362*e4b17023SJohn Marino if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
363*e4b17023SJohn Marino && GET_MODE (SUBREG_REG (x)) == to_mode)
364*e4b17023SJohn Marino return SUBREG_REG (x);
365*e4b17023SJohn Marino break;
366*e4b17023SJohn Marino
367*e4b17023SJohn Marino case LABEL_REF:
368*e4b17023SJohn Marino temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
369*e4b17023SJohn Marino LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
370*e4b17023SJohn Marino return temp;
371*e4b17023SJohn Marino break;
372*e4b17023SJohn Marino
373*e4b17023SJohn Marino case SYMBOL_REF:
374*e4b17023SJohn Marino temp = shallow_copy_rtx (x);
375*e4b17023SJohn Marino PUT_MODE (temp, to_mode);
376*e4b17023SJohn Marino return temp;
377*e4b17023SJohn Marino break;
378*e4b17023SJohn Marino
379*e4b17023SJohn Marino case CONST:
380*e4b17023SJohn Marino return gen_rtx_CONST (to_mode,
381*e4b17023SJohn Marino convert_memory_address_addr_space
382*e4b17023SJohn Marino (to_mode, XEXP (x, 0), as));
383*e4b17023SJohn Marino break;
384*e4b17023SJohn Marino
385*e4b17023SJohn Marino case PLUS:
386*e4b17023SJohn Marino case MULT:
387*e4b17023SJohn Marino /* FIXME: For addition, we used to permute the conversion and
388*e4b17023SJohn Marino addition operation only if one operand is a constant and
389*e4b17023SJohn Marino converting the constant does not change it or if one operand
390*e4b17023SJohn Marino is a constant and we are using a ptr_extend instruction
391*e4b17023SJohn Marino (POINTERS_EXTEND_UNSIGNED < 0) even if the resulting address
392*e4b17023SJohn Marino may overflow/underflow. We relax the condition to include
393*e4b17023SJohn Marino zero-extend (POINTERS_EXTEND_UNSIGNED > 0) since the other
394*e4b17023SJohn Marino parts of the compiler depend on it. See PR 49721.
395*e4b17023SJohn Marino
396*e4b17023SJohn Marino We can always safely permute them if we are making the address
397*e4b17023SJohn Marino narrower. */
398*e4b17023SJohn Marino if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
399*e4b17023SJohn Marino || (GET_CODE (x) == PLUS
400*e4b17023SJohn Marino && CONST_INT_P (XEXP (x, 1))
401*e4b17023SJohn Marino && (POINTERS_EXTEND_UNSIGNED != 0
402*e4b17023SJohn Marino || XEXP (x, 1) == convert_memory_address_addr_space
403*e4b17023SJohn Marino (to_mode, XEXP (x, 1), as))))
404*e4b17023SJohn Marino return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
405*e4b17023SJohn Marino convert_memory_address_addr_space
406*e4b17023SJohn Marino (to_mode, XEXP (x, 0), as),
407*e4b17023SJohn Marino XEXP (x, 1));
408*e4b17023SJohn Marino break;
409*e4b17023SJohn Marino
410*e4b17023SJohn Marino default:
411*e4b17023SJohn Marino break;
412*e4b17023SJohn Marino }
413*e4b17023SJohn Marino
414*e4b17023SJohn Marino return convert_modes (to_mode, from_mode,
415*e4b17023SJohn Marino x, POINTERS_EXTEND_UNSIGNED);
416*e4b17023SJohn Marino #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
417*e4b17023SJohn Marino }
418*e4b17023SJohn Marino
419*e4b17023SJohn Marino /* Return something equivalent to X but valid as a memory address for something
420*e4b17023SJohn Marino of mode MODE in the named address space AS. When X is not itself valid,
421*e4b17023SJohn Marino this works by copying X or subexpressions of it into registers. */
422*e4b17023SJohn Marino
423*e4b17023SJohn Marino rtx
memory_address_addr_space(enum machine_mode mode,rtx x,addr_space_t as)424*e4b17023SJohn Marino memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
425*e4b17023SJohn Marino {
426*e4b17023SJohn Marino rtx oldx = x;
427*e4b17023SJohn Marino enum machine_mode address_mode = targetm.addr_space.address_mode (as);
428*e4b17023SJohn Marino
429*e4b17023SJohn Marino x = convert_memory_address_addr_space (address_mode, x, as);
430*e4b17023SJohn Marino
431*e4b17023SJohn Marino /* By passing constant addresses through registers
432*e4b17023SJohn Marino we get a chance to cse them. */
433*e4b17023SJohn Marino if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
434*e4b17023SJohn Marino x = force_reg (address_mode, x);
435*e4b17023SJohn Marino
436*e4b17023SJohn Marino /* We get better cse by rejecting indirect addressing at this stage.
437*e4b17023SJohn Marino Let the combiner create indirect addresses where appropriate.
438*e4b17023SJohn Marino For now, generate the code so that the subexpressions useful to share
439*e4b17023SJohn Marino are visible. But not if cse won't be done! */
440*e4b17023SJohn Marino else
441*e4b17023SJohn Marino {
442*e4b17023SJohn Marino if (! cse_not_expected && !REG_P (x))
443*e4b17023SJohn Marino x = break_out_memory_refs (x);
444*e4b17023SJohn Marino
445*e4b17023SJohn Marino /* At this point, any valid address is accepted. */
446*e4b17023SJohn Marino if (memory_address_addr_space_p (mode, x, as))
447*e4b17023SJohn Marino goto done;
448*e4b17023SJohn Marino
449*e4b17023SJohn Marino /* If it was valid before but breaking out memory refs invalidated it,
450*e4b17023SJohn Marino use it the old way. */
451*e4b17023SJohn Marino if (memory_address_addr_space_p (mode, oldx, as))
452*e4b17023SJohn Marino {
453*e4b17023SJohn Marino x = oldx;
454*e4b17023SJohn Marino goto done;
455*e4b17023SJohn Marino }
456*e4b17023SJohn Marino
457*e4b17023SJohn Marino /* Perform machine-dependent transformations on X
458*e4b17023SJohn Marino in certain cases. This is not necessary since the code
459*e4b17023SJohn Marino below can handle all possible cases, but machine-dependent
460*e4b17023SJohn Marino transformations can make better code. */
461*e4b17023SJohn Marino {
462*e4b17023SJohn Marino rtx orig_x = x;
463*e4b17023SJohn Marino x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
464*e4b17023SJohn Marino if (orig_x != x && memory_address_addr_space_p (mode, x, as))
465*e4b17023SJohn Marino goto done;
466*e4b17023SJohn Marino }
467*e4b17023SJohn Marino
468*e4b17023SJohn Marino /* PLUS and MULT can appear in special ways
469*e4b17023SJohn Marino as the result of attempts to make an address usable for indexing.
470*e4b17023SJohn Marino Usually they are dealt with by calling force_operand, below.
471*e4b17023SJohn Marino But a sum containing constant terms is special
472*e4b17023SJohn Marino if removing them makes the sum a valid address:
473*e4b17023SJohn Marino then we generate that address in a register
474*e4b17023SJohn Marino and index off of it. We do this because it often makes
475*e4b17023SJohn Marino shorter code, and because the addresses thus generated
476*e4b17023SJohn Marino in registers often become common subexpressions. */
477*e4b17023SJohn Marino if (GET_CODE (x) == PLUS)
478*e4b17023SJohn Marino {
479*e4b17023SJohn Marino rtx constant_term = const0_rtx;
480*e4b17023SJohn Marino rtx y = eliminate_constant_term (x, &constant_term);
481*e4b17023SJohn Marino if (constant_term == const0_rtx
482*e4b17023SJohn Marino || ! memory_address_addr_space_p (mode, y, as))
483*e4b17023SJohn Marino x = force_operand (x, NULL_RTX);
484*e4b17023SJohn Marino else
485*e4b17023SJohn Marino {
486*e4b17023SJohn Marino y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
487*e4b17023SJohn Marino if (! memory_address_addr_space_p (mode, y, as))
488*e4b17023SJohn Marino x = force_operand (x, NULL_RTX);
489*e4b17023SJohn Marino else
490*e4b17023SJohn Marino x = y;
491*e4b17023SJohn Marino }
492*e4b17023SJohn Marino }
493*e4b17023SJohn Marino
494*e4b17023SJohn Marino else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
495*e4b17023SJohn Marino x = force_operand (x, NULL_RTX);
496*e4b17023SJohn Marino
497*e4b17023SJohn Marino /* If we have a register that's an invalid address,
498*e4b17023SJohn Marino it must be a hard reg of the wrong class. Copy it to a pseudo. */
499*e4b17023SJohn Marino else if (REG_P (x))
500*e4b17023SJohn Marino x = copy_to_reg (x);
501*e4b17023SJohn Marino
502*e4b17023SJohn Marino /* Last resort: copy the value to a register, since
503*e4b17023SJohn Marino the register is a valid address. */
504*e4b17023SJohn Marino else
505*e4b17023SJohn Marino x = force_reg (address_mode, x);
506*e4b17023SJohn Marino }
507*e4b17023SJohn Marino
508*e4b17023SJohn Marino done:
509*e4b17023SJohn Marino
510*e4b17023SJohn Marino gcc_assert (memory_address_addr_space_p (mode, x, as));
511*e4b17023SJohn Marino /* If we didn't change the address, we are done. Otherwise, mark
512*e4b17023SJohn Marino a reg as a pointer if we have REG or REG + CONST_INT. */
513*e4b17023SJohn Marino if (oldx == x)
514*e4b17023SJohn Marino return x;
515*e4b17023SJohn Marino else if (REG_P (x))
516*e4b17023SJohn Marino mark_reg_pointer (x, BITS_PER_UNIT);
517*e4b17023SJohn Marino else if (GET_CODE (x) == PLUS
518*e4b17023SJohn Marino && REG_P (XEXP (x, 0))
519*e4b17023SJohn Marino && CONST_INT_P (XEXP (x, 1)))
520*e4b17023SJohn Marino mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
521*e4b17023SJohn Marino
522*e4b17023SJohn Marino /* OLDX may have been the address on a temporary. Update the address
523*e4b17023SJohn Marino to indicate that X is now used. */
524*e4b17023SJohn Marino update_temp_slot_address (oldx, x);
525*e4b17023SJohn Marino
526*e4b17023SJohn Marino return x;
527*e4b17023SJohn Marino }
528*e4b17023SJohn Marino
529*e4b17023SJohn Marino /* Convert a mem ref into one with a valid memory address.
530*e4b17023SJohn Marino Pass through anything else unchanged. */
531*e4b17023SJohn Marino
532*e4b17023SJohn Marino rtx
validize_mem(rtx ref)533*e4b17023SJohn Marino validize_mem (rtx ref)
534*e4b17023SJohn Marino {
535*e4b17023SJohn Marino if (!MEM_P (ref))
536*e4b17023SJohn Marino return ref;
537*e4b17023SJohn Marino ref = use_anchored_address (ref);
538*e4b17023SJohn Marino if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
539*e4b17023SJohn Marino MEM_ADDR_SPACE (ref)))
540*e4b17023SJohn Marino return ref;
541*e4b17023SJohn Marino
542*e4b17023SJohn Marino /* Don't alter REF itself, since that is probably a stack slot. */
543*e4b17023SJohn Marino return replace_equiv_address (ref, XEXP (ref, 0));
544*e4b17023SJohn Marino }
545*e4b17023SJohn Marino
546*e4b17023SJohn Marino /* If X is a memory reference to a member of an object block, try rewriting
547*e4b17023SJohn Marino it to use an anchor instead. Return the new memory reference on success
548*e4b17023SJohn Marino and the old one on failure. */
549*e4b17023SJohn Marino
550*e4b17023SJohn Marino rtx
use_anchored_address(rtx x)551*e4b17023SJohn Marino use_anchored_address (rtx x)
552*e4b17023SJohn Marino {
553*e4b17023SJohn Marino rtx base;
554*e4b17023SJohn Marino HOST_WIDE_INT offset;
555*e4b17023SJohn Marino
556*e4b17023SJohn Marino if (!flag_section_anchors)
557*e4b17023SJohn Marino return x;
558*e4b17023SJohn Marino
559*e4b17023SJohn Marino if (!MEM_P (x))
560*e4b17023SJohn Marino return x;
561*e4b17023SJohn Marino
562*e4b17023SJohn Marino /* Split the address into a base and offset. */
563*e4b17023SJohn Marino base = XEXP (x, 0);
564*e4b17023SJohn Marino offset = 0;
565*e4b17023SJohn Marino if (GET_CODE (base) == CONST
566*e4b17023SJohn Marino && GET_CODE (XEXP (base, 0)) == PLUS
567*e4b17023SJohn Marino && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
568*e4b17023SJohn Marino {
569*e4b17023SJohn Marino offset += INTVAL (XEXP (XEXP (base, 0), 1));
570*e4b17023SJohn Marino base = XEXP (XEXP (base, 0), 0);
571*e4b17023SJohn Marino }
572*e4b17023SJohn Marino
573*e4b17023SJohn Marino /* Check whether BASE is suitable for anchors. */
574*e4b17023SJohn Marino if (GET_CODE (base) != SYMBOL_REF
575*e4b17023SJohn Marino || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
576*e4b17023SJohn Marino || SYMBOL_REF_ANCHOR_P (base)
577*e4b17023SJohn Marino || SYMBOL_REF_BLOCK (base) == NULL
578*e4b17023SJohn Marino || !targetm.use_anchors_for_symbol_p (base))
579*e4b17023SJohn Marino return x;
580*e4b17023SJohn Marino
581*e4b17023SJohn Marino /* Decide where BASE is going to be. */
582*e4b17023SJohn Marino place_block_symbol (base);
583*e4b17023SJohn Marino
584*e4b17023SJohn Marino /* Get the anchor we need to use. */
585*e4b17023SJohn Marino offset += SYMBOL_REF_BLOCK_OFFSET (base);
586*e4b17023SJohn Marino base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
587*e4b17023SJohn Marino SYMBOL_REF_TLS_MODEL (base));
588*e4b17023SJohn Marino
589*e4b17023SJohn Marino /* Work out the offset from the anchor. */
590*e4b17023SJohn Marino offset -= SYMBOL_REF_BLOCK_OFFSET (base);
591*e4b17023SJohn Marino
592*e4b17023SJohn Marino /* If we're going to run a CSE pass, force the anchor into a register.
593*e4b17023SJohn Marino We will then be able to reuse registers for several accesses, if the
594*e4b17023SJohn Marino target costs say that that's worthwhile. */
595*e4b17023SJohn Marino if (!cse_not_expected)
596*e4b17023SJohn Marino base = force_reg (GET_MODE (base), base);
597*e4b17023SJohn Marino
598*e4b17023SJohn Marino return replace_equiv_address (x, plus_constant (base, offset));
599*e4b17023SJohn Marino }
600*e4b17023SJohn Marino
601*e4b17023SJohn Marino /* Copy the value or contents of X to a new temp reg and return that reg. */
602*e4b17023SJohn Marino
603*e4b17023SJohn Marino rtx
copy_to_reg(rtx x)604*e4b17023SJohn Marino copy_to_reg (rtx x)
605*e4b17023SJohn Marino {
606*e4b17023SJohn Marino rtx temp = gen_reg_rtx (GET_MODE (x));
607*e4b17023SJohn Marino
608*e4b17023SJohn Marino /* If not an operand, must be an address with PLUS and MULT so
609*e4b17023SJohn Marino do the computation. */
610*e4b17023SJohn Marino if (! general_operand (x, VOIDmode))
611*e4b17023SJohn Marino x = force_operand (x, temp);
612*e4b17023SJohn Marino
613*e4b17023SJohn Marino if (x != temp)
614*e4b17023SJohn Marino emit_move_insn (temp, x);
615*e4b17023SJohn Marino
616*e4b17023SJohn Marino return temp;
617*e4b17023SJohn Marino }
618*e4b17023SJohn Marino
619*e4b17023SJohn Marino /* Like copy_to_reg but always give the new register mode Pmode
620*e4b17023SJohn Marino in case X is a constant. */
621*e4b17023SJohn Marino
622*e4b17023SJohn Marino rtx
copy_addr_to_reg(rtx x)623*e4b17023SJohn Marino copy_addr_to_reg (rtx x)
624*e4b17023SJohn Marino {
625*e4b17023SJohn Marino return copy_to_mode_reg (Pmode, x);
626*e4b17023SJohn Marino }
627*e4b17023SJohn Marino
628*e4b17023SJohn Marino /* Like copy_to_reg but always give the new register mode MODE
629*e4b17023SJohn Marino in case X is a constant. */
630*e4b17023SJohn Marino
631*e4b17023SJohn Marino rtx
copy_to_mode_reg(enum machine_mode mode,rtx x)632*e4b17023SJohn Marino copy_to_mode_reg (enum machine_mode mode, rtx x)
633*e4b17023SJohn Marino {
634*e4b17023SJohn Marino rtx temp = gen_reg_rtx (mode);
635*e4b17023SJohn Marino
636*e4b17023SJohn Marino /* If not an operand, must be an address with PLUS and MULT so
637*e4b17023SJohn Marino do the computation. */
638*e4b17023SJohn Marino if (! general_operand (x, VOIDmode))
639*e4b17023SJohn Marino x = force_operand (x, temp);
640*e4b17023SJohn Marino
641*e4b17023SJohn Marino gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
642*e4b17023SJohn Marino if (x != temp)
643*e4b17023SJohn Marino emit_move_insn (temp, x);
644*e4b17023SJohn Marino return temp;
645*e4b17023SJohn Marino }
646*e4b17023SJohn Marino
647*e4b17023SJohn Marino /* Load X into a register if it is not already one.
648*e4b17023SJohn Marino Use mode MODE for the register.
649*e4b17023SJohn Marino X should be valid for mode MODE, but it may be a constant which
650*e4b17023SJohn Marino is valid for all integer modes; that's why caller must specify MODE.
651*e4b17023SJohn Marino
652*e4b17023SJohn Marino The caller must not alter the value in the register we return,
653*e4b17023SJohn Marino since we mark it as a "constant" register. */
654*e4b17023SJohn Marino
655*e4b17023SJohn Marino rtx
force_reg(enum machine_mode mode,rtx x)656*e4b17023SJohn Marino force_reg (enum machine_mode mode, rtx x)
657*e4b17023SJohn Marino {
658*e4b17023SJohn Marino rtx temp, insn, set;
659*e4b17023SJohn Marino
660*e4b17023SJohn Marino if (REG_P (x))
661*e4b17023SJohn Marino return x;
662*e4b17023SJohn Marino
663*e4b17023SJohn Marino if (general_operand (x, mode))
664*e4b17023SJohn Marino {
665*e4b17023SJohn Marino temp = gen_reg_rtx (mode);
666*e4b17023SJohn Marino insn = emit_move_insn (temp, x);
667*e4b17023SJohn Marino }
668*e4b17023SJohn Marino else
669*e4b17023SJohn Marino {
670*e4b17023SJohn Marino temp = force_operand (x, NULL_RTX);
671*e4b17023SJohn Marino if (REG_P (temp))
672*e4b17023SJohn Marino insn = get_last_insn ();
673*e4b17023SJohn Marino else
674*e4b17023SJohn Marino {
675*e4b17023SJohn Marino rtx temp2 = gen_reg_rtx (mode);
676*e4b17023SJohn Marino insn = emit_move_insn (temp2, temp);
677*e4b17023SJohn Marino temp = temp2;
678*e4b17023SJohn Marino }
679*e4b17023SJohn Marino }
680*e4b17023SJohn Marino
681*e4b17023SJohn Marino /* Let optimizers know that TEMP's value never changes
682*e4b17023SJohn Marino and that X can be substituted for it. Don't get confused
683*e4b17023SJohn Marino if INSN set something else (such as a SUBREG of TEMP). */
684*e4b17023SJohn Marino if (CONSTANT_P (x)
685*e4b17023SJohn Marino && (set = single_set (insn)) != 0
686*e4b17023SJohn Marino && SET_DEST (set) == temp
687*e4b17023SJohn Marino && ! rtx_equal_p (x, SET_SRC (set)))
688*e4b17023SJohn Marino set_unique_reg_note (insn, REG_EQUAL, x);
689*e4b17023SJohn Marino
690*e4b17023SJohn Marino /* Let optimizers know that TEMP is a pointer, and if so, the
691*e4b17023SJohn Marino known alignment of that pointer. */
692*e4b17023SJohn Marino {
693*e4b17023SJohn Marino unsigned align = 0;
694*e4b17023SJohn Marino if (GET_CODE (x) == SYMBOL_REF)
695*e4b17023SJohn Marino {
696*e4b17023SJohn Marino align = BITS_PER_UNIT;
697*e4b17023SJohn Marino if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
698*e4b17023SJohn Marino align = DECL_ALIGN (SYMBOL_REF_DECL (x));
699*e4b17023SJohn Marino }
700*e4b17023SJohn Marino else if (GET_CODE (x) == LABEL_REF)
701*e4b17023SJohn Marino align = BITS_PER_UNIT;
702*e4b17023SJohn Marino else if (GET_CODE (x) == CONST
703*e4b17023SJohn Marino && GET_CODE (XEXP (x, 0)) == PLUS
704*e4b17023SJohn Marino && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
705*e4b17023SJohn Marino && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
706*e4b17023SJohn Marino {
707*e4b17023SJohn Marino rtx s = XEXP (XEXP (x, 0), 0);
708*e4b17023SJohn Marino rtx c = XEXP (XEXP (x, 0), 1);
709*e4b17023SJohn Marino unsigned sa, ca;
710*e4b17023SJohn Marino
711*e4b17023SJohn Marino sa = BITS_PER_UNIT;
712*e4b17023SJohn Marino if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
713*e4b17023SJohn Marino sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
714*e4b17023SJohn Marino
715*e4b17023SJohn Marino if (INTVAL (c) == 0)
716*e4b17023SJohn Marino align = sa;
717*e4b17023SJohn Marino else
718*e4b17023SJohn Marino {
719*e4b17023SJohn Marino ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
720*e4b17023SJohn Marino align = MIN (sa, ca);
721*e4b17023SJohn Marino }
722*e4b17023SJohn Marino }
723*e4b17023SJohn Marino
724*e4b17023SJohn Marino if (align || (MEM_P (x) && MEM_POINTER (x)))
725*e4b17023SJohn Marino mark_reg_pointer (temp, align);
726*e4b17023SJohn Marino }
727*e4b17023SJohn Marino
728*e4b17023SJohn Marino return temp;
729*e4b17023SJohn Marino }
730*e4b17023SJohn Marino
731*e4b17023SJohn Marino /* If X is a memory ref, copy its contents to a new temp reg and return
732*e4b17023SJohn Marino that reg. Otherwise, return X. */
733*e4b17023SJohn Marino
734*e4b17023SJohn Marino rtx
force_not_mem(rtx x)735*e4b17023SJohn Marino force_not_mem (rtx x)
736*e4b17023SJohn Marino {
737*e4b17023SJohn Marino rtx temp;
738*e4b17023SJohn Marino
739*e4b17023SJohn Marino if (!MEM_P (x) || GET_MODE (x) == BLKmode)
740*e4b17023SJohn Marino return x;
741*e4b17023SJohn Marino
742*e4b17023SJohn Marino temp = gen_reg_rtx (GET_MODE (x));
743*e4b17023SJohn Marino
744*e4b17023SJohn Marino if (MEM_POINTER (x))
745*e4b17023SJohn Marino REG_POINTER (temp) = 1;
746*e4b17023SJohn Marino
747*e4b17023SJohn Marino emit_move_insn (temp, x);
748*e4b17023SJohn Marino return temp;
749*e4b17023SJohn Marino }
750*e4b17023SJohn Marino
751*e4b17023SJohn Marino /* Copy X to TARGET (if it's nonzero and a reg)
752*e4b17023SJohn Marino or to a new temp reg and return that reg.
753*e4b17023SJohn Marino MODE is the mode to use for X in case it is a constant. */
754*e4b17023SJohn Marino
755*e4b17023SJohn Marino rtx
copy_to_suggested_reg(rtx x,rtx target,enum machine_mode mode)756*e4b17023SJohn Marino copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
757*e4b17023SJohn Marino {
758*e4b17023SJohn Marino rtx temp;
759*e4b17023SJohn Marino
760*e4b17023SJohn Marino if (target && REG_P (target))
761*e4b17023SJohn Marino temp = target;
762*e4b17023SJohn Marino else
763*e4b17023SJohn Marino temp = gen_reg_rtx (mode);
764*e4b17023SJohn Marino
765*e4b17023SJohn Marino emit_move_insn (temp, x);
766*e4b17023SJohn Marino return temp;
767*e4b17023SJohn Marino }
768*e4b17023SJohn Marino
769*e4b17023SJohn Marino /* Return the mode to use to pass or return a scalar of TYPE and MODE.
770*e4b17023SJohn Marino PUNSIGNEDP points to the signedness of the type and may be adjusted
771*e4b17023SJohn Marino to show what signedness to use on extension operations.
772*e4b17023SJohn Marino
773*e4b17023SJohn Marino FOR_RETURN is nonzero if the caller is promoting the return value
774*e4b17023SJohn Marino of FNDECL, else it is for promoting args. */
775*e4b17023SJohn Marino
776*e4b17023SJohn Marino enum machine_mode
promote_function_mode(const_tree type,enum machine_mode mode,int * punsignedp,const_tree funtype,int for_return)777*e4b17023SJohn Marino promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
778*e4b17023SJohn Marino const_tree funtype, int for_return)
779*e4b17023SJohn Marino {
780*e4b17023SJohn Marino /* Called without a type node for a libcall. */
781*e4b17023SJohn Marino if (type == NULL_TREE)
782*e4b17023SJohn Marino {
783*e4b17023SJohn Marino if (INTEGRAL_MODE_P (mode))
784*e4b17023SJohn Marino return targetm.calls.promote_function_mode (NULL_TREE, mode,
785*e4b17023SJohn Marino punsignedp, funtype,
786*e4b17023SJohn Marino for_return);
787*e4b17023SJohn Marino else
788*e4b17023SJohn Marino return mode;
789*e4b17023SJohn Marino }
790*e4b17023SJohn Marino
791*e4b17023SJohn Marino switch (TREE_CODE (type))
792*e4b17023SJohn Marino {
793*e4b17023SJohn Marino case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
794*e4b17023SJohn Marino case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
795*e4b17023SJohn Marino case POINTER_TYPE: case REFERENCE_TYPE:
796*e4b17023SJohn Marino return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
797*e4b17023SJohn Marino for_return);
798*e4b17023SJohn Marino
799*e4b17023SJohn Marino default:
800*e4b17023SJohn Marino return mode;
801*e4b17023SJohn Marino }
802*e4b17023SJohn Marino }
803*e4b17023SJohn Marino /* Return the mode to use to store a scalar of TYPE and MODE.
804*e4b17023SJohn Marino PUNSIGNEDP points to the signedness of the type and may be adjusted
805*e4b17023SJohn Marino to show what signedness to use on extension operations. */
806*e4b17023SJohn Marino
807*e4b17023SJohn Marino enum machine_mode
promote_mode(const_tree type ATTRIBUTE_UNUSED,enum machine_mode mode,int * punsignedp ATTRIBUTE_UNUSED)808*e4b17023SJohn Marino promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
809*e4b17023SJohn Marino int *punsignedp ATTRIBUTE_UNUSED)
810*e4b17023SJohn Marino {
811*e4b17023SJohn Marino #ifdef PROMOTE_MODE
812*e4b17023SJohn Marino enum tree_code code;
813*e4b17023SJohn Marino int unsignedp;
814*e4b17023SJohn Marino #endif
815*e4b17023SJohn Marino
816*e4b17023SJohn Marino /* For libcalls this is invoked without TYPE from the backends
817*e4b17023SJohn Marino TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
818*e4b17023SJohn Marino case. */
819*e4b17023SJohn Marino if (type == NULL_TREE)
820*e4b17023SJohn Marino return mode;
821*e4b17023SJohn Marino
822*e4b17023SJohn Marino /* FIXME: this is the same logic that was there until GCC 4.4, but we
823*e4b17023SJohn Marino probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
824*e4b17023SJohn Marino is not defined. The affected targets are M32C, S390, SPARC. */
825*e4b17023SJohn Marino #ifdef PROMOTE_MODE
826*e4b17023SJohn Marino code = TREE_CODE (type);
827*e4b17023SJohn Marino unsignedp = *punsignedp;
828*e4b17023SJohn Marino
829*e4b17023SJohn Marino switch (code)
830*e4b17023SJohn Marino {
831*e4b17023SJohn Marino case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
832*e4b17023SJohn Marino case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
833*e4b17023SJohn Marino PROMOTE_MODE (mode, unsignedp, type);
834*e4b17023SJohn Marino *punsignedp = unsignedp;
835*e4b17023SJohn Marino return mode;
836*e4b17023SJohn Marino break;
837*e4b17023SJohn Marino
838*e4b17023SJohn Marino #ifdef POINTERS_EXTEND_UNSIGNED
839*e4b17023SJohn Marino case REFERENCE_TYPE:
840*e4b17023SJohn Marino case POINTER_TYPE:
841*e4b17023SJohn Marino *punsignedp = POINTERS_EXTEND_UNSIGNED;
842*e4b17023SJohn Marino return targetm.addr_space.address_mode
843*e4b17023SJohn Marino (TYPE_ADDR_SPACE (TREE_TYPE (type)));
844*e4b17023SJohn Marino break;
845*e4b17023SJohn Marino #endif
846*e4b17023SJohn Marino
847*e4b17023SJohn Marino default:
848*e4b17023SJohn Marino return mode;
849*e4b17023SJohn Marino }
850*e4b17023SJohn Marino #else
851*e4b17023SJohn Marino return mode;
852*e4b17023SJohn Marino #endif
853*e4b17023SJohn Marino }
854*e4b17023SJohn Marino
855*e4b17023SJohn Marino
856*e4b17023SJohn Marino /* Use one of promote_mode or promote_function_mode to find the promoted
857*e4b17023SJohn Marino mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
858*e4b17023SJohn Marino of DECL after promotion. */
859*e4b17023SJohn Marino
860*e4b17023SJohn Marino enum machine_mode
promote_decl_mode(const_tree decl,int * punsignedp)861*e4b17023SJohn Marino promote_decl_mode (const_tree decl, int *punsignedp)
862*e4b17023SJohn Marino {
863*e4b17023SJohn Marino tree type = TREE_TYPE (decl);
864*e4b17023SJohn Marino int unsignedp = TYPE_UNSIGNED (type);
865*e4b17023SJohn Marino enum machine_mode mode = DECL_MODE (decl);
866*e4b17023SJohn Marino enum machine_mode pmode;
867*e4b17023SJohn Marino
868*e4b17023SJohn Marino if (TREE_CODE (decl) == RESULT_DECL
869*e4b17023SJohn Marino || TREE_CODE (decl) == PARM_DECL)
870*e4b17023SJohn Marino pmode = promote_function_mode (type, mode, &unsignedp,
871*e4b17023SJohn Marino TREE_TYPE (current_function_decl), 2);
872*e4b17023SJohn Marino else
873*e4b17023SJohn Marino pmode = promote_mode (type, mode, &unsignedp);
874*e4b17023SJohn Marino
875*e4b17023SJohn Marino if (punsignedp)
876*e4b17023SJohn Marino *punsignedp = unsignedp;
877*e4b17023SJohn Marino return pmode;
878*e4b17023SJohn Marino }
879*e4b17023SJohn Marino
880*e4b17023SJohn Marino
881*e4b17023SJohn Marino /* Controls the behaviour of {anti_,}adjust_stack. */
882*e4b17023SJohn Marino static bool suppress_reg_args_size;
883*e4b17023SJohn Marino
884*e4b17023SJohn Marino /* A helper for adjust_stack and anti_adjust_stack. */
885*e4b17023SJohn Marino
886*e4b17023SJohn Marino static void
adjust_stack_1(rtx adjust,bool anti_p)887*e4b17023SJohn Marino adjust_stack_1 (rtx adjust, bool anti_p)
888*e4b17023SJohn Marino {
889*e4b17023SJohn Marino rtx temp, insn;
890*e4b17023SJohn Marino
891*e4b17023SJohn Marino #ifndef STACK_GROWS_DOWNWARD
892*e4b17023SJohn Marino /* Hereafter anti_p means subtract_p. */
893*e4b17023SJohn Marino anti_p = !anti_p;
894*e4b17023SJohn Marino #endif
895*e4b17023SJohn Marino
896*e4b17023SJohn Marino temp = expand_binop (Pmode,
897*e4b17023SJohn Marino anti_p ? sub_optab : add_optab,
898*e4b17023SJohn Marino stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
899*e4b17023SJohn Marino OPTAB_LIB_WIDEN);
900*e4b17023SJohn Marino
901*e4b17023SJohn Marino if (temp != stack_pointer_rtx)
902*e4b17023SJohn Marino insn = emit_move_insn (stack_pointer_rtx, temp);
903*e4b17023SJohn Marino else
904*e4b17023SJohn Marino {
905*e4b17023SJohn Marino insn = get_last_insn ();
906*e4b17023SJohn Marino temp = single_set (insn);
907*e4b17023SJohn Marino gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
908*e4b17023SJohn Marino }
909*e4b17023SJohn Marino
910*e4b17023SJohn Marino if (!suppress_reg_args_size)
911*e4b17023SJohn Marino add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
912*e4b17023SJohn Marino }
913*e4b17023SJohn Marino
914*e4b17023SJohn Marino /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
915*e4b17023SJohn Marino This pops when ADJUST is positive. ADJUST need not be constant. */
916*e4b17023SJohn Marino
917*e4b17023SJohn Marino void
adjust_stack(rtx adjust)918*e4b17023SJohn Marino adjust_stack (rtx adjust)
919*e4b17023SJohn Marino {
920*e4b17023SJohn Marino if (adjust == const0_rtx)
921*e4b17023SJohn Marino return;
922*e4b17023SJohn Marino
923*e4b17023SJohn Marino /* We expect all variable sized adjustments to be multiple of
924*e4b17023SJohn Marino PREFERRED_STACK_BOUNDARY. */
925*e4b17023SJohn Marino if (CONST_INT_P (adjust))
926*e4b17023SJohn Marino stack_pointer_delta -= INTVAL (adjust);
927*e4b17023SJohn Marino
928*e4b17023SJohn Marino adjust_stack_1 (adjust, false);
929*e4b17023SJohn Marino }
930*e4b17023SJohn Marino
931*e4b17023SJohn Marino /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
932*e4b17023SJohn Marino This pushes when ADJUST is positive. ADJUST need not be constant. */
933*e4b17023SJohn Marino
934*e4b17023SJohn Marino void
anti_adjust_stack(rtx adjust)935*e4b17023SJohn Marino anti_adjust_stack (rtx adjust)
936*e4b17023SJohn Marino {
937*e4b17023SJohn Marino if (adjust == const0_rtx)
938*e4b17023SJohn Marino return;
939*e4b17023SJohn Marino
940*e4b17023SJohn Marino /* We expect all variable sized adjustments to be multiple of
941*e4b17023SJohn Marino PREFERRED_STACK_BOUNDARY. */
942*e4b17023SJohn Marino if (CONST_INT_P (adjust))
943*e4b17023SJohn Marino stack_pointer_delta += INTVAL (adjust);
944*e4b17023SJohn Marino
945*e4b17023SJohn Marino adjust_stack_1 (adjust, true);
946*e4b17023SJohn Marino }
947*e4b17023SJohn Marino
948*e4b17023SJohn Marino /* Round the size of a block to be pushed up to the boundary required
949*e4b17023SJohn Marino by this machine. SIZE is the desired size, which need not be constant. */
950*e4b17023SJohn Marino
951*e4b17023SJohn Marino static rtx
round_push(rtx size)952*e4b17023SJohn Marino round_push (rtx size)
953*e4b17023SJohn Marino {
954*e4b17023SJohn Marino rtx align_rtx, alignm1_rtx;
955*e4b17023SJohn Marino
956*e4b17023SJohn Marino if (!SUPPORTS_STACK_ALIGNMENT
957*e4b17023SJohn Marino || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
958*e4b17023SJohn Marino {
959*e4b17023SJohn Marino int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
960*e4b17023SJohn Marino
961*e4b17023SJohn Marino if (align == 1)
962*e4b17023SJohn Marino return size;
963*e4b17023SJohn Marino
964*e4b17023SJohn Marino if (CONST_INT_P (size))
965*e4b17023SJohn Marino {
966*e4b17023SJohn Marino HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
967*e4b17023SJohn Marino
968*e4b17023SJohn Marino if (INTVAL (size) != new_size)
969*e4b17023SJohn Marino size = GEN_INT (new_size);
970*e4b17023SJohn Marino return size;
971*e4b17023SJohn Marino }
972*e4b17023SJohn Marino
973*e4b17023SJohn Marino align_rtx = GEN_INT (align);
974*e4b17023SJohn Marino alignm1_rtx = GEN_INT (align - 1);
975*e4b17023SJohn Marino }
976*e4b17023SJohn Marino else
977*e4b17023SJohn Marino {
978*e4b17023SJohn Marino /* If crtl->preferred_stack_boundary might still grow, use
979*e4b17023SJohn Marino virtual_preferred_stack_boundary_rtx instead. This will be
980*e4b17023SJohn Marino substituted by the right value in vregs pass and optimized
981*e4b17023SJohn Marino during combine. */
982*e4b17023SJohn Marino align_rtx = virtual_preferred_stack_boundary_rtx;
983*e4b17023SJohn Marino alignm1_rtx = force_operand (plus_constant (align_rtx, -1), NULL_RTX);
984*e4b17023SJohn Marino }
985*e4b17023SJohn Marino
986*e4b17023SJohn Marino /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
987*e4b17023SJohn Marino but we know it can't. So add ourselves and then do
988*e4b17023SJohn Marino TRUNC_DIV_EXPR. */
989*e4b17023SJohn Marino size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
990*e4b17023SJohn Marino NULL_RTX, 1, OPTAB_LIB_WIDEN);
991*e4b17023SJohn Marino size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
992*e4b17023SJohn Marino NULL_RTX, 1);
993*e4b17023SJohn Marino size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
994*e4b17023SJohn Marino
995*e4b17023SJohn Marino return size;
996*e4b17023SJohn Marino }
997*e4b17023SJohn Marino
998*e4b17023SJohn Marino /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
999*e4b17023SJohn Marino to a previously-created save area. If no save area has been allocated,
1000*e4b17023SJohn Marino this function will allocate one. If a save area is specified, it
1001*e4b17023SJohn Marino must be of the proper mode. */
1002*e4b17023SJohn Marino
1003*e4b17023SJohn Marino void
emit_stack_save(enum save_level save_level,rtx * psave)1004*e4b17023SJohn Marino emit_stack_save (enum save_level save_level, rtx *psave)
1005*e4b17023SJohn Marino {
1006*e4b17023SJohn Marino rtx sa = *psave;
1007*e4b17023SJohn Marino /* The default is that we use a move insn and save in a Pmode object. */
1008*e4b17023SJohn Marino rtx (*fcn) (rtx, rtx) = gen_move_insn;
1009*e4b17023SJohn Marino enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1010*e4b17023SJohn Marino
1011*e4b17023SJohn Marino /* See if this machine has anything special to do for this kind of save. */
1012*e4b17023SJohn Marino switch (save_level)
1013*e4b17023SJohn Marino {
1014*e4b17023SJohn Marino #ifdef HAVE_save_stack_block
1015*e4b17023SJohn Marino case SAVE_BLOCK:
1016*e4b17023SJohn Marino if (HAVE_save_stack_block)
1017*e4b17023SJohn Marino fcn = gen_save_stack_block;
1018*e4b17023SJohn Marino break;
1019*e4b17023SJohn Marino #endif
1020*e4b17023SJohn Marino #ifdef HAVE_save_stack_function
1021*e4b17023SJohn Marino case SAVE_FUNCTION:
1022*e4b17023SJohn Marino if (HAVE_save_stack_function)
1023*e4b17023SJohn Marino fcn = gen_save_stack_function;
1024*e4b17023SJohn Marino break;
1025*e4b17023SJohn Marino #endif
1026*e4b17023SJohn Marino #ifdef HAVE_save_stack_nonlocal
1027*e4b17023SJohn Marino case SAVE_NONLOCAL:
1028*e4b17023SJohn Marino if (HAVE_save_stack_nonlocal)
1029*e4b17023SJohn Marino fcn = gen_save_stack_nonlocal;
1030*e4b17023SJohn Marino break;
1031*e4b17023SJohn Marino #endif
1032*e4b17023SJohn Marino default:
1033*e4b17023SJohn Marino break;
1034*e4b17023SJohn Marino }
1035*e4b17023SJohn Marino
1036*e4b17023SJohn Marino /* If there is no save area and we have to allocate one, do so. Otherwise
1037*e4b17023SJohn Marino verify the save area is the proper mode. */
1038*e4b17023SJohn Marino
1039*e4b17023SJohn Marino if (sa == 0)
1040*e4b17023SJohn Marino {
1041*e4b17023SJohn Marino if (mode != VOIDmode)
1042*e4b17023SJohn Marino {
1043*e4b17023SJohn Marino if (save_level == SAVE_NONLOCAL)
1044*e4b17023SJohn Marino *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1045*e4b17023SJohn Marino else
1046*e4b17023SJohn Marino *psave = sa = gen_reg_rtx (mode);
1047*e4b17023SJohn Marino }
1048*e4b17023SJohn Marino }
1049*e4b17023SJohn Marino
1050*e4b17023SJohn Marino do_pending_stack_adjust ();
1051*e4b17023SJohn Marino if (sa != 0)
1052*e4b17023SJohn Marino sa = validize_mem (sa);
1053*e4b17023SJohn Marino emit_insn (fcn (sa, stack_pointer_rtx));
1054*e4b17023SJohn Marino }
1055*e4b17023SJohn Marino
1056*e4b17023SJohn Marino /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1057*e4b17023SJohn Marino area made by emit_stack_save. If it is zero, we have nothing to do. */
1058*e4b17023SJohn Marino
1059*e4b17023SJohn Marino void
emit_stack_restore(enum save_level save_level,rtx sa)1060*e4b17023SJohn Marino emit_stack_restore (enum save_level save_level, rtx sa)
1061*e4b17023SJohn Marino {
1062*e4b17023SJohn Marino /* The default is that we use a move insn. */
1063*e4b17023SJohn Marino rtx (*fcn) (rtx, rtx) = gen_move_insn;
1064*e4b17023SJohn Marino
1065*e4b17023SJohn Marino /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1066*e4b17023SJohn Marino STACK_POINTER and HARD_FRAME_POINTER.
1067*e4b17023SJohn Marino If stack_realign_fp, the x86 backend emits a prologue that aligns only
1068*e4b17023SJohn Marino STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1069*e4b17023SJohn Marino aligned variables, which is reflected in ix86_can_eliminate.
1070*e4b17023SJohn Marino We normally still have the realigned STACK_POINTER that we can use.
1071*e4b17023SJohn Marino But if there is a stack restore still present at reload, it can trigger
1072*e4b17023SJohn Marino mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1073*e4b17023SJohn Marino FRAME_POINTER into a hard reg.
1074*e4b17023SJohn Marino To prevent this situation, we force need_drap if we emit a stack
1075*e4b17023SJohn Marino restore. */
1076*e4b17023SJohn Marino if (SUPPORTS_STACK_ALIGNMENT)
1077*e4b17023SJohn Marino crtl->need_drap = true;
1078*e4b17023SJohn Marino
1079*e4b17023SJohn Marino /* See if this machine has anything special to do for this kind of save. */
1080*e4b17023SJohn Marino switch (save_level)
1081*e4b17023SJohn Marino {
1082*e4b17023SJohn Marino #ifdef HAVE_restore_stack_block
1083*e4b17023SJohn Marino case SAVE_BLOCK:
1084*e4b17023SJohn Marino if (HAVE_restore_stack_block)
1085*e4b17023SJohn Marino fcn = gen_restore_stack_block;
1086*e4b17023SJohn Marino break;
1087*e4b17023SJohn Marino #endif
1088*e4b17023SJohn Marino #ifdef HAVE_restore_stack_function
1089*e4b17023SJohn Marino case SAVE_FUNCTION:
1090*e4b17023SJohn Marino if (HAVE_restore_stack_function)
1091*e4b17023SJohn Marino fcn = gen_restore_stack_function;
1092*e4b17023SJohn Marino break;
1093*e4b17023SJohn Marino #endif
1094*e4b17023SJohn Marino #ifdef HAVE_restore_stack_nonlocal
1095*e4b17023SJohn Marino case SAVE_NONLOCAL:
1096*e4b17023SJohn Marino if (HAVE_restore_stack_nonlocal)
1097*e4b17023SJohn Marino fcn = gen_restore_stack_nonlocal;
1098*e4b17023SJohn Marino break;
1099*e4b17023SJohn Marino #endif
1100*e4b17023SJohn Marino default:
1101*e4b17023SJohn Marino break;
1102*e4b17023SJohn Marino }
1103*e4b17023SJohn Marino
1104*e4b17023SJohn Marino if (sa != 0)
1105*e4b17023SJohn Marino {
1106*e4b17023SJohn Marino sa = validize_mem (sa);
1107*e4b17023SJohn Marino /* These clobbers prevent the scheduler from moving
1108*e4b17023SJohn Marino references to variable arrays below the code
1109*e4b17023SJohn Marino that deletes (pops) the arrays. */
1110*e4b17023SJohn Marino emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1111*e4b17023SJohn Marino emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1112*e4b17023SJohn Marino }
1113*e4b17023SJohn Marino
1114*e4b17023SJohn Marino discard_pending_stack_adjust ();
1115*e4b17023SJohn Marino
1116*e4b17023SJohn Marino emit_insn (fcn (stack_pointer_rtx, sa));
1117*e4b17023SJohn Marino }
1118*e4b17023SJohn Marino
1119*e4b17023SJohn Marino /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1120*e4b17023SJohn Marino function. This function should be called whenever we allocate or
1121*e4b17023SJohn Marino deallocate dynamic stack space. */
1122*e4b17023SJohn Marino
1123*e4b17023SJohn Marino void
update_nonlocal_goto_save_area(void)1124*e4b17023SJohn Marino update_nonlocal_goto_save_area (void)
1125*e4b17023SJohn Marino {
1126*e4b17023SJohn Marino tree t_save;
1127*e4b17023SJohn Marino rtx r_save;
1128*e4b17023SJohn Marino
1129*e4b17023SJohn Marino /* The nonlocal_goto_save_area object is an array of N pointers. The
1130*e4b17023SJohn Marino first one is used for the frame pointer save; the rest are sized by
1131*e4b17023SJohn Marino STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1132*e4b17023SJohn Marino of the stack save area slots. */
1133*e4b17023SJohn Marino t_save = build4 (ARRAY_REF,
1134*e4b17023SJohn Marino TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1135*e4b17023SJohn Marino cfun->nonlocal_goto_save_area,
1136*e4b17023SJohn Marino integer_one_node, NULL_TREE, NULL_TREE);
1137*e4b17023SJohn Marino r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1138*e4b17023SJohn Marino
1139*e4b17023SJohn Marino emit_stack_save (SAVE_NONLOCAL, &r_save);
1140*e4b17023SJohn Marino }
1141*e4b17023SJohn Marino
1142*e4b17023SJohn Marino /* Return an rtx representing the address of an area of memory dynamically
1143*e4b17023SJohn Marino pushed on the stack.
1144*e4b17023SJohn Marino
1145*e4b17023SJohn Marino Any required stack pointer alignment is preserved.
1146*e4b17023SJohn Marino
1147*e4b17023SJohn Marino SIZE is an rtx representing the size of the area.
1148*e4b17023SJohn Marino
1149*e4b17023SJohn Marino SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1150*e4b17023SJohn Marino parameter may be zero. If so, a proper value will be extracted
1151*e4b17023SJohn Marino from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1152*e4b17023SJohn Marino
1153*e4b17023SJohn Marino REQUIRED_ALIGN is the alignment (in bits) required for the region
1154*e4b17023SJohn Marino of memory.
1155*e4b17023SJohn Marino
1156*e4b17023SJohn Marino If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1157*e4b17023SJohn Marino stack space allocated by the generated code cannot be added with itself
1158*e4b17023SJohn Marino in the course of the execution of the function. It is always safe to
1159*e4b17023SJohn Marino pass FALSE here and the following criterion is sufficient in order to
1160*e4b17023SJohn Marino pass TRUE: every path in the CFG that starts at the allocation point and
1161*e4b17023SJohn Marino loops to it executes the associated deallocation code. */
1162*e4b17023SJohn Marino
1163*e4b17023SJohn Marino rtx
allocate_dynamic_stack_space(rtx size,unsigned size_align,unsigned required_align,bool cannot_accumulate)1164*e4b17023SJohn Marino allocate_dynamic_stack_space (rtx size, unsigned size_align,
1165*e4b17023SJohn Marino unsigned required_align, bool cannot_accumulate)
1166*e4b17023SJohn Marino {
1167*e4b17023SJohn Marino HOST_WIDE_INT stack_usage_size = -1;
1168*e4b17023SJohn Marino rtx final_label, final_target, target;
1169*e4b17023SJohn Marino unsigned extra_align = 0;
1170*e4b17023SJohn Marino bool must_align;
1171*e4b17023SJohn Marino
1172*e4b17023SJohn Marino /* If we're asking for zero bytes, it doesn't matter what we point
1173*e4b17023SJohn Marino to since we can't dereference it. But return a reasonable
1174*e4b17023SJohn Marino address anyway. */
1175*e4b17023SJohn Marino if (size == const0_rtx)
1176*e4b17023SJohn Marino return virtual_stack_dynamic_rtx;
1177*e4b17023SJohn Marino
1178*e4b17023SJohn Marino /* Otherwise, show we're calling alloca or equivalent. */
1179*e4b17023SJohn Marino cfun->calls_alloca = 1;
1180*e4b17023SJohn Marino
1181*e4b17023SJohn Marino /* If stack usage info is requested, look into the size we are passed.
1182*e4b17023SJohn Marino We need to do so this early to avoid the obfuscation that may be
1183*e4b17023SJohn Marino introduced later by the various alignment operations. */
1184*e4b17023SJohn Marino if (flag_stack_usage_info)
1185*e4b17023SJohn Marino {
1186*e4b17023SJohn Marino if (CONST_INT_P (size))
1187*e4b17023SJohn Marino stack_usage_size = INTVAL (size);
1188*e4b17023SJohn Marino else if (REG_P (size))
1189*e4b17023SJohn Marino {
1190*e4b17023SJohn Marino /* Look into the last emitted insn and see if we can deduce
1191*e4b17023SJohn Marino something for the register. */
1192*e4b17023SJohn Marino rtx insn, set, note;
1193*e4b17023SJohn Marino insn = get_last_insn ();
1194*e4b17023SJohn Marino if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1195*e4b17023SJohn Marino {
1196*e4b17023SJohn Marino if (CONST_INT_P (SET_SRC (set)))
1197*e4b17023SJohn Marino stack_usage_size = INTVAL (SET_SRC (set));
1198*e4b17023SJohn Marino else if ((note = find_reg_equal_equiv_note (insn))
1199*e4b17023SJohn Marino && CONST_INT_P (XEXP (note, 0)))
1200*e4b17023SJohn Marino stack_usage_size = INTVAL (XEXP (note, 0));
1201*e4b17023SJohn Marino }
1202*e4b17023SJohn Marino }
1203*e4b17023SJohn Marino
1204*e4b17023SJohn Marino /* If the size is not constant, we can't say anything. */
1205*e4b17023SJohn Marino if (stack_usage_size == -1)
1206*e4b17023SJohn Marino {
1207*e4b17023SJohn Marino current_function_has_unbounded_dynamic_stack_size = 1;
1208*e4b17023SJohn Marino stack_usage_size = 0;
1209*e4b17023SJohn Marino }
1210*e4b17023SJohn Marino }
1211*e4b17023SJohn Marino
1212*e4b17023SJohn Marino /* Ensure the size is in the proper mode. */
1213*e4b17023SJohn Marino if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1214*e4b17023SJohn Marino size = convert_to_mode (Pmode, size, 1);
1215*e4b17023SJohn Marino
1216*e4b17023SJohn Marino /* Adjust SIZE_ALIGN, if needed. */
1217*e4b17023SJohn Marino if (CONST_INT_P (size))
1218*e4b17023SJohn Marino {
1219*e4b17023SJohn Marino unsigned HOST_WIDE_INT lsb;
1220*e4b17023SJohn Marino
1221*e4b17023SJohn Marino lsb = INTVAL (size);
1222*e4b17023SJohn Marino lsb &= -lsb;
1223*e4b17023SJohn Marino
1224*e4b17023SJohn Marino /* Watch out for overflow truncating to "unsigned". */
1225*e4b17023SJohn Marino if (lsb > UINT_MAX / BITS_PER_UNIT)
1226*e4b17023SJohn Marino size_align = 1u << (HOST_BITS_PER_INT - 1);
1227*e4b17023SJohn Marino else
1228*e4b17023SJohn Marino size_align = (unsigned)lsb * BITS_PER_UNIT;
1229*e4b17023SJohn Marino }
1230*e4b17023SJohn Marino else if (size_align < BITS_PER_UNIT)
1231*e4b17023SJohn Marino size_align = BITS_PER_UNIT;
1232*e4b17023SJohn Marino
1233*e4b17023SJohn Marino /* We can't attempt to minimize alignment necessary, because we don't
1234*e4b17023SJohn Marino know the final value of preferred_stack_boundary yet while executing
1235*e4b17023SJohn Marino this code. */
1236*e4b17023SJohn Marino if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1237*e4b17023SJohn Marino crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1238*e4b17023SJohn Marino
1239*e4b17023SJohn Marino /* We will need to ensure that the address we return is aligned to
1240*e4b17023SJohn Marino REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1241*e4b17023SJohn Marino always know its final value at this point in the compilation (it
1242*e4b17023SJohn Marino might depend on the size of the outgoing parameter lists, for
1243*e4b17023SJohn Marino example), so we must align the value to be returned in that case.
1244*e4b17023SJohn Marino (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1245*e4b17023SJohn Marino STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1246*e4b17023SJohn Marino We must also do an alignment operation on the returned value if
1247*e4b17023SJohn Marino the stack pointer alignment is less strict than REQUIRED_ALIGN.
1248*e4b17023SJohn Marino
1249*e4b17023SJohn Marino If we have to align, we must leave space in SIZE for the hole
1250*e4b17023SJohn Marino that might result from the alignment operation. */
1251*e4b17023SJohn Marino
1252*e4b17023SJohn Marino must_align = (crtl->preferred_stack_boundary < required_align);
1253*e4b17023SJohn Marino if (must_align)
1254*e4b17023SJohn Marino {
1255*e4b17023SJohn Marino if (required_align > PREFERRED_STACK_BOUNDARY)
1256*e4b17023SJohn Marino extra_align = PREFERRED_STACK_BOUNDARY;
1257*e4b17023SJohn Marino else if (required_align > STACK_BOUNDARY)
1258*e4b17023SJohn Marino extra_align = STACK_BOUNDARY;
1259*e4b17023SJohn Marino else
1260*e4b17023SJohn Marino extra_align = BITS_PER_UNIT;
1261*e4b17023SJohn Marino }
1262*e4b17023SJohn Marino
1263*e4b17023SJohn Marino /* ??? STACK_POINTER_OFFSET is always defined now. */
1264*e4b17023SJohn Marino #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1265*e4b17023SJohn Marino must_align = true;
1266*e4b17023SJohn Marino extra_align = BITS_PER_UNIT;
1267*e4b17023SJohn Marino #endif
1268*e4b17023SJohn Marino
1269*e4b17023SJohn Marino if (must_align)
1270*e4b17023SJohn Marino {
1271*e4b17023SJohn Marino unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
1272*e4b17023SJohn Marino
1273*e4b17023SJohn Marino size = plus_constant (size, extra);
1274*e4b17023SJohn Marino size = force_operand (size, NULL_RTX);
1275*e4b17023SJohn Marino
1276*e4b17023SJohn Marino if (flag_stack_usage_info)
1277*e4b17023SJohn Marino stack_usage_size += extra;
1278*e4b17023SJohn Marino
1279*e4b17023SJohn Marino if (extra && size_align > extra_align)
1280*e4b17023SJohn Marino size_align = extra_align;
1281*e4b17023SJohn Marino }
1282*e4b17023SJohn Marino
1283*e4b17023SJohn Marino /* Round the size to a multiple of the required stack alignment.
1284*e4b17023SJohn Marino Since the stack if presumed to be rounded before this allocation,
1285*e4b17023SJohn Marino this will maintain the required alignment.
1286*e4b17023SJohn Marino
1287*e4b17023SJohn Marino If the stack grows downward, we could save an insn by subtracting
1288*e4b17023SJohn Marino SIZE from the stack pointer and then aligning the stack pointer.
1289*e4b17023SJohn Marino The problem with this is that the stack pointer may be unaligned
1290*e4b17023SJohn Marino between the execution of the subtraction and alignment insns and
1291*e4b17023SJohn Marino some machines do not allow this. Even on those that do, some
1292*e4b17023SJohn Marino signal handlers malfunction if a signal should occur between those
1293*e4b17023SJohn Marino insns. Since this is an extremely rare event, we have no reliable
1294*e4b17023SJohn Marino way of knowing which systems have this problem. So we avoid even
1295*e4b17023SJohn Marino momentarily mis-aligning the stack. */
1296*e4b17023SJohn Marino if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1297*e4b17023SJohn Marino {
1298*e4b17023SJohn Marino size = round_push (size);
1299*e4b17023SJohn Marino
1300*e4b17023SJohn Marino if (flag_stack_usage_info)
1301*e4b17023SJohn Marino {
1302*e4b17023SJohn Marino int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1303*e4b17023SJohn Marino stack_usage_size = (stack_usage_size + align - 1) / align * align;
1304*e4b17023SJohn Marino }
1305*e4b17023SJohn Marino }
1306*e4b17023SJohn Marino
1307*e4b17023SJohn Marino target = gen_reg_rtx (Pmode);
1308*e4b17023SJohn Marino
1309*e4b17023SJohn Marino /* The size is supposed to be fully adjusted at this point so record it
1310*e4b17023SJohn Marino if stack usage info is requested. */
1311*e4b17023SJohn Marino if (flag_stack_usage_info)
1312*e4b17023SJohn Marino {
1313*e4b17023SJohn Marino current_function_dynamic_stack_size += stack_usage_size;
1314*e4b17023SJohn Marino
1315*e4b17023SJohn Marino /* ??? This is gross but the only safe stance in the absence
1316*e4b17023SJohn Marino of stack usage oriented flow analysis. */
1317*e4b17023SJohn Marino if (!cannot_accumulate)
1318*e4b17023SJohn Marino current_function_has_unbounded_dynamic_stack_size = 1;
1319*e4b17023SJohn Marino }
1320*e4b17023SJohn Marino
1321*e4b17023SJohn Marino final_label = NULL_RTX;
1322*e4b17023SJohn Marino final_target = NULL_RTX;
1323*e4b17023SJohn Marino
1324*e4b17023SJohn Marino /* If we are splitting the stack, we need to ask the backend whether
1325*e4b17023SJohn Marino there is enough room on the current stack. If there isn't, or if
1326*e4b17023SJohn Marino the backend doesn't know how to tell is, then we need to call a
1327*e4b17023SJohn Marino function to allocate memory in some other way. This memory will
1328*e4b17023SJohn Marino be released when we release the current stack segment. The
1329*e4b17023SJohn Marino effect is that stack allocation becomes less efficient, but at
1330*e4b17023SJohn Marino least it doesn't cause a stack overflow. */
1331*e4b17023SJohn Marino if (flag_split_stack)
1332*e4b17023SJohn Marino {
1333*e4b17023SJohn Marino rtx available_label, ask, space, func;
1334*e4b17023SJohn Marino
1335*e4b17023SJohn Marino available_label = NULL_RTX;
1336*e4b17023SJohn Marino
1337*e4b17023SJohn Marino #ifdef HAVE_split_stack_space_check
1338*e4b17023SJohn Marino if (HAVE_split_stack_space_check)
1339*e4b17023SJohn Marino {
1340*e4b17023SJohn Marino available_label = gen_label_rtx ();
1341*e4b17023SJohn Marino
1342*e4b17023SJohn Marino /* This instruction will branch to AVAILABLE_LABEL if there
1343*e4b17023SJohn Marino are SIZE bytes available on the stack. */
1344*e4b17023SJohn Marino emit_insn (gen_split_stack_space_check (size, available_label));
1345*e4b17023SJohn Marino }
1346*e4b17023SJohn Marino #endif
1347*e4b17023SJohn Marino
1348*e4b17023SJohn Marino /* The __morestack_allocate_stack_space function will allocate
1349*e4b17023SJohn Marino memory using malloc. If the alignment of the memory returned
1350*e4b17023SJohn Marino by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1351*e4b17023SJohn Marino make sure we allocate enough space. */
1352*e4b17023SJohn Marino if (MALLOC_ABI_ALIGNMENT >= required_align)
1353*e4b17023SJohn Marino ask = size;
1354*e4b17023SJohn Marino else
1355*e4b17023SJohn Marino {
1356*e4b17023SJohn Marino ask = expand_binop (Pmode, add_optab, size,
1357*e4b17023SJohn Marino GEN_INT (required_align / BITS_PER_UNIT - 1),
1358*e4b17023SJohn Marino NULL_RTX, 1, OPTAB_LIB_WIDEN);
1359*e4b17023SJohn Marino must_align = true;
1360*e4b17023SJohn Marino }
1361*e4b17023SJohn Marino
1362*e4b17023SJohn Marino func = init_one_libfunc ("__morestack_allocate_stack_space");
1363*e4b17023SJohn Marino
1364*e4b17023SJohn Marino space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1365*e4b17023SJohn Marino 1, ask, Pmode);
1366*e4b17023SJohn Marino
1367*e4b17023SJohn Marino if (available_label == NULL_RTX)
1368*e4b17023SJohn Marino return space;
1369*e4b17023SJohn Marino
1370*e4b17023SJohn Marino final_target = gen_reg_rtx (Pmode);
1371*e4b17023SJohn Marino
1372*e4b17023SJohn Marino emit_move_insn (final_target, space);
1373*e4b17023SJohn Marino
1374*e4b17023SJohn Marino final_label = gen_label_rtx ();
1375*e4b17023SJohn Marino emit_jump (final_label);
1376*e4b17023SJohn Marino
1377*e4b17023SJohn Marino emit_label (available_label);
1378*e4b17023SJohn Marino }
1379*e4b17023SJohn Marino
1380*e4b17023SJohn Marino do_pending_stack_adjust ();
1381*e4b17023SJohn Marino
1382*e4b17023SJohn Marino /* We ought to be called always on the toplevel and stack ought to be aligned
1383*e4b17023SJohn Marino properly. */
1384*e4b17023SJohn Marino gcc_assert (!(stack_pointer_delta
1385*e4b17023SJohn Marino % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1386*e4b17023SJohn Marino
1387*e4b17023SJohn Marino /* If needed, check that we have the required amount of stack. Take into
1388*e4b17023SJohn Marino account what has already been checked. */
1389*e4b17023SJohn Marino if (STACK_CHECK_MOVING_SP)
1390*e4b17023SJohn Marino ;
1391*e4b17023SJohn Marino else if (flag_stack_check == GENERIC_STACK_CHECK)
1392*e4b17023SJohn Marino probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1393*e4b17023SJohn Marino size);
1394*e4b17023SJohn Marino else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1395*e4b17023SJohn Marino probe_stack_range (STACK_CHECK_PROTECT, size);
1396*e4b17023SJohn Marino
1397*e4b17023SJohn Marino /* Don't let anti_adjust_stack emit notes. */
1398*e4b17023SJohn Marino suppress_reg_args_size = true;
1399*e4b17023SJohn Marino
1400*e4b17023SJohn Marino /* Perform the required allocation from the stack. Some systems do
1401*e4b17023SJohn Marino this differently than simply incrementing/decrementing from the
1402*e4b17023SJohn Marino stack pointer, such as acquiring the space by calling malloc(). */
1403*e4b17023SJohn Marino #ifdef HAVE_allocate_stack
1404*e4b17023SJohn Marino if (HAVE_allocate_stack)
1405*e4b17023SJohn Marino {
1406*e4b17023SJohn Marino struct expand_operand ops[2];
1407*e4b17023SJohn Marino /* We don't have to check against the predicate for operand 0 since
1408*e4b17023SJohn Marino TARGET is known to be a pseudo of the proper mode, which must
1409*e4b17023SJohn Marino be valid for the operand. */
1410*e4b17023SJohn Marino create_fixed_operand (&ops[0], target);
1411*e4b17023SJohn Marino create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1412*e4b17023SJohn Marino expand_insn (CODE_FOR_allocate_stack, 2, ops);
1413*e4b17023SJohn Marino }
1414*e4b17023SJohn Marino else
1415*e4b17023SJohn Marino #endif
1416*e4b17023SJohn Marino {
1417*e4b17023SJohn Marino int saved_stack_pointer_delta;
1418*e4b17023SJohn Marino
1419*e4b17023SJohn Marino #ifndef STACK_GROWS_DOWNWARD
1420*e4b17023SJohn Marino emit_move_insn (target, virtual_stack_dynamic_rtx);
1421*e4b17023SJohn Marino #endif
1422*e4b17023SJohn Marino
1423*e4b17023SJohn Marino /* Check stack bounds if necessary. */
1424*e4b17023SJohn Marino if (crtl->limit_stack)
1425*e4b17023SJohn Marino {
1426*e4b17023SJohn Marino rtx available;
1427*e4b17023SJohn Marino rtx space_available = gen_label_rtx ();
1428*e4b17023SJohn Marino #ifdef STACK_GROWS_DOWNWARD
1429*e4b17023SJohn Marino available = expand_binop (Pmode, sub_optab,
1430*e4b17023SJohn Marino stack_pointer_rtx, stack_limit_rtx,
1431*e4b17023SJohn Marino NULL_RTX, 1, OPTAB_WIDEN);
1432*e4b17023SJohn Marino #else
1433*e4b17023SJohn Marino available = expand_binop (Pmode, sub_optab,
1434*e4b17023SJohn Marino stack_limit_rtx, stack_pointer_rtx,
1435*e4b17023SJohn Marino NULL_RTX, 1, OPTAB_WIDEN);
1436*e4b17023SJohn Marino #endif
1437*e4b17023SJohn Marino emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1438*e4b17023SJohn Marino space_available);
1439*e4b17023SJohn Marino #ifdef HAVE_trap
1440*e4b17023SJohn Marino if (HAVE_trap)
1441*e4b17023SJohn Marino emit_insn (gen_trap ());
1442*e4b17023SJohn Marino else
1443*e4b17023SJohn Marino #endif
1444*e4b17023SJohn Marino error ("stack limits not supported on this target");
1445*e4b17023SJohn Marino emit_barrier ();
1446*e4b17023SJohn Marino emit_label (space_available);
1447*e4b17023SJohn Marino }
1448*e4b17023SJohn Marino
1449*e4b17023SJohn Marino saved_stack_pointer_delta = stack_pointer_delta;
1450*e4b17023SJohn Marino
1451*e4b17023SJohn Marino if (flag_stack_check && STACK_CHECK_MOVING_SP)
1452*e4b17023SJohn Marino anti_adjust_stack_and_probe (size, false);
1453*e4b17023SJohn Marino else
1454*e4b17023SJohn Marino anti_adjust_stack (size);
1455*e4b17023SJohn Marino
1456*e4b17023SJohn Marino /* Even if size is constant, don't modify stack_pointer_delta.
1457*e4b17023SJohn Marino The constant size alloca should preserve
1458*e4b17023SJohn Marino crtl->preferred_stack_boundary alignment. */
1459*e4b17023SJohn Marino stack_pointer_delta = saved_stack_pointer_delta;
1460*e4b17023SJohn Marino
1461*e4b17023SJohn Marino #ifdef STACK_GROWS_DOWNWARD
1462*e4b17023SJohn Marino emit_move_insn (target, virtual_stack_dynamic_rtx);
1463*e4b17023SJohn Marino #endif
1464*e4b17023SJohn Marino }
1465*e4b17023SJohn Marino
1466*e4b17023SJohn Marino suppress_reg_args_size = false;
1467*e4b17023SJohn Marino
1468*e4b17023SJohn Marino /* Finish up the split stack handling. */
1469*e4b17023SJohn Marino if (final_label != NULL_RTX)
1470*e4b17023SJohn Marino {
1471*e4b17023SJohn Marino gcc_assert (flag_split_stack);
1472*e4b17023SJohn Marino emit_move_insn (final_target, target);
1473*e4b17023SJohn Marino emit_label (final_label);
1474*e4b17023SJohn Marino target = final_target;
1475*e4b17023SJohn Marino }
1476*e4b17023SJohn Marino
1477*e4b17023SJohn Marino if (must_align)
1478*e4b17023SJohn Marino {
1479*e4b17023SJohn Marino /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1480*e4b17023SJohn Marino but we know it can't. So add ourselves and then do
1481*e4b17023SJohn Marino TRUNC_DIV_EXPR. */
1482*e4b17023SJohn Marino target = expand_binop (Pmode, add_optab, target,
1483*e4b17023SJohn Marino GEN_INT (required_align / BITS_PER_UNIT - 1),
1484*e4b17023SJohn Marino NULL_RTX, 1, OPTAB_LIB_WIDEN);
1485*e4b17023SJohn Marino target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1486*e4b17023SJohn Marino GEN_INT (required_align / BITS_PER_UNIT),
1487*e4b17023SJohn Marino NULL_RTX, 1);
1488*e4b17023SJohn Marino target = expand_mult (Pmode, target,
1489*e4b17023SJohn Marino GEN_INT (required_align / BITS_PER_UNIT),
1490*e4b17023SJohn Marino NULL_RTX, 1);
1491*e4b17023SJohn Marino }
1492*e4b17023SJohn Marino
1493*e4b17023SJohn Marino /* Now that we've committed to a return value, mark its alignment. */
1494*e4b17023SJohn Marino mark_reg_pointer (target, required_align);
1495*e4b17023SJohn Marino
1496*e4b17023SJohn Marino /* Record the new stack level for nonlocal gotos. */
1497*e4b17023SJohn Marino if (cfun->nonlocal_goto_save_area != 0)
1498*e4b17023SJohn Marino update_nonlocal_goto_save_area ();
1499*e4b17023SJohn Marino
1500*e4b17023SJohn Marino return target;
1501*e4b17023SJohn Marino }
1502*e4b17023SJohn Marino
1503*e4b17023SJohn Marino /* A front end may want to override GCC's stack checking by providing a
1504*e4b17023SJohn Marino run-time routine to call to check the stack, so provide a mechanism for
1505*e4b17023SJohn Marino calling that routine. */
1506*e4b17023SJohn Marino
1507*e4b17023SJohn Marino static GTY(()) rtx stack_check_libfunc;
1508*e4b17023SJohn Marino
1509*e4b17023SJohn Marino void
set_stack_check_libfunc(const char * libfunc_name)1510*e4b17023SJohn Marino set_stack_check_libfunc (const char *libfunc_name)
1511*e4b17023SJohn Marino {
1512*e4b17023SJohn Marino gcc_assert (stack_check_libfunc == NULL_RTX);
1513*e4b17023SJohn Marino stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1514*e4b17023SJohn Marino }
1515*e4b17023SJohn Marino
1516*e4b17023SJohn Marino /* Emit one stack probe at ADDRESS, an address within the stack. */
1517*e4b17023SJohn Marino
1518*e4b17023SJohn Marino void
emit_stack_probe(rtx address)1519*e4b17023SJohn Marino emit_stack_probe (rtx address)
1520*e4b17023SJohn Marino {
1521*e4b17023SJohn Marino rtx memref = gen_rtx_MEM (word_mode, address);
1522*e4b17023SJohn Marino
1523*e4b17023SJohn Marino MEM_VOLATILE_P (memref) = 1;
1524*e4b17023SJohn Marino
1525*e4b17023SJohn Marino /* See if we have an insn to probe the stack. */
1526*e4b17023SJohn Marino #ifdef HAVE_probe_stack
1527*e4b17023SJohn Marino if (HAVE_probe_stack)
1528*e4b17023SJohn Marino emit_insn (gen_probe_stack (memref));
1529*e4b17023SJohn Marino else
1530*e4b17023SJohn Marino #endif
1531*e4b17023SJohn Marino emit_move_insn (memref, const0_rtx);
1532*e4b17023SJohn Marino }
1533*e4b17023SJohn Marino
1534*e4b17023SJohn Marino /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1535*e4b17023SJohn Marino FIRST is a constant and size is a Pmode RTX. These are offsets from
1536*e4b17023SJohn Marino the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1537*e4b17023SJohn Marino or subtract them from the stack pointer. */
1538*e4b17023SJohn Marino
1539*e4b17023SJohn Marino #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1540*e4b17023SJohn Marino
1541*e4b17023SJohn Marino #ifdef STACK_GROWS_DOWNWARD
1542*e4b17023SJohn Marino #define STACK_GROW_OP MINUS
1543*e4b17023SJohn Marino #define STACK_GROW_OPTAB sub_optab
1544*e4b17023SJohn Marino #define STACK_GROW_OFF(off) -(off)
1545*e4b17023SJohn Marino #else
1546*e4b17023SJohn Marino #define STACK_GROW_OP PLUS
1547*e4b17023SJohn Marino #define STACK_GROW_OPTAB add_optab
1548*e4b17023SJohn Marino #define STACK_GROW_OFF(off) (off)
1549*e4b17023SJohn Marino #endif
1550*e4b17023SJohn Marino
1551*e4b17023SJohn Marino void
probe_stack_range(HOST_WIDE_INT first,rtx size)1552*e4b17023SJohn Marino probe_stack_range (HOST_WIDE_INT first, rtx size)
1553*e4b17023SJohn Marino {
1554*e4b17023SJohn Marino /* First ensure SIZE is Pmode. */
1555*e4b17023SJohn Marino if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1556*e4b17023SJohn Marino size = convert_to_mode (Pmode, size, 1);
1557*e4b17023SJohn Marino
1558*e4b17023SJohn Marino /* Next see if we have a function to check the stack. */
1559*e4b17023SJohn Marino if (stack_check_libfunc)
1560*e4b17023SJohn Marino {
1561*e4b17023SJohn Marino rtx addr = memory_address (Pmode,
1562*e4b17023SJohn Marino gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1563*e4b17023SJohn Marino stack_pointer_rtx,
1564*e4b17023SJohn Marino plus_constant (size, first)));
1565*e4b17023SJohn Marino emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1566*e4b17023SJohn Marino Pmode);
1567*e4b17023SJohn Marino }
1568*e4b17023SJohn Marino
1569*e4b17023SJohn Marino /* Next see if we have an insn to check the stack. */
1570*e4b17023SJohn Marino #ifdef HAVE_check_stack
1571*e4b17023SJohn Marino else if (HAVE_check_stack)
1572*e4b17023SJohn Marino {
1573*e4b17023SJohn Marino struct expand_operand ops[1];
1574*e4b17023SJohn Marino rtx addr = memory_address (Pmode,
1575*e4b17023SJohn Marino gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1576*e4b17023SJohn Marino stack_pointer_rtx,
1577*e4b17023SJohn Marino plus_constant (size, first)));
1578*e4b17023SJohn Marino bool success;
1579*e4b17023SJohn Marino create_input_operand (&ops[0], addr, Pmode);
1580*e4b17023SJohn Marino success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops);
1581*e4b17023SJohn Marino gcc_assert (success);
1582*e4b17023SJohn Marino }
1583*e4b17023SJohn Marino #endif
1584*e4b17023SJohn Marino
1585*e4b17023SJohn Marino /* Otherwise we have to generate explicit probes. If we have a constant
1586*e4b17023SJohn Marino small number of them to generate, that's the easy case. */
1587*e4b17023SJohn Marino else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1588*e4b17023SJohn Marino {
1589*e4b17023SJohn Marino HOST_WIDE_INT isize = INTVAL (size), i;
1590*e4b17023SJohn Marino rtx addr;
1591*e4b17023SJohn Marino
1592*e4b17023SJohn Marino /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1593*e4b17023SJohn Marino it exceeds SIZE. If only one probe is needed, this will not
1594*e4b17023SJohn Marino generate any code. Then probe at FIRST + SIZE. */
1595*e4b17023SJohn Marino for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1596*e4b17023SJohn Marino {
1597*e4b17023SJohn Marino addr = memory_address (Pmode,
1598*e4b17023SJohn Marino plus_constant (stack_pointer_rtx,
1599*e4b17023SJohn Marino STACK_GROW_OFF (first + i)));
1600*e4b17023SJohn Marino emit_stack_probe (addr);
1601*e4b17023SJohn Marino }
1602*e4b17023SJohn Marino
1603*e4b17023SJohn Marino addr = memory_address (Pmode,
1604*e4b17023SJohn Marino plus_constant (stack_pointer_rtx,
1605*e4b17023SJohn Marino STACK_GROW_OFF (first + isize)));
1606*e4b17023SJohn Marino emit_stack_probe (addr);
1607*e4b17023SJohn Marino }
1608*e4b17023SJohn Marino
1609*e4b17023SJohn Marino /* In the variable case, do the same as above, but in a loop. Note that we
1610*e4b17023SJohn Marino must be extra careful with variables wrapping around because we might be
1611*e4b17023SJohn Marino at the very top (or the very bottom) of the address space and we have to
1612*e4b17023SJohn Marino be able to handle this case properly; in particular, we use an equality
1613*e4b17023SJohn Marino test for the loop condition. */
1614*e4b17023SJohn Marino else
1615*e4b17023SJohn Marino {
1616*e4b17023SJohn Marino rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1617*e4b17023SJohn Marino rtx loop_lab = gen_label_rtx ();
1618*e4b17023SJohn Marino rtx end_lab = gen_label_rtx ();
1619*e4b17023SJohn Marino
1620*e4b17023SJohn Marino
1621*e4b17023SJohn Marino /* Step 1: round SIZE to the previous multiple of the interval. */
1622*e4b17023SJohn Marino
1623*e4b17023SJohn Marino /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1624*e4b17023SJohn Marino rounded_size
1625*e4b17023SJohn Marino = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1626*e4b17023SJohn Marino rounded_size_op = force_operand (rounded_size, NULL_RTX);
1627*e4b17023SJohn Marino
1628*e4b17023SJohn Marino
1629*e4b17023SJohn Marino /* Step 2: compute initial and final value of the loop counter. */
1630*e4b17023SJohn Marino
1631*e4b17023SJohn Marino /* TEST_ADDR = SP + FIRST. */
1632*e4b17023SJohn Marino test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1633*e4b17023SJohn Marino stack_pointer_rtx,
1634*e4b17023SJohn Marino GEN_INT (first)), NULL_RTX);
1635*e4b17023SJohn Marino
1636*e4b17023SJohn Marino /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1637*e4b17023SJohn Marino last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1638*e4b17023SJohn Marino test_addr,
1639*e4b17023SJohn Marino rounded_size_op), NULL_RTX);
1640*e4b17023SJohn Marino
1641*e4b17023SJohn Marino
1642*e4b17023SJohn Marino /* Step 3: the loop
1643*e4b17023SJohn Marino
1644*e4b17023SJohn Marino while (TEST_ADDR != LAST_ADDR)
1645*e4b17023SJohn Marino {
1646*e4b17023SJohn Marino TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1647*e4b17023SJohn Marino probe at TEST_ADDR
1648*e4b17023SJohn Marino }
1649*e4b17023SJohn Marino
1650*e4b17023SJohn Marino probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1651*e4b17023SJohn Marino until it is equal to ROUNDED_SIZE. */
1652*e4b17023SJohn Marino
1653*e4b17023SJohn Marino emit_label (loop_lab);
1654*e4b17023SJohn Marino
1655*e4b17023SJohn Marino /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1656*e4b17023SJohn Marino emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1657*e4b17023SJohn Marino end_lab);
1658*e4b17023SJohn Marino
1659*e4b17023SJohn Marino /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1660*e4b17023SJohn Marino temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1661*e4b17023SJohn Marino GEN_INT (PROBE_INTERVAL), test_addr,
1662*e4b17023SJohn Marino 1, OPTAB_WIDEN);
1663*e4b17023SJohn Marino
1664*e4b17023SJohn Marino gcc_assert (temp == test_addr);
1665*e4b17023SJohn Marino
1666*e4b17023SJohn Marino /* Probe at TEST_ADDR. */
1667*e4b17023SJohn Marino emit_stack_probe (test_addr);
1668*e4b17023SJohn Marino
1669*e4b17023SJohn Marino emit_jump (loop_lab);
1670*e4b17023SJohn Marino
1671*e4b17023SJohn Marino emit_label (end_lab);
1672*e4b17023SJohn Marino
1673*e4b17023SJohn Marino
1674*e4b17023SJohn Marino /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1675*e4b17023SJohn Marino that SIZE is equal to ROUNDED_SIZE. */
1676*e4b17023SJohn Marino
1677*e4b17023SJohn Marino /* TEMP = SIZE - ROUNDED_SIZE. */
1678*e4b17023SJohn Marino temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1679*e4b17023SJohn Marino if (temp != const0_rtx)
1680*e4b17023SJohn Marino {
1681*e4b17023SJohn Marino rtx addr;
1682*e4b17023SJohn Marino
1683*e4b17023SJohn Marino if (CONST_INT_P (temp))
1684*e4b17023SJohn Marino {
1685*e4b17023SJohn Marino /* Use [base + disp} addressing mode if supported. */
1686*e4b17023SJohn Marino HOST_WIDE_INT offset = INTVAL (temp);
1687*e4b17023SJohn Marino addr = memory_address (Pmode,
1688*e4b17023SJohn Marino plus_constant (last_addr,
1689*e4b17023SJohn Marino STACK_GROW_OFF (offset)));
1690*e4b17023SJohn Marino }
1691*e4b17023SJohn Marino else
1692*e4b17023SJohn Marino {
1693*e4b17023SJohn Marino /* Manual CSE if the difference is not known at compile-time. */
1694*e4b17023SJohn Marino temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1695*e4b17023SJohn Marino addr = memory_address (Pmode,
1696*e4b17023SJohn Marino gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1697*e4b17023SJohn Marino last_addr, temp));
1698*e4b17023SJohn Marino }
1699*e4b17023SJohn Marino
1700*e4b17023SJohn Marino emit_stack_probe (addr);
1701*e4b17023SJohn Marino }
1702*e4b17023SJohn Marino }
1703*e4b17023SJohn Marino }
1704*e4b17023SJohn Marino
1705*e4b17023SJohn Marino /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1706*e4b17023SJohn Marino while probing it. This pushes when SIZE is positive. SIZE need not
1707*e4b17023SJohn Marino be constant. If ADJUST_BACK is true, adjust back the stack pointer
1708*e4b17023SJohn Marino by plus SIZE at the end. */
1709*e4b17023SJohn Marino
1710*e4b17023SJohn Marino void
anti_adjust_stack_and_probe(rtx size,bool adjust_back)1711*e4b17023SJohn Marino anti_adjust_stack_and_probe (rtx size, bool adjust_back)
1712*e4b17023SJohn Marino {
1713*e4b17023SJohn Marino /* We skip the probe for the first interval + a small dope of 4 words and
1714*e4b17023SJohn Marino probe that many bytes past the specified size to maintain a protection
1715*e4b17023SJohn Marino area at the botton of the stack. */
1716*e4b17023SJohn Marino const int dope = 4 * UNITS_PER_WORD;
1717*e4b17023SJohn Marino
1718*e4b17023SJohn Marino /* First ensure SIZE is Pmode. */
1719*e4b17023SJohn Marino if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1720*e4b17023SJohn Marino size = convert_to_mode (Pmode, size, 1);
1721*e4b17023SJohn Marino
1722*e4b17023SJohn Marino /* If we have a constant small number of probes to generate, that's the
1723*e4b17023SJohn Marino easy case. */
1724*e4b17023SJohn Marino if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1725*e4b17023SJohn Marino {
1726*e4b17023SJohn Marino HOST_WIDE_INT isize = INTVAL (size), i;
1727*e4b17023SJohn Marino bool first_probe = true;
1728*e4b17023SJohn Marino
1729*e4b17023SJohn Marino /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1730*e4b17023SJohn Marino values of N from 1 until it exceeds SIZE. If only one probe is
1731*e4b17023SJohn Marino needed, this will not generate any code. Then adjust and probe
1732*e4b17023SJohn Marino to PROBE_INTERVAL + SIZE. */
1733*e4b17023SJohn Marino for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1734*e4b17023SJohn Marino {
1735*e4b17023SJohn Marino if (first_probe)
1736*e4b17023SJohn Marino {
1737*e4b17023SJohn Marino anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1738*e4b17023SJohn Marino first_probe = false;
1739*e4b17023SJohn Marino }
1740*e4b17023SJohn Marino else
1741*e4b17023SJohn Marino anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1742*e4b17023SJohn Marino emit_stack_probe (stack_pointer_rtx);
1743*e4b17023SJohn Marino }
1744*e4b17023SJohn Marino
1745*e4b17023SJohn Marino if (first_probe)
1746*e4b17023SJohn Marino anti_adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
1747*e4b17023SJohn Marino else
1748*e4b17023SJohn Marino anti_adjust_stack (plus_constant (size, PROBE_INTERVAL - i));
1749*e4b17023SJohn Marino emit_stack_probe (stack_pointer_rtx);
1750*e4b17023SJohn Marino }
1751*e4b17023SJohn Marino
1752*e4b17023SJohn Marino /* In the variable case, do the same as above, but in a loop. Note that we
1753*e4b17023SJohn Marino must be extra careful with variables wrapping around because we might be
1754*e4b17023SJohn Marino at the very top (or the very bottom) of the address space and we have to
1755*e4b17023SJohn Marino be able to handle this case properly; in particular, we use an equality
1756*e4b17023SJohn Marino test for the loop condition. */
1757*e4b17023SJohn Marino else
1758*e4b17023SJohn Marino {
1759*e4b17023SJohn Marino rtx rounded_size, rounded_size_op, last_addr, temp;
1760*e4b17023SJohn Marino rtx loop_lab = gen_label_rtx ();
1761*e4b17023SJohn Marino rtx end_lab = gen_label_rtx ();
1762*e4b17023SJohn Marino
1763*e4b17023SJohn Marino
1764*e4b17023SJohn Marino /* Step 1: round SIZE to the previous multiple of the interval. */
1765*e4b17023SJohn Marino
1766*e4b17023SJohn Marino /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1767*e4b17023SJohn Marino rounded_size
1768*e4b17023SJohn Marino = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1769*e4b17023SJohn Marino rounded_size_op = force_operand (rounded_size, NULL_RTX);
1770*e4b17023SJohn Marino
1771*e4b17023SJohn Marino
1772*e4b17023SJohn Marino /* Step 2: compute initial and final value of the loop counter. */
1773*e4b17023SJohn Marino
1774*e4b17023SJohn Marino /* SP = SP_0 + PROBE_INTERVAL. */
1775*e4b17023SJohn Marino anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1776*e4b17023SJohn Marino
1777*e4b17023SJohn Marino /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1778*e4b17023SJohn Marino last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1779*e4b17023SJohn Marino stack_pointer_rtx,
1780*e4b17023SJohn Marino rounded_size_op), NULL_RTX);
1781*e4b17023SJohn Marino
1782*e4b17023SJohn Marino
1783*e4b17023SJohn Marino /* Step 3: the loop
1784*e4b17023SJohn Marino
1785*e4b17023SJohn Marino while (SP != LAST_ADDR)
1786*e4b17023SJohn Marino {
1787*e4b17023SJohn Marino SP = SP + PROBE_INTERVAL
1788*e4b17023SJohn Marino probe at SP
1789*e4b17023SJohn Marino }
1790*e4b17023SJohn Marino
1791*e4b17023SJohn Marino adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1792*e4b17023SJohn Marino values of N from 1 until it is equal to ROUNDED_SIZE. */
1793*e4b17023SJohn Marino
1794*e4b17023SJohn Marino emit_label (loop_lab);
1795*e4b17023SJohn Marino
1796*e4b17023SJohn Marino /* Jump to END_LAB if SP == LAST_ADDR. */
1797*e4b17023SJohn Marino emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1798*e4b17023SJohn Marino Pmode, 1, end_lab);
1799*e4b17023SJohn Marino
1800*e4b17023SJohn Marino /* SP = SP + PROBE_INTERVAL and probe at SP. */
1801*e4b17023SJohn Marino anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1802*e4b17023SJohn Marino emit_stack_probe (stack_pointer_rtx);
1803*e4b17023SJohn Marino
1804*e4b17023SJohn Marino emit_jump (loop_lab);
1805*e4b17023SJohn Marino
1806*e4b17023SJohn Marino emit_label (end_lab);
1807*e4b17023SJohn Marino
1808*e4b17023SJohn Marino
1809*e4b17023SJohn Marino /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1810*e4b17023SJohn Marino assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1811*e4b17023SJohn Marino
1812*e4b17023SJohn Marino /* TEMP = SIZE - ROUNDED_SIZE. */
1813*e4b17023SJohn Marino temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1814*e4b17023SJohn Marino if (temp != const0_rtx)
1815*e4b17023SJohn Marino {
1816*e4b17023SJohn Marino /* Manual CSE if the difference is not known at compile-time. */
1817*e4b17023SJohn Marino if (GET_CODE (temp) != CONST_INT)
1818*e4b17023SJohn Marino temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1819*e4b17023SJohn Marino anti_adjust_stack (temp);
1820*e4b17023SJohn Marino emit_stack_probe (stack_pointer_rtx);
1821*e4b17023SJohn Marino }
1822*e4b17023SJohn Marino }
1823*e4b17023SJohn Marino
1824*e4b17023SJohn Marino /* Adjust back and account for the additional first interval. */
1825*e4b17023SJohn Marino if (adjust_back)
1826*e4b17023SJohn Marino adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
1827*e4b17023SJohn Marino else
1828*e4b17023SJohn Marino adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1829*e4b17023SJohn Marino }
1830*e4b17023SJohn Marino
1831*e4b17023SJohn Marino /* Return an rtx representing the register or memory location
1832*e4b17023SJohn Marino in which a scalar value of data type VALTYPE
1833*e4b17023SJohn Marino was returned by a function call to function FUNC.
1834*e4b17023SJohn Marino FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1835*e4b17023SJohn Marino function is known, otherwise 0.
1836*e4b17023SJohn Marino OUTGOING is 1 if on a machine with register windows this function
1837*e4b17023SJohn Marino should return the register in which the function will put its result
1838*e4b17023SJohn Marino and 0 otherwise. */
1839*e4b17023SJohn Marino
1840*e4b17023SJohn Marino rtx
hard_function_value(const_tree valtype,const_tree func,const_tree fntype,int outgoing ATTRIBUTE_UNUSED)1841*e4b17023SJohn Marino hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1842*e4b17023SJohn Marino int outgoing ATTRIBUTE_UNUSED)
1843*e4b17023SJohn Marino {
1844*e4b17023SJohn Marino rtx val;
1845*e4b17023SJohn Marino
1846*e4b17023SJohn Marino val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1847*e4b17023SJohn Marino
1848*e4b17023SJohn Marino if (REG_P (val)
1849*e4b17023SJohn Marino && GET_MODE (val) == BLKmode)
1850*e4b17023SJohn Marino {
1851*e4b17023SJohn Marino unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1852*e4b17023SJohn Marino enum machine_mode tmpmode;
1853*e4b17023SJohn Marino
1854*e4b17023SJohn Marino /* int_size_in_bytes can return -1. We don't need a check here
1855*e4b17023SJohn Marino since the value of bytes will then be large enough that no
1856*e4b17023SJohn Marino mode will match anyway. */
1857*e4b17023SJohn Marino
1858*e4b17023SJohn Marino for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1859*e4b17023SJohn Marino tmpmode != VOIDmode;
1860*e4b17023SJohn Marino tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1861*e4b17023SJohn Marino {
1862*e4b17023SJohn Marino /* Have we found a large enough mode? */
1863*e4b17023SJohn Marino if (GET_MODE_SIZE (tmpmode) >= bytes)
1864*e4b17023SJohn Marino break;
1865*e4b17023SJohn Marino }
1866*e4b17023SJohn Marino
1867*e4b17023SJohn Marino /* No suitable mode found. */
1868*e4b17023SJohn Marino gcc_assert (tmpmode != VOIDmode);
1869*e4b17023SJohn Marino
1870*e4b17023SJohn Marino PUT_MODE (val, tmpmode);
1871*e4b17023SJohn Marino }
1872*e4b17023SJohn Marino return val;
1873*e4b17023SJohn Marino }
1874*e4b17023SJohn Marino
1875*e4b17023SJohn Marino /* Return an rtx representing the register or memory location
1876*e4b17023SJohn Marino in which a scalar value of mode MODE was returned by a library call. */
1877*e4b17023SJohn Marino
1878*e4b17023SJohn Marino rtx
hard_libcall_value(enum machine_mode mode,rtx fun)1879*e4b17023SJohn Marino hard_libcall_value (enum machine_mode mode, rtx fun)
1880*e4b17023SJohn Marino {
1881*e4b17023SJohn Marino return targetm.calls.libcall_value (mode, fun);
1882*e4b17023SJohn Marino }
1883*e4b17023SJohn Marino
1884*e4b17023SJohn Marino /* Look up the tree code for a given rtx code
1885*e4b17023SJohn Marino to provide the arithmetic operation for REAL_ARITHMETIC.
1886*e4b17023SJohn Marino The function returns an int because the caller may not know
1887*e4b17023SJohn Marino what `enum tree_code' means. */
1888*e4b17023SJohn Marino
1889*e4b17023SJohn Marino int
rtx_to_tree_code(enum rtx_code code)1890*e4b17023SJohn Marino rtx_to_tree_code (enum rtx_code code)
1891*e4b17023SJohn Marino {
1892*e4b17023SJohn Marino enum tree_code tcode;
1893*e4b17023SJohn Marino
1894*e4b17023SJohn Marino switch (code)
1895*e4b17023SJohn Marino {
1896*e4b17023SJohn Marino case PLUS:
1897*e4b17023SJohn Marino tcode = PLUS_EXPR;
1898*e4b17023SJohn Marino break;
1899*e4b17023SJohn Marino case MINUS:
1900*e4b17023SJohn Marino tcode = MINUS_EXPR;
1901*e4b17023SJohn Marino break;
1902*e4b17023SJohn Marino case MULT:
1903*e4b17023SJohn Marino tcode = MULT_EXPR;
1904*e4b17023SJohn Marino break;
1905*e4b17023SJohn Marino case DIV:
1906*e4b17023SJohn Marino tcode = RDIV_EXPR;
1907*e4b17023SJohn Marino break;
1908*e4b17023SJohn Marino case SMIN:
1909*e4b17023SJohn Marino tcode = MIN_EXPR;
1910*e4b17023SJohn Marino break;
1911*e4b17023SJohn Marino case SMAX:
1912*e4b17023SJohn Marino tcode = MAX_EXPR;
1913*e4b17023SJohn Marino break;
1914*e4b17023SJohn Marino default:
1915*e4b17023SJohn Marino tcode = LAST_AND_UNUSED_TREE_CODE;
1916*e4b17023SJohn Marino break;
1917*e4b17023SJohn Marino }
1918*e4b17023SJohn Marino return ((int) tcode);
1919*e4b17023SJohn Marino }
1920*e4b17023SJohn Marino
1921*e4b17023SJohn Marino #include "gt-explow.h"
1922