1*e4b17023SJohn Marino /* Convert function calls to rtl insns, for GNU C compiler.
2*e4b17023SJohn Marino Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3*e4b17023SJohn Marino 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4*e4b17023SJohn Marino 2011, 2012 Free Software Foundation, Inc.
5*e4b17023SJohn Marino
6*e4b17023SJohn Marino This file is part of GCC.
7*e4b17023SJohn Marino
8*e4b17023SJohn Marino GCC is free software; you can redistribute it and/or modify it under
9*e4b17023SJohn Marino the terms of the GNU General Public License as published by the Free
10*e4b17023SJohn Marino Software Foundation; either version 3, or (at your option) any later
11*e4b17023SJohn Marino version.
12*e4b17023SJohn Marino
13*e4b17023SJohn Marino GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14*e4b17023SJohn Marino WARRANTY; without even the implied warranty of MERCHANTABILITY or
15*e4b17023SJohn Marino FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16*e4b17023SJohn Marino for more details.
17*e4b17023SJohn Marino
18*e4b17023SJohn Marino You should have received a copy of the GNU General Public License
19*e4b17023SJohn Marino along with GCC; see the file COPYING3. If not see
20*e4b17023SJohn Marino <http://www.gnu.org/licenses/>. */
21*e4b17023SJohn Marino
22*e4b17023SJohn Marino #include "config.h"
23*e4b17023SJohn Marino #include "system.h"
24*e4b17023SJohn Marino #include "coretypes.h"
25*e4b17023SJohn Marino #include "tm.h"
26*e4b17023SJohn Marino #include "rtl.h"
27*e4b17023SJohn Marino #include "tree.h"
28*e4b17023SJohn Marino #include "gimple.h"
29*e4b17023SJohn Marino #include "flags.h"
30*e4b17023SJohn Marino #include "expr.h"
31*e4b17023SJohn Marino #include "optabs.h"
32*e4b17023SJohn Marino #include "libfuncs.h"
33*e4b17023SJohn Marino #include "function.h"
34*e4b17023SJohn Marino #include "regs.h"
35*e4b17023SJohn Marino #include "diagnostic-core.h"
36*e4b17023SJohn Marino #include "output.h"
37*e4b17023SJohn Marino #include "tm_p.h"
38*e4b17023SJohn Marino #include "timevar.h"
39*e4b17023SJohn Marino #include "sbitmap.h"
40*e4b17023SJohn Marino #include "langhooks.h"
41*e4b17023SJohn Marino #include "target.h"
42*e4b17023SJohn Marino #include "cgraph.h"
43*e4b17023SJohn Marino #include "except.h"
44*e4b17023SJohn Marino #include "dbgcnt.h"
45*e4b17023SJohn Marino #include "tree-flow.h"
46*e4b17023SJohn Marino
47*e4b17023SJohn Marino /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
48*e4b17023SJohn Marino #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
49*e4b17023SJohn Marino
50*e4b17023SJohn Marino /* Data structure and subroutines used within expand_call. */
51*e4b17023SJohn Marino
52*e4b17023SJohn Marino struct arg_data
53*e4b17023SJohn Marino {
54*e4b17023SJohn Marino /* Tree node for this argument. */
55*e4b17023SJohn Marino tree tree_value;
56*e4b17023SJohn Marino /* Mode for value; TYPE_MODE unless promoted. */
57*e4b17023SJohn Marino enum machine_mode mode;
58*e4b17023SJohn Marino /* Current RTL value for argument, or 0 if it isn't precomputed. */
59*e4b17023SJohn Marino rtx value;
60*e4b17023SJohn Marino /* Initially-compute RTL value for argument; only for const functions. */
61*e4b17023SJohn Marino rtx initial_value;
62*e4b17023SJohn Marino /* Register to pass this argument in, 0 if passed on stack, or an
63*e4b17023SJohn Marino PARALLEL if the arg is to be copied into multiple non-contiguous
64*e4b17023SJohn Marino registers. */
65*e4b17023SJohn Marino rtx reg;
66*e4b17023SJohn Marino /* Register to pass this argument in when generating tail call sequence.
67*e4b17023SJohn Marino This is not the same register as for normal calls on machines with
68*e4b17023SJohn Marino register windows. */
69*e4b17023SJohn Marino rtx tail_call_reg;
70*e4b17023SJohn Marino /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
71*e4b17023SJohn Marino form for emit_group_move. */
72*e4b17023SJohn Marino rtx parallel_value;
73*e4b17023SJohn Marino /* If REG was promoted from the actual mode of the argument expression,
74*e4b17023SJohn Marino indicates whether the promotion is sign- or zero-extended. */
75*e4b17023SJohn Marino int unsignedp;
76*e4b17023SJohn Marino /* Number of bytes to put in registers. 0 means put the whole arg
77*e4b17023SJohn Marino in registers. Also 0 if not passed in registers. */
78*e4b17023SJohn Marino int partial;
79*e4b17023SJohn Marino /* Nonzero if argument must be passed on stack.
80*e4b17023SJohn Marino Note that some arguments may be passed on the stack
81*e4b17023SJohn Marino even though pass_on_stack is zero, just because FUNCTION_ARG says so.
82*e4b17023SJohn Marino pass_on_stack identifies arguments that *cannot* go in registers. */
83*e4b17023SJohn Marino int pass_on_stack;
84*e4b17023SJohn Marino /* Some fields packaged up for locate_and_pad_parm. */
85*e4b17023SJohn Marino struct locate_and_pad_arg_data locate;
86*e4b17023SJohn Marino /* Location on the stack at which parameter should be stored. The store
87*e4b17023SJohn Marino has already been done if STACK == VALUE. */
88*e4b17023SJohn Marino rtx stack;
89*e4b17023SJohn Marino /* Location on the stack of the start of this argument slot. This can
90*e4b17023SJohn Marino differ from STACK if this arg pads downward. This location is known
91*e4b17023SJohn Marino to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
92*e4b17023SJohn Marino rtx stack_slot;
93*e4b17023SJohn Marino /* Place that this stack area has been saved, if needed. */
94*e4b17023SJohn Marino rtx save_area;
95*e4b17023SJohn Marino /* If an argument's alignment does not permit direct copying into registers,
96*e4b17023SJohn Marino copy in smaller-sized pieces into pseudos. These are stored in a
97*e4b17023SJohn Marino block pointed to by this field. The next field says how many
98*e4b17023SJohn Marino word-sized pseudos we made. */
99*e4b17023SJohn Marino rtx *aligned_regs;
100*e4b17023SJohn Marino int n_aligned_regs;
101*e4b17023SJohn Marino };
102*e4b17023SJohn Marino
103*e4b17023SJohn Marino /* A vector of one char per byte of stack space. A byte if nonzero if
104*e4b17023SJohn Marino the corresponding stack location has been used.
105*e4b17023SJohn Marino This vector is used to prevent a function call within an argument from
106*e4b17023SJohn Marino clobbering any stack already set up. */
107*e4b17023SJohn Marino static char *stack_usage_map;
108*e4b17023SJohn Marino
109*e4b17023SJohn Marino /* Size of STACK_USAGE_MAP. */
110*e4b17023SJohn Marino static int highest_outgoing_arg_in_use;
111*e4b17023SJohn Marino
112*e4b17023SJohn Marino /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
113*e4b17023SJohn Marino stack location's tail call argument has been already stored into the stack.
114*e4b17023SJohn Marino This bitmap is used to prevent sibling call optimization if function tries
115*e4b17023SJohn Marino to use parent's incoming argument slots when they have been already
116*e4b17023SJohn Marino overwritten with tail call arguments. */
117*e4b17023SJohn Marino static sbitmap stored_args_map;
118*e4b17023SJohn Marino
119*e4b17023SJohn Marino /* stack_arg_under_construction is nonzero when an argument may be
120*e4b17023SJohn Marino initialized with a constructor call (including a C function that
121*e4b17023SJohn Marino returns a BLKmode struct) and expand_call must take special action
122*e4b17023SJohn Marino to make sure the object being constructed does not overlap the
123*e4b17023SJohn Marino argument list for the constructor call. */
124*e4b17023SJohn Marino static int stack_arg_under_construction;
125*e4b17023SJohn Marino
126*e4b17023SJohn Marino static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
127*e4b17023SJohn Marino HOST_WIDE_INT, rtx, rtx, int, rtx, int,
128*e4b17023SJohn Marino cumulative_args_t);
129*e4b17023SJohn Marino static void precompute_register_parameters (int, struct arg_data *, int *);
130*e4b17023SJohn Marino static int store_one_arg (struct arg_data *, rtx, int, int, int);
131*e4b17023SJohn Marino static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
132*e4b17023SJohn Marino static int finalize_must_preallocate (int, int, struct arg_data *,
133*e4b17023SJohn Marino struct args_size *);
134*e4b17023SJohn Marino static void precompute_arguments (int, struct arg_data *);
135*e4b17023SJohn Marino static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
136*e4b17023SJohn Marino static void initialize_argument_information (int, struct arg_data *,
137*e4b17023SJohn Marino struct args_size *, int,
138*e4b17023SJohn Marino tree, tree,
139*e4b17023SJohn Marino tree, tree, cumulative_args_t, int,
140*e4b17023SJohn Marino rtx *, int *, int *, int *,
141*e4b17023SJohn Marino bool *, bool);
142*e4b17023SJohn Marino static void compute_argument_addresses (struct arg_data *, rtx, int);
143*e4b17023SJohn Marino static rtx rtx_for_function_call (tree, tree);
144*e4b17023SJohn Marino static void load_register_parameters (struct arg_data *, int, rtx *, int,
145*e4b17023SJohn Marino int, int *);
146*e4b17023SJohn Marino static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
147*e4b17023SJohn Marino enum machine_mode, int, va_list);
148*e4b17023SJohn Marino static int special_function_p (const_tree, int);
149*e4b17023SJohn Marino static int check_sibcall_argument_overlap_1 (rtx);
150*e4b17023SJohn Marino static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
151*e4b17023SJohn Marino
152*e4b17023SJohn Marino static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
153*e4b17023SJohn Marino unsigned int);
154*e4b17023SJohn Marino static tree split_complex_types (tree);
155*e4b17023SJohn Marino
156*e4b17023SJohn Marino #ifdef REG_PARM_STACK_SPACE
157*e4b17023SJohn Marino static rtx save_fixed_argument_area (int, rtx, int *, int *);
158*e4b17023SJohn Marino static void restore_fixed_argument_area (rtx, rtx, int, int);
159*e4b17023SJohn Marino #endif
160*e4b17023SJohn Marino
161*e4b17023SJohn Marino /* Force FUNEXP into a form suitable for the address of a CALL,
162*e4b17023SJohn Marino and return that as an rtx. Also load the static chain register
163*e4b17023SJohn Marino if FNDECL is a nested function.
164*e4b17023SJohn Marino
165*e4b17023SJohn Marino CALL_FUSAGE points to a variable holding the prospective
166*e4b17023SJohn Marino CALL_INSN_FUNCTION_USAGE information. */
167*e4b17023SJohn Marino
168*e4b17023SJohn Marino rtx
prepare_call_address(tree fndecl,rtx funexp,rtx static_chain_value,rtx * call_fusage,int reg_parm_seen,int sibcallp)169*e4b17023SJohn Marino prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
170*e4b17023SJohn Marino rtx *call_fusage, int reg_parm_seen, int sibcallp)
171*e4b17023SJohn Marino {
172*e4b17023SJohn Marino /* Make a valid memory address and copy constants through pseudo-regs,
173*e4b17023SJohn Marino but not for a constant address if -fno-function-cse. */
174*e4b17023SJohn Marino if (GET_CODE (funexp) != SYMBOL_REF)
175*e4b17023SJohn Marino /* If we are using registers for parameters, force the
176*e4b17023SJohn Marino function address into a register now. */
177*e4b17023SJohn Marino funexp = ((reg_parm_seen
178*e4b17023SJohn Marino && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
179*e4b17023SJohn Marino ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
180*e4b17023SJohn Marino : memory_address (FUNCTION_MODE, funexp));
181*e4b17023SJohn Marino else if (! sibcallp)
182*e4b17023SJohn Marino {
183*e4b17023SJohn Marino #ifndef NO_FUNCTION_CSE
184*e4b17023SJohn Marino if (optimize && ! flag_no_function_cse)
185*e4b17023SJohn Marino funexp = force_reg (Pmode, funexp);
186*e4b17023SJohn Marino #endif
187*e4b17023SJohn Marino }
188*e4b17023SJohn Marino
189*e4b17023SJohn Marino if (static_chain_value != 0)
190*e4b17023SJohn Marino {
191*e4b17023SJohn Marino rtx chain;
192*e4b17023SJohn Marino
193*e4b17023SJohn Marino gcc_assert (fndecl);
194*e4b17023SJohn Marino chain = targetm.calls.static_chain (fndecl, false);
195*e4b17023SJohn Marino static_chain_value = convert_memory_address (Pmode, static_chain_value);
196*e4b17023SJohn Marino
197*e4b17023SJohn Marino emit_move_insn (chain, static_chain_value);
198*e4b17023SJohn Marino if (REG_P (chain))
199*e4b17023SJohn Marino use_reg (call_fusage, chain);
200*e4b17023SJohn Marino }
201*e4b17023SJohn Marino
202*e4b17023SJohn Marino return funexp;
203*e4b17023SJohn Marino }
204*e4b17023SJohn Marino
205*e4b17023SJohn Marino /* Generate instructions to call function FUNEXP,
206*e4b17023SJohn Marino and optionally pop the results.
207*e4b17023SJohn Marino The CALL_INSN is the first insn generated.
208*e4b17023SJohn Marino
209*e4b17023SJohn Marino FNDECL is the declaration node of the function. This is given to the
210*e4b17023SJohn Marino hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
211*e4b17023SJohn Marino its own args.
212*e4b17023SJohn Marino
213*e4b17023SJohn Marino FUNTYPE is the data type of the function. This is given to the hook
214*e4b17023SJohn Marino TARGET_RETURN_POPS_ARGS to determine whether this function pops its
215*e4b17023SJohn Marino own args. We used to allow an identifier for library functions, but
216*e4b17023SJohn Marino that doesn't work when the return type is an aggregate type and the
217*e4b17023SJohn Marino calling convention says that the pointer to this aggregate is to be
218*e4b17023SJohn Marino popped by the callee.
219*e4b17023SJohn Marino
220*e4b17023SJohn Marino STACK_SIZE is the number of bytes of arguments on the stack,
221*e4b17023SJohn Marino ROUNDED_STACK_SIZE is that number rounded up to
222*e4b17023SJohn Marino PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
223*e4b17023SJohn Marino both to put into the call insn and to generate explicit popping
224*e4b17023SJohn Marino code if necessary.
225*e4b17023SJohn Marino
226*e4b17023SJohn Marino STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
227*e4b17023SJohn Marino It is zero if this call doesn't want a structure value.
228*e4b17023SJohn Marino
229*e4b17023SJohn Marino NEXT_ARG_REG is the rtx that results from executing
230*e4b17023SJohn Marino targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
231*e4b17023SJohn Marino just after all the args have had their registers assigned.
232*e4b17023SJohn Marino This could be whatever you like, but normally it is the first
233*e4b17023SJohn Marino arg-register beyond those used for args in this call,
234*e4b17023SJohn Marino or 0 if all the arg-registers are used in this call.
235*e4b17023SJohn Marino It is passed on to `gen_call' so you can put this info in the call insn.
236*e4b17023SJohn Marino
237*e4b17023SJohn Marino VALREG is a hard register in which a value is returned,
238*e4b17023SJohn Marino or 0 if the call does not return a value.
239*e4b17023SJohn Marino
240*e4b17023SJohn Marino OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
241*e4b17023SJohn Marino the args to this call were processed.
242*e4b17023SJohn Marino We restore `inhibit_defer_pop' to that value.
243*e4b17023SJohn Marino
244*e4b17023SJohn Marino CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
245*e4b17023SJohn Marino denote registers used by the called function. */
246*e4b17023SJohn Marino
247*e4b17023SJohn Marino static void
emit_call_1(rtx funexp,tree fntree ATTRIBUTE_UNUSED,tree fndecl ATTRIBUTE_UNUSED,tree funtype ATTRIBUTE_UNUSED,HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,HOST_WIDE_INT rounded_stack_size,HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,rtx next_arg_reg ATTRIBUTE_UNUSED,rtx valreg,int old_inhibit_defer_pop,rtx call_fusage,int ecf_flags,cumulative_args_t args_so_far ATTRIBUTE_UNUSED)248*e4b17023SJohn Marino emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
249*e4b17023SJohn Marino tree funtype ATTRIBUTE_UNUSED,
250*e4b17023SJohn Marino HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
251*e4b17023SJohn Marino HOST_WIDE_INT rounded_stack_size,
252*e4b17023SJohn Marino HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
253*e4b17023SJohn Marino rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
254*e4b17023SJohn Marino int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
255*e4b17023SJohn Marino cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
256*e4b17023SJohn Marino {
257*e4b17023SJohn Marino rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
258*e4b17023SJohn Marino rtx call_insn, call, funmem;
259*e4b17023SJohn Marino int already_popped = 0;
260*e4b17023SJohn Marino HOST_WIDE_INT n_popped
261*e4b17023SJohn Marino = targetm.calls.return_pops_args (fndecl, funtype, stack_size);
262*e4b17023SJohn Marino
263*e4b17023SJohn Marino #ifdef CALL_POPS_ARGS
264*e4b17023SJohn Marino n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
265*e4b17023SJohn Marino #endif
266*e4b17023SJohn Marino
267*e4b17023SJohn Marino /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
268*e4b17023SJohn Marino and we don't want to load it into a register as an optimization,
269*e4b17023SJohn Marino because prepare_call_address already did it if it should be done. */
270*e4b17023SJohn Marino if (GET_CODE (funexp) != SYMBOL_REF)
271*e4b17023SJohn Marino funexp = memory_address (FUNCTION_MODE, funexp);
272*e4b17023SJohn Marino
273*e4b17023SJohn Marino funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
274*e4b17023SJohn Marino if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
275*e4b17023SJohn Marino {
276*e4b17023SJohn Marino tree t = fndecl;
277*e4b17023SJohn Marino
278*e4b17023SJohn Marino /* Although a built-in FUNCTION_DECL and its non-__builtin
279*e4b17023SJohn Marino counterpart compare equal and get a shared mem_attrs, they
280*e4b17023SJohn Marino produce different dump output in compare-debug compilations,
281*e4b17023SJohn Marino if an entry gets garbage collected in one compilation, then
282*e4b17023SJohn Marino adds a different (but equivalent) entry, while the other
283*e4b17023SJohn Marino doesn't run the garbage collector at the same spot and then
284*e4b17023SJohn Marino shares the mem_attr with the equivalent entry. */
285*e4b17023SJohn Marino if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
286*e4b17023SJohn Marino {
287*e4b17023SJohn Marino tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
288*e4b17023SJohn Marino if (t2)
289*e4b17023SJohn Marino t = t2;
290*e4b17023SJohn Marino }
291*e4b17023SJohn Marino
292*e4b17023SJohn Marino set_mem_expr (funmem, t);
293*e4b17023SJohn Marino }
294*e4b17023SJohn Marino else if (fntree)
295*e4b17023SJohn Marino set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
296*e4b17023SJohn Marino
297*e4b17023SJohn Marino #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
298*e4b17023SJohn Marino if ((ecf_flags & ECF_SIBCALL)
299*e4b17023SJohn Marino && HAVE_sibcall_pop && HAVE_sibcall_value_pop
300*e4b17023SJohn Marino && (n_popped > 0 || stack_size == 0))
301*e4b17023SJohn Marino {
302*e4b17023SJohn Marino rtx n_pop = GEN_INT (n_popped);
303*e4b17023SJohn Marino rtx pat;
304*e4b17023SJohn Marino
305*e4b17023SJohn Marino /* If this subroutine pops its own args, record that in the call insn
306*e4b17023SJohn Marino if possible, for the sake of frame pointer elimination. */
307*e4b17023SJohn Marino
308*e4b17023SJohn Marino if (valreg)
309*e4b17023SJohn Marino pat = GEN_SIBCALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
310*e4b17023SJohn Marino next_arg_reg, n_pop);
311*e4b17023SJohn Marino else
312*e4b17023SJohn Marino pat = GEN_SIBCALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
313*e4b17023SJohn Marino n_pop);
314*e4b17023SJohn Marino
315*e4b17023SJohn Marino emit_call_insn (pat);
316*e4b17023SJohn Marino already_popped = 1;
317*e4b17023SJohn Marino }
318*e4b17023SJohn Marino else
319*e4b17023SJohn Marino #endif
320*e4b17023SJohn Marino
321*e4b17023SJohn Marino #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
322*e4b17023SJohn Marino /* If the target has "call" or "call_value" insns, then prefer them
323*e4b17023SJohn Marino if no arguments are actually popped. If the target does not have
324*e4b17023SJohn Marino "call" or "call_value" insns, then we must use the popping versions
325*e4b17023SJohn Marino even if the call has no arguments to pop. */
326*e4b17023SJohn Marino #if defined (HAVE_call) && defined (HAVE_call_value)
327*e4b17023SJohn Marino if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
328*e4b17023SJohn Marino && n_popped > 0)
329*e4b17023SJohn Marino #else
330*e4b17023SJohn Marino if (HAVE_call_pop && HAVE_call_value_pop)
331*e4b17023SJohn Marino #endif
332*e4b17023SJohn Marino {
333*e4b17023SJohn Marino rtx n_pop = GEN_INT (n_popped);
334*e4b17023SJohn Marino rtx pat;
335*e4b17023SJohn Marino
336*e4b17023SJohn Marino /* If this subroutine pops its own args, record that in the call insn
337*e4b17023SJohn Marino if possible, for the sake of frame pointer elimination. */
338*e4b17023SJohn Marino
339*e4b17023SJohn Marino if (valreg)
340*e4b17023SJohn Marino pat = GEN_CALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
341*e4b17023SJohn Marino next_arg_reg, n_pop);
342*e4b17023SJohn Marino else
343*e4b17023SJohn Marino pat = GEN_CALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
344*e4b17023SJohn Marino n_pop);
345*e4b17023SJohn Marino
346*e4b17023SJohn Marino emit_call_insn (pat);
347*e4b17023SJohn Marino already_popped = 1;
348*e4b17023SJohn Marino }
349*e4b17023SJohn Marino else
350*e4b17023SJohn Marino #endif
351*e4b17023SJohn Marino
352*e4b17023SJohn Marino #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
353*e4b17023SJohn Marino if ((ecf_flags & ECF_SIBCALL)
354*e4b17023SJohn Marino && HAVE_sibcall && HAVE_sibcall_value)
355*e4b17023SJohn Marino {
356*e4b17023SJohn Marino if (valreg)
357*e4b17023SJohn Marino emit_call_insn (GEN_SIBCALL_VALUE (valreg, funmem,
358*e4b17023SJohn Marino rounded_stack_size_rtx,
359*e4b17023SJohn Marino next_arg_reg, NULL_RTX));
360*e4b17023SJohn Marino else
361*e4b17023SJohn Marino emit_call_insn (GEN_SIBCALL (funmem, rounded_stack_size_rtx,
362*e4b17023SJohn Marino next_arg_reg,
363*e4b17023SJohn Marino GEN_INT (struct_value_size)));
364*e4b17023SJohn Marino }
365*e4b17023SJohn Marino else
366*e4b17023SJohn Marino #endif
367*e4b17023SJohn Marino
368*e4b17023SJohn Marino #if defined (HAVE_call) && defined (HAVE_call_value)
369*e4b17023SJohn Marino if (HAVE_call && HAVE_call_value)
370*e4b17023SJohn Marino {
371*e4b17023SJohn Marino if (valreg)
372*e4b17023SJohn Marino emit_call_insn (GEN_CALL_VALUE (valreg, funmem, rounded_stack_size_rtx,
373*e4b17023SJohn Marino next_arg_reg, NULL_RTX));
374*e4b17023SJohn Marino else
375*e4b17023SJohn Marino emit_call_insn (GEN_CALL (funmem, rounded_stack_size_rtx, next_arg_reg,
376*e4b17023SJohn Marino GEN_INT (struct_value_size)));
377*e4b17023SJohn Marino }
378*e4b17023SJohn Marino else
379*e4b17023SJohn Marino #endif
380*e4b17023SJohn Marino gcc_unreachable ();
381*e4b17023SJohn Marino
382*e4b17023SJohn Marino /* Find the call we just emitted. */
383*e4b17023SJohn Marino call_insn = last_call_insn ();
384*e4b17023SJohn Marino
385*e4b17023SJohn Marino /* Some target create a fresh MEM instead of reusing the one provided
386*e4b17023SJohn Marino above. Set its MEM_EXPR. */
387*e4b17023SJohn Marino call = PATTERN (call_insn);
388*e4b17023SJohn Marino if (GET_CODE (call) == PARALLEL)
389*e4b17023SJohn Marino call = XVECEXP (call, 0, 0);
390*e4b17023SJohn Marino if (GET_CODE (call) == SET)
391*e4b17023SJohn Marino call = SET_SRC (call);
392*e4b17023SJohn Marino if (GET_CODE (call) == CALL
393*e4b17023SJohn Marino && MEM_P (XEXP (call, 0))
394*e4b17023SJohn Marino && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
395*e4b17023SJohn Marino && MEM_EXPR (funmem) != NULL_TREE)
396*e4b17023SJohn Marino set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
397*e4b17023SJohn Marino
398*e4b17023SJohn Marino /* Put the register usage information there. */
399*e4b17023SJohn Marino add_function_usage_to (call_insn, call_fusage);
400*e4b17023SJohn Marino
401*e4b17023SJohn Marino /* If this is a const call, then set the insn's unchanging bit. */
402*e4b17023SJohn Marino if (ecf_flags & ECF_CONST)
403*e4b17023SJohn Marino RTL_CONST_CALL_P (call_insn) = 1;
404*e4b17023SJohn Marino
405*e4b17023SJohn Marino /* If this is a pure call, then set the insn's unchanging bit. */
406*e4b17023SJohn Marino if (ecf_flags & ECF_PURE)
407*e4b17023SJohn Marino RTL_PURE_CALL_P (call_insn) = 1;
408*e4b17023SJohn Marino
409*e4b17023SJohn Marino /* If this is a const call, then set the insn's unchanging bit. */
410*e4b17023SJohn Marino if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
411*e4b17023SJohn Marino RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
412*e4b17023SJohn Marino
413*e4b17023SJohn Marino /* Create a nothrow REG_EH_REGION note, if needed. */
414*e4b17023SJohn Marino make_reg_eh_region_note (call_insn, ecf_flags, 0);
415*e4b17023SJohn Marino
416*e4b17023SJohn Marino if (ecf_flags & ECF_NORETURN)
417*e4b17023SJohn Marino add_reg_note (call_insn, REG_NORETURN, const0_rtx);
418*e4b17023SJohn Marino
419*e4b17023SJohn Marino if (ecf_flags & ECF_RETURNS_TWICE)
420*e4b17023SJohn Marino {
421*e4b17023SJohn Marino add_reg_note (call_insn, REG_SETJMP, const0_rtx);
422*e4b17023SJohn Marino cfun->calls_setjmp = 1;
423*e4b17023SJohn Marino }
424*e4b17023SJohn Marino
425*e4b17023SJohn Marino SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
426*e4b17023SJohn Marino
427*e4b17023SJohn Marino /* Restore this now, so that we do defer pops for this call's args
428*e4b17023SJohn Marino if the context of the call as a whole permits. */
429*e4b17023SJohn Marino inhibit_defer_pop = old_inhibit_defer_pop;
430*e4b17023SJohn Marino
431*e4b17023SJohn Marino if (n_popped > 0)
432*e4b17023SJohn Marino {
433*e4b17023SJohn Marino if (!already_popped)
434*e4b17023SJohn Marino CALL_INSN_FUNCTION_USAGE (call_insn)
435*e4b17023SJohn Marino = gen_rtx_EXPR_LIST (VOIDmode,
436*e4b17023SJohn Marino gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
437*e4b17023SJohn Marino CALL_INSN_FUNCTION_USAGE (call_insn));
438*e4b17023SJohn Marino rounded_stack_size -= n_popped;
439*e4b17023SJohn Marino rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
440*e4b17023SJohn Marino stack_pointer_delta -= n_popped;
441*e4b17023SJohn Marino
442*e4b17023SJohn Marino add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
443*e4b17023SJohn Marino
444*e4b17023SJohn Marino /* If popup is needed, stack realign must use DRAP */
445*e4b17023SJohn Marino if (SUPPORTS_STACK_ALIGNMENT)
446*e4b17023SJohn Marino crtl->need_drap = true;
447*e4b17023SJohn Marino }
448*e4b17023SJohn Marino /* For noreturn calls when not accumulating outgoing args force
449*e4b17023SJohn Marino REG_ARGS_SIZE note to prevent crossjumping of calls with different
450*e4b17023SJohn Marino args sizes. */
451*e4b17023SJohn Marino else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
452*e4b17023SJohn Marino add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
453*e4b17023SJohn Marino
454*e4b17023SJohn Marino if (!ACCUMULATE_OUTGOING_ARGS)
455*e4b17023SJohn Marino {
456*e4b17023SJohn Marino /* If returning from the subroutine does not automatically pop the args,
457*e4b17023SJohn Marino we need an instruction to pop them sooner or later.
458*e4b17023SJohn Marino Perhaps do it now; perhaps just record how much space to pop later.
459*e4b17023SJohn Marino
460*e4b17023SJohn Marino If returning from the subroutine does pop the args, indicate that the
461*e4b17023SJohn Marino stack pointer will be changed. */
462*e4b17023SJohn Marino
463*e4b17023SJohn Marino if (rounded_stack_size != 0)
464*e4b17023SJohn Marino {
465*e4b17023SJohn Marino if (ecf_flags & ECF_NORETURN)
466*e4b17023SJohn Marino /* Just pretend we did the pop. */
467*e4b17023SJohn Marino stack_pointer_delta -= rounded_stack_size;
468*e4b17023SJohn Marino else if (flag_defer_pop && inhibit_defer_pop == 0
469*e4b17023SJohn Marino && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
470*e4b17023SJohn Marino pending_stack_adjust += rounded_stack_size;
471*e4b17023SJohn Marino else
472*e4b17023SJohn Marino adjust_stack (rounded_stack_size_rtx);
473*e4b17023SJohn Marino }
474*e4b17023SJohn Marino }
475*e4b17023SJohn Marino /* When we accumulate outgoing args, we must avoid any stack manipulations.
476*e4b17023SJohn Marino Restore the stack pointer to its original value now. Usually
477*e4b17023SJohn Marino ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
478*e4b17023SJohn Marino On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
479*e4b17023SJohn Marino popping variants of functions exist as well.
480*e4b17023SJohn Marino
481*e4b17023SJohn Marino ??? We may optimize similar to defer_pop above, but it is
482*e4b17023SJohn Marino probably not worthwhile.
483*e4b17023SJohn Marino
484*e4b17023SJohn Marino ??? It will be worthwhile to enable combine_stack_adjustments even for
485*e4b17023SJohn Marino such machines. */
486*e4b17023SJohn Marino else if (n_popped)
487*e4b17023SJohn Marino anti_adjust_stack (GEN_INT (n_popped));
488*e4b17023SJohn Marino }
489*e4b17023SJohn Marino
490*e4b17023SJohn Marino /* Determine if the function identified by NAME and FNDECL is one with
491*e4b17023SJohn Marino special properties we wish to know about.
492*e4b17023SJohn Marino
493*e4b17023SJohn Marino For example, if the function might return more than one time (setjmp), then
494*e4b17023SJohn Marino set RETURNS_TWICE to a nonzero value.
495*e4b17023SJohn Marino
496*e4b17023SJohn Marino Similarly set NORETURN if the function is in the longjmp family.
497*e4b17023SJohn Marino
498*e4b17023SJohn Marino Set MAY_BE_ALLOCA for any memory allocation function that might allocate
499*e4b17023SJohn Marino space from the stack such as alloca. */
500*e4b17023SJohn Marino
501*e4b17023SJohn Marino static int
special_function_p(const_tree fndecl,int flags)502*e4b17023SJohn Marino special_function_p (const_tree fndecl, int flags)
503*e4b17023SJohn Marino {
504*e4b17023SJohn Marino if (fndecl && DECL_NAME (fndecl)
505*e4b17023SJohn Marino && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
506*e4b17023SJohn Marino /* Exclude functions not at the file scope, or not `extern',
507*e4b17023SJohn Marino since they are not the magic functions we would otherwise
508*e4b17023SJohn Marino think they are.
509*e4b17023SJohn Marino FIXME: this should be handled with attributes, not with this
510*e4b17023SJohn Marino hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
511*e4b17023SJohn Marino because you can declare fork() inside a function if you
512*e4b17023SJohn Marino wish. */
513*e4b17023SJohn Marino && (DECL_CONTEXT (fndecl) == NULL_TREE
514*e4b17023SJohn Marino || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
515*e4b17023SJohn Marino && TREE_PUBLIC (fndecl))
516*e4b17023SJohn Marino {
517*e4b17023SJohn Marino const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
518*e4b17023SJohn Marino const char *tname = name;
519*e4b17023SJohn Marino
520*e4b17023SJohn Marino /* We assume that alloca will always be called by name. It
521*e4b17023SJohn Marino makes no sense to pass it as a pointer-to-function to
522*e4b17023SJohn Marino anything that does not understand its behavior. */
523*e4b17023SJohn Marino if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
524*e4b17023SJohn Marino && name[0] == 'a'
525*e4b17023SJohn Marino && ! strcmp (name, "alloca"))
526*e4b17023SJohn Marino || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
527*e4b17023SJohn Marino && name[0] == '_'
528*e4b17023SJohn Marino && ! strcmp (name, "__builtin_alloca"))))
529*e4b17023SJohn Marino flags |= ECF_MAY_BE_ALLOCA;
530*e4b17023SJohn Marino
531*e4b17023SJohn Marino /* Disregard prefix _, __, __x or __builtin_. */
532*e4b17023SJohn Marino if (name[0] == '_')
533*e4b17023SJohn Marino {
534*e4b17023SJohn Marino if (name[1] == '_'
535*e4b17023SJohn Marino && name[2] == 'b'
536*e4b17023SJohn Marino && !strncmp (name + 3, "uiltin_", 7))
537*e4b17023SJohn Marino tname += 10;
538*e4b17023SJohn Marino else if (name[1] == '_' && name[2] == 'x')
539*e4b17023SJohn Marino tname += 3;
540*e4b17023SJohn Marino else if (name[1] == '_')
541*e4b17023SJohn Marino tname += 2;
542*e4b17023SJohn Marino else
543*e4b17023SJohn Marino tname += 1;
544*e4b17023SJohn Marino }
545*e4b17023SJohn Marino
546*e4b17023SJohn Marino if (tname[0] == 's')
547*e4b17023SJohn Marino {
548*e4b17023SJohn Marino if ((tname[1] == 'e'
549*e4b17023SJohn Marino && (! strcmp (tname, "setjmp")
550*e4b17023SJohn Marino || ! strcmp (tname, "setjmp_syscall")))
551*e4b17023SJohn Marino || (tname[1] == 'i'
552*e4b17023SJohn Marino && ! strcmp (tname, "sigsetjmp"))
553*e4b17023SJohn Marino || (tname[1] == 'a'
554*e4b17023SJohn Marino && ! strcmp (tname, "savectx")))
555*e4b17023SJohn Marino flags |= ECF_RETURNS_TWICE;
556*e4b17023SJohn Marino
557*e4b17023SJohn Marino if (tname[1] == 'i'
558*e4b17023SJohn Marino && ! strcmp (tname, "siglongjmp"))
559*e4b17023SJohn Marino flags |= ECF_NORETURN;
560*e4b17023SJohn Marino }
561*e4b17023SJohn Marino else if ((tname[0] == 'q' && tname[1] == 's'
562*e4b17023SJohn Marino && ! strcmp (tname, "qsetjmp"))
563*e4b17023SJohn Marino || (tname[0] == 'v' && tname[1] == 'f'
564*e4b17023SJohn Marino && ! strcmp (tname, "vfork"))
565*e4b17023SJohn Marino || (tname[0] == 'g' && tname[1] == 'e'
566*e4b17023SJohn Marino && !strcmp (tname, "getcontext")))
567*e4b17023SJohn Marino flags |= ECF_RETURNS_TWICE;
568*e4b17023SJohn Marino
569*e4b17023SJohn Marino else if (tname[0] == 'l' && tname[1] == 'o'
570*e4b17023SJohn Marino && ! strcmp (tname, "longjmp"))
571*e4b17023SJohn Marino flags |= ECF_NORETURN;
572*e4b17023SJohn Marino }
573*e4b17023SJohn Marino
574*e4b17023SJohn Marino return flags;
575*e4b17023SJohn Marino }
576*e4b17023SJohn Marino
577*e4b17023SJohn Marino /* Return nonzero when FNDECL represents a call to setjmp. */
578*e4b17023SJohn Marino
579*e4b17023SJohn Marino int
setjmp_call_p(const_tree fndecl)580*e4b17023SJohn Marino setjmp_call_p (const_tree fndecl)
581*e4b17023SJohn Marino {
582*e4b17023SJohn Marino if (DECL_IS_RETURNS_TWICE (fndecl))
583*e4b17023SJohn Marino return ECF_RETURNS_TWICE;
584*e4b17023SJohn Marino return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
585*e4b17023SJohn Marino }
586*e4b17023SJohn Marino
587*e4b17023SJohn Marino
588*e4b17023SJohn Marino /* Return true if STMT is an alloca call. */
589*e4b17023SJohn Marino
590*e4b17023SJohn Marino bool
gimple_alloca_call_p(const_gimple stmt)591*e4b17023SJohn Marino gimple_alloca_call_p (const_gimple stmt)
592*e4b17023SJohn Marino {
593*e4b17023SJohn Marino tree fndecl;
594*e4b17023SJohn Marino
595*e4b17023SJohn Marino if (!is_gimple_call (stmt))
596*e4b17023SJohn Marino return false;
597*e4b17023SJohn Marino
598*e4b17023SJohn Marino fndecl = gimple_call_fndecl (stmt);
599*e4b17023SJohn Marino if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
600*e4b17023SJohn Marino return true;
601*e4b17023SJohn Marino
602*e4b17023SJohn Marino return false;
603*e4b17023SJohn Marino }
604*e4b17023SJohn Marino
605*e4b17023SJohn Marino /* Return true when exp contains alloca call. */
606*e4b17023SJohn Marino
607*e4b17023SJohn Marino bool
alloca_call_p(const_tree exp)608*e4b17023SJohn Marino alloca_call_p (const_tree exp)
609*e4b17023SJohn Marino {
610*e4b17023SJohn Marino if (TREE_CODE (exp) == CALL_EXPR
611*e4b17023SJohn Marino && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
612*e4b17023SJohn Marino && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
613*e4b17023SJohn Marino && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
614*e4b17023SJohn Marino & ECF_MAY_BE_ALLOCA))
615*e4b17023SJohn Marino return true;
616*e4b17023SJohn Marino return false;
617*e4b17023SJohn Marino }
618*e4b17023SJohn Marino
619*e4b17023SJohn Marino /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
620*e4b17023SJohn Marino function. Return FALSE otherwise. */
621*e4b17023SJohn Marino
622*e4b17023SJohn Marino static bool
is_tm_builtin(const_tree fndecl)623*e4b17023SJohn Marino is_tm_builtin (const_tree fndecl)
624*e4b17023SJohn Marino {
625*e4b17023SJohn Marino if (fndecl == NULL)
626*e4b17023SJohn Marino return false;
627*e4b17023SJohn Marino
628*e4b17023SJohn Marino if (decl_is_tm_clone (fndecl))
629*e4b17023SJohn Marino return true;
630*e4b17023SJohn Marino
631*e4b17023SJohn Marino if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
632*e4b17023SJohn Marino {
633*e4b17023SJohn Marino switch (DECL_FUNCTION_CODE (fndecl))
634*e4b17023SJohn Marino {
635*e4b17023SJohn Marino case BUILT_IN_TM_COMMIT:
636*e4b17023SJohn Marino case BUILT_IN_TM_COMMIT_EH:
637*e4b17023SJohn Marino case BUILT_IN_TM_ABORT:
638*e4b17023SJohn Marino case BUILT_IN_TM_IRREVOCABLE:
639*e4b17023SJohn Marino case BUILT_IN_TM_GETTMCLONE_IRR:
640*e4b17023SJohn Marino case BUILT_IN_TM_MEMCPY:
641*e4b17023SJohn Marino case BUILT_IN_TM_MEMMOVE:
642*e4b17023SJohn Marino case BUILT_IN_TM_MEMSET:
643*e4b17023SJohn Marino CASE_BUILT_IN_TM_STORE (1):
644*e4b17023SJohn Marino CASE_BUILT_IN_TM_STORE (2):
645*e4b17023SJohn Marino CASE_BUILT_IN_TM_STORE (4):
646*e4b17023SJohn Marino CASE_BUILT_IN_TM_STORE (8):
647*e4b17023SJohn Marino CASE_BUILT_IN_TM_STORE (FLOAT):
648*e4b17023SJohn Marino CASE_BUILT_IN_TM_STORE (DOUBLE):
649*e4b17023SJohn Marino CASE_BUILT_IN_TM_STORE (LDOUBLE):
650*e4b17023SJohn Marino CASE_BUILT_IN_TM_STORE (M64):
651*e4b17023SJohn Marino CASE_BUILT_IN_TM_STORE (M128):
652*e4b17023SJohn Marino CASE_BUILT_IN_TM_STORE (M256):
653*e4b17023SJohn Marino CASE_BUILT_IN_TM_LOAD (1):
654*e4b17023SJohn Marino CASE_BUILT_IN_TM_LOAD (2):
655*e4b17023SJohn Marino CASE_BUILT_IN_TM_LOAD (4):
656*e4b17023SJohn Marino CASE_BUILT_IN_TM_LOAD (8):
657*e4b17023SJohn Marino CASE_BUILT_IN_TM_LOAD (FLOAT):
658*e4b17023SJohn Marino CASE_BUILT_IN_TM_LOAD (DOUBLE):
659*e4b17023SJohn Marino CASE_BUILT_IN_TM_LOAD (LDOUBLE):
660*e4b17023SJohn Marino CASE_BUILT_IN_TM_LOAD (M64):
661*e4b17023SJohn Marino CASE_BUILT_IN_TM_LOAD (M128):
662*e4b17023SJohn Marino CASE_BUILT_IN_TM_LOAD (M256):
663*e4b17023SJohn Marino case BUILT_IN_TM_LOG:
664*e4b17023SJohn Marino case BUILT_IN_TM_LOG_1:
665*e4b17023SJohn Marino case BUILT_IN_TM_LOG_2:
666*e4b17023SJohn Marino case BUILT_IN_TM_LOG_4:
667*e4b17023SJohn Marino case BUILT_IN_TM_LOG_8:
668*e4b17023SJohn Marino case BUILT_IN_TM_LOG_FLOAT:
669*e4b17023SJohn Marino case BUILT_IN_TM_LOG_DOUBLE:
670*e4b17023SJohn Marino case BUILT_IN_TM_LOG_LDOUBLE:
671*e4b17023SJohn Marino case BUILT_IN_TM_LOG_M64:
672*e4b17023SJohn Marino case BUILT_IN_TM_LOG_M128:
673*e4b17023SJohn Marino case BUILT_IN_TM_LOG_M256:
674*e4b17023SJohn Marino return true;
675*e4b17023SJohn Marino default:
676*e4b17023SJohn Marino break;
677*e4b17023SJohn Marino }
678*e4b17023SJohn Marino }
679*e4b17023SJohn Marino return false;
680*e4b17023SJohn Marino }
681*e4b17023SJohn Marino
682*e4b17023SJohn Marino /* Detect flags (function attributes) from the function decl or type node. */
683*e4b17023SJohn Marino
684*e4b17023SJohn Marino int
flags_from_decl_or_type(const_tree exp)685*e4b17023SJohn Marino flags_from_decl_or_type (const_tree exp)
686*e4b17023SJohn Marino {
687*e4b17023SJohn Marino int flags = 0;
688*e4b17023SJohn Marino
689*e4b17023SJohn Marino if (DECL_P (exp))
690*e4b17023SJohn Marino {
691*e4b17023SJohn Marino /* The function exp may have the `malloc' attribute. */
692*e4b17023SJohn Marino if (DECL_IS_MALLOC (exp))
693*e4b17023SJohn Marino flags |= ECF_MALLOC;
694*e4b17023SJohn Marino
695*e4b17023SJohn Marino /* The function exp may have the `returns_twice' attribute. */
696*e4b17023SJohn Marino if (DECL_IS_RETURNS_TWICE (exp))
697*e4b17023SJohn Marino flags |= ECF_RETURNS_TWICE;
698*e4b17023SJohn Marino
699*e4b17023SJohn Marino /* Process the pure and const attributes. */
700*e4b17023SJohn Marino if (TREE_READONLY (exp))
701*e4b17023SJohn Marino flags |= ECF_CONST;
702*e4b17023SJohn Marino if (DECL_PURE_P (exp))
703*e4b17023SJohn Marino flags |= ECF_PURE;
704*e4b17023SJohn Marino if (DECL_LOOPING_CONST_OR_PURE_P (exp))
705*e4b17023SJohn Marino flags |= ECF_LOOPING_CONST_OR_PURE;
706*e4b17023SJohn Marino
707*e4b17023SJohn Marino if (DECL_IS_NOVOPS (exp))
708*e4b17023SJohn Marino flags |= ECF_NOVOPS;
709*e4b17023SJohn Marino if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
710*e4b17023SJohn Marino flags |= ECF_LEAF;
711*e4b17023SJohn Marino
712*e4b17023SJohn Marino if (TREE_NOTHROW (exp))
713*e4b17023SJohn Marino flags |= ECF_NOTHROW;
714*e4b17023SJohn Marino
715*e4b17023SJohn Marino if (flag_tm)
716*e4b17023SJohn Marino {
717*e4b17023SJohn Marino if (is_tm_builtin (exp))
718*e4b17023SJohn Marino flags |= ECF_TM_BUILTIN;
719*e4b17023SJohn Marino else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
720*e4b17023SJohn Marino || lookup_attribute ("transaction_pure",
721*e4b17023SJohn Marino TYPE_ATTRIBUTES (TREE_TYPE (exp))))
722*e4b17023SJohn Marino flags |= ECF_TM_PURE;
723*e4b17023SJohn Marino }
724*e4b17023SJohn Marino
725*e4b17023SJohn Marino flags = special_function_p (exp, flags);
726*e4b17023SJohn Marino }
727*e4b17023SJohn Marino else if (TYPE_P (exp))
728*e4b17023SJohn Marino {
729*e4b17023SJohn Marino if (TYPE_READONLY (exp))
730*e4b17023SJohn Marino flags |= ECF_CONST;
731*e4b17023SJohn Marino
732*e4b17023SJohn Marino if (flag_tm
733*e4b17023SJohn Marino && ((flags & ECF_CONST) != 0
734*e4b17023SJohn Marino || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
735*e4b17023SJohn Marino flags |= ECF_TM_PURE;
736*e4b17023SJohn Marino }
737*e4b17023SJohn Marino
738*e4b17023SJohn Marino if (TREE_THIS_VOLATILE (exp))
739*e4b17023SJohn Marino {
740*e4b17023SJohn Marino flags |= ECF_NORETURN;
741*e4b17023SJohn Marino if (flags & (ECF_CONST|ECF_PURE))
742*e4b17023SJohn Marino flags |= ECF_LOOPING_CONST_OR_PURE;
743*e4b17023SJohn Marino }
744*e4b17023SJohn Marino
745*e4b17023SJohn Marino return flags;
746*e4b17023SJohn Marino }
747*e4b17023SJohn Marino
748*e4b17023SJohn Marino /* Detect flags from a CALL_EXPR. */
749*e4b17023SJohn Marino
750*e4b17023SJohn Marino int
call_expr_flags(const_tree t)751*e4b17023SJohn Marino call_expr_flags (const_tree t)
752*e4b17023SJohn Marino {
753*e4b17023SJohn Marino int flags;
754*e4b17023SJohn Marino tree decl = get_callee_fndecl (t);
755*e4b17023SJohn Marino
756*e4b17023SJohn Marino if (decl)
757*e4b17023SJohn Marino flags = flags_from_decl_or_type (decl);
758*e4b17023SJohn Marino else
759*e4b17023SJohn Marino {
760*e4b17023SJohn Marino t = TREE_TYPE (CALL_EXPR_FN (t));
761*e4b17023SJohn Marino if (t && TREE_CODE (t) == POINTER_TYPE)
762*e4b17023SJohn Marino flags = flags_from_decl_or_type (TREE_TYPE (t));
763*e4b17023SJohn Marino else
764*e4b17023SJohn Marino flags = 0;
765*e4b17023SJohn Marino }
766*e4b17023SJohn Marino
767*e4b17023SJohn Marino return flags;
768*e4b17023SJohn Marino }
769*e4b17023SJohn Marino
770*e4b17023SJohn Marino /* Precompute all register parameters as described by ARGS, storing values
771*e4b17023SJohn Marino into fields within the ARGS array.
772*e4b17023SJohn Marino
773*e4b17023SJohn Marino NUM_ACTUALS indicates the total number elements in the ARGS array.
774*e4b17023SJohn Marino
775*e4b17023SJohn Marino Set REG_PARM_SEEN if we encounter a register parameter. */
776*e4b17023SJohn Marino
777*e4b17023SJohn Marino static void
precompute_register_parameters(int num_actuals,struct arg_data * args,int * reg_parm_seen)778*e4b17023SJohn Marino precompute_register_parameters (int num_actuals, struct arg_data *args,
779*e4b17023SJohn Marino int *reg_parm_seen)
780*e4b17023SJohn Marino {
781*e4b17023SJohn Marino int i;
782*e4b17023SJohn Marino
783*e4b17023SJohn Marino *reg_parm_seen = 0;
784*e4b17023SJohn Marino
785*e4b17023SJohn Marino for (i = 0; i < num_actuals; i++)
786*e4b17023SJohn Marino if (args[i].reg != 0 && ! args[i].pass_on_stack)
787*e4b17023SJohn Marino {
788*e4b17023SJohn Marino *reg_parm_seen = 1;
789*e4b17023SJohn Marino
790*e4b17023SJohn Marino if (args[i].value == 0)
791*e4b17023SJohn Marino {
792*e4b17023SJohn Marino push_temp_slots ();
793*e4b17023SJohn Marino args[i].value = expand_normal (args[i].tree_value);
794*e4b17023SJohn Marino preserve_temp_slots (args[i].value);
795*e4b17023SJohn Marino pop_temp_slots ();
796*e4b17023SJohn Marino }
797*e4b17023SJohn Marino
798*e4b17023SJohn Marino /* If we are to promote the function arg to a wider mode,
799*e4b17023SJohn Marino do it now. */
800*e4b17023SJohn Marino
801*e4b17023SJohn Marino if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
802*e4b17023SJohn Marino args[i].value
803*e4b17023SJohn Marino = convert_modes (args[i].mode,
804*e4b17023SJohn Marino TYPE_MODE (TREE_TYPE (args[i].tree_value)),
805*e4b17023SJohn Marino args[i].value, args[i].unsignedp);
806*e4b17023SJohn Marino
807*e4b17023SJohn Marino /* If the value is a non-legitimate constant, force it into a
808*e4b17023SJohn Marino pseudo now. TLS symbols sometimes need a call to resolve. */
809*e4b17023SJohn Marino if (CONSTANT_P (args[i].value)
810*e4b17023SJohn Marino && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
811*e4b17023SJohn Marino args[i].value = force_reg (args[i].mode, args[i].value);
812*e4b17023SJohn Marino
813*e4b17023SJohn Marino /* If we're going to have to load the value by parts, pull the
814*e4b17023SJohn Marino parts into pseudos. The part extraction process can involve
815*e4b17023SJohn Marino non-trivial computation. */
816*e4b17023SJohn Marino if (GET_CODE (args[i].reg) == PARALLEL)
817*e4b17023SJohn Marino {
818*e4b17023SJohn Marino tree type = TREE_TYPE (args[i].tree_value);
819*e4b17023SJohn Marino args[i].parallel_value
820*e4b17023SJohn Marino = emit_group_load_into_temps (args[i].reg, args[i].value,
821*e4b17023SJohn Marino type, int_size_in_bytes (type));
822*e4b17023SJohn Marino }
823*e4b17023SJohn Marino
824*e4b17023SJohn Marino /* If the value is expensive, and we are inside an appropriately
825*e4b17023SJohn Marino short loop, put the value into a pseudo and then put the pseudo
826*e4b17023SJohn Marino into the hard reg.
827*e4b17023SJohn Marino
828*e4b17023SJohn Marino For small register classes, also do this if this call uses
829*e4b17023SJohn Marino register parameters. This is to avoid reload conflicts while
830*e4b17023SJohn Marino loading the parameters registers. */
831*e4b17023SJohn Marino
832*e4b17023SJohn Marino else if ((! (REG_P (args[i].value)
833*e4b17023SJohn Marino || (GET_CODE (args[i].value) == SUBREG
834*e4b17023SJohn Marino && REG_P (SUBREG_REG (args[i].value)))))
835*e4b17023SJohn Marino && args[i].mode != BLKmode
836*e4b17023SJohn Marino && set_src_cost (args[i].value, optimize_insn_for_speed_p ())
837*e4b17023SJohn Marino > COSTS_N_INSNS (1)
838*e4b17023SJohn Marino && ((*reg_parm_seen
839*e4b17023SJohn Marino && targetm.small_register_classes_for_mode_p (args[i].mode))
840*e4b17023SJohn Marino || optimize))
841*e4b17023SJohn Marino args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
842*e4b17023SJohn Marino }
843*e4b17023SJohn Marino }
844*e4b17023SJohn Marino
845*e4b17023SJohn Marino #ifdef REG_PARM_STACK_SPACE
846*e4b17023SJohn Marino
847*e4b17023SJohn Marino /* The argument list is the property of the called routine and it
848*e4b17023SJohn Marino may clobber it. If the fixed area has been used for previous
849*e4b17023SJohn Marino parameters, we must save and restore it. */
850*e4b17023SJohn Marino
851*e4b17023SJohn Marino static rtx
save_fixed_argument_area(int reg_parm_stack_space,rtx argblock,int * low_to_save,int * high_to_save)852*e4b17023SJohn Marino save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
853*e4b17023SJohn Marino {
854*e4b17023SJohn Marino int low;
855*e4b17023SJohn Marino int high;
856*e4b17023SJohn Marino
857*e4b17023SJohn Marino /* Compute the boundary of the area that needs to be saved, if any. */
858*e4b17023SJohn Marino high = reg_parm_stack_space;
859*e4b17023SJohn Marino #ifdef ARGS_GROW_DOWNWARD
860*e4b17023SJohn Marino high += 1;
861*e4b17023SJohn Marino #endif
862*e4b17023SJohn Marino if (high > highest_outgoing_arg_in_use)
863*e4b17023SJohn Marino high = highest_outgoing_arg_in_use;
864*e4b17023SJohn Marino
865*e4b17023SJohn Marino for (low = 0; low < high; low++)
866*e4b17023SJohn Marino if (stack_usage_map[low] != 0)
867*e4b17023SJohn Marino {
868*e4b17023SJohn Marino int num_to_save;
869*e4b17023SJohn Marino enum machine_mode save_mode;
870*e4b17023SJohn Marino int delta;
871*e4b17023SJohn Marino rtx stack_area;
872*e4b17023SJohn Marino rtx save_area;
873*e4b17023SJohn Marino
874*e4b17023SJohn Marino while (stack_usage_map[--high] == 0)
875*e4b17023SJohn Marino ;
876*e4b17023SJohn Marino
877*e4b17023SJohn Marino *low_to_save = low;
878*e4b17023SJohn Marino *high_to_save = high;
879*e4b17023SJohn Marino
880*e4b17023SJohn Marino num_to_save = high - low + 1;
881*e4b17023SJohn Marino save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
882*e4b17023SJohn Marino
883*e4b17023SJohn Marino /* If we don't have the required alignment, must do this
884*e4b17023SJohn Marino in BLKmode. */
885*e4b17023SJohn Marino if ((low & (MIN (GET_MODE_SIZE (save_mode),
886*e4b17023SJohn Marino BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
887*e4b17023SJohn Marino save_mode = BLKmode;
888*e4b17023SJohn Marino
889*e4b17023SJohn Marino #ifdef ARGS_GROW_DOWNWARD
890*e4b17023SJohn Marino delta = -high;
891*e4b17023SJohn Marino #else
892*e4b17023SJohn Marino delta = low;
893*e4b17023SJohn Marino #endif
894*e4b17023SJohn Marino stack_area = gen_rtx_MEM (save_mode,
895*e4b17023SJohn Marino memory_address (save_mode,
896*e4b17023SJohn Marino plus_constant (argblock,
897*e4b17023SJohn Marino delta)));
898*e4b17023SJohn Marino
899*e4b17023SJohn Marino set_mem_align (stack_area, PARM_BOUNDARY);
900*e4b17023SJohn Marino if (save_mode == BLKmode)
901*e4b17023SJohn Marino {
902*e4b17023SJohn Marino save_area = assign_stack_temp (BLKmode, num_to_save, 0);
903*e4b17023SJohn Marino emit_block_move (validize_mem (save_area), stack_area,
904*e4b17023SJohn Marino GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
905*e4b17023SJohn Marino }
906*e4b17023SJohn Marino else
907*e4b17023SJohn Marino {
908*e4b17023SJohn Marino save_area = gen_reg_rtx (save_mode);
909*e4b17023SJohn Marino emit_move_insn (save_area, stack_area);
910*e4b17023SJohn Marino }
911*e4b17023SJohn Marino
912*e4b17023SJohn Marino return save_area;
913*e4b17023SJohn Marino }
914*e4b17023SJohn Marino
915*e4b17023SJohn Marino return NULL_RTX;
916*e4b17023SJohn Marino }
917*e4b17023SJohn Marino
918*e4b17023SJohn Marino static void
restore_fixed_argument_area(rtx save_area,rtx argblock,int high_to_save,int low_to_save)919*e4b17023SJohn Marino restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
920*e4b17023SJohn Marino {
921*e4b17023SJohn Marino enum machine_mode save_mode = GET_MODE (save_area);
922*e4b17023SJohn Marino int delta;
923*e4b17023SJohn Marino rtx stack_area;
924*e4b17023SJohn Marino
925*e4b17023SJohn Marino #ifdef ARGS_GROW_DOWNWARD
926*e4b17023SJohn Marino delta = -high_to_save;
927*e4b17023SJohn Marino #else
928*e4b17023SJohn Marino delta = low_to_save;
929*e4b17023SJohn Marino #endif
930*e4b17023SJohn Marino stack_area = gen_rtx_MEM (save_mode,
931*e4b17023SJohn Marino memory_address (save_mode,
932*e4b17023SJohn Marino plus_constant (argblock, delta)));
933*e4b17023SJohn Marino set_mem_align (stack_area, PARM_BOUNDARY);
934*e4b17023SJohn Marino
935*e4b17023SJohn Marino if (save_mode != BLKmode)
936*e4b17023SJohn Marino emit_move_insn (stack_area, save_area);
937*e4b17023SJohn Marino else
938*e4b17023SJohn Marino emit_block_move (stack_area, validize_mem (save_area),
939*e4b17023SJohn Marino GEN_INT (high_to_save - low_to_save + 1),
940*e4b17023SJohn Marino BLOCK_OP_CALL_PARM);
941*e4b17023SJohn Marino }
942*e4b17023SJohn Marino #endif /* REG_PARM_STACK_SPACE */
943*e4b17023SJohn Marino
944*e4b17023SJohn Marino /* If any elements in ARGS refer to parameters that are to be passed in
945*e4b17023SJohn Marino registers, but not in memory, and whose alignment does not permit a
946*e4b17023SJohn Marino direct copy into registers. Copy the values into a group of pseudos
947*e4b17023SJohn Marino which we will later copy into the appropriate hard registers.
948*e4b17023SJohn Marino
949*e4b17023SJohn Marino Pseudos for each unaligned argument will be stored into the array
950*e4b17023SJohn Marino args[argnum].aligned_regs. The caller is responsible for deallocating
951*e4b17023SJohn Marino the aligned_regs array if it is nonzero. */
952*e4b17023SJohn Marino
953*e4b17023SJohn Marino static void
store_unaligned_arguments_into_pseudos(struct arg_data * args,int num_actuals)954*e4b17023SJohn Marino store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
955*e4b17023SJohn Marino {
956*e4b17023SJohn Marino int i, j;
957*e4b17023SJohn Marino
958*e4b17023SJohn Marino for (i = 0; i < num_actuals; i++)
959*e4b17023SJohn Marino if (args[i].reg != 0 && ! args[i].pass_on_stack
960*e4b17023SJohn Marino && args[i].mode == BLKmode
961*e4b17023SJohn Marino && MEM_P (args[i].value)
962*e4b17023SJohn Marino && (MEM_ALIGN (args[i].value)
963*e4b17023SJohn Marino < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
964*e4b17023SJohn Marino {
965*e4b17023SJohn Marino int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
966*e4b17023SJohn Marino int endian_correction = 0;
967*e4b17023SJohn Marino
968*e4b17023SJohn Marino if (args[i].partial)
969*e4b17023SJohn Marino {
970*e4b17023SJohn Marino gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
971*e4b17023SJohn Marino args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
972*e4b17023SJohn Marino }
973*e4b17023SJohn Marino else
974*e4b17023SJohn Marino {
975*e4b17023SJohn Marino args[i].n_aligned_regs
976*e4b17023SJohn Marino = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
977*e4b17023SJohn Marino }
978*e4b17023SJohn Marino
979*e4b17023SJohn Marino args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
980*e4b17023SJohn Marino
981*e4b17023SJohn Marino /* Structures smaller than a word are normally aligned to the
982*e4b17023SJohn Marino least significant byte. On a BYTES_BIG_ENDIAN machine,
983*e4b17023SJohn Marino this means we must skip the empty high order bytes when
984*e4b17023SJohn Marino calculating the bit offset. */
985*e4b17023SJohn Marino if (bytes < UNITS_PER_WORD
986*e4b17023SJohn Marino #ifdef BLOCK_REG_PADDING
987*e4b17023SJohn Marino && (BLOCK_REG_PADDING (args[i].mode,
988*e4b17023SJohn Marino TREE_TYPE (args[i].tree_value), 1)
989*e4b17023SJohn Marino == downward)
990*e4b17023SJohn Marino #else
991*e4b17023SJohn Marino && BYTES_BIG_ENDIAN
992*e4b17023SJohn Marino #endif
993*e4b17023SJohn Marino )
994*e4b17023SJohn Marino endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
995*e4b17023SJohn Marino
996*e4b17023SJohn Marino for (j = 0; j < args[i].n_aligned_regs; j++)
997*e4b17023SJohn Marino {
998*e4b17023SJohn Marino rtx reg = gen_reg_rtx (word_mode);
999*e4b17023SJohn Marino rtx word = operand_subword_force (args[i].value, j, BLKmode);
1000*e4b17023SJohn Marino int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1001*e4b17023SJohn Marino
1002*e4b17023SJohn Marino args[i].aligned_regs[j] = reg;
1003*e4b17023SJohn Marino word = extract_bit_field (word, bitsize, 0, 1, false, NULL_RTX,
1004*e4b17023SJohn Marino word_mode, word_mode);
1005*e4b17023SJohn Marino
1006*e4b17023SJohn Marino /* There is no need to restrict this code to loading items
1007*e4b17023SJohn Marino in TYPE_ALIGN sized hunks. The bitfield instructions can
1008*e4b17023SJohn Marino load up entire word sized registers efficiently.
1009*e4b17023SJohn Marino
1010*e4b17023SJohn Marino ??? This may not be needed anymore.
1011*e4b17023SJohn Marino We use to emit a clobber here but that doesn't let later
1012*e4b17023SJohn Marino passes optimize the instructions we emit. By storing 0 into
1013*e4b17023SJohn Marino the register later passes know the first AND to zero out the
1014*e4b17023SJohn Marino bitfield being set in the register is unnecessary. The store
1015*e4b17023SJohn Marino of 0 will be deleted as will at least the first AND. */
1016*e4b17023SJohn Marino
1017*e4b17023SJohn Marino emit_move_insn (reg, const0_rtx);
1018*e4b17023SJohn Marino
1019*e4b17023SJohn Marino bytes -= bitsize / BITS_PER_UNIT;
1020*e4b17023SJohn Marino store_bit_field (reg, bitsize, endian_correction, 0, 0,
1021*e4b17023SJohn Marino word_mode, word);
1022*e4b17023SJohn Marino }
1023*e4b17023SJohn Marino }
1024*e4b17023SJohn Marino }
1025*e4b17023SJohn Marino
1026*e4b17023SJohn Marino /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1027*e4b17023SJohn Marino CALL_EXPR EXP.
1028*e4b17023SJohn Marino
1029*e4b17023SJohn Marino NUM_ACTUALS is the total number of parameters.
1030*e4b17023SJohn Marino
1031*e4b17023SJohn Marino N_NAMED_ARGS is the total number of named arguments.
1032*e4b17023SJohn Marino
1033*e4b17023SJohn Marino STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1034*e4b17023SJohn Marino value, or null.
1035*e4b17023SJohn Marino
1036*e4b17023SJohn Marino FNDECL is the tree code for the target of this call (if known)
1037*e4b17023SJohn Marino
1038*e4b17023SJohn Marino ARGS_SO_FAR holds state needed by the target to know where to place
1039*e4b17023SJohn Marino the next argument.
1040*e4b17023SJohn Marino
1041*e4b17023SJohn Marino REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1042*e4b17023SJohn Marino for arguments which are passed in registers.
1043*e4b17023SJohn Marino
1044*e4b17023SJohn Marino OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1045*e4b17023SJohn Marino and may be modified by this routine.
1046*e4b17023SJohn Marino
1047*e4b17023SJohn Marino OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1048*e4b17023SJohn Marino flags which may may be modified by this routine.
1049*e4b17023SJohn Marino
1050*e4b17023SJohn Marino MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1051*e4b17023SJohn Marino that requires allocation of stack space.
1052*e4b17023SJohn Marino
1053*e4b17023SJohn Marino CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1054*e4b17023SJohn Marino the thunked-to function. */
1055*e4b17023SJohn Marino
1056*e4b17023SJohn Marino static void
initialize_argument_information(int num_actuals ATTRIBUTE_UNUSED,struct arg_data * args,struct args_size * args_size,int n_named_args ATTRIBUTE_UNUSED,tree exp,tree struct_value_addr_value,tree fndecl,tree fntype,cumulative_args_t args_so_far,int reg_parm_stack_space,rtx * old_stack_level,int * old_pending_adj,int * must_preallocate,int * ecf_flags,bool * may_tailcall,bool call_from_thunk_p)1057*e4b17023SJohn Marino initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1058*e4b17023SJohn Marino struct arg_data *args,
1059*e4b17023SJohn Marino struct args_size *args_size,
1060*e4b17023SJohn Marino int n_named_args ATTRIBUTE_UNUSED,
1061*e4b17023SJohn Marino tree exp, tree struct_value_addr_value,
1062*e4b17023SJohn Marino tree fndecl, tree fntype,
1063*e4b17023SJohn Marino cumulative_args_t args_so_far,
1064*e4b17023SJohn Marino int reg_parm_stack_space,
1065*e4b17023SJohn Marino rtx *old_stack_level, int *old_pending_adj,
1066*e4b17023SJohn Marino int *must_preallocate, int *ecf_flags,
1067*e4b17023SJohn Marino bool *may_tailcall, bool call_from_thunk_p)
1068*e4b17023SJohn Marino {
1069*e4b17023SJohn Marino CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
1070*e4b17023SJohn Marino location_t loc = EXPR_LOCATION (exp);
1071*e4b17023SJohn Marino /* 1 if scanning parms front to back, -1 if scanning back to front. */
1072*e4b17023SJohn Marino int inc;
1073*e4b17023SJohn Marino
1074*e4b17023SJohn Marino /* Count arg position in order args appear. */
1075*e4b17023SJohn Marino int argpos;
1076*e4b17023SJohn Marino
1077*e4b17023SJohn Marino int i;
1078*e4b17023SJohn Marino
1079*e4b17023SJohn Marino args_size->constant = 0;
1080*e4b17023SJohn Marino args_size->var = 0;
1081*e4b17023SJohn Marino
1082*e4b17023SJohn Marino /* In this loop, we consider args in the order they are written.
1083*e4b17023SJohn Marino We fill up ARGS from the front or from the back if necessary
1084*e4b17023SJohn Marino so that in any case the first arg to be pushed ends up at the front. */
1085*e4b17023SJohn Marino
1086*e4b17023SJohn Marino if (PUSH_ARGS_REVERSED)
1087*e4b17023SJohn Marino {
1088*e4b17023SJohn Marino i = num_actuals - 1, inc = -1;
1089*e4b17023SJohn Marino /* In this case, must reverse order of args
1090*e4b17023SJohn Marino so that we compute and push the last arg first. */
1091*e4b17023SJohn Marino }
1092*e4b17023SJohn Marino else
1093*e4b17023SJohn Marino {
1094*e4b17023SJohn Marino i = 0, inc = 1;
1095*e4b17023SJohn Marino }
1096*e4b17023SJohn Marino
1097*e4b17023SJohn Marino /* First fill in the actual arguments in the ARGS array, splitting
1098*e4b17023SJohn Marino complex arguments if necessary. */
1099*e4b17023SJohn Marino {
1100*e4b17023SJohn Marino int j = i;
1101*e4b17023SJohn Marino call_expr_arg_iterator iter;
1102*e4b17023SJohn Marino tree arg;
1103*e4b17023SJohn Marino
1104*e4b17023SJohn Marino if (struct_value_addr_value)
1105*e4b17023SJohn Marino {
1106*e4b17023SJohn Marino args[j].tree_value = struct_value_addr_value;
1107*e4b17023SJohn Marino j += inc;
1108*e4b17023SJohn Marino }
1109*e4b17023SJohn Marino FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1110*e4b17023SJohn Marino {
1111*e4b17023SJohn Marino tree argtype = TREE_TYPE (arg);
1112*e4b17023SJohn Marino if (targetm.calls.split_complex_arg
1113*e4b17023SJohn Marino && argtype
1114*e4b17023SJohn Marino && TREE_CODE (argtype) == COMPLEX_TYPE
1115*e4b17023SJohn Marino && targetm.calls.split_complex_arg (argtype))
1116*e4b17023SJohn Marino {
1117*e4b17023SJohn Marino tree subtype = TREE_TYPE (argtype);
1118*e4b17023SJohn Marino args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1119*e4b17023SJohn Marino j += inc;
1120*e4b17023SJohn Marino args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1121*e4b17023SJohn Marino }
1122*e4b17023SJohn Marino else
1123*e4b17023SJohn Marino args[j].tree_value = arg;
1124*e4b17023SJohn Marino j += inc;
1125*e4b17023SJohn Marino }
1126*e4b17023SJohn Marino }
1127*e4b17023SJohn Marino
1128*e4b17023SJohn Marino /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1129*e4b17023SJohn Marino for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
1130*e4b17023SJohn Marino {
1131*e4b17023SJohn Marino tree type = TREE_TYPE (args[i].tree_value);
1132*e4b17023SJohn Marino int unsignedp;
1133*e4b17023SJohn Marino enum machine_mode mode;
1134*e4b17023SJohn Marino
1135*e4b17023SJohn Marino /* Replace erroneous argument with constant zero. */
1136*e4b17023SJohn Marino if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1137*e4b17023SJohn Marino args[i].tree_value = integer_zero_node, type = integer_type_node;
1138*e4b17023SJohn Marino
1139*e4b17023SJohn Marino /* If TYPE is a transparent union or record, pass things the way
1140*e4b17023SJohn Marino we would pass the first field of the union or record. We have
1141*e4b17023SJohn Marino already verified that the modes are the same. */
1142*e4b17023SJohn Marino if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1143*e4b17023SJohn Marino && TYPE_TRANSPARENT_AGGR (type))
1144*e4b17023SJohn Marino type = TREE_TYPE (first_field (type));
1145*e4b17023SJohn Marino
1146*e4b17023SJohn Marino /* Decide where to pass this arg.
1147*e4b17023SJohn Marino
1148*e4b17023SJohn Marino args[i].reg is nonzero if all or part is passed in registers.
1149*e4b17023SJohn Marino
1150*e4b17023SJohn Marino args[i].partial is nonzero if part but not all is passed in registers,
1151*e4b17023SJohn Marino and the exact value says how many bytes are passed in registers.
1152*e4b17023SJohn Marino
1153*e4b17023SJohn Marino args[i].pass_on_stack is nonzero if the argument must at least be
1154*e4b17023SJohn Marino computed on the stack. It may then be loaded back into registers
1155*e4b17023SJohn Marino if args[i].reg is nonzero.
1156*e4b17023SJohn Marino
1157*e4b17023SJohn Marino These decisions are driven by the FUNCTION_... macros and must agree
1158*e4b17023SJohn Marino with those made by function.c. */
1159*e4b17023SJohn Marino
1160*e4b17023SJohn Marino /* See if this argument should be passed by invisible reference. */
1161*e4b17023SJohn Marino if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
1162*e4b17023SJohn Marino type, argpos < n_named_args))
1163*e4b17023SJohn Marino {
1164*e4b17023SJohn Marino bool callee_copies;
1165*e4b17023SJohn Marino tree base = NULL_TREE;
1166*e4b17023SJohn Marino
1167*e4b17023SJohn Marino callee_copies
1168*e4b17023SJohn Marino = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
1169*e4b17023SJohn Marino type, argpos < n_named_args);
1170*e4b17023SJohn Marino
1171*e4b17023SJohn Marino /* If we're compiling a thunk, pass through invisible references
1172*e4b17023SJohn Marino instead of making a copy. */
1173*e4b17023SJohn Marino if (call_from_thunk_p
1174*e4b17023SJohn Marino || (callee_copies
1175*e4b17023SJohn Marino && !TREE_ADDRESSABLE (type)
1176*e4b17023SJohn Marino && (base = get_base_address (args[i].tree_value))
1177*e4b17023SJohn Marino && TREE_CODE (base) != SSA_NAME
1178*e4b17023SJohn Marino && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1179*e4b17023SJohn Marino {
1180*e4b17023SJohn Marino mark_addressable (args[i].tree_value);
1181*e4b17023SJohn Marino
1182*e4b17023SJohn Marino /* We can't use sibcalls if a callee-copied argument is
1183*e4b17023SJohn Marino stored in the current function's frame. */
1184*e4b17023SJohn Marino if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1185*e4b17023SJohn Marino *may_tailcall = false;
1186*e4b17023SJohn Marino
1187*e4b17023SJohn Marino args[i].tree_value = build_fold_addr_expr_loc (loc,
1188*e4b17023SJohn Marino args[i].tree_value);
1189*e4b17023SJohn Marino type = TREE_TYPE (args[i].tree_value);
1190*e4b17023SJohn Marino
1191*e4b17023SJohn Marino if (*ecf_flags & ECF_CONST)
1192*e4b17023SJohn Marino *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1193*e4b17023SJohn Marino }
1194*e4b17023SJohn Marino else
1195*e4b17023SJohn Marino {
1196*e4b17023SJohn Marino /* We make a copy of the object and pass the address to the
1197*e4b17023SJohn Marino function being called. */
1198*e4b17023SJohn Marino rtx copy;
1199*e4b17023SJohn Marino
1200*e4b17023SJohn Marino if (!COMPLETE_TYPE_P (type)
1201*e4b17023SJohn Marino || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1202*e4b17023SJohn Marino || (flag_stack_check == GENERIC_STACK_CHECK
1203*e4b17023SJohn Marino && compare_tree_int (TYPE_SIZE_UNIT (type),
1204*e4b17023SJohn Marino STACK_CHECK_MAX_VAR_SIZE) > 0))
1205*e4b17023SJohn Marino {
1206*e4b17023SJohn Marino /* This is a variable-sized object. Make space on the stack
1207*e4b17023SJohn Marino for it. */
1208*e4b17023SJohn Marino rtx size_rtx = expr_size (args[i].tree_value);
1209*e4b17023SJohn Marino
1210*e4b17023SJohn Marino if (*old_stack_level == 0)
1211*e4b17023SJohn Marino {
1212*e4b17023SJohn Marino emit_stack_save (SAVE_BLOCK, old_stack_level);
1213*e4b17023SJohn Marino *old_pending_adj = pending_stack_adjust;
1214*e4b17023SJohn Marino pending_stack_adjust = 0;
1215*e4b17023SJohn Marino }
1216*e4b17023SJohn Marino
1217*e4b17023SJohn Marino /* We can pass TRUE as the 4th argument because we just
1218*e4b17023SJohn Marino saved the stack pointer and will restore it right after
1219*e4b17023SJohn Marino the call. */
1220*e4b17023SJohn Marino copy = allocate_dynamic_stack_space (size_rtx,
1221*e4b17023SJohn Marino TYPE_ALIGN (type),
1222*e4b17023SJohn Marino TYPE_ALIGN (type),
1223*e4b17023SJohn Marino true);
1224*e4b17023SJohn Marino copy = gen_rtx_MEM (BLKmode, copy);
1225*e4b17023SJohn Marino set_mem_attributes (copy, type, 1);
1226*e4b17023SJohn Marino }
1227*e4b17023SJohn Marino else
1228*e4b17023SJohn Marino copy = assign_temp (type, 0, 1, 0);
1229*e4b17023SJohn Marino
1230*e4b17023SJohn Marino store_expr (args[i].tree_value, copy, 0, false);
1231*e4b17023SJohn Marino
1232*e4b17023SJohn Marino /* Just change the const function to pure and then let
1233*e4b17023SJohn Marino the next test clear the pure based on
1234*e4b17023SJohn Marino callee_copies. */
1235*e4b17023SJohn Marino if (*ecf_flags & ECF_CONST)
1236*e4b17023SJohn Marino {
1237*e4b17023SJohn Marino *ecf_flags &= ~ECF_CONST;
1238*e4b17023SJohn Marino *ecf_flags |= ECF_PURE;
1239*e4b17023SJohn Marino }
1240*e4b17023SJohn Marino
1241*e4b17023SJohn Marino if (!callee_copies && *ecf_flags & ECF_PURE)
1242*e4b17023SJohn Marino *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1243*e4b17023SJohn Marino
1244*e4b17023SJohn Marino args[i].tree_value
1245*e4b17023SJohn Marino = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1246*e4b17023SJohn Marino type = TREE_TYPE (args[i].tree_value);
1247*e4b17023SJohn Marino *may_tailcall = false;
1248*e4b17023SJohn Marino }
1249*e4b17023SJohn Marino }
1250*e4b17023SJohn Marino
1251*e4b17023SJohn Marino unsignedp = TYPE_UNSIGNED (type);
1252*e4b17023SJohn Marino mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1253*e4b17023SJohn Marino fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1254*e4b17023SJohn Marino
1255*e4b17023SJohn Marino args[i].unsignedp = unsignedp;
1256*e4b17023SJohn Marino args[i].mode = mode;
1257*e4b17023SJohn Marino
1258*e4b17023SJohn Marino args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1259*e4b17023SJohn Marino argpos < n_named_args);
1260*e4b17023SJohn Marino
1261*e4b17023SJohn Marino /* If this is a sibling call and the machine has register windows, the
1262*e4b17023SJohn Marino register window has to be unwinded before calling the routine, so
1263*e4b17023SJohn Marino arguments have to go into the incoming registers. */
1264*e4b17023SJohn Marino if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1265*e4b17023SJohn Marino args[i].tail_call_reg
1266*e4b17023SJohn Marino = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1267*e4b17023SJohn Marino argpos < n_named_args);
1268*e4b17023SJohn Marino else
1269*e4b17023SJohn Marino args[i].tail_call_reg = args[i].reg;
1270*e4b17023SJohn Marino
1271*e4b17023SJohn Marino if (args[i].reg)
1272*e4b17023SJohn Marino args[i].partial
1273*e4b17023SJohn Marino = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1274*e4b17023SJohn Marino argpos < n_named_args);
1275*e4b17023SJohn Marino
1276*e4b17023SJohn Marino args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1277*e4b17023SJohn Marino
1278*e4b17023SJohn Marino /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1279*e4b17023SJohn Marino it means that we are to pass this arg in the register(s) designated
1280*e4b17023SJohn Marino by the PARALLEL, but also to pass it in the stack. */
1281*e4b17023SJohn Marino if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1282*e4b17023SJohn Marino && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1283*e4b17023SJohn Marino args[i].pass_on_stack = 1;
1284*e4b17023SJohn Marino
1285*e4b17023SJohn Marino /* If this is an addressable type, we must preallocate the stack
1286*e4b17023SJohn Marino since we must evaluate the object into its final location.
1287*e4b17023SJohn Marino
1288*e4b17023SJohn Marino If this is to be passed in both registers and the stack, it is simpler
1289*e4b17023SJohn Marino to preallocate. */
1290*e4b17023SJohn Marino if (TREE_ADDRESSABLE (type)
1291*e4b17023SJohn Marino || (args[i].pass_on_stack && args[i].reg != 0))
1292*e4b17023SJohn Marino *must_preallocate = 1;
1293*e4b17023SJohn Marino
1294*e4b17023SJohn Marino /* Compute the stack-size of this argument. */
1295*e4b17023SJohn Marino if (args[i].reg == 0 || args[i].partial != 0
1296*e4b17023SJohn Marino || reg_parm_stack_space > 0
1297*e4b17023SJohn Marino || args[i].pass_on_stack)
1298*e4b17023SJohn Marino locate_and_pad_parm (mode, type,
1299*e4b17023SJohn Marino #ifdef STACK_PARMS_IN_REG_PARM_AREA
1300*e4b17023SJohn Marino 1,
1301*e4b17023SJohn Marino #else
1302*e4b17023SJohn Marino args[i].reg != 0,
1303*e4b17023SJohn Marino #endif
1304*e4b17023SJohn Marino args[i].pass_on_stack ? 0 : args[i].partial,
1305*e4b17023SJohn Marino fndecl, args_size, &args[i].locate);
1306*e4b17023SJohn Marino #ifdef BLOCK_REG_PADDING
1307*e4b17023SJohn Marino else
1308*e4b17023SJohn Marino /* The argument is passed entirely in registers. See at which
1309*e4b17023SJohn Marino end it should be padded. */
1310*e4b17023SJohn Marino args[i].locate.where_pad =
1311*e4b17023SJohn Marino BLOCK_REG_PADDING (mode, type,
1312*e4b17023SJohn Marino int_size_in_bytes (type) <= UNITS_PER_WORD);
1313*e4b17023SJohn Marino #endif
1314*e4b17023SJohn Marino
1315*e4b17023SJohn Marino /* Update ARGS_SIZE, the total stack space for args so far. */
1316*e4b17023SJohn Marino
1317*e4b17023SJohn Marino args_size->constant += args[i].locate.size.constant;
1318*e4b17023SJohn Marino if (args[i].locate.size.var)
1319*e4b17023SJohn Marino ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1320*e4b17023SJohn Marino
1321*e4b17023SJohn Marino /* Increment ARGS_SO_FAR, which has info about which arg-registers
1322*e4b17023SJohn Marino have been used, etc. */
1323*e4b17023SJohn Marino
1324*e4b17023SJohn Marino targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1325*e4b17023SJohn Marino type, argpos < n_named_args);
1326*e4b17023SJohn Marino }
1327*e4b17023SJohn Marino }
1328*e4b17023SJohn Marino
1329*e4b17023SJohn Marino /* Update ARGS_SIZE to contain the total size for the argument block.
1330*e4b17023SJohn Marino Return the original constant component of the argument block's size.
1331*e4b17023SJohn Marino
1332*e4b17023SJohn Marino REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1333*e4b17023SJohn Marino for arguments passed in registers. */
1334*e4b17023SJohn Marino
1335*e4b17023SJohn Marino static int
compute_argument_block_size(int reg_parm_stack_space,struct args_size * args_size,tree fndecl ATTRIBUTE_UNUSED,tree fntype ATTRIBUTE_UNUSED,int preferred_stack_boundary ATTRIBUTE_UNUSED)1336*e4b17023SJohn Marino compute_argument_block_size (int reg_parm_stack_space,
1337*e4b17023SJohn Marino struct args_size *args_size,
1338*e4b17023SJohn Marino tree fndecl ATTRIBUTE_UNUSED,
1339*e4b17023SJohn Marino tree fntype ATTRIBUTE_UNUSED,
1340*e4b17023SJohn Marino int preferred_stack_boundary ATTRIBUTE_UNUSED)
1341*e4b17023SJohn Marino {
1342*e4b17023SJohn Marino int unadjusted_args_size = args_size->constant;
1343*e4b17023SJohn Marino
1344*e4b17023SJohn Marino /* For accumulate outgoing args mode we don't need to align, since the frame
1345*e4b17023SJohn Marino will be already aligned. Align to STACK_BOUNDARY in order to prevent
1346*e4b17023SJohn Marino backends from generating misaligned frame sizes. */
1347*e4b17023SJohn Marino if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1348*e4b17023SJohn Marino preferred_stack_boundary = STACK_BOUNDARY;
1349*e4b17023SJohn Marino
1350*e4b17023SJohn Marino /* Compute the actual size of the argument block required. The variable
1351*e4b17023SJohn Marino and constant sizes must be combined, the size may have to be rounded,
1352*e4b17023SJohn Marino and there may be a minimum required size. */
1353*e4b17023SJohn Marino
1354*e4b17023SJohn Marino if (args_size->var)
1355*e4b17023SJohn Marino {
1356*e4b17023SJohn Marino args_size->var = ARGS_SIZE_TREE (*args_size);
1357*e4b17023SJohn Marino args_size->constant = 0;
1358*e4b17023SJohn Marino
1359*e4b17023SJohn Marino preferred_stack_boundary /= BITS_PER_UNIT;
1360*e4b17023SJohn Marino if (preferred_stack_boundary > 1)
1361*e4b17023SJohn Marino {
1362*e4b17023SJohn Marino /* We don't handle this case yet. To handle it correctly we have
1363*e4b17023SJohn Marino to add the delta, round and subtract the delta.
1364*e4b17023SJohn Marino Currently no machine description requires this support. */
1365*e4b17023SJohn Marino gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1366*e4b17023SJohn Marino args_size->var = round_up (args_size->var, preferred_stack_boundary);
1367*e4b17023SJohn Marino }
1368*e4b17023SJohn Marino
1369*e4b17023SJohn Marino if (reg_parm_stack_space > 0)
1370*e4b17023SJohn Marino {
1371*e4b17023SJohn Marino args_size->var
1372*e4b17023SJohn Marino = size_binop (MAX_EXPR, args_size->var,
1373*e4b17023SJohn Marino ssize_int (reg_parm_stack_space));
1374*e4b17023SJohn Marino
1375*e4b17023SJohn Marino /* The area corresponding to register parameters is not to count in
1376*e4b17023SJohn Marino the size of the block we need. So make the adjustment. */
1377*e4b17023SJohn Marino if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1378*e4b17023SJohn Marino args_size->var
1379*e4b17023SJohn Marino = size_binop (MINUS_EXPR, args_size->var,
1380*e4b17023SJohn Marino ssize_int (reg_parm_stack_space));
1381*e4b17023SJohn Marino }
1382*e4b17023SJohn Marino }
1383*e4b17023SJohn Marino else
1384*e4b17023SJohn Marino {
1385*e4b17023SJohn Marino preferred_stack_boundary /= BITS_PER_UNIT;
1386*e4b17023SJohn Marino if (preferred_stack_boundary < 1)
1387*e4b17023SJohn Marino preferred_stack_boundary = 1;
1388*e4b17023SJohn Marino args_size->constant = (((args_size->constant
1389*e4b17023SJohn Marino + stack_pointer_delta
1390*e4b17023SJohn Marino + preferred_stack_boundary - 1)
1391*e4b17023SJohn Marino / preferred_stack_boundary
1392*e4b17023SJohn Marino * preferred_stack_boundary)
1393*e4b17023SJohn Marino - stack_pointer_delta);
1394*e4b17023SJohn Marino
1395*e4b17023SJohn Marino args_size->constant = MAX (args_size->constant,
1396*e4b17023SJohn Marino reg_parm_stack_space);
1397*e4b17023SJohn Marino
1398*e4b17023SJohn Marino if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1399*e4b17023SJohn Marino args_size->constant -= reg_parm_stack_space;
1400*e4b17023SJohn Marino }
1401*e4b17023SJohn Marino return unadjusted_args_size;
1402*e4b17023SJohn Marino }
1403*e4b17023SJohn Marino
1404*e4b17023SJohn Marino /* Precompute parameters as needed for a function call.
1405*e4b17023SJohn Marino
1406*e4b17023SJohn Marino FLAGS is mask of ECF_* constants.
1407*e4b17023SJohn Marino
1408*e4b17023SJohn Marino NUM_ACTUALS is the number of arguments.
1409*e4b17023SJohn Marino
1410*e4b17023SJohn Marino ARGS is an array containing information for each argument; this
1411*e4b17023SJohn Marino routine fills in the INITIAL_VALUE and VALUE fields for each
1412*e4b17023SJohn Marino precomputed argument. */
1413*e4b17023SJohn Marino
1414*e4b17023SJohn Marino static void
precompute_arguments(int num_actuals,struct arg_data * args)1415*e4b17023SJohn Marino precompute_arguments (int num_actuals, struct arg_data *args)
1416*e4b17023SJohn Marino {
1417*e4b17023SJohn Marino int i;
1418*e4b17023SJohn Marino
1419*e4b17023SJohn Marino /* If this is a libcall, then precompute all arguments so that we do not
1420*e4b17023SJohn Marino get extraneous instructions emitted as part of the libcall sequence. */
1421*e4b17023SJohn Marino
1422*e4b17023SJohn Marino /* If we preallocated the stack space, and some arguments must be passed
1423*e4b17023SJohn Marino on the stack, then we must precompute any parameter which contains a
1424*e4b17023SJohn Marino function call which will store arguments on the stack.
1425*e4b17023SJohn Marino Otherwise, evaluating the parameter may clobber previous parameters
1426*e4b17023SJohn Marino which have already been stored into the stack. (we have code to avoid
1427*e4b17023SJohn Marino such case by saving the outgoing stack arguments, but it results in
1428*e4b17023SJohn Marino worse code) */
1429*e4b17023SJohn Marino if (!ACCUMULATE_OUTGOING_ARGS)
1430*e4b17023SJohn Marino return;
1431*e4b17023SJohn Marino
1432*e4b17023SJohn Marino for (i = 0; i < num_actuals; i++)
1433*e4b17023SJohn Marino {
1434*e4b17023SJohn Marino tree type;
1435*e4b17023SJohn Marino enum machine_mode mode;
1436*e4b17023SJohn Marino
1437*e4b17023SJohn Marino if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1438*e4b17023SJohn Marino continue;
1439*e4b17023SJohn Marino
1440*e4b17023SJohn Marino /* If this is an addressable type, we cannot pre-evaluate it. */
1441*e4b17023SJohn Marino type = TREE_TYPE (args[i].tree_value);
1442*e4b17023SJohn Marino gcc_assert (!TREE_ADDRESSABLE (type));
1443*e4b17023SJohn Marino
1444*e4b17023SJohn Marino args[i].initial_value = args[i].value
1445*e4b17023SJohn Marino = expand_normal (args[i].tree_value);
1446*e4b17023SJohn Marino
1447*e4b17023SJohn Marino mode = TYPE_MODE (type);
1448*e4b17023SJohn Marino if (mode != args[i].mode)
1449*e4b17023SJohn Marino {
1450*e4b17023SJohn Marino int unsignedp = args[i].unsignedp;
1451*e4b17023SJohn Marino args[i].value
1452*e4b17023SJohn Marino = convert_modes (args[i].mode, mode,
1453*e4b17023SJohn Marino args[i].value, args[i].unsignedp);
1454*e4b17023SJohn Marino
1455*e4b17023SJohn Marino /* CSE will replace this only if it contains args[i].value
1456*e4b17023SJohn Marino pseudo, so convert it down to the declared mode using
1457*e4b17023SJohn Marino a SUBREG. */
1458*e4b17023SJohn Marino if (REG_P (args[i].value)
1459*e4b17023SJohn Marino && GET_MODE_CLASS (args[i].mode) == MODE_INT
1460*e4b17023SJohn Marino && promote_mode (type, mode, &unsignedp) != args[i].mode)
1461*e4b17023SJohn Marino {
1462*e4b17023SJohn Marino args[i].initial_value
1463*e4b17023SJohn Marino = gen_lowpart_SUBREG (mode, args[i].value);
1464*e4b17023SJohn Marino SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1465*e4b17023SJohn Marino SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1466*e4b17023SJohn Marino args[i].unsignedp);
1467*e4b17023SJohn Marino }
1468*e4b17023SJohn Marino }
1469*e4b17023SJohn Marino }
1470*e4b17023SJohn Marino }
1471*e4b17023SJohn Marino
1472*e4b17023SJohn Marino /* Given the current state of MUST_PREALLOCATE and information about
1473*e4b17023SJohn Marino arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1474*e4b17023SJohn Marino compute and return the final value for MUST_PREALLOCATE. */
1475*e4b17023SJohn Marino
1476*e4b17023SJohn Marino static int
finalize_must_preallocate(int must_preallocate,int num_actuals,struct arg_data * args,struct args_size * args_size)1477*e4b17023SJohn Marino finalize_must_preallocate (int must_preallocate, int num_actuals,
1478*e4b17023SJohn Marino struct arg_data *args, struct args_size *args_size)
1479*e4b17023SJohn Marino {
1480*e4b17023SJohn Marino /* See if we have or want to preallocate stack space.
1481*e4b17023SJohn Marino
1482*e4b17023SJohn Marino If we would have to push a partially-in-regs parm
1483*e4b17023SJohn Marino before other stack parms, preallocate stack space instead.
1484*e4b17023SJohn Marino
1485*e4b17023SJohn Marino If the size of some parm is not a multiple of the required stack
1486*e4b17023SJohn Marino alignment, we must preallocate.
1487*e4b17023SJohn Marino
1488*e4b17023SJohn Marino If the total size of arguments that would otherwise create a copy in
1489*e4b17023SJohn Marino a temporary (such as a CALL) is more than half the total argument list
1490*e4b17023SJohn Marino size, preallocation is faster.
1491*e4b17023SJohn Marino
1492*e4b17023SJohn Marino Another reason to preallocate is if we have a machine (like the m88k)
1493*e4b17023SJohn Marino where stack alignment is required to be maintained between every
1494*e4b17023SJohn Marino pair of insns, not just when the call is made. However, we assume here
1495*e4b17023SJohn Marino that such machines either do not have push insns (and hence preallocation
1496*e4b17023SJohn Marino would occur anyway) or the problem is taken care of with
1497*e4b17023SJohn Marino PUSH_ROUNDING. */
1498*e4b17023SJohn Marino
1499*e4b17023SJohn Marino if (! must_preallocate)
1500*e4b17023SJohn Marino {
1501*e4b17023SJohn Marino int partial_seen = 0;
1502*e4b17023SJohn Marino int copy_to_evaluate_size = 0;
1503*e4b17023SJohn Marino int i;
1504*e4b17023SJohn Marino
1505*e4b17023SJohn Marino for (i = 0; i < num_actuals && ! must_preallocate; i++)
1506*e4b17023SJohn Marino {
1507*e4b17023SJohn Marino if (args[i].partial > 0 && ! args[i].pass_on_stack)
1508*e4b17023SJohn Marino partial_seen = 1;
1509*e4b17023SJohn Marino else if (partial_seen && args[i].reg == 0)
1510*e4b17023SJohn Marino must_preallocate = 1;
1511*e4b17023SJohn Marino
1512*e4b17023SJohn Marino if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1513*e4b17023SJohn Marino && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1514*e4b17023SJohn Marino || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1515*e4b17023SJohn Marino || TREE_CODE (args[i].tree_value) == COND_EXPR
1516*e4b17023SJohn Marino || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1517*e4b17023SJohn Marino copy_to_evaluate_size
1518*e4b17023SJohn Marino += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1519*e4b17023SJohn Marino }
1520*e4b17023SJohn Marino
1521*e4b17023SJohn Marino if (copy_to_evaluate_size * 2 >= args_size->constant
1522*e4b17023SJohn Marino && args_size->constant > 0)
1523*e4b17023SJohn Marino must_preallocate = 1;
1524*e4b17023SJohn Marino }
1525*e4b17023SJohn Marino return must_preallocate;
1526*e4b17023SJohn Marino }
1527*e4b17023SJohn Marino
1528*e4b17023SJohn Marino /* If we preallocated stack space, compute the address of each argument
1529*e4b17023SJohn Marino and store it into the ARGS array.
1530*e4b17023SJohn Marino
1531*e4b17023SJohn Marino We need not ensure it is a valid memory address here; it will be
1532*e4b17023SJohn Marino validized when it is used.
1533*e4b17023SJohn Marino
1534*e4b17023SJohn Marino ARGBLOCK is an rtx for the address of the outgoing arguments. */
1535*e4b17023SJohn Marino
1536*e4b17023SJohn Marino static void
compute_argument_addresses(struct arg_data * args,rtx argblock,int num_actuals)1537*e4b17023SJohn Marino compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1538*e4b17023SJohn Marino {
1539*e4b17023SJohn Marino if (argblock)
1540*e4b17023SJohn Marino {
1541*e4b17023SJohn Marino rtx arg_reg = argblock;
1542*e4b17023SJohn Marino int i, arg_offset = 0;
1543*e4b17023SJohn Marino
1544*e4b17023SJohn Marino if (GET_CODE (argblock) == PLUS)
1545*e4b17023SJohn Marino arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1546*e4b17023SJohn Marino
1547*e4b17023SJohn Marino for (i = 0; i < num_actuals; i++)
1548*e4b17023SJohn Marino {
1549*e4b17023SJohn Marino rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1550*e4b17023SJohn Marino rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1551*e4b17023SJohn Marino rtx addr;
1552*e4b17023SJohn Marino unsigned int align, boundary;
1553*e4b17023SJohn Marino unsigned int units_on_stack = 0;
1554*e4b17023SJohn Marino enum machine_mode partial_mode = VOIDmode;
1555*e4b17023SJohn Marino
1556*e4b17023SJohn Marino /* Skip this parm if it will not be passed on the stack. */
1557*e4b17023SJohn Marino if (! args[i].pass_on_stack
1558*e4b17023SJohn Marino && args[i].reg != 0
1559*e4b17023SJohn Marino && args[i].partial == 0)
1560*e4b17023SJohn Marino continue;
1561*e4b17023SJohn Marino
1562*e4b17023SJohn Marino if (CONST_INT_P (offset))
1563*e4b17023SJohn Marino addr = plus_constant (arg_reg, INTVAL (offset));
1564*e4b17023SJohn Marino else
1565*e4b17023SJohn Marino addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1566*e4b17023SJohn Marino
1567*e4b17023SJohn Marino addr = plus_constant (addr, arg_offset);
1568*e4b17023SJohn Marino
1569*e4b17023SJohn Marino if (args[i].partial != 0)
1570*e4b17023SJohn Marino {
1571*e4b17023SJohn Marino /* Only part of the parameter is being passed on the stack.
1572*e4b17023SJohn Marino Generate a simple memory reference of the correct size. */
1573*e4b17023SJohn Marino units_on_stack = args[i].locate.size.constant;
1574*e4b17023SJohn Marino partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1575*e4b17023SJohn Marino MODE_INT, 1);
1576*e4b17023SJohn Marino args[i].stack = gen_rtx_MEM (partial_mode, addr);
1577*e4b17023SJohn Marino set_mem_size (args[i].stack, units_on_stack);
1578*e4b17023SJohn Marino }
1579*e4b17023SJohn Marino else
1580*e4b17023SJohn Marino {
1581*e4b17023SJohn Marino args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1582*e4b17023SJohn Marino set_mem_attributes (args[i].stack,
1583*e4b17023SJohn Marino TREE_TYPE (args[i].tree_value), 1);
1584*e4b17023SJohn Marino }
1585*e4b17023SJohn Marino align = BITS_PER_UNIT;
1586*e4b17023SJohn Marino boundary = args[i].locate.boundary;
1587*e4b17023SJohn Marino if (args[i].locate.where_pad != downward)
1588*e4b17023SJohn Marino align = boundary;
1589*e4b17023SJohn Marino else if (CONST_INT_P (offset))
1590*e4b17023SJohn Marino {
1591*e4b17023SJohn Marino align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1592*e4b17023SJohn Marino align = align & -align;
1593*e4b17023SJohn Marino }
1594*e4b17023SJohn Marino set_mem_align (args[i].stack, align);
1595*e4b17023SJohn Marino
1596*e4b17023SJohn Marino if (CONST_INT_P (slot_offset))
1597*e4b17023SJohn Marino addr = plus_constant (arg_reg, INTVAL (slot_offset));
1598*e4b17023SJohn Marino else
1599*e4b17023SJohn Marino addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1600*e4b17023SJohn Marino
1601*e4b17023SJohn Marino addr = plus_constant (addr, arg_offset);
1602*e4b17023SJohn Marino
1603*e4b17023SJohn Marino if (args[i].partial != 0)
1604*e4b17023SJohn Marino {
1605*e4b17023SJohn Marino /* Only part of the parameter is being passed on the stack.
1606*e4b17023SJohn Marino Generate a simple memory reference of the correct size.
1607*e4b17023SJohn Marino */
1608*e4b17023SJohn Marino args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1609*e4b17023SJohn Marino set_mem_size (args[i].stack_slot, units_on_stack);
1610*e4b17023SJohn Marino }
1611*e4b17023SJohn Marino else
1612*e4b17023SJohn Marino {
1613*e4b17023SJohn Marino args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1614*e4b17023SJohn Marino set_mem_attributes (args[i].stack_slot,
1615*e4b17023SJohn Marino TREE_TYPE (args[i].tree_value), 1);
1616*e4b17023SJohn Marino }
1617*e4b17023SJohn Marino set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1618*e4b17023SJohn Marino
1619*e4b17023SJohn Marino /* Function incoming arguments may overlap with sibling call
1620*e4b17023SJohn Marino outgoing arguments and we cannot allow reordering of reads
1621*e4b17023SJohn Marino from function arguments with stores to outgoing arguments
1622*e4b17023SJohn Marino of sibling calls. */
1623*e4b17023SJohn Marino set_mem_alias_set (args[i].stack, 0);
1624*e4b17023SJohn Marino set_mem_alias_set (args[i].stack_slot, 0);
1625*e4b17023SJohn Marino }
1626*e4b17023SJohn Marino }
1627*e4b17023SJohn Marino }
1628*e4b17023SJohn Marino
1629*e4b17023SJohn Marino /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1630*e4b17023SJohn Marino in a call instruction.
1631*e4b17023SJohn Marino
1632*e4b17023SJohn Marino FNDECL is the tree node for the target function. For an indirect call
1633*e4b17023SJohn Marino FNDECL will be NULL_TREE.
1634*e4b17023SJohn Marino
1635*e4b17023SJohn Marino ADDR is the operand 0 of CALL_EXPR for this call. */
1636*e4b17023SJohn Marino
1637*e4b17023SJohn Marino static rtx
rtx_for_function_call(tree fndecl,tree addr)1638*e4b17023SJohn Marino rtx_for_function_call (tree fndecl, tree addr)
1639*e4b17023SJohn Marino {
1640*e4b17023SJohn Marino rtx funexp;
1641*e4b17023SJohn Marino
1642*e4b17023SJohn Marino /* Get the function to call, in the form of RTL. */
1643*e4b17023SJohn Marino if (fndecl)
1644*e4b17023SJohn Marino {
1645*e4b17023SJohn Marino /* If this is the first use of the function, see if we need to
1646*e4b17023SJohn Marino make an external definition for it. */
1647*e4b17023SJohn Marino if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1648*e4b17023SJohn Marino {
1649*e4b17023SJohn Marino assemble_external (fndecl);
1650*e4b17023SJohn Marino TREE_USED (fndecl) = 1;
1651*e4b17023SJohn Marino }
1652*e4b17023SJohn Marino
1653*e4b17023SJohn Marino /* Get a SYMBOL_REF rtx for the function address. */
1654*e4b17023SJohn Marino funexp = XEXP (DECL_RTL (fndecl), 0);
1655*e4b17023SJohn Marino }
1656*e4b17023SJohn Marino else
1657*e4b17023SJohn Marino /* Generate an rtx (probably a pseudo-register) for the address. */
1658*e4b17023SJohn Marino {
1659*e4b17023SJohn Marino push_temp_slots ();
1660*e4b17023SJohn Marino funexp = expand_normal (addr);
1661*e4b17023SJohn Marino pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1662*e4b17023SJohn Marino }
1663*e4b17023SJohn Marino return funexp;
1664*e4b17023SJohn Marino }
1665*e4b17023SJohn Marino
1666*e4b17023SJohn Marino /* Internal state for internal_arg_pointer_based_exp and its helpers. */
1667*e4b17023SJohn Marino static struct
1668*e4b17023SJohn Marino {
1669*e4b17023SJohn Marino /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1670*e4b17023SJohn Marino or NULL_RTX if none has been scanned yet. */
1671*e4b17023SJohn Marino rtx scan_start;
1672*e4b17023SJohn Marino /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1673*e4b17023SJohn Marino based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1674*e4b17023SJohn Marino pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1675*e4b17023SJohn Marino with fixed offset, or PC if this is with variable or unknown offset. */
1676*e4b17023SJohn Marino VEC(rtx, heap) *cache;
1677*e4b17023SJohn Marino } internal_arg_pointer_exp_state;
1678*e4b17023SJohn Marino
1679*e4b17023SJohn Marino static rtx internal_arg_pointer_based_exp (rtx, bool);
1680*e4b17023SJohn Marino
1681*e4b17023SJohn Marino /* Helper function for internal_arg_pointer_based_exp. Scan insns in
1682*e4b17023SJohn Marino the tail call sequence, starting with first insn that hasn't been
1683*e4b17023SJohn Marino scanned yet, and note for each pseudo on the LHS whether it is based
1684*e4b17023SJohn Marino on crtl->args.internal_arg_pointer or not, and what offset from that
1685*e4b17023SJohn Marino that pointer it has. */
1686*e4b17023SJohn Marino
1687*e4b17023SJohn Marino static void
internal_arg_pointer_based_exp_scan(void)1688*e4b17023SJohn Marino internal_arg_pointer_based_exp_scan (void)
1689*e4b17023SJohn Marino {
1690*e4b17023SJohn Marino rtx insn, scan_start = internal_arg_pointer_exp_state.scan_start;
1691*e4b17023SJohn Marino
1692*e4b17023SJohn Marino if (scan_start == NULL_RTX)
1693*e4b17023SJohn Marino insn = get_insns ();
1694*e4b17023SJohn Marino else
1695*e4b17023SJohn Marino insn = NEXT_INSN (scan_start);
1696*e4b17023SJohn Marino
1697*e4b17023SJohn Marino while (insn)
1698*e4b17023SJohn Marino {
1699*e4b17023SJohn Marino rtx set = single_set (insn);
1700*e4b17023SJohn Marino if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1701*e4b17023SJohn Marino {
1702*e4b17023SJohn Marino rtx val = NULL_RTX;
1703*e4b17023SJohn Marino unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1704*e4b17023SJohn Marino /* Punt on pseudos set multiple times. */
1705*e4b17023SJohn Marino if (idx < VEC_length (rtx, internal_arg_pointer_exp_state.cache)
1706*e4b17023SJohn Marino && (VEC_index (rtx, internal_arg_pointer_exp_state.cache, idx)
1707*e4b17023SJohn Marino != NULL_RTX))
1708*e4b17023SJohn Marino val = pc_rtx;
1709*e4b17023SJohn Marino else
1710*e4b17023SJohn Marino val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1711*e4b17023SJohn Marino if (val != NULL_RTX)
1712*e4b17023SJohn Marino {
1713*e4b17023SJohn Marino if (idx
1714*e4b17023SJohn Marino >= VEC_length (rtx, internal_arg_pointer_exp_state.cache))
1715*e4b17023SJohn Marino VEC_safe_grow_cleared (rtx, heap,
1716*e4b17023SJohn Marino internal_arg_pointer_exp_state.cache,
1717*e4b17023SJohn Marino idx + 1);
1718*e4b17023SJohn Marino VEC_replace (rtx, internal_arg_pointer_exp_state.cache,
1719*e4b17023SJohn Marino idx, val);
1720*e4b17023SJohn Marino }
1721*e4b17023SJohn Marino }
1722*e4b17023SJohn Marino if (NEXT_INSN (insn) == NULL_RTX)
1723*e4b17023SJohn Marino scan_start = insn;
1724*e4b17023SJohn Marino insn = NEXT_INSN (insn);
1725*e4b17023SJohn Marino }
1726*e4b17023SJohn Marino
1727*e4b17023SJohn Marino internal_arg_pointer_exp_state.scan_start = scan_start;
1728*e4b17023SJohn Marino }
1729*e4b17023SJohn Marino
1730*e4b17023SJohn Marino /* Helper function for internal_arg_pointer_based_exp, called through
1731*e4b17023SJohn Marino for_each_rtx. Return 1 if *LOC is a register based on
1732*e4b17023SJohn Marino crtl->args.internal_arg_pointer. Return -1 if *LOC is not based on it
1733*e4b17023SJohn Marino and the subexpressions need not be examined. Otherwise return 0. */
1734*e4b17023SJohn Marino
1735*e4b17023SJohn Marino static int
internal_arg_pointer_based_exp_1(rtx * loc,void * data ATTRIBUTE_UNUSED)1736*e4b17023SJohn Marino internal_arg_pointer_based_exp_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1737*e4b17023SJohn Marino {
1738*e4b17023SJohn Marino if (REG_P (*loc) && internal_arg_pointer_based_exp (*loc, false) != NULL_RTX)
1739*e4b17023SJohn Marino return 1;
1740*e4b17023SJohn Marino if (MEM_P (*loc))
1741*e4b17023SJohn Marino return -1;
1742*e4b17023SJohn Marino return 0;
1743*e4b17023SJohn Marino }
1744*e4b17023SJohn Marino
1745*e4b17023SJohn Marino /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1746*e4b17023SJohn Marino NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1747*e4b17023SJohn Marino it with fixed offset, or PC if this is with variable or unknown offset.
1748*e4b17023SJohn Marino TOPLEVEL is true if the function is invoked at the topmost level. */
1749*e4b17023SJohn Marino
1750*e4b17023SJohn Marino static rtx
internal_arg_pointer_based_exp(rtx rtl,bool toplevel)1751*e4b17023SJohn Marino internal_arg_pointer_based_exp (rtx rtl, bool toplevel)
1752*e4b17023SJohn Marino {
1753*e4b17023SJohn Marino if (CONSTANT_P (rtl))
1754*e4b17023SJohn Marino return NULL_RTX;
1755*e4b17023SJohn Marino
1756*e4b17023SJohn Marino if (rtl == crtl->args.internal_arg_pointer)
1757*e4b17023SJohn Marino return const0_rtx;
1758*e4b17023SJohn Marino
1759*e4b17023SJohn Marino if (REG_P (rtl) && HARD_REGISTER_P (rtl))
1760*e4b17023SJohn Marino return NULL_RTX;
1761*e4b17023SJohn Marino
1762*e4b17023SJohn Marino if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
1763*e4b17023SJohn Marino {
1764*e4b17023SJohn Marino rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
1765*e4b17023SJohn Marino if (val == NULL_RTX || val == pc_rtx)
1766*e4b17023SJohn Marino return val;
1767*e4b17023SJohn Marino return plus_constant (val, INTVAL (XEXP (rtl, 1)));
1768*e4b17023SJohn Marino }
1769*e4b17023SJohn Marino
1770*e4b17023SJohn Marino /* When called at the topmost level, scan pseudo assignments in between the
1771*e4b17023SJohn Marino last scanned instruction in the tail call sequence and the latest insn
1772*e4b17023SJohn Marino in that sequence. */
1773*e4b17023SJohn Marino if (toplevel)
1774*e4b17023SJohn Marino internal_arg_pointer_based_exp_scan ();
1775*e4b17023SJohn Marino
1776*e4b17023SJohn Marino if (REG_P (rtl))
1777*e4b17023SJohn Marino {
1778*e4b17023SJohn Marino unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
1779*e4b17023SJohn Marino if (idx < VEC_length (rtx, internal_arg_pointer_exp_state.cache))
1780*e4b17023SJohn Marino return VEC_index (rtx, internal_arg_pointer_exp_state.cache, idx);
1781*e4b17023SJohn Marino
1782*e4b17023SJohn Marino return NULL_RTX;
1783*e4b17023SJohn Marino }
1784*e4b17023SJohn Marino
1785*e4b17023SJohn Marino if (for_each_rtx (&rtl, internal_arg_pointer_based_exp_1, NULL))
1786*e4b17023SJohn Marino return pc_rtx;
1787*e4b17023SJohn Marino
1788*e4b17023SJohn Marino return NULL_RTX;
1789*e4b17023SJohn Marino }
1790*e4b17023SJohn Marino
1791*e4b17023SJohn Marino /* Return true if and only if SIZE storage units (usually bytes)
1792*e4b17023SJohn Marino starting from address ADDR overlap with already clobbered argument
1793*e4b17023SJohn Marino area. This function is used to determine if we should give up a
1794*e4b17023SJohn Marino sibcall. */
1795*e4b17023SJohn Marino
1796*e4b17023SJohn Marino static bool
mem_overlaps_already_clobbered_arg_p(rtx addr,unsigned HOST_WIDE_INT size)1797*e4b17023SJohn Marino mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1798*e4b17023SJohn Marino {
1799*e4b17023SJohn Marino HOST_WIDE_INT i;
1800*e4b17023SJohn Marino rtx val;
1801*e4b17023SJohn Marino
1802*e4b17023SJohn Marino if (sbitmap_empty_p (stored_args_map))
1803*e4b17023SJohn Marino return false;
1804*e4b17023SJohn Marino val = internal_arg_pointer_based_exp (addr, true);
1805*e4b17023SJohn Marino if (val == NULL_RTX)
1806*e4b17023SJohn Marino return false;
1807*e4b17023SJohn Marino else if (val == pc_rtx)
1808*e4b17023SJohn Marino return true;
1809*e4b17023SJohn Marino else
1810*e4b17023SJohn Marino i = INTVAL (val);
1811*e4b17023SJohn Marino #ifdef STACK_GROWS_DOWNWARD
1812*e4b17023SJohn Marino i -= crtl->args.pretend_args_size;
1813*e4b17023SJohn Marino #else
1814*e4b17023SJohn Marino i += crtl->args.pretend_args_size;
1815*e4b17023SJohn Marino #endif
1816*e4b17023SJohn Marino
1817*e4b17023SJohn Marino #ifdef ARGS_GROW_DOWNWARD
1818*e4b17023SJohn Marino i = -i - size;
1819*e4b17023SJohn Marino #endif
1820*e4b17023SJohn Marino if (size > 0)
1821*e4b17023SJohn Marino {
1822*e4b17023SJohn Marino unsigned HOST_WIDE_INT k;
1823*e4b17023SJohn Marino
1824*e4b17023SJohn Marino for (k = 0; k < size; k++)
1825*e4b17023SJohn Marino if (i + k < stored_args_map->n_bits
1826*e4b17023SJohn Marino && TEST_BIT (stored_args_map, i + k))
1827*e4b17023SJohn Marino return true;
1828*e4b17023SJohn Marino }
1829*e4b17023SJohn Marino
1830*e4b17023SJohn Marino return false;
1831*e4b17023SJohn Marino }
1832*e4b17023SJohn Marino
1833*e4b17023SJohn Marino /* Do the register loads required for any wholly-register parms or any
1834*e4b17023SJohn Marino parms which are passed both on the stack and in a register. Their
1835*e4b17023SJohn Marino expressions were already evaluated.
1836*e4b17023SJohn Marino
1837*e4b17023SJohn Marino Mark all register-parms as living through the call, putting these USE
1838*e4b17023SJohn Marino insns in the CALL_INSN_FUNCTION_USAGE field.
1839*e4b17023SJohn Marino
1840*e4b17023SJohn Marino When IS_SIBCALL, perform the check_sibcall_argument_overlap
1841*e4b17023SJohn Marino checking, setting *SIBCALL_FAILURE if appropriate. */
1842*e4b17023SJohn Marino
1843*e4b17023SJohn Marino static void
load_register_parameters(struct arg_data * args,int num_actuals,rtx * call_fusage,int flags,int is_sibcall,int * sibcall_failure)1844*e4b17023SJohn Marino load_register_parameters (struct arg_data *args, int num_actuals,
1845*e4b17023SJohn Marino rtx *call_fusage, int flags, int is_sibcall,
1846*e4b17023SJohn Marino int *sibcall_failure)
1847*e4b17023SJohn Marino {
1848*e4b17023SJohn Marino int i, j;
1849*e4b17023SJohn Marino
1850*e4b17023SJohn Marino for (i = 0; i < num_actuals; i++)
1851*e4b17023SJohn Marino {
1852*e4b17023SJohn Marino rtx reg = ((flags & ECF_SIBCALL)
1853*e4b17023SJohn Marino ? args[i].tail_call_reg : args[i].reg);
1854*e4b17023SJohn Marino if (reg)
1855*e4b17023SJohn Marino {
1856*e4b17023SJohn Marino int partial = args[i].partial;
1857*e4b17023SJohn Marino int nregs;
1858*e4b17023SJohn Marino int size = 0;
1859*e4b17023SJohn Marino rtx before_arg = get_last_insn ();
1860*e4b17023SJohn Marino /* Set non-negative if we must move a word at a time, even if
1861*e4b17023SJohn Marino just one word (e.g, partial == 4 && mode == DFmode). Set
1862*e4b17023SJohn Marino to -1 if we just use a normal move insn. This value can be
1863*e4b17023SJohn Marino zero if the argument is a zero size structure. */
1864*e4b17023SJohn Marino nregs = -1;
1865*e4b17023SJohn Marino if (GET_CODE (reg) == PARALLEL)
1866*e4b17023SJohn Marino ;
1867*e4b17023SJohn Marino else if (partial)
1868*e4b17023SJohn Marino {
1869*e4b17023SJohn Marino gcc_assert (partial % UNITS_PER_WORD == 0);
1870*e4b17023SJohn Marino nregs = partial / UNITS_PER_WORD;
1871*e4b17023SJohn Marino }
1872*e4b17023SJohn Marino else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1873*e4b17023SJohn Marino {
1874*e4b17023SJohn Marino size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1875*e4b17023SJohn Marino nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1876*e4b17023SJohn Marino }
1877*e4b17023SJohn Marino else
1878*e4b17023SJohn Marino size = GET_MODE_SIZE (args[i].mode);
1879*e4b17023SJohn Marino
1880*e4b17023SJohn Marino /* Handle calls that pass values in multiple non-contiguous
1881*e4b17023SJohn Marino locations. The Irix 6 ABI has examples of this. */
1882*e4b17023SJohn Marino
1883*e4b17023SJohn Marino if (GET_CODE (reg) == PARALLEL)
1884*e4b17023SJohn Marino emit_group_move (reg, args[i].parallel_value);
1885*e4b17023SJohn Marino
1886*e4b17023SJohn Marino /* If simple case, just do move. If normal partial, store_one_arg
1887*e4b17023SJohn Marino has already loaded the register for us. In all other cases,
1888*e4b17023SJohn Marino load the register(s) from memory. */
1889*e4b17023SJohn Marino
1890*e4b17023SJohn Marino else if (nregs == -1)
1891*e4b17023SJohn Marino {
1892*e4b17023SJohn Marino emit_move_insn (reg, args[i].value);
1893*e4b17023SJohn Marino #ifdef BLOCK_REG_PADDING
1894*e4b17023SJohn Marino /* Handle case where we have a value that needs shifting
1895*e4b17023SJohn Marino up to the msb. eg. a QImode value and we're padding
1896*e4b17023SJohn Marino upward on a BYTES_BIG_ENDIAN machine. */
1897*e4b17023SJohn Marino if (size < UNITS_PER_WORD
1898*e4b17023SJohn Marino && (args[i].locate.where_pad
1899*e4b17023SJohn Marino == (BYTES_BIG_ENDIAN ? upward : downward)))
1900*e4b17023SJohn Marino {
1901*e4b17023SJohn Marino rtx x;
1902*e4b17023SJohn Marino int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1903*e4b17023SJohn Marino
1904*e4b17023SJohn Marino /* Assigning REG here rather than a temp makes CALL_FUSAGE
1905*e4b17023SJohn Marino report the whole reg as used. Strictly speaking, the
1906*e4b17023SJohn Marino call only uses SIZE bytes at the msb end, but it doesn't
1907*e4b17023SJohn Marino seem worth generating rtl to say that. */
1908*e4b17023SJohn Marino reg = gen_rtx_REG (word_mode, REGNO (reg));
1909*e4b17023SJohn Marino x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
1910*e4b17023SJohn Marino if (x != reg)
1911*e4b17023SJohn Marino emit_move_insn (reg, x);
1912*e4b17023SJohn Marino }
1913*e4b17023SJohn Marino #endif
1914*e4b17023SJohn Marino }
1915*e4b17023SJohn Marino
1916*e4b17023SJohn Marino /* If we have pre-computed the values to put in the registers in
1917*e4b17023SJohn Marino the case of non-aligned structures, copy them in now. */
1918*e4b17023SJohn Marino
1919*e4b17023SJohn Marino else if (args[i].n_aligned_regs != 0)
1920*e4b17023SJohn Marino for (j = 0; j < args[i].n_aligned_regs; j++)
1921*e4b17023SJohn Marino emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1922*e4b17023SJohn Marino args[i].aligned_regs[j]);
1923*e4b17023SJohn Marino
1924*e4b17023SJohn Marino else if (partial == 0 || args[i].pass_on_stack)
1925*e4b17023SJohn Marino {
1926*e4b17023SJohn Marino rtx mem = validize_mem (args[i].value);
1927*e4b17023SJohn Marino
1928*e4b17023SJohn Marino /* Check for overlap with already clobbered argument area,
1929*e4b17023SJohn Marino providing that this has non-zero size. */
1930*e4b17023SJohn Marino if (is_sibcall
1931*e4b17023SJohn Marino && (size == 0
1932*e4b17023SJohn Marino || mem_overlaps_already_clobbered_arg_p
1933*e4b17023SJohn Marino (XEXP (args[i].value, 0), size)))
1934*e4b17023SJohn Marino *sibcall_failure = 1;
1935*e4b17023SJohn Marino
1936*e4b17023SJohn Marino /* Handle a BLKmode that needs shifting. */
1937*e4b17023SJohn Marino if (nregs == 1 && size < UNITS_PER_WORD
1938*e4b17023SJohn Marino #ifdef BLOCK_REG_PADDING
1939*e4b17023SJohn Marino && args[i].locate.where_pad == downward
1940*e4b17023SJohn Marino #else
1941*e4b17023SJohn Marino && BYTES_BIG_ENDIAN
1942*e4b17023SJohn Marino #endif
1943*e4b17023SJohn Marino )
1944*e4b17023SJohn Marino {
1945*e4b17023SJohn Marino rtx tem = operand_subword_force (mem, 0, args[i].mode);
1946*e4b17023SJohn Marino rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1947*e4b17023SJohn Marino rtx x = gen_reg_rtx (word_mode);
1948*e4b17023SJohn Marino int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1949*e4b17023SJohn Marino enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1950*e4b17023SJohn Marino : LSHIFT_EXPR;
1951*e4b17023SJohn Marino
1952*e4b17023SJohn Marino emit_move_insn (x, tem);
1953*e4b17023SJohn Marino x = expand_shift (dir, word_mode, x, shift, ri, 1);
1954*e4b17023SJohn Marino if (x != ri)
1955*e4b17023SJohn Marino emit_move_insn (ri, x);
1956*e4b17023SJohn Marino }
1957*e4b17023SJohn Marino else
1958*e4b17023SJohn Marino move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1959*e4b17023SJohn Marino }
1960*e4b17023SJohn Marino
1961*e4b17023SJohn Marino /* When a parameter is a block, and perhaps in other cases, it is
1962*e4b17023SJohn Marino possible that it did a load from an argument slot that was
1963*e4b17023SJohn Marino already clobbered. */
1964*e4b17023SJohn Marino if (is_sibcall
1965*e4b17023SJohn Marino && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1966*e4b17023SJohn Marino *sibcall_failure = 1;
1967*e4b17023SJohn Marino
1968*e4b17023SJohn Marino /* Handle calls that pass values in multiple non-contiguous
1969*e4b17023SJohn Marino locations. The Irix 6 ABI has examples of this. */
1970*e4b17023SJohn Marino if (GET_CODE (reg) == PARALLEL)
1971*e4b17023SJohn Marino use_group_regs (call_fusage, reg);
1972*e4b17023SJohn Marino else if (nregs == -1)
1973*e4b17023SJohn Marino use_reg_mode (call_fusage, reg,
1974*e4b17023SJohn Marino TYPE_MODE (TREE_TYPE (args[i].tree_value)));
1975*e4b17023SJohn Marino else if (nregs > 0)
1976*e4b17023SJohn Marino use_regs (call_fusage, REGNO (reg), nregs);
1977*e4b17023SJohn Marino }
1978*e4b17023SJohn Marino }
1979*e4b17023SJohn Marino }
1980*e4b17023SJohn Marino
1981*e4b17023SJohn Marino /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1982*e4b17023SJohn Marino wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1983*e4b17023SJohn Marino bytes, then we would need to push some additional bytes to pad the
1984*e4b17023SJohn Marino arguments. So, we compute an adjust to the stack pointer for an
1985*e4b17023SJohn Marino amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1986*e4b17023SJohn Marino bytes. Then, when the arguments are pushed the stack will be perfectly
1987*e4b17023SJohn Marino aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1988*e4b17023SJohn Marino be popped after the call. Returns the adjustment. */
1989*e4b17023SJohn Marino
1990*e4b17023SJohn Marino static int
combine_pending_stack_adjustment_and_call(int unadjusted_args_size,struct args_size * args_size,unsigned int preferred_unit_stack_boundary)1991*e4b17023SJohn Marino combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1992*e4b17023SJohn Marino struct args_size *args_size,
1993*e4b17023SJohn Marino unsigned int preferred_unit_stack_boundary)
1994*e4b17023SJohn Marino {
1995*e4b17023SJohn Marino /* The number of bytes to pop so that the stack will be
1996*e4b17023SJohn Marino under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1997*e4b17023SJohn Marino HOST_WIDE_INT adjustment;
1998*e4b17023SJohn Marino /* The alignment of the stack after the arguments are pushed, if we
1999*e4b17023SJohn Marino just pushed the arguments without adjust the stack here. */
2000*e4b17023SJohn Marino unsigned HOST_WIDE_INT unadjusted_alignment;
2001*e4b17023SJohn Marino
2002*e4b17023SJohn Marino unadjusted_alignment
2003*e4b17023SJohn Marino = ((stack_pointer_delta + unadjusted_args_size)
2004*e4b17023SJohn Marino % preferred_unit_stack_boundary);
2005*e4b17023SJohn Marino
2006*e4b17023SJohn Marino /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2007*e4b17023SJohn Marino as possible -- leaving just enough left to cancel out the
2008*e4b17023SJohn Marino UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2009*e4b17023SJohn Marino PENDING_STACK_ADJUST is non-negative, and congruent to
2010*e4b17023SJohn Marino -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2011*e4b17023SJohn Marino
2012*e4b17023SJohn Marino /* Begin by trying to pop all the bytes. */
2013*e4b17023SJohn Marino unadjusted_alignment
2014*e4b17023SJohn Marino = (unadjusted_alignment
2015*e4b17023SJohn Marino - (pending_stack_adjust % preferred_unit_stack_boundary));
2016*e4b17023SJohn Marino adjustment = pending_stack_adjust;
2017*e4b17023SJohn Marino /* Push enough additional bytes that the stack will be aligned
2018*e4b17023SJohn Marino after the arguments are pushed. */
2019*e4b17023SJohn Marino if (preferred_unit_stack_boundary > 1)
2020*e4b17023SJohn Marino {
2021*e4b17023SJohn Marino if (unadjusted_alignment > 0)
2022*e4b17023SJohn Marino adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2023*e4b17023SJohn Marino else
2024*e4b17023SJohn Marino adjustment += unadjusted_alignment;
2025*e4b17023SJohn Marino }
2026*e4b17023SJohn Marino
2027*e4b17023SJohn Marino /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2028*e4b17023SJohn Marino bytes after the call. The right number is the entire
2029*e4b17023SJohn Marino PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2030*e4b17023SJohn Marino by the arguments in the first place. */
2031*e4b17023SJohn Marino args_size->constant
2032*e4b17023SJohn Marino = pending_stack_adjust - adjustment + unadjusted_args_size;
2033*e4b17023SJohn Marino
2034*e4b17023SJohn Marino return adjustment;
2035*e4b17023SJohn Marino }
2036*e4b17023SJohn Marino
2037*e4b17023SJohn Marino /* Scan X expression if it does not dereference any argument slots
2038*e4b17023SJohn Marino we already clobbered by tail call arguments (as noted in stored_args_map
2039*e4b17023SJohn Marino bitmap).
2040*e4b17023SJohn Marino Return nonzero if X expression dereferences such argument slots,
2041*e4b17023SJohn Marino zero otherwise. */
2042*e4b17023SJohn Marino
2043*e4b17023SJohn Marino static int
check_sibcall_argument_overlap_1(rtx x)2044*e4b17023SJohn Marino check_sibcall_argument_overlap_1 (rtx x)
2045*e4b17023SJohn Marino {
2046*e4b17023SJohn Marino RTX_CODE code;
2047*e4b17023SJohn Marino int i, j;
2048*e4b17023SJohn Marino const char *fmt;
2049*e4b17023SJohn Marino
2050*e4b17023SJohn Marino if (x == NULL_RTX)
2051*e4b17023SJohn Marino return 0;
2052*e4b17023SJohn Marino
2053*e4b17023SJohn Marino code = GET_CODE (x);
2054*e4b17023SJohn Marino
2055*e4b17023SJohn Marino /* We need not check the operands of the CALL expression itself. */
2056*e4b17023SJohn Marino if (code == CALL)
2057*e4b17023SJohn Marino return 0;
2058*e4b17023SJohn Marino
2059*e4b17023SJohn Marino if (code == MEM)
2060*e4b17023SJohn Marino return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
2061*e4b17023SJohn Marino GET_MODE_SIZE (GET_MODE (x)));
2062*e4b17023SJohn Marino
2063*e4b17023SJohn Marino /* Scan all subexpressions. */
2064*e4b17023SJohn Marino fmt = GET_RTX_FORMAT (code);
2065*e4b17023SJohn Marino for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2066*e4b17023SJohn Marino {
2067*e4b17023SJohn Marino if (*fmt == 'e')
2068*e4b17023SJohn Marino {
2069*e4b17023SJohn Marino if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2070*e4b17023SJohn Marino return 1;
2071*e4b17023SJohn Marino }
2072*e4b17023SJohn Marino else if (*fmt == 'E')
2073*e4b17023SJohn Marino {
2074*e4b17023SJohn Marino for (j = 0; j < XVECLEN (x, i); j++)
2075*e4b17023SJohn Marino if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2076*e4b17023SJohn Marino return 1;
2077*e4b17023SJohn Marino }
2078*e4b17023SJohn Marino }
2079*e4b17023SJohn Marino return 0;
2080*e4b17023SJohn Marino }
2081*e4b17023SJohn Marino
2082*e4b17023SJohn Marino /* Scan sequence after INSN if it does not dereference any argument slots
2083*e4b17023SJohn Marino we already clobbered by tail call arguments (as noted in stored_args_map
2084*e4b17023SJohn Marino bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2085*e4b17023SJohn Marino stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2086*e4b17023SJohn Marino should be 0). Return nonzero if sequence after INSN dereferences such argument
2087*e4b17023SJohn Marino slots, zero otherwise. */
2088*e4b17023SJohn Marino
2089*e4b17023SJohn Marino static int
check_sibcall_argument_overlap(rtx insn,struct arg_data * arg,int mark_stored_args_map)2090*e4b17023SJohn Marino check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
2091*e4b17023SJohn Marino {
2092*e4b17023SJohn Marino int low, high;
2093*e4b17023SJohn Marino
2094*e4b17023SJohn Marino if (insn == NULL_RTX)
2095*e4b17023SJohn Marino insn = get_insns ();
2096*e4b17023SJohn Marino else
2097*e4b17023SJohn Marino insn = NEXT_INSN (insn);
2098*e4b17023SJohn Marino
2099*e4b17023SJohn Marino for (; insn; insn = NEXT_INSN (insn))
2100*e4b17023SJohn Marino if (INSN_P (insn)
2101*e4b17023SJohn Marino && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2102*e4b17023SJohn Marino break;
2103*e4b17023SJohn Marino
2104*e4b17023SJohn Marino if (mark_stored_args_map)
2105*e4b17023SJohn Marino {
2106*e4b17023SJohn Marino #ifdef ARGS_GROW_DOWNWARD
2107*e4b17023SJohn Marino low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2108*e4b17023SJohn Marino #else
2109*e4b17023SJohn Marino low = arg->locate.slot_offset.constant;
2110*e4b17023SJohn Marino #endif
2111*e4b17023SJohn Marino
2112*e4b17023SJohn Marino for (high = low + arg->locate.size.constant; low < high; low++)
2113*e4b17023SJohn Marino SET_BIT (stored_args_map, low);
2114*e4b17023SJohn Marino }
2115*e4b17023SJohn Marino return insn != NULL_RTX;
2116*e4b17023SJohn Marino }
2117*e4b17023SJohn Marino
2118*e4b17023SJohn Marino /* Given that a function returns a value of mode MODE at the most
2119*e4b17023SJohn Marino significant end of hard register VALUE, shift VALUE left or right
2120*e4b17023SJohn Marino as specified by LEFT_P. Return true if some action was needed. */
2121*e4b17023SJohn Marino
2122*e4b17023SJohn Marino bool
shift_return_value(enum machine_mode mode,bool left_p,rtx value)2123*e4b17023SJohn Marino shift_return_value (enum machine_mode mode, bool left_p, rtx value)
2124*e4b17023SJohn Marino {
2125*e4b17023SJohn Marino HOST_WIDE_INT shift;
2126*e4b17023SJohn Marino
2127*e4b17023SJohn Marino gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2128*e4b17023SJohn Marino shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
2129*e4b17023SJohn Marino if (shift == 0)
2130*e4b17023SJohn Marino return false;
2131*e4b17023SJohn Marino
2132*e4b17023SJohn Marino /* Use ashr rather than lshr for right shifts. This is for the benefit
2133*e4b17023SJohn Marino of the MIPS port, which requires SImode values to be sign-extended
2134*e4b17023SJohn Marino when stored in 64-bit registers. */
2135*e4b17023SJohn Marino if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
2136*e4b17023SJohn Marino value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
2137*e4b17023SJohn Marino gcc_unreachable ();
2138*e4b17023SJohn Marino return true;
2139*e4b17023SJohn Marino }
2140*e4b17023SJohn Marino
2141*e4b17023SJohn Marino /* If X is a likely-spilled register value, copy it to a pseudo
2142*e4b17023SJohn Marino register and return that register. Return X otherwise. */
2143*e4b17023SJohn Marino
2144*e4b17023SJohn Marino static rtx
avoid_likely_spilled_reg(rtx x)2145*e4b17023SJohn Marino avoid_likely_spilled_reg (rtx x)
2146*e4b17023SJohn Marino {
2147*e4b17023SJohn Marino rtx new_rtx;
2148*e4b17023SJohn Marino
2149*e4b17023SJohn Marino if (REG_P (x)
2150*e4b17023SJohn Marino && HARD_REGISTER_P (x)
2151*e4b17023SJohn Marino && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
2152*e4b17023SJohn Marino {
2153*e4b17023SJohn Marino /* Make sure that we generate a REG rather than a CONCAT.
2154*e4b17023SJohn Marino Moves into CONCATs can need nontrivial instructions,
2155*e4b17023SJohn Marino and the whole point of this function is to avoid
2156*e4b17023SJohn Marino using the hard register directly in such a situation. */
2157*e4b17023SJohn Marino generating_concat_p = 0;
2158*e4b17023SJohn Marino new_rtx = gen_reg_rtx (GET_MODE (x));
2159*e4b17023SJohn Marino generating_concat_p = 1;
2160*e4b17023SJohn Marino emit_move_insn (new_rtx, x);
2161*e4b17023SJohn Marino return new_rtx;
2162*e4b17023SJohn Marino }
2163*e4b17023SJohn Marino return x;
2164*e4b17023SJohn Marino }
2165*e4b17023SJohn Marino
2166*e4b17023SJohn Marino /* Generate all the code for a CALL_EXPR exp
2167*e4b17023SJohn Marino and return an rtx for its value.
2168*e4b17023SJohn Marino Store the value in TARGET (specified as an rtx) if convenient.
2169*e4b17023SJohn Marino If the value is stored in TARGET then TARGET is returned.
2170*e4b17023SJohn Marino If IGNORE is nonzero, then we ignore the value of the function call. */
2171*e4b17023SJohn Marino
2172*e4b17023SJohn Marino rtx
expand_call(tree exp,rtx target,int ignore)2173*e4b17023SJohn Marino expand_call (tree exp, rtx target, int ignore)
2174*e4b17023SJohn Marino {
2175*e4b17023SJohn Marino /* Nonzero if we are currently expanding a call. */
2176*e4b17023SJohn Marino static int currently_expanding_call = 0;
2177*e4b17023SJohn Marino
2178*e4b17023SJohn Marino /* RTX for the function to be called. */
2179*e4b17023SJohn Marino rtx funexp;
2180*e4b17023SJohn Marino /* Sequence of insns to perform a normal "call". */
2181*e4b17023SJohn Marino rtx normal_call_insns = NULL_RTX;
2182*e4b17023SJohn Marino /* Sequence of insns to perform a tail "call". */
2183*e4b17023SJohn Marino rtx tail_call_insns = NULL_RTX;
2184*e4b17023SJohn Marino /* Data type of the function. */
2185*e4b17023SJohn Marino tree funtype;
2186*e4b17023SJohn Marino tree type_arg_types;
2187*e4b17023SJohn Marino tree rettype;
2188*e4b17023SJohn Marino /* Declaration of the function being called,
2189*e4b17023SJohn Marino or 0 if the function is computed (not known by name). */
2190*e4b17023SJohn Marino tree fndecl = 0;
2191*e4b17023SJohn Marino /* The type of the function being called. */
2192*e4b17023SJohn Marino tree fntype;
2193*e4b17023SJohn Marino bool try_tail_call = CALL_EXPR_TAILCALL (exp);
2194*e4b17023SJohn Marino int pass;
2195*e4b17023SJohn Marino
2196*e4b17023SJohn Marino /* Register in which non-BLKmode value will be returned,
2197*e4b17023SJohn Marino or 0 if no value or if value is BLKmode. */
2198*e4b17023SJohn Marino rtx valreg;
2199*e4b17023SJohn Marino /* Address where we should return a BLKmode value;
2200*e4b17023SJohn Marino 0 if value not BLKmode. */
2201*e4b17023SJohn Marino rtx structure_value_addr = 0;
2202*e4b17023SJohn Marino /* Nonzero if that address is being passed by treating it as
2203*e4b17023SJohn Marino an extra, implicit first parameter. Otherwise,
2204*e4b17023SJohn Marino it is passed by being copied directly into struct_value_rtx. */
2205*e4b17023SJohn Marino int structure_value_addr_parm = 0;
2206*e4b17023SJohn Marino /* Holds the value of implicit argument for the struct value. */
2207*e4b17023SJohn Marino tree structure_value_addr_value = NULL_TREE;
2208*e4b17023SJohn Marino /* Size of aggregate value wanted, or zero if none wanted
2209*e4b17023SJohn Marino or if we are using the non-reentrant PCC calling convention
2210*e4b17023SJohn Marino or expecting the value in registers. */
2211*e4b17023SJohn Marino HOST_WIDE_INT struct_value_size = 0;
2212*e4b17023SJohn Marino /* Nonzero if called function returns an aggregate in memory PCC style,
2213*e4b17023SJohn Marino by returning the address of where to find it. */
2214*e4b17023SJohn Marino int pcc_struct_value = 0;
2215*e4b17023SJohn Marino rtx struct_value = 0;
2216*e4b17023SJohn Marino
2217*e4b17023SJohn Marino /* Number of actual parameters in this call, including struct value addr. */
2218*e4b17023SJohn Marino int num_actuals;
2219*e4b17023SJohn Marino /* Number of named args. Args after this are anonymous ones
2220*e4b17023SJohn Marino and they must all go on the stack. */
2221*e4b17023SJohn Marino int n_named_args;
2222*e4b17023SJohn Marino /* Number of complex actual arguments that need to be split. */
2223*e4b17023SJohn Marino int num_complex_actuals = 0;
2224*e4b17023SJohn Marino
2225*e4b17023SJohn Marino /* Vector of information about each argument.
2226*e4b17023SJohn Marino Arguments are numbered in the order they will be pushed,
2227*e4b17023SJohn Marino not the order they are written. */
2228*e4b17023SJohn Marino struct arg_data *args;
2229*e4b17023SJohn Marino
2230*e4b17023SJohn Marino /* Total size in bytes of all the stack-parms scanned so far. */
2231*e4b17023SJohn Marino struct args_size args_size;
2232*e4b17023SJohn Marino struct args_size adjusted_args_size;
2233*e4b17023SJohn Marino /* Size of arguments before any adjustments (such as rounding). */
2234*e4b17023SJohn Marino int unadjusted_args_size;
2235*e4b17023SJohn Marino /* Data on reg parms scanned so far. */
2236*e4b17023SJohn Marino CUMULATIVE_ARGS args_so_far_v;
2237*e4b17023SJohn Marino cumulative_args_t args_so_far;
2238*e4b17023SJohn Marino /* Nonzero if a reg parm has been scanned. */
2239*e4b17023SJohn Marino int reg_parm_seen;
2240*e4b17023SJohn Marino /* Nonzero if this is an indirect function call. */
2241*e4b17023SJohn Marino
2242*e4b17023SJohn Marino /* Nonzero if we must avoid push-insns in the args for this call.
2243*e4b17023SJohn Marino If stack space is allocated for register parameters, but not by the
2244*e4b17023SJohn Marino caller, then it is preallocated in the fixed part of the stack frame.
2245*e4b17023SJohn Marino So the entire argument block must then be preallocated (i.e., we
2246*e4b17023SJohn Marino ignore PUSH_ROUNDING in that case). */
2247*e4b17023SJohn Marino
2248*e4b17023SJohn Marino int must_preallocate = !PUSH_ARGS;
2249*e4b17023SJohn Marino
2250*e4b17023SJohn Marino /* Size of the stack reserved for parameter registers. */
2251*e4b17023SJohn Marino int reg_parm_stack_space = 0;
2252*e4b17023SJohn Marino
2253*e4b17023SJohn Marino /* Address of space preallocated for stack parms
2254*e4b17023SJohn Marino (on machines that lack push insns), or 0 if space not preallocated. */
2255*e4b17023SJohn Marino rtx argblock = 0;
2256*e4b17023SJohn Marino
2257*e4b17023SJohn Marino /* Mask of ECF_ flags. */
2258*e4b17023SJohn Marino int flags = 0;
2259*e4b17023SJohn Marino #ifdef REG_PARM_STACK_SPACE
2260*e4b17023SJohn Marino /* Define the boundary of the register parm stack space that needs to be
2261*e4b17023SJohn Marino saved, if any. */
2262*e4b17023SJohn Marino int low_to_save, high_to_save;
2263*e4b17023SJohn Marino rtx save_area = 0; /* Place that it is saved */
2264*e4b17023SJohn Marino #endif
2265*e4b17023SJohn Marino
2266*e4b17023SJohn Marino int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2267*e4b17023SJohn Marino char *initial_stack_usage_map = stack_usage_map;
2268*e4b17023SJohn Marino char *stack_usage_map_buf = NULL;
2269*e4b17023SJohn Marino
2270*e4b17023SJohn Marino int old_stack_allocated;
2271*e4b17023SJohn Marino
2272*e4b17023SJohn Marino /* State variables to track stack modifications. */
2273*e4b17023SJohn Marino rtx old_stack_level = 0;
2274*e4b17023SJohn Marino int old_stack_arg_under_construction = 0;
2275*e4b17023SJohn Marino int old_pending_adj = 0;
2276*e4b17023SJohn Marino int old_inhibit_defer_pop = inhibit_defer_pop;
2277*e4b17023SJohn Marino
2278*e4b17023SJohn Marino /* Some stack pointer alterations we make are performed via
2279*e4b17023SJohn Marino allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2280*e4b17023SJohn Marino which we then also need to save/restore along the way. */
2281*e4b17023SJohn Marino int old_stack_pointer_delta = 0;
2282*e4b17023SJohn Marino
2283*e4b17023SJohn Marino rtx call_fusage;
2284*e4b17023SJohn Marino tree addr = CALL_EXPR_FN (exp);
2285*e4b17023SJohn Marino int i;
2286*e4b17023SJohn Marino /* The alignment of the stack, in bits. */
2287*e4b17023SJohn Marino unsigned HOST_WIDE_INT preferred_stack_boundary;
2288*e4b17023SJohn Marino /* The alignment of the stack, in bytes. */
2289*e4b17023SJohn Marino unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2290*e4b17023SJohn Marino /* The static chain value to use for this call. */
2291*e4b17023SJohn Marino rtx static_chain_value;
2292*e4b17023SJohn Marino /* See if this is "nothrow" function call. */
2293*e4b17023SJohn Marino if (TREE_NOTHROW (exp))
2294*e4b17023SJohn Marino flags |= ECF_NOTHROW;
2295*e4b17023SJohn Marino
2296*e4b17023SJohn Marino /* See if we can find a DECL-node for the actual function, and get the
2297*e4b17023SJohn Marino function attributes (flags) from the function decl or type node. */
2298*e4b17023SJohn Marino fndecl = get_callee_fndecl (exp);
2299*e4b17023SJohn Marino if (fndecl)
2300*e4b17023SJohn Marino {
2301*e4b17023SJohn Marino fntype = TREE_TYPE (fndecl);
2302*e4b17023SJohn Marino flags |= flags_from_decl_or_type (fndecl);
2303*e4b17023SJohn Marino }
2304*e4b17023SJohn Marino else
2305*e4b17023SJohn Marino {
2306*e4b17023SJohn Marino fntype = TREE_TYPE (TREE_TYPE (addr));
2307*e4b17023SJohn Marino flags |= flags_from_decl_or_type (fntype);
2308*e4b17023SJohn Marino }
2309*e4b17023SJohn Marino rettype = TREE_TYPE (exp);
2310*e4b17023SJohn Marino
2311*e4b17023SJohn Marino struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2312*e4b17023SJohn Marino
2313*e4b17023SJohn Marino /* Warn if this value is an aggregate type,
2314*e4b17023SJohn Marino regardless of which calling convention we are using for it. */
2315*e4b17023SJohn Marino if (AGGREGATE_TYPE_P (rettype))
2316*e4b17023SJohn Marino warning (OPT_Waggregate_return, "function call has aggregate value");
2317*e4b17023SJohn Marino
2318*e4b17023SJohn Marino /* If the result of a non looping pure or const function call is
2319*e4b17023SJohn Marino ignored (or void), and none of its arguments are volatile, we can
2320*e4b17023SJohn Marino avoid expanding the call and just evaluate the arguments for
2321*e4b17023SJohn Marino side-effects. */
2322*e4b17023SJohn Marino if ((flags & (ECF_CONST | ECF_PURE))
2323*e4b17023SJohn Marino && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2324*e4b17023SJohn Marino && (ignore || target == const0_rtx
2325*e4b17023SJohn Marino || TYPE_MODE (rettype) == VOIDmode))
2326*e4b17023SJohn Marino {
2327*e4b17023SJohn Marino bool volatilep = false;
2328*e4b17023SJohn Marino tree arg;
2329*e4b17023SJohn Marino call_expr_arg_iterator iter;
2330*e4b17023SJohn Marino
2331*e4b17023SJohn Marino FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2332*e4b17023SJohn Marino if (TREE_THIS_VOLATILE (arg))
2333*e4b17023SJohn Marino {
2334*e4b17023SJohn Marino volatilep = true;
2335*e4b17023SJohn Marino break;
2336*e4b17023SJohn Marino }
2337*e4b17023SJohn Marino
2338*e4b17023SJohn Marino if (! volatilep)
2339*e4b17023SJohn Marino {
2340*e4b17023SJohn Marino FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2341*e4b17023SJohn Marino expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2342*e4b17023SJohn Marino return const0_rtx;
2343*e4b17023SJohn Marino }
2344*e4b17023SJohn Marino }
2345*e4b17023SJohn Marino
2346*e4b17023SJohn Marino #ifdef REG_PARM_STACK_SPACE
2347*e4b17023SJohn Marino reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2348*e4b17023SJohn Marino #endif
2349*e4b17023SJohn Marino
2350*e4b17023SJohn Marino if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2351*e4b17023SJohn Marino && reg_parm_stack_space > 0 && PUSH_ARGS)
2352*e4b17023SJohn Marino must_preallocate = 1;
2353*e4b17023SJohn Marino
2354*e4b17023SJohn Marino /* Set up a place to return a structure. */
2355*e4b17023SJohn Marino
2356*e4b17023SJohn Marino /* Cater to broken compilers. */
2357*e4b17023SJohn Marino if (aggregate_value_p (exp, fntype))
2358*e4b17023SJohn Marino {
2359*e4b17023SJohn Marino /* This call returns a big structure. */
2360*e4b17023SJohn Marino flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2361*e4b17023SJohn Marino
2362*e4b17023SJohn Marino #ifdef PCC_STATIC_STRUCT_RETURN
2363*e4b17023SJohn Marino {
2364*e4b17023SJohn Marino pcc_struct_value = 1;
2365*e4b17023SJohn Marino }
2366*e4b17023SJohn Marino #else /* not PCC_STATIC_STRUCT_RETURN */
2367*e4b17023SJohn Marino {
2368*e4b17023SJohn Marino struct_value_size = int_size_in_bytes (rettype);
2369*e4b17023SJohn Marino
2370*e4b17023SJohn Marino if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2371*e4b17023SJohn Marino structure_value_addr = XEXP (target, 0);
2372*e4b17023SJohn Marino else
2373*e4b17023SJohn Marino {
2374*e4b17023SJohn Marino /* For variable-sized objects, we must be called with a target
2375*e4b17023SJohn Marino specified. If we were to allocate space on the stack here,
2376*e4b17023SJohn Marino we would have no way of knowing when to free it. */
2377*e4b17023SJohn Marino rtx d = assign_temp (rettype, 0, 1, 1);
2378*e4b17023SJohn Marino
2379*e4b17023SJohn Marino mark_temp_addr_taken (d);
2380*e4b17023SJohn Marino structure_value_addr = XEXP (d, 0);
2381*e4b17023SJohn Marino target = 0;
2382*e4b17023SJohn Marino }
2383*e4b17023SJohn Marino }
2384*e4b17023SJohn Marino #endif /* not PCC_STATIC_STRUCT_RETURN */
2385*e4b17023SJohn Marino }
2386*e4b17023SJohn Marino
2387*e4b17023SJohn Marino /* Figure out the amount to which the stack should be aligned. */
2388*e4b17023SJohn Marino preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2389*e4b17023SJohn Marino if (fndecl)
2390*e4b17023SJohn Marino {
2391*e4b17023SJohn Marino struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2392*e4b17023SJohn Marino /* Without automatic stack alignment, we can't increase preferred
2393*e4b17023SJohn Marino stack boundary. With automatic stack alignment, it is
2394*e4b17023SJohn Marino unnecessary since unless we can guarantee that all callers will
2395*e4b17023SJohn Marino align the outgoing stack properly, callee has to align its
2396*e4b17023SJohn Marino stack anyway. */
2397*e4b17023SJohn Marino if (i
2398*e4b17023SJohn Marino && i->preferred_incoming_stack_boundary
2399*e4b17023SJohn Marino && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2400*e4b17023SJohn Marino preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2401*e4b17023SJohn Marino }
2402*e4b17023SJohn Marino
2403*e4b17023SJohn Marino /* Operand 0 is a pointer-to-function; get the type of the function. */
2404*e4b17023SJohn Marino funtype = TREE_TYPE (addr);
2405*e4b17023SJohn Marino gcc_assert (POINTER_TYPE_P (funtype));
2406*e4b17023SJohn Marino funtype = TREE_TYPE (funtype);
2407*e4b17023SJohn Marino
2408*e4b17023SJohn Marino /* Count whether there are actual complex arguments that need to be split
2409*e4b17023SJohn Marino into their real and imaginary parts. Munge the type_arg_types
2410*e4b17023SJohn Marino appropriately here as well. */
2411*e4b17023SJohn Marino if (targetm.calls.split_complex_arg)
2412*e4b17023SJohn Marino {
2413*e4b17023SJohn Marino call_expr_arg_iterator iter;
2414*e4b17023SJohn Marino tree arg;
2415*e4b17023SJohn Marino FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2416*e4b17023SJohn Marino {
2417*e4b17023SJohn Marino tree type = TREE_TYPE (arg);
2418*e4b17023SJohn Marino if (type && TREE_CODE (type) == COMPLEX_TYPE
2419*e4b17023SJohn Marino && targetm.calls.split_complex_arg (type))
2420*e4b17023SJohn Marino num_complex_actuals++;
2421*e4b17023SJohn Marino }
2422*e4b17023SJohn Marino type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2423*e4b17023SJohn Marino }
2424*e4b17023SJohn Marino else
2425*e4b17023SJohn Marino type_arg_types = TYPE_ARG_TYPES (funtype);
2426*e4b17023SJohn Marino
2427*e4b17023SJohn Marino if (flags & ECF_MAY_BE_ALLOCA)
2428*e4b17023SJohn Marino cfun->calls_alloca = 1;
2429*e4b17023SJohn Marino
2430*e4b17023SJohn Marino /* If struct_value_rtx is 0, it means pass the address
2431*e4b17023SJohn Marino as if it were an extra parameter. Put the argument expression
2432*e4b17023SJohn Marino in structure_value_addr_value. */
2433*e4b17023SJohn Marino if (structure_value_addr && struct_value == 0)
2434*e4b17023SJohn Marino {
2435*e4b17023SJohn Marino /* If structure_value_addr is a REG other than
2436*e4b17023SJohn Marino virtual_outgoing_args_rtx, we can use always use it. If it
2437*e4b17023SJohn Marino is not a REG, we must always copy it into a register.
2438*e4b17023SJohn Marino If it is virtual_outgoing_args_rtx, we must copy it to another
2439*e4b17023SJohn Marino register in some cases. */
2440*e4b17023SJohn Marino rtx temp = (!REG_P (structure_value_addr)
2441*e4b17023SJohn Marino || (ACCUMULATE_OUTGOING_ARGS
2442*e4b17023SJohn Marino && stack_arg_under_construction
2443*e4b17023SJohn Marino && structure_value_addr == virtual_outgoing_args_rtx)
2444*e4b17023SJohn Marino ? copy_addr_to_reg (convert_memory_address
2445*e4b17023SJohn Marino (Pmode, structure_value_addr))
2446*e4b17023SJohn Marino : structure_value_addr);
2447*e4b17023SJohn Marino
2448*e4b17023SJohn Marino structure_value_addr_value =
2449*e4b17023SJohn Marino make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2450*e4b17023SJohn Marino structure_value_addr_parm = 1;
2451*e4b17023SJohn Marino }
2452*e4b17023SJohn Marino
2453*e4b17023SJohn Marino /* Count the arguments and set NUM_ACTUALS. */
2454*e4b17023SJohn Marino num_actuals =
2455*e4b17023SJohn Marino call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2456*e4b17023SJohn Marino
2457*e4b17023SJohn Marino /* Compute number of named args.
2458*e4b17023SJohn Marino First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2459*e4b17023SJohn Marino
2460*e4b17023SJohn Marino if (type_arg_types != 0)
2461*e4b17023SJohn Marino n_named_args
2462*e4b17023SJohn Marino = (list_length (type_arg_types)
2463*e4b17023SJohn Marino /* Count the struct value address, if it is passed as a parm. */
2464*e4b17023SJohn Marino + structure_value_addr_parm);
2465*e4b17023SJohn Marino else
2466*e4b17023SJohn Marino /* If we know nothing, treat all args as named. */
2467*e4b17023SJohn Marino n_named_args = num_actuals;
2468*e4b17023SJohn Marino
2469*e4b17023SJohn Marino /* Start updating where the next arg would go.
2470*e4b17023SJohn Marino
2471*e4b17023SJohn Marino On some machines (such as the PA) indirect calls have a different
2472*e4b17023SJohn Marino calling convention than normal calls. The fourth argument in
2473*e4b17023SJohn Marino INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2474*e4b17023SJohn Marino or not. */
2475*e4b17023SJohn Marino INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2476*e4b17023SJohn Marino args_so_far = pack_cumulative_args (&args_so_far_v);
2477*e4b17023SJohn Marino
2478*e4b17023SJohn Marino /* Now possibly adjust the number of named args.
2479*e4b17023SJohn Marino Normally, don't include the last named arg if anonymous args follow.
2480*e4b17023SJohn Marino We do include the last named arg if
2481*e4b17023SJohn Marino targetm.calls.strict_argument_naming() returns nonzero.
2482*e4b17023SJohn Marino (If no anonymous args follow, the result of list_length is actually
2483*e4b17023SJohn Marino one too large. This is harmless.)
2484*e4b17023SJohn Marino
2485*e4b17023SJohn Marino If targetm.calls.pretend_outgoing_varargs_named() returns
2486*e4b17023SJohn Marino nonzero, and targetm.calls.strict_argument_naming() returns zero,
2487*e4b17023SJohn Marino this machine will be able to place unnamed args that were passed
2488*e4b17023SJohn Marino in registers into the stack. So treat all args as named. This
2489*e4b17023SJohn Marino allows the insns emitting for a specific argument list to be
2490*e4b17023SJohn Marino independent of the function declaration.
2491*e4b17023SJohn Marino
2492*e4b17023SJohn Marino If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2493*e4b17023SJohn Marino we do not have any reliable way to pass unnamed args in
2494*e4b17023SJohn Marino registers, so we must force them into memory. */
2495*e4b17023SJohn Marino
2496*e4b17023SJohn Marino if (type_arg_types != 0
2497*e4b17023SJohn Marino && targetm.calls.strict_argument_naming (args_so_far))
2498*e4b17023SJohn Marino ;
2499*e4b17023SJohn Marino else if (type_arg_types != 0
2500*e4b17023SJohn Marino && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
2501*e4b17023SJohn Marino /* Don't include the last named arg. */
2502*e4b17023SJohn Marino --n_named_args;
2503*e4b17023SJohn Marino else
2504*e4b17023SJohn Marino /* Treat all args as named. */
2505*e4b17023SJohn Marino n_named_args = num_actuals;
2506*e4b17023SJohn Marino
2507*e4b17023SJohn Marino /* Make a vector to hold all the information about each arg. */
2508*e4b17023SJohn Marino args = XALLOCAVEC (struct arg_data, num_actuals);
2509*e4b17023SJohn Marino memset (args, 0, num_actuals * sizeof (struct arg_data));
2510*e4b17023SJohn Marino
2511*e4b17023SJohn Marino /* Build up entries in the ARGS array, compute the size of the
2512*e4b17023SJohn Marino arguments into ARGS_SIZE, etc. */
2513*e4b17023SJohn Marino initialize_argument_information (num_actuals, args, &args_size,
2514*e4b17023SJohn Marino n_named_args, exp,
2515*e4b17023SJohn Marino structure_value_addr_value, fndecl, fntype,
2516*e4b17023SJohn Marino args_so_far, reg_parm_stack_space,
2517*e4b17023SJohn Marino &old_stack_level, &old_pending_adj,
2518*e4b17023SJohn Marino &must_preallocate, &flags,
2519*e4b17023SJohn Marino &try_tail_call, CALL_FROM_THUNK_P (exp));
2520*e4b17023SJohn Marino
2521*e4b17023SJohn Marino if (args_size.var)
2522*e4b17023SJohn Marino must_preallocate = 1;
2523*e4b17023SJohn Marino
2524*e4b17023SJohn Marino /* Now make final decision about preallocating stack space. */
2525*e4b17023SJohn Marino must_preallocate = finalize_must_preallocate (must_preallocate,
2526*e4b17023SJohn Marino num_actuals, args,
2527*e4b17023SJohn Marino &args_size);
2528*e4b17023SJohn Marino
2529*e4b17023SJohn Marino /* If the structure value address will reference the stack pointer, we
2530*e4b17023SJohn Marino must stabilize it. We don't need to do this if we know that we are
2531*e4b17023SJohn Marino not going to adjust the stack pointer in processing this call. */
2532*e4b17023SJohn Marino
2533*e4b17023SJohn Marino if (structure_value_addr
2534*e4b17023SJohn Marino && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2535*e4b17023SJohn Marino || reg_mentioned_p (virtual_outgoing_args_rtx,
2536*e4b17023SJohn Marino structure_value_addr))
2537*e4b17023SJohn Marino && (args_size.var
2538*e4b17023SJohn Marino || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2539*e4b17023SJohn Marino structure_value_addr = copy_to_reg (structure_value_addr);
2540*e4b17023SJohn Marino
2541*e4b17023SJohn Marino /* Tail calls can make things harder to debug, and we've traditionally
2542*e4b17023SJohn Marino pushed these optimizations into -O2. Don't try if we're already
2543*e4b17023SJohn Marino expanding a call, as that means we're an argument. Don't try if
2544*e4b17023SJohn Marino there's cleanups, as we know there's code to follow the call. */
2545*e4b17023SJohn Marino
2546*e4b17023SJohn Marino if (currently_expanding_call++ != 0
2547*e4b17023SJohn Marino || !flag_optimize_sibling_calls
2548*e4b17023SJohn Marino || args_size.var
2549*e4b17023SJohn Marino || dbg_cnt (tail_call) == false)
2550*e4b17023SJohn Marino try_tail_call = 0;
2551*e4b17023SJohn Marino
2552*e4b17023SJohn Marino /* Rest of purposes for tail call optimizations to fail. */
2553*e4b17023SJohn Marino if (
2554*e4b17023SJohn Marino #ifdef HAVE_sibcall_epilogue
2555*e4b17023SJohn Marino !HAVE_sibcall_epilogue
2556*e4b17023SJohn Marino #else
2557*e4b17023SJohn Marino 1
2558*e4b17023SJohn Marino #endif
2559*e4b17023SJohn Marino || !try_tail_call
2560*e4b17023SJohn Marino /* Doing sibling call optimization needs some work, since
2561*e4b17023SJohn Marino structure_value_addr can be allocated on the stack.
2562*e4b17023SJohn Marino It does not seem worth the effort since few optimizable
2563*e4b17023SJohn Marino sibling calls will return a structure. */
2564*e4b17023SJohn Marino || structure_value_addr != NULL_RTX
2565*e4b17023SJohn Marino #ifdef REG_PARM_STACK_SPACE
2566*e4b17023SJohn Marino /* If outgoing reg parm stack space changes, we can not do sibcall. */
2567*e4b17023SJohn Marino || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2568*e4b17023SJohn Marino != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2569*e4b17023SJohn Marino || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
2570*e4b17023SJohn Marino #endif
2571*e4b17023SJohn Marino /* Check whether the target is able to optimize the call
2572*e4b17023SJohn Marino into a sibcall. */
2573*e4b17023SJohn Marino || !targetm.function_ok_for_sibcall (fndecl, exp)
2574*e4b17023SJohn Marino /* Functions that do not return exactly once may not be sibcall
2575*e4b17023SJohn Marino optimized. */
2576*e4b17023SJohn Marino || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2577*e4b17023SJohn Marino || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2578*e4b17023SJohn Marino /* If the called function is nested in the current one, it might access
2579*e4b17023SJohn Marino some of the caller's arguments, but could clobber them beforehand if
2580*e4b17023SJohn Marino the argument areas are shared. */
2581*e4b17023SJohn Marino || (fndecl && decl_function_context (fndecl) == current_function_decl)
2582*e4b17023SJohn Marino /* If this function requires more stack slots than the current
2583*e4b17023SJohn Marino function, we cannot change it into a sibling call.
2584*e4b17023SJohn Marino crtl->args.pretend_args_size is not part of the
2585*e4b17023SJohn Marino stack allocated by our caller. */
2586*e4b17023SJohn Marino || args_size.constant > (crtl->args.size
2587*e4b17023SJohn Marino - crtl->args.pretend_args_size)
2588*e4b17023SJohn Marino /* If the callee pops its own arguments, then it must pop exactly
2589*e4b17023SJohn Marino the same number of arguments as the current function. */
2590*e4b17023SJohn Marino || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2591*e4b17023SJohn Marino != targetm.calls.return_pops_args (current_function_decl,
2592*e4b17023SJohn Marino TREE_TYPE (current_function_decl),
2593*e4b17023SJohn Marino crtl->args.size))
2594*e4b17023SJohn Marino || !lang_hooks.decls.ok_for_sibcall (fndecl))
2595*e4b17023SJohn Marino try_tail_call = 0;
2596*e4b17023SJohn Marino
2597*e4b17023SJohn Marino /* Check if caller and callee disagree in promotion of function
2598*e4b17023SJohn Marino return value. */
2599*e4b17023SJohn Marino if (try_tail_call)
2600*e4b17023SJohn Marino {
2601*e4b17023SJohn Marino enum machine_mode caller_mode, caller_promoted_mode;
2602*e4b17023SJohn Marino enum machine_mode callee_mode, callee_promoted_mode;
2603*e4b17023SJohn Marino int caller_unsignedp, callee_unsignedp;
2604*e4b17023SJohn Marino tree caller_res = DECL_RESULT (current_function_decl);
2605*e4b17023SJohn Marino
2606*e4b17023SJohn Marino caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2607*e4b17023SJohn Marino caller_mode = DECL_MODE (caller_res);
2608*e4b17023SJohn Marino callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2609*e4b17023SJohn Marino callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2610*e4b17023SJohn Marino caller_promoted_mode
2611*e4b17023SJohn Marino = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2612*e4b17023SJohn Marino &caller_unsignedp,
2613*e4b17023SJohn Marino TREE_TYPE (current_function_decl), 1);
2614*e4b17023SJohn Marino callee_promoted_mode
2615*e4b17023SJohn Marino = promote_function_mode (TREE_TYPE (funtype), callee_mode,
2616*e4b17023SJohn Marino &callee_unsignedp,
2617*e4b17023SJohn Marino funtype, 1);
2618*e4b17023SJohn Marino if (caller_mode != VOIDmode
2619*e4b17023SJohn Marino && (caller_promoted_mode != callee_promoted_mode
2620*e4b17023SJohn Marino || ((caller_mode != caller_promoted_mode
2621*e4b17023SJohn Marino || callee_mode != callee_promoted_mode)
2622*e4b17023SJohn Marino && (caller_unsignedp != callee_unsignedp
2623*e4b17023SJohn Marino || GET_MODE_BITSIZE (caller_mode)
2624*e4b17023SJohn Marino < GET_MODE_BITSIZE (callee_mode)))))
2625*e4b17023SJohn Marino try_tail_call = 0;
2626*e4b17023SJohn Marino }
2627*e4b17023SJohn Marino
2628*e4b17023SJohn Marino /* Ensure current function's preferred stack boundary is at least
2629*e4b17023SJohn Marino what we need. Stack alignment may also increase preferred stack
2630*e4b17023SJohn Marino boundary. */
2631*e4b17023SJohn Marino if (crtl->preferred_stack_boundary < preferred_stack_boundary)
2632*e4b17023SJohn Marino crtl->preferred_stack_boundary = preferred_stack_boundary;
2633*e4b17023SJohn Marino else
2634*e4b17023SJohn Marino preferred_stack_boundary = crtl->preferred_stack_boundary;
2635*e4b17023SJohn Marino
2636*e4b17023SJohn Marino preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2637*e4b17023SJohn Marino
2638*e4b17023SJohn Marino /* We want to make two insn chains; one for a sibling call, the other
2639*e4b17023SJohn Marino for a normal call. We will select one of the two chains after
2640*e4b17023SJohn Marino initial RTL generation is complete. */
2641*e4b17023SJohn Marino for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2642*e4b17023SJohn Marino {
2643*e4b17023SJohn Marino int sibcall_failure = 0;
2644*e4b17023SJohn Marino /* We want to emit any pending stack adjustments before the tail
2645*e4b17023SJohn Marino recursion "call". That way we know any adjustment after the tail
2646*e4b17023SJohn Marino recursion call can be ignored if we indeed use the tail
2647*e4b17023SJohn Marino call expansion. */
2648*e4b17023SJohn Marino int save_pending_stack_adjust = 0;
2649*e4b17023SJohn Marino int save_stack_pointer_delta = 0;
2650*e4b17023SJohn Marino rtx insns;
2651*e4b17023SJohn Marino rtx before_call, next_arg_reg, after_args;
2652*e4b17023SJohn Marino
2653*e4b17023SJohn Marino if (pass == 0)
2654*e4b17023SJohn Marino {
2655*e4b17023SJohn Marino /* State variables we need to save and restore between
2656*e4b17023SJohn Marino iterations. */
2657*e4b17023SJohn Marino save_pending_stack_adjust = pending_stack_adjust;
2658*e4b17023SJohn Marino save_stack_pointer_delta = stack_pointer_delta;
2659*e4b17023SJohn Marino }
2660*e4b17023SJohn Marino if (pass)
2661*e4b17023SJohn Marino flags &= ~ECF_SIBCALL;
2662*e4b17023SJohn Marino else
2663*e4b17023SJohn Marino flags |= ECF_SIBCALL;
2664*e4b17023SJohn Marino
2665*e4b17023SJohn Marino /* Other state variables that we must reinitialize each time
2666*e4b17023SJohn Marino through the loop (that are not initialized by the loop itself). */
2667*e4b17023SJohn Marino argblock = 0;
2668*e4b17023SJohn Marino call_fusage = 0;
2669*e4b17023SJohn Marino
2670*e4b17023SJohn Marino /* Start a new sequence for the normal call case.
2671*e4b17023SJohn Marino
2672*e4b17023SJohn Marino From this point on, if the sibling call fails, we want to set
2673*e4b17023SJohn Marino sibcall_failure instead of continuing the loop. */
2674*e4b17023SJohn Marino start_sequence ();
2675*e4b17023SJohn Marino
2676*e4b17023SJohn Marino /* Don't let pending stack adjusts add up to too much.
2677*e4b17023SJohn Marino Also, do all pending adjustments now if there is any chance
2678*e4b17023SJohn Marino this might be a call to alloca or if we are expanding a sibling
2679*e4b17023SJohn Marino call sequence.
2680*e4b17023SJohn Marino Also do the adjustments before a throwing call, otherwise
2681*e4b17023SJohn Marino exception handling can fail; PR 19225. */
2682*e4b17023SJohn Marino if (pending_stack_adjust >= 32
2683*e4b17023SJohn Marino || (pending_stack_adjust > 0
2684*e4b17023SJohn Marino && (flags & ECF_MAY_BE_ALLOCA))
2685*e4b17023SJohn Marino || (pending_stack_adjust > 0
2686*e4b17023SJohn Marino && flag_exceptions && !(flags & ECF_NOTHROW))
2687*e4b17023SJohn Marino || pass == 0)
2688*e4b17023SJohn Marino do_pending_stack_adjust ();
2689*e4b17023SJohn Marino
2690*e4b17023SJohn Marino /* Precompute any arguments as needed. */
2691*e4b17023SJohn Marino if (pass)
2692*e4b17023SJohn Marino precompute_arguments (num_actuals, args);
2693*e4b17023SJohn Marino
2694*e4b17023SJohn Marino /* Now we are about to start emitting insns that can be deleted
2695*e4b17023SJohn Marino if a libcall is deleted. */
2696*e4b17023SJohn Marino if (pass && (flags & ECF_MALLOC))
2697*e4b17023SJohn Marino start_sequence ();
2698*e4b17023SJohn Marino
2699*e4b17023SJohn Marino if (pass == 0 && crtl->stack_protect_guard)
2700*e4b17023SJohn Marino stack_protect_epilogue ();
2701*e4b17023SJohn Marino
2702*e4b17023SJohn Marino adjusted_args_size = args_size;
2703*e4b17023SJohn Marino /* Compute the actual size of the argument block required. The variable
2704*e4b17023SJohn Marino and constant sizes must be combined, the size may have to be rounded,
2705*e4b17023SJohn Marino and there may be a minimum required size. When generating a sibcall
2706*e4b17023SJohn Marino pattern, do not round up, since we'll be re-using whatever space our
2707*e4b17023SJohn Marino caller provided. */
2708*e4b17023SJohn Marino unadjusted_args_size
2709*e4b17023SJohn Marino = compute_argument_block_size (reg_parm_stack_space,
2710*e4b17023SJohn Marino &adjusted_args_size,
2711*e4b17023SJohn Marino fndecl, fntype,
2712*e4b17023SJohn Marino (pass == 0 ? 0
2713*e4b17023SJohn Marino : preferred_stack_boundary));
2714*e4b17023SJohn Marino
2715*e4b17023SJohn Marino old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2716*e4b17023SJohn Marino
2717*e4b17023SJohn Marino /* The argument block when performing a sibling call is the
2718*e4b17023SJohn Marino incoming argument block. */
2719*e4b17023SJohn Marino if (pass == 0)
2720*e4b17023SJohn Marino {
2721*e4b17023SJohn Marino argblock = crtl->args.internal_arg_pointer;
2722*e4b17023SJohn Marino argblock
2723*e4b17023SJohn Marino #ifdef STACK_GROWS_DOWNWARD
2724*e4b17023SJohn Marino = plus_constant (argblock, crtl->args.pretend_args_size);
2725*e4b17023SJohn Marino #else
2726*e4b17023SJohn Marino = plus_constant (argblock, -crtl->args.pretend_args_size);
2727*e4b17023SJohn Marino #endif
2728*e4b17023SJohn Marino stored_args_map = sbitmap_alloc (args_size.constant);
2729*e4b17023SJohn Marino sbitmap_zero (stored_args_map);
2730*e4b17023SJohn Marino }
2731*e4b17023SJohn Marino
2732*e4b17023SJohn Marino /* If we have no actual push instructions, or shouldn't use them,
2733*e4b17023SJohn Marino make space for all args right now. */
2734*e4b17023SJohn Marino else if (adjusted_args_size.var != 0)
2735*e4b17023SJohn Marino {
2736*e4b17023SJohn Marino if (old_stack_level == 0)
2737*e4b17023SJohn Marino {
2738*e4b17023SJohn Marino emit_stack_save (SAVE_BLOCK, &old_stack_level);
2739*e4b17023SJohn Marino old_stack_pointer_delta = stack_pointer_delta;
2740*e4b17023SJohn Marino old_pending_adj = pending_stack_adjust;
2741*e4b17023SJohn Marino pending_stack_adjust = 0;
2742*e4b17023SJohn Marino /* stack_arg_under_construction says whether a stack arg is
2743*e4b17023SJohn Marino being constructed at the old stack level. Pushing the stack
2744*e4b17023SJohn Marino gets a clean outgoing argument block. */
2745*e4b17023SJohn Marino old_stack_arg_under_construction = stack_arg_under_construction;
2746*e4b17023SJohn Marino stack_arg_under_construction = 0;
2747*e4b17023SJohn Marino }
2748*e4b17023SJohn Marino argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2749*e4b17023SJohn Marino if (flag_stack_usage_info)
2750*e4b17023SJohn Marino current_function_has_unbounded_dynamic_stack_size = 1;
2751*e4b17023SJohn Marino }
2752*e4b17023SJohn Marino else
2753*e4b17023SJohn Marino {
2754*e4b17023SJohn Marino /* Note that we must go through the motions of allocating an argument
2755*e4b17023SJohn Marino block even if the size is zero because we may be storing args
2756*e4b17023SJohn Marino in the area reserved for register arguments, which may be part of
2757*e4b17023SJohn Marino the stack frame. */
2758*e4b17023SJohn Marino
2759*e4b17023SJohn Marino int needed = adjusted_args_size.constant;
2760*e4b17023SJohn Marino
2761*e4b17023SJohn Marino /* Store the maximum argument space used. It will be pushed by
2762*e4b17023SJohn Marino the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2763*e4b17023SJohn Marino checking). */
2764*e4b17023SJohn Marino
2765*e4b17023SJohn Marino if (needed > crtl->outgoing_args_size)
2766*e4b17023SJohn Marino crtl->outgoing_args_size = needed;
2767*e4b17023SJohn Marino
2768*e4b17023SJohn Marino if (must_preallocate)
2769*e4b17023SJohn Marino {
2770*e4b17023SJohn Marino if (ACCUMULATE_OUTGOING_ARGS)
2771*e4b17023SJohn Marino {
2772*e4b17023SJohn Marino /* Since the stack pointer will never be pushed, it is
2773*e4b17023SJohn Marino possible for the evaluation of a parm to clobber
2774*e4b17023SJohn Marino something we have already written to the stack.
2775*e4b17023SJohn Marino Since most function calls on RISC machines do not use
2776*e4b17023SJohn Marino the stack, this is uncommon, but must work correctly.
2777*e4b17023SJohn Marino
2778*e4b17023SJohn Marino Therefore, we save any area of the stack that was already
2779*e4b17023SJohn Marino written and that we are using. Here we set up to do this
2780*e4b17023SJohn Marino by making a new stack usage map from the old one. The
2781*e4b17023SJohn Marino actual save will be done by store_one_arg.
2782*e4b17023SJohn Marino
2783*e4b17023SJohn Marino Another approach might be to try to reorder the argument
2784*e4b17023SJohn Marino evaluations to avoid this conflicting stack usage. */
2785*e4b17023SJohn Marino
2786*e4b17023SJohn Marino /* Since we will be writing into the entire argument area,
2787*e4b17023SJohn Marino the map must be allocated for its entire size, not just
2788*e4b17023SJohn Marino the part that is the responsibility of the caller. */
2789*e4b17023SJohn Marino if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2790*e4b17023SJohn Marino needed += reg_parm_stack_space;
2791*e4b17023SJohn Marino
2792*e4b17023SJohn Marino #ifdef ARGS_GROW_DOWNWARD
2793*e4b17023SJohn Marino highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2794*e4b17023SJohn Marino needed + 1);
2795*e4b17023SJohn Marino #else
2796*e4b17023SJohn Marino highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2797*e4b17023SJohn Marino needed);
2798*e4b17023SJohn Marino #endif
2799*e4b17023SJohn Marino free (stack_usage_map_buf);
2800*e4b17023SJohn Marino stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2801*e4b17023SJohn Marino stack_usage_map = stack_usage_map_buf;
2802*e4b17023SJohn Marino
2803*e4b17023SJohn Marino if (initial_highest_arg_in_use)
2804*e4b17023SJohn Marino memcpy (stack_usage_map, initial_stack_usage_map,
2805*e4b17023SJohn Marino initial_highest_arg_in_use);
2806*e4b17023SJohn Marino
2807*e4b17023SJohn Marino if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2808*e4b17023SJohn Marino memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2809*e4b17023SJohn Marino (highest_outgoing_arg_in_use
2810*e4b17023SJohn Marino - initial_highest_arg_in_use));
2811*e4b17023SJohn Marino needed = 0;
2812*e4b17023SJohn Marino
2813*e4b17023SJohn Marino /* The address of the outgoing argument list must not be
2814*e4b17023SJohn Marino copied to a register here, because argblock would be left
2815*e4b17023SJohn Marino pointing to the wrong place after the call to
2816*e4b17023SJohn Marino allocate_dynamic_stack_space below. */
2817*e4b17023SJohn Marino
2818*e4b17023SJohn Marino argblock = virtual_outgoing_args_rtx;
2819*e4b17023SJohn Marino }
2820*e4b17023SJohn Marino else
2821*e4b17023SJohn Marino {
2822*e4b17023SJohn Marino if (inhibit_defer_pop == 0)
2823*e4b17023SJohn Marino {
2824*e4b17023SJohn Marino /* Try to reuse some or all of the pending_stack_adjust
2825*e4b17023SJohn Marino to get this space. */
2826*e4b17023SJohn Marino needed
2827*e4b17023SJohn Marino = (combine_pending_stack_adjustment_and_call
2828*e4b17023SJohn Marino (unadjusted_args_size,
2829*e4b17023SJohn Marino &adjusted_args_size,
2830*e4b17023SJohn Marino preferred_unit_stack_boundary));
2831*e4b17023SJohn Marino
2832*e4b17023SJohn Marino /* combine_pending_stack_adjustment_and_call computes
2833*e4b17023SJohn Marino an adjustment before the arguments are allocated.
2834*e4b17023SJohn Marino Account for them and see whether or not the stack
2835*e4b17023SJohn Marino needs to go up or down. */
2836*e4b17023SJohn Marino needed = unadjusted_args_size - needed;
2837*e4b17023SJohn Marino
2838*e4b17023SJohn Marino if (needed < 0)
2839*e4b17023SJohn Marino {
2840*e4b17023SJohn Marino /* We're releasing stack space. */
2841*e4b17023SJohn Marino /* ??? We can avoid any adjustment at all if we're
2842*e4b17023SJohn Marino already aligned. FIXME. */
2843*e4b17023SJohn Marino pending_stack_adjust = -needed;
2844*e4b17023SJohn Marino do_pending_stack_adjust ();
2845*e4b17023SJohn Marino needed = 0;
2846*e4b17023SJohn Marino }
2847*e4b17023SJohn Marino else
2848*e4b17023SJohn Marino /* We need to allocate space. We'll do that in
2849*e4b17023SJohn Marino push_block below. */
2850*e4b17023SJohn Marino pending_stack_adjust = 0;
2851*e4b17023SJohn Marino }
2852*e4b17023SJohn Marino
2853*e4b17023SJohn Marino /* Special case this because overhead of `push_block' in
2854*e4b17023SJohn Marino this case is non-trivial. */
2855*e4b17023SJohn Marino if (needed == 0)
2856*e4b17023SJohn Marino argblock = virtual_outgoing_args_rtx;
2857*e4b17023SJohn Marino else
2858*e4b17023SJohn Marino {
2859*e4b17023SJohn Marino argblock = push_block (GEN_INT (needed), 0, 0);
2860*e4b17023SJohn Marino #ifdef ARGS_GROW_DOWNWARD
2861*e4b17023SJohn Marino argblock = plus_constant (argblock, needed);
2862*e4b17023SJohn Marino #endif
2863*e4b17023SJohn Marino }
2864*e4b17023SJohn Marino
2865*e4b17023SJohn Marino /* We only really need to call `copy_to_reg' in the case
2866*e4b17023SJohn Marino where push insns are going to be used to pass ARGBLOCK
2867*e4b17023SJohn Marino to a function call in ARGS. In that case, the stack
2868*e4b17023SJohn Marino pointer changes value from the allocation point to the
2869*e4b17023SJohn Marino call point, and hence the value of
2870*e4b17023SJohn Marino VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2871*e4b17023SJohn Marino as well always do it. */
2872*e4b17023SJohn Marino argblock = copy_to_reg (argblock);
2873*e4b17023SJohn Marino }
2874*e4b17023SJohn Marino }
2875*e4b17023SJohn Marino }
2876*e4b17023SJohn Marino
2877*e4b17023SJohn Marino if (ACCUMULATE_OUTGOING_ARGS)
2878*e4b17023SJohn Marino {
2879*e4b17023SJohn Marino /* The save/restore code in store_one_arg handles all
2880*e4b17023SJohn Marino cases except one: a constructor call (including a C
2881*e4b17023SJohn Marino function returning a BLKmode struct) to initialize
2882*e4b17023SJohn Marino an argument. */
2883*e4b17023SJohn Marino if (stack_arg_under_construction)
2884*e4b17023SJohn Marino {
2885*e4b17023SJohn Marino rtx push_size
2886*e4b17023SJohn Marino = GEN_INT (adjusted_args_size.constant
2887*e4b17023SJohn Marino + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
2888*e4b17023SJohn Marino : TREE_TYPE (fndecl))) ? 0
2889*e4b17023SJohn Marino : reg_parm_stack_space));
2890*e4b17023SJohn Marino if (old_stack_level == 0)
2891*e4b17023SJohn Marino {
2892*e4b17023SJohn Marino emit_stack_save (SAVE_BLOCK, &old_stack_level);
2893*e4b17023SJohn Marino old_stack_pointer_delta = stack_pointer_delta;
2894*e4b17023SJohn Marino old_pending_adj = pending_stack_adjust;
2895*e4b17023SJohn Marino pending_stack_adjust = 0;
2896*e4b17023SJohn Marino /* stack_arg_under_construction says whether a stack
2897*e4b17023SJohn Marino arg is being constructed at the old stack level.
2898*e4b17023SJohn Marino Pushing the stack gets a clean outgoing argument
2899*e4b17023SJohn Marino block. */
2900*e4b17023SJohn Marino old_stack_arg_under_construction
2901*e4b17023SJohn Marino = stack_arg_under_construction;
2902*e4b17023SJohn Marino stack_arg_under_construction = 0;
2903*e4b17023SJohn Marino /* Make a new map for the new argument list. */
2904*e4b17023SJohn Marino free (stack_usage_map_buf);
2905*e4b17023SJohn Marino stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2906*e4b17023SJohn Marino stack_usage_map = stack_usage_map_buf;
2907*e4b17023SJohn Marino highest_outgoing_arg_in_use = 0;
2908*e4b17023SJohn Marino }
2909*e4b17023SJohn Marino /* We can pass TRUE as the 4th argument because we just
2910*e4b17023SJohn Marino saved the stack pointer and will restore it right after
2911*e4b17023SJohn Marino the call. */
2912*e4b17023SJohn Marino allocate_dynamic_stack_space (push_size, 0,
2913*e4b17023SJohn Marino BIGGEST_ALIGNMENT, true);
2914*e4b17023SJohn Marino }
2915*e4b17023SJohn Marino
2916*e4b17023SJohn Marino /* If argument evaluation might modify the stack pointer,
2917*e4b17023SJohn Marino copy the address of the argument list to a register. */
2918*e4b17023SJohn Marino for (i = 0; i < num_actuals; i++)
2919*e4b17023SJohn Marino if (args[i].pass_on_stack)
2920*e4b17023SJohn Marino {
2921*e4b17023SJohn Marino argblock = copy_addr_to_reg (argblock);
2922*e4b17023SJohn Marino break;
2923*e4b17023SJohn Marino }
2924*e4b17023SJohn Marino }
2925*e4b17023SJohn Marino
2926*e4b17023SJohn Marino compute_argument_addresses (args, argblock, num_actuals);
2927*e4b17023SJohn Marino
2928*e4b17023SJohn Marino /* If we push args individually in reverse order, perform stack alignment
2929*e4b17023SJohn Marino before the first push (the last arg). */
2930*e4b17023SJohn Marino if (PUSH_ARGS_REVERSED && argblock == 0
2931*e4b17023SJohn Marino && adjusted_args_size.constant != unadjusted_args_size)
2932*e4b17023SJohn Marino {
2933*e4b17023SJohn Marino /* When the stack adjustment is pending, we get better code
2934*e4b17023SJohn Marino by combining the adjustments. */
2935*e4b17023SJohn Marino if (pending_stack_adjust
2936*e4b17023SJohn Marino && ! inhibit_defer_pop)
2937*e4b17023SJohn Marino {
2938*e4b17023SJohn Marino pending_stack_adjust
2939*e4b17023SJohn Marino = (combine_pending_stack_adjustment_and_call
2940*e4b17023SJohn Marino (unadjusted_args_size,
2941*e4b17023SJohn Marino &adjusted_args_size,
2942*e4b17023SJohn Marino preferred_unit_stack_boundary));
2943*e4b17023SJohn Marino do_pending_stack_adjust ();
2944*e4b17023SJohn Marino }
2945*e4b17023SJohn Marino else if (argblock == 0)
2946*e4b17023SJohn Marino anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2947*e4b17023SJohn Marino - unadjusted_args_size));
2948*e4b17023SJohn Marino }
2949*e4b17023SJohn Marino /* Now that the stack is properly aligned, pops can't safely
2950*e4b17023SJohn Marino be deferred during the evaluation of the arguments. */
2951*e4b17023SJohn Marino NO_DEFER_POP;
2952*e4b17023SJohn Marino
2953*e4b17023SJohn Marino /* Record the maximum pushed stack space size. We need to delay
2954*e4b17023SJohn Marino doing it this far to take into account the optimization done
2955*e4b17023SJohn Marino by combine_pending_stack_adjustment_and_call. */
2956*e4b17023SJohn Marino if (flag_stack_usage_info
2957*e4b17023SJohn Marino && !ACCUMULATE_OUTGOING_ARGS
2958*e4b17023SJohn Marino && pass
2959*e4b17023SJohn Marino && adjusted_args_size.var == 0)
2960*e4b17023SJohn Marino {
2961*e4b17023SJohn Marino int pushed = adjusted_args_size.constant + pending_stack_adjust;
2962*e4b17023SJohn Marino if (pushed > current_function_pushed_stack_size)
2963*e4b17023SJohn Marino current_function_pushed_stack_size = pushed;
2964*e4b17023SJohn Marino }
2965*e4b17023SJohn Marino
2966*e4b17023SJohn Marino funexp = rtx_for_function_call (fndecl, addr);
2967*e4b17023SJohn Marino
2968*e4b17023SJohn Marino /* Figure out the register where the value, if any, will come back. */
2969*e4b17023SJohn Marino valreg = 0;
2970*e4b17023SJohn Marino if (TYPE_MODE (rettype) != VOIDmode
2971*e4b17023SJohn Marino && ! structure_value_addr)
2972*e4b17023SJohn Marino {
2973*e4b17023SJohn Marino if (pcc_struct_value)
2974*e4b17023SJohn Marino valreg = hard_function_value (build_pointer_type (rettype),
2975*e4b17023SJohn Marino fndecl, NULL, (pass == 0));
2976*e4b17023SJohn Marino else
2977*e4b17023SJohn Marino valreg = hard_function_value (rettype, fndecl, fntype,
2978*e4b17023SJohn Marino (pass == 0));
2979*e4b17023SJohn Marino
2980*e4b17023SJohn Marino /* If VALREG is a PARALLEL whose first member has a zero
2981*e4b17023SJohn Marino offset, use that. This is for targets such as m68k that
2982*e4b17023SJohn Marino return the same value in multiple places. */
2983*e4b17023SJohn Marino if (GET_CODE (valreg) == PARALLEL)
2984*e4b17023SJohn Marino {
2985*e4b17023SJohn Marino rtx elem = XVECEXP (valreg, 0, 0);
2986*e4b17023SJohn Marino rtx where = XEXP (elem, 0);
2987*e4b17023SJohn Marino rtx offset = XEXP (elem, 1);
2988*e4b17023SJohn Marino if (offset == const0_rtx
2989*e4b17023SJohn Marino && GET_MODE (where) == GET_MODE (valreg))
2990*e4b17023SJohn Marino valreg = where;
2991*e4b17023SJohn Marino }
2992*e4b17023SJohn Marino }
2993*e4b17023SJohn Marino
2994*e4b17023SJohn Marino /* Precompute all register parameters. It isn't safe to compute anything
2995*e4b17023SJohn Marino once we have started filling any specific hard regs. */
2996*e4b17023SJohn Marino precompute_register_parameters (num_actuals, args, ®_parm_seen);
2997*e4b17023SJohn Marino
2998*e4b17023SJohn Marino if (CALL_EXPR_STATIC_CHAIN (exp))
2999*e4b17023SJohn Marino static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
3000*e4b17023SJohn Marino else
3001*e4b17023SJohn Marino static_chain_value = 0;
3002*e4b17023SJohn Marino
3003*e4b17023SJohn Marino #ifdef REG_PARM_STACK_SPACE
3004*e4b17023SJohn Marino /* Save the fixed argument area if it's part of the caller's frame and
3005*e4b17023SJohn Marino is clobbered by argument setup for this call. */
3006*e4b17023SJohn Marino if (ACCUMULATE_OUTGOING_ARGS && pass)
3007*e4b17023SJohn Marino save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3008*e4b17023SJohn Marino &low_to_save, &high_to_save);
3009*e4b17023SJohn Marino #endif
3010*e4b17023SJohn Marino
3011*e4b17023SJohn Marino /* Now store (and compute if necessary) all non-register parms.
3012*e4b17023SJohn Marino These come before register parms, since they can require block-moves,
3013*e4b17023SJohn Marino which could clobber the registers used for register parms.
3014*e4b17023SJohn Marino Parms which have partial registers are not stored here,
3015*e4b17023SJohn Marino but we do preallocate space here if they want that. */
3016*e4b17023SJohn Marino
3017*e4b17023SJohn Marino for (i = 0; i < num_actuals; i++)
3018*e4b17023SJohn Marino {
3019*e4b17023SJohn Marino if (args[i].reg == 0 || args[i].pass_on_stack)
3020*e4b17023SJohn Marino {
3021*e4b17023SJohn Marino rtx before_arg = get_last_insn ();
3022*e4b17023SJohn Marino
3023*e4b17023SJohn Marino if (store_one_arg (&args[i], argblock, flags,
3024*e4b17023SJohn Marino adjusted_args_size.var != 0,
3025*e4b17023SJohn Marino reg_parm_stack_space)
3026*e4b17023SJohn Marino || (pass == 0
3027*e4b17023SJohn Marino && check_sibcall_argument_overlap (before_arg,
3028*e4b17023SJohn Marino &args[i], 1)))
3029*e4b17023SJohn Marino sibcall_failure = 1;
3030*e4b17023SJohn Marino }
3031*e4b17023SJohn Marino
3032*e4b17023SJohn Marino if (args[i].stack)
3033*e4b17023SJohn Marino call_fusage
3034*e4b17023SJohn Marino = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3035*e4b17023SJohn Marino gen_rtx_USE (VOIDmode, args[i].stack),
3036*e4b17023SJohn Marino call_fusage);
3037*e4b17023SJohn Marino }
3038*e4b17023SJohn Marino
3039*e4b17023SJohn Marino /* If we have a parm that is passed in registers but not in memory
3040*e4b17023SJohn Marino and whose alignment does not permit a direct copy into registers,
3041*e4b17023SJohn Marino make a group of pseudos that correspond to each register that we
3042*e4b17023SJohn Marino will later fill. */
3043*e4b17023SJohn Marino if (STRICT_ALIGNMENT)
3044*e4b17023SJohn Marino store_unaligned_arguments_into_pseudos (args, num_actuals);
3045*e4b17023SJohn Marino
3046*e4b17023SJohn Marino /* Now store any partially-in-registers parm.
3047*e4b17023SJohn Marino This is the last place a block-move can happen. */
3048*e4b17023SJohn Marino if (reg_parm_seen)
3049*e4b17023SJohn Marino for (i = 0; i < num_actuals; i++)
3050*e4b17023SJohn Marino if (args[i].partial != 0 && ! args[i].pass_on_stack)
3051*e4b17023SJohn Marino {
3052*e4b17023SJohn Marino rtx before_arg = get_last_insn ();
3053*e4b17023SJohn Marino
3054*e4b17023SJohn Marino if (store_one_arg (&args[i], argblock, flags,
3055*e4b17023SJohn Marino adjusted_args_size.var != 0,
3056*e4b17023SJohn Marino reg_parm_stack_space)
3057*e4b17023SJohn Marino || (pass == 0
3058*e4b17023SJohn Marino && check_sibcall_argument_overlap (before_arg,
3059*e4b17023SJohn Marino &args[i], 1)))
3060*e4b17023SJohn Marino sibcall_failure = 1;
3061*e4b17023SJohn Marino }
3062*e4b17023SJohn Marino
3063*e4b17023SJohn Marino /* If we pushed args in forward order, perform stack alignment
3064*e4b17023SJohn Marino after pushing the last arg. */
3065*e4b17023SJohn Marino if (!PUSH_ARGS_REVERSED && argblock == 0)
3066*e4b17023SJohn Marino anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3067*e4b17023SJohn Marino - unadjusted_args_size));
3068*e4b17023SJohn Marino
3069*e4b17023SJohn Marino /* If register arguments require space on the stack and stack space
3070*e4b17023SJohn Marino was not preallocated, allocate stack space here for arguments
3071*e4b17023SJohn Marino passed in registers. */
3072*e4b17023SJohn Marino if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3073*e4b17023SJohn Marino && !ACCUMULATE_OUTGOING_ARGS
3074*e4b17023SJohn Marino && must_preallocate == 0 && reg_parm_stack_space > 0)
3075*e4b17023SJohn Marino anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3076*e4b17023SJohn Marino
3077*e4b17023SJohn Marino /* Pass the function the address in which to return a
3078*e4b17023SJohn Marino structure value. */
3079*e4b17023SJohn Marino if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3080*e4b17023SJohn Marino {
3081*e4b17023SJohn Marino structure_value_addr
3082*e4b17023SJohn Marino = convert_memory_address (Pmode, structure_value_addr);
3083*e4b17023SJohn Marino emit_move_insn (struct_value,
3084*e4b17023SJohn Marino force_reg (Pmode,
3085*e4b17023SJohn Marino force_operand (structure_value_addr,
3086*e4b17023SJohn Marino NULL_RTX)));
3087*e4b17023SJohn Marino
3088*e4b17023SJohn Marino if (REG_P (struct_value))
3089*e4b17023SJohn Marino use_reg (&call_fusage, struct_value);
3090*e4b17023SJohn Marino }
3091*e4b17023SJohn Marino
3092*e4b17023SJohn Marino after_args = get_last_insn ();
3093*e4b17023SJohn Marino funexp = prepare_call_address (fndecl, funexp, static_chain_value,
3094*e4b17023SJohn Marino &call_fusage, reg_parm_seen, pass == 0);
3095*e4b17023SJohn Marino
3096*e4b17023SJohn Marino load_register_parameters (args, num_actuals, &call_fusage, flags,
3097*e4b17023SJohn Marino pass == 0, &sibcall_failure);
3098*e4b17023SJohn Marino
3099*e4b17023SJohn Marino /* Save a pointer to the last insn before the call, so that we can
3100*e4b17023SJohn Marino later safely search backwards to find the CALL_INSN. */
3101*e4b17023SJohn Marino before_call = get_last_insn ();
3102*e4b17023SJohn Marino
3103*e4b17023SJohn Marino /* Set up next argument register. For sibling calls on machines
3104*e4b17023SJohn Marino with register windows this should be the incoming register. */
3105*e4b17023SJohn Marino if (pass == 0)
3106*e4b17023SJohn Marino next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
3107*e4b17023SJohn Marino VOIDmode,
3108*e4b17023SJohn Marino void_type_node,
3109*e4b17023SJohn Marino true);
3110*e4b17023SJohn Marino else
3111*e4b17023SJohn Marino next_arg_reg = targetm.calls.function_arg (args_so_far,
3112*e4b17023SJohn Marino VOIDmode, void_type_node,
3113*e4b17023SJohn Marino true);
3114*e4b17023SJohn Marino
3115*e4b17023SJohn Marino /* All arguments and registers used for the call must be set up by
3116*e4b17023SJohn Marino now! */
3117*e4b17023SJohn Marino
3118*e4b17023SJohn Marino /* Stack must be properly aligned now. */
3119*e4b17023SJohn Marino gcc_assert (!pass
3120*e4b17023SJohn Marino || !(stack_pointer_delta % preferred_unit_stack_boundary));
3121*e4b17023SJohn Marino
3122*e4b17023SJohn Marino /* Generate the actual call instruction. */
3123*e4b17023SJohn Marino emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
3124*e4b17023SJohn Marino adjusted_args_size.constant, struct_value_size,
3125*e4b17023SJohn Marino next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3126*e4b17023SJohn Marino flags, args_so_far);
3127*e4b17023SJohn Marino
3128*e4b17023SJohn Marino /* If the call setup or the call itself overlaps with anything
3129*e4b17023SJohn Marino of the argument setup we probably clobbered our call address.
3130*e4b17023SJohn Marino In that case we can't do sibcalls. */
3131*e4b17023SJohn Marino if (pass == 0
3132*e4b17023SJohn Marino && check_sibcall_argument_overlap (after_args, 0, 0))
3133*e4b17023SJohn Marino sibcall_failure = 1;
3134*e4b17023SJohn Marino
3135*e4b17023SJohn Marino /* If a non-BLKmode value is returned at the most significant end
3136*e4b17023SJohn Marino of a register, shift the register right by the appropriate amount
3137*e4b17023SJohn Marino and update VALREG accordingly. BLKmode values are handled by the
3138*e4b17023SJohn Marino group load/store machinery below. */
3139*e4b17023SJohn Marino if (!structure_value_addr
3140*e4b17023SJohn Marino && !pcc_struct_value
3141*e4b17023SJohn Marino && TYPE_MODE (rettype) != BLKmode
3142*e4b17023SJohn Marino && targetm.calls.return_in_msb (rettype))
3143*e4b17023SJohn Marino {
3144*e4b17023SJohn Marino if (shift_return_value (TYPE_MODE (rettype), false, valreg))
3145*e4b17023SJohn Marino sibcall_failure = 1;
3146*e4b17023SJohn Marino valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
3147*e4b17023SJohn Marino }
3148*e4b17023SJohn Marino
3149*e4b17023SJohn Marino if (pass && (flags & ECF_MALLOC))
3150*e4b17023SJohn Marino {
3151*e4b17023SJohn Marino rtx temp = gen_reg_rtx (GET_MODE (valreg));
3152*e4b17023SJohn Marino rtx last, insns;
3153*e4b17023SJohn Marino
3154*e4b17023SJohn Marino /* The return value from a malloc-like function is a pointer. */
3155*e4b17023SJohn Marino if (TREE_CODE (rettype) == POINTER_TYPE)
3156*e4b17023SJohn Marino mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3157*e4b17023SJohn Marino
3158*e4b17023SJohn Marino emit_move_insn (temp, valreg);
3159*e4b17023SJohn Marino
3160*e4b17023SJohn Marino /* The return value from a malloc-like function can not alias
3161*e4b17023SJohn Marino anything else. */
3162*e4b17023SJohn Marino last = get_last_insn ();
3163*e4b17023SJohn Marino add_reg_note (last, REG_NOALIAS, temp);
3164*e4b17023SJohn Marino
3165*e4b17023SJohn Marino /* Write out the sequence. */
3166*e4b17023SJohn Marino insns = get_insns ();
3167*e4b17023SJohn Marino end_sequence ();
3168*e4b17023SJohn Marino emit_insn (insns);
3169*e4b17023SJohn Marino valreg = temp;
3170*e4b17023SJohn Marino }
3171*e4b17023SJohn Marino
3172*e4b17023SJohn Marino /* For calls to `setjmp', etc., inform
3173*e4b17023SJohn Marino function.c:setjmp_warnings that it should complain if
3174*e4b17023SJohn Marino nonvolatile values are live. For functions that cannot
3175*e4b17023SJohn Marino return, inform flow that control does not fall through. */
3176*e4b17023SJohn Marino
3177*e4b17023SJohn Marino if ((flags & ECF_NORETURN) || pass == 0)
3178*e4b17023SJohn Marino {
3179*e4b17023SJohn Marino /* The barrier must be emitted
3180*e4b17023SJohn Marino immediately after the CALL_INSN. Some ports emit more
3181*e4b17023SJohn Marino than just a CALL_INSN above, so we must search for it here. */
3182*e4b17023SJohn Marino
3183*e4b17023SJohn Marino rtx last = get_last_insn ();
3184*e4b17023SJohn Marino while (!CALL_P (last))
3185*e4b17023SJohn Marino {
3186*e4b17023SJohn Marino last = PREV_INSN (last);
3187*e4b17023SJohn Marino /* There was no CALL_INSN? */
3188*e4b17023SJohn Marino gcc_assert (last != before_call);
3189*e4b17023SJohn Marino }
3190*e4b17023SJohn Marino
3191*e4b17023SJohn Marino emit_barrier_after (last);
3192*e4b17023SJohn Marino
3193*e4b17023SJohn Marino /* Stack adjustments after a noreturn call are dead code.
3194*e4b17023SJohn Marino However when NO_DEFER_POP is in effect, we must preserve
3195*e4b17023SJohn Marino stack_pointer_delta. */
3196*e4b17023SJohn Marino if (inhibit_defer_pop == 0)
3197*e4b17023SJohn Marino {
3198*e4b17023SJohn Marino stack_pointer_delta = old_stack_allocated;
3199*e4b17023SJohn Marino pending_stack_adjust = 0;
3200*e4b17023SJohn Marino }
3201*e4b17023SJohn Marino }
3202*e4b17023SJohn Marino
3203*e4b17023SJohn Marino /* If value type not void, return an rtx for the value. */
3204*e4b17023SJohn Marino
3205*e4b17023SJohn Marino if (TYPE_MODE (rettype) == VOIDmode
3206*e4b17023SJohn Marino || ignore)
3207*e4b17023SJohn Marino target = const0_rtx;
3208*e4b17023SJohn Marino else if (structure_value_addr)
3209*e4b17023SJohn Marino {
3210*e4b17023SJohn Marino if (target == 0 || !MEM_P (target))
3211*e4b17023SJohn Marino {
3212*e4b17023SJohn Marino target
3213*e4b17023SJohn Marino = gen_rtx_MEM (TYPE_MODE (rettype),
3214*e4b17023SJohn Marino memory_address (TYPE_MODE (rettype),
3215*e4b17023SJohn Marino structure_value_addr));
3216*e4b17023SJohn Marino set_mem_attributes (target, rettype, 1);
3217*e4b17023SJohn Marino }
3218*e4b17023SJohn Marino }
3219*e4b17023SJohn Marino else if (pcc_struct_value)
3220*e4b17023SJohn Marino {
3221*e4b17023SJohn Marino /* This is the special C++ case where we need to
3222*e4b17023SJohn Marino know what the true target was. We take care to
3223*e4b17023SJohn Marino never use this value more than once in one expression. */
3224*e4b17023SJohn Marino target = gen_rtx_MEM (TYPE_MODE (rettype),
3225*e4b17023SJohn Marino copy_to_reg (valreg));
3226*e4b17023SJohn Marino set_mem_attributes (target, rettype, 1);
3227*e4b17023SJohn Marino }
3228*e4b17023SJohn Marino /* Handle calls that return values in multiple non-contiguous locations.
3229*e4b17023SJohn Marino The Irix 6 ABI has examples of this. */
3230*e4b17023SJohn Marino else if (GET_CODE (valreg) == PARALLEL)
3231*e4b17023SJohn Marino {
3232*e4b17023SJohn Marino if (target == 0)
3233*e4b17023SJohn Marino {
3234*e4b17023SJohn Marino /* This will only be assigned once, so it can be readonly. */
3235*e4b17023SJohn Marino tree nt = build_qualified_type (rettype,
3236*e4b17023SJohn Marino (TYPE_QUALS (rettype)
3237*e4b17023SJohn Marino | TYPE_QUAL_CONST));
3238*e4b17023SJohn Marino
3239*e4b17023SJohn Marino target = assign_temp (nt, 0, 1, 1);
3240*e4b17023SJohn Marino }
3241*e4b17023SJohn Marino
3242*e4b17023SJohn Marino if (! rtx_equal_p (target, valreg))
3243*e4b17023SJohn Marino emit_group_store (target, valreg, rettype,
3244*e4b17023SJohn Marino int_size_in_bytes (rettype));
3245*e4b17023SJohn Marino
3246*e4b17023SJohn Marino /* We can not support sibling calls for this case. */
3247*e4b17023SJohn Marino sibcall_failure = 1;
3248*e4b17023SJohn Marino }
3249*e4b17023SJohn Marino else if (target
3250*e4b17023SJohn Marino && GET_MODE (target) == TYPE_MODE (rettype)
3251*e4b17023SJohn Marino && GET_MODE (target) == GET_MODE (valreg))
3252*e4b17023SJohn Marino {
3253*e4b17023SJohn Marino bool may_overlap = false;
3254*e4b17023SJohn Marino
3255*e4b17023SJohn Marino /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3256*e4b17023SJohn Marino reg to a plain register. */
3257*e4b17023SJohn Marino if (!REG_P (target) || HARD_REGISTER_P (target))
3258*e4b17023SJohn Marino valreg = avoid_likely_spilled_reg (valreg);
3259*e4b17023SJohn Marino
3260*e4b17023SJohn Marino /* If TARGET is a MEM in the argument area, and we have
3261*e4b17023SJohn Marino saved part of the argument area, then we can't store
3262*e4b17023SJohn Marino directly into TARGET as it may get overwritten when we
3263*e4b17023SJohn Marino restore the argument save area below. Don't work too
3264*e4b17023SJohn Marino hard though and simply force TARGET to a register if it
3265*e4b17023SJohn Marino is a MEM; the optimizer is quite likely to sort it out. */
3266*e4b17023SJohn Marino if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3267*e4b17023SJohn Marino for (i = 0; i < num_actuals; i++)
3268*e4b17023SJohn Marino if (args[i].save_area)
3269*e4b17023SJohn Marino {
3270*e4b17023SJohn Marino may_overlap = true;
3271*e4b17023SJohn Marino break;
3272*e4b17023SJohn Marino }
3273*e4b17023SJohn Marino
3274*e4b17023SJohn Marino if (may_overlap)
3275*e4b17023SJohn Marino target = copy_to_reg (valreg);
3276*e4b17023SJohn Marino else
3277*e4b17023SJohn Marino {
3278*e4b17023SJohn Marino /* TARGET and VALREG cannot be equal at this point
3279*e4b17023SJohn Marino because the latter would not have
3280*e4b17023SJohn Marino REG_FUNCTION_VALUE_P true, while the former would if
3281*e4b17023SJohn Marino it were referring to the same register.
3282*e4b17023SJohn Marino
3283*e4b17023SJohn Marino If they refer to the same register, this move will be
3284*e4b17023SJohn Marino a no-op, except when function inlining is being
3285*e4b17023SJohn Marino done. */
3286*e4b17023SJohn Marino emit_move_insn (target, valreg);
3287*e4b17023SJohn Marino
3288*e4b17023SJohn Marino /* If we are setting a MEM, this code must be executed.
3289*e4b17023SJohn Marino Since it is emitted after the call insn, sibcall
3290*e4b17023SJohn Marino optimization cannot be performed in that case. */
3291*e4b17023SJohn Marino if (MEM_P (target))
3292*e4b17023SJohn Marino sibcall_failure = 1;
3293*e4b17023SJohn Marino }
3294*e4b17023SJohn Marino }
3295*e4b17023SJohn Marino else if (TYPE_MODE (rettype) == BLKmode)
3296*e4b17023SJohn Marino {
3297*e4b17023SJohn Marino rtx val = valreg;
3298*e4b17023SJohn Marino if (GET_MODE (val) != BLKmode)
3299*e4b17023SJohn Marino val = avoid_likely_spilled_reg (val);
3300*e4b17023SJohn Marino target = copy_blkmode_from_reg (target, val, rettype);
3301*e4b17023SJohn Marino
3302*e4b17023SJohn Marino /* We can not support sibling calls for this case. */
3303*e4b17023SJohn Marino sibcall_failure = 1;
3304*e4b17023SJohn Marino }
3305*e4b17023SJohn Marino else
3306*e4b17023SJohn Marino target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3307*e4b17023SJohn Marino
3308*e4b17023SJohn Marino /* If we promoted this return value, make the proper SUBREG.
3309*e4b17023SJohn Marino TARGET might be const0_rtx here, so be careful. */
3310*e4b17023SJohn Marino if (REG_P (target)
3311*e4b17023SJohn Marino && TYPE_MODE (rettype) != BLKmode
3312*e4b17023SJohn Marino && GET_MODE (target) != TYPE_MODE (rettype))
3313*e4b17023SJohn Marino {
3314*e4b17023SJohn Marino tree type = rettype;
3315*e4b17023SJohn Marino int unsignedp = TYPE_UNSIGNED (type);
3316*e4b17023SJohn Marino int offset = 0;
3317*e4b17023SJohn Marino enum machine_mode pmode;
3318*e4b17023SJohn Marino
3319*e4b17023SJohn Marino /* Ensure we promote as expected, and get the new unsignedness. */
3320*e4b17023SJohn Marino pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3321*e4b17023SJohn Marino funtype, 1);
3322*e4b17023SJohn Marino gcc_assert (GET_MODE (target) == pmode);
3323*e4b17023SJohn Marino
3324*e4b17023SJohn Marino if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3325*e4b17023SJohn Marino && (GET_MODE_SIZE (GET_MODE (target))
3326*e4b17023SJohn Marino > GET_MODE_SIZE (TYPE_MODE (type))))
3327*e4b17023SJohn Marino {
3328*e4b17023SJohn Marino offset = GET_MODE_SIZE (GET_MODE (target))
3329*e4b17023SJohn Marino - GET_MODE_SIZE (TYPE_MODE (type));
3330*e4b17023SJohn Marino if (! BYTES_BIG_ENDIAN)
3331*e4b17023SJohn Marino offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3332*e4b17023SJohn Marino else if (! WORDS_BIG_ENDIAN)
3333*e4b17023SJohn Marino offset %= UNITS_PER_WORD;
3334*e4b17023SJohn Marino }
3335*e4b17023SJohn Marino
3336*e4b17023SJohn Marino target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3337*e4b17023SJohn Marino SUBREG_PROMOTED_VAR_P (target) = 1;
3338*e4b17023SJohn Marino SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3339*e4b17023SJohn Marino }
3340*e4b17023SJohn Marino
3341*e4b17023SJohn Marino /* If size of args is variable or this was a constructor call for a stack
3342*e4b17023SJohn Marino argument, restore saved stack-pointer value. */
3343*e4b17023SJohn Marino
3344*e4b17023SJohn Marino if (old_stack_level)
3345*e4b17023SJohn Marino {
3346*e4b17023SJohn Marino rtx prev = get_last_insn ();
3347*e4b17023SJohn Marino
3348*e4b17023SJohn Marino emit_stack_restore (SAVE_BLOCK, old_stack_level);
3349*e4b17023SJohn Marino stack_pointer_delta = old_stack_pointer_delta;
3350*e4b17023SJohn Marino
3351*e4b17023SJohn Marino fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
3352*e4b17023SJohn Marino
3353*e4b17023SJohn Marino pending_stack_adjust = old_pending_adj;
3354*e4b17023SJohn Marino old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3355*e4b17023SJohn Marino stack_arg_under_construction = old_stack_arg_under_construction;
3356*e4b17023SJohn Marino highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3357*e4b17023SJohn Marino stack_usage_map = initial_stack_usage_map;
3358*e4b17023SJohn Marino sibcall_failure = 1;
3359*e4b17023SJohn Marino }
3360*e4b17023SJohn Marino else if (ACCUMULATE_OUTGOING_ARGS && pass)
3361*e4b17023SJohn Marino {
3362*e4b17023SJohn Marino #ifdef REG_PARM_STACK_SPACE
3363*e4b17023SJohn Marino if (save_area)
3364*e4b17023SJohn Marino restore_fixed_argument_area (save_area, argblock,
3365*e4b17023SJohn Marino high_to_save, low_to_save);
3366*e4b17023SJohn Marino #endif
3367*e4b17023SJohn Marino
3368*e4b17023SJohn Marino /* If we saved any argument areas, restore them. */
3369*e4b17023SJohn Marino for (i = 0; i < num_actuals; i++)
3370*e4b17023SJohn Marino if (args[i].save_area)
3371*e4b17023SJohn Marino {
3372*e4b17023SJohn Marino enum machine_mode save_mode = GET_MODE (args[i].save_area);
3373*e4b17023SJohn Marino rtx stack_area
3374*e4b17023SJohn Marino = gen_rtx_MEM (save_mode,
3375*e4b17023SJohn Marino memory_address (save_mode,
3376*e4b17023SJohn Marino XEXP (args[i].stack_slot, 0)));
3377*e4b17023SJohn Marino
3378*e4b17023SJohn Marino if (save_mode != BLKmode)
3379*e4b17023SJohn Marino emit_move_insn (stack_area, args[i].save_area);
3380*e4b17023SJohn Marino else
3381*e4b17023SJohn Marino emit_block_move (stack_area, args[i].save_area,
3382*e4b17023SJohn Marino GEN_INT (args[i].locate.size.constant),
3383*e4b17023SJohn Marino BLOCK_OP_CALL_PARM);
3384*e4b17023SJohn Marino }
3385*e4b17023SJohn Marino
3386*e4b17023SJohn Marino highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3387*e4b17023SJohn Marino stack_usage_map = initial_stack_usage_map;
3388*e4b17023SJohn Marino }
3389*e4b17023SJohn Marino
3390*e4b17023SJohn Marino /* If this was alloca, record the new stack level for nonlocal gotos.
3391*e4b17023SJohn Marino Check for the handler slots since we might not have a save area
3392*e4b17023SJohn Marino for non-local gotos. */
3393*e4b17023SJohn Marino
3394*e4b17023SJohn Marino if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3395*e4b17023SJohn Marino update_nonlocal_goto_save_area ();
3396*e4b17023SJohn Marino
3397*e4b17023SJohn Marino /* Free up storage we no longer need. */
3398*e4b17023SJohn Marino for (i = 0; i < num_actuals; ++i)
3399*e4b17023SJohn Marino free (args[i].aligned_regs);
3400*e4b17023SJohn Marino
3401*e4b17023SJohn Marino insns = get_insns ();
3402*e4b17023SJohn Marino end_sequence ();
3403*e4b17023SJohn Marino
3404*e4b17023SJohn Marino if (pass == 0)
3405*e4b17023SJohn Marino {
3406*e4b17023SJohn Marino tail_call_insns = insns;
3407*e4b17023SJohn Marino
3408*e4b17023SJohn Marino /* Restore the pending stack adjustment now that we have
3409*e4b17023SJohn Marino finished generating the sibling call sequence. */
3410*e4b17023SJohn Marino
3411*e4b17023SJohn Marino pending_stack_adjust = save_pending_stack_adjust;
3412*e4b17023SJohn Marino stack_pointer_delta = save_stack_pointer_delta;
3413*e4b17023SJohn Marino
3414*e4b17023SJohn Marino /* Prepare arg structure for next iteration. */
3415*e4b17023SJohn Marino for (i = 0; i < num_actuals; i++)
3416*e4b17023SJohn Marino {
3417*e4b17023SJohn Marino args[i].value = 0;
3418*e4b17023SJohn Marino args[i].aligned_regs = 0;
3419*e4b17023SJohn Marino args[i].stack = 0;
3420*e4b17023SJohn Marino }
3421*e4b17023SJohn Marino
3422*e4b17023SJohn Marino sbitmap_free (stored_args_map);
3423*e4b17023SJohn Marino internal_arg_pointer_exp_state.scan_start = NULL_RTX;
3424*e4b17023SJohn Marino VEC_free (rtx, heap, internal_arg_pointer_exp_state.cache);
3425*e4b17023SJohn Marino }
3426*e4b17023SJohn Marino else
3427*e4b17023SJohn Marino {
3428*e4b17023SJohn Marino normal_call_insns = insns;
3429*e4b17023SJohn Marino
3430*e4b17023SJohn Marino /* Verify that we've deallocated all the stack we used. */
3431*e4b17023SJohn Marino gcc_assert ((flags & ECF_NORETURN)
3432*e4b17023SJohn Marino || (old_stack_allocated
3433*e4b17023SJohn Marino == stack_pointer_delta - pending_stack_adjust));
3434*e4b17023SJohn Marino }
3435*e4b17023SJohn Marino
3436*e4b17023SJohn Marino /* If something prevents making this a sibling call,
3437*e4b17023SJohn Marino zero out the sequence. */
3438*e4b17023SJohn Marino if (sibcall_failure)
3439*e4b17023SJohn Marino tail_call_insns = NULL_RTX;
3440*e4b17023SJohn Marino else
3441*e4b17023SJohn Marino break;
3442*e4b17023SJohn Marino }
3443*e4b17023SJohn Marino
3444*e4b17023SJohn Marino /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3445*e4b17023SJohn Marino arguments too, as argument area is now clobbered by the call. */
3446*e4b17023SJohn Marino if (tail_call_insns)
3447*e4b17023SJohn Marino {
3448*e4b17023SJohn Marino emit_insn (tail_call_insns);
3449*e4b17023SJohn Marino crtl->tail_call_emit = true;
3450*e4b17023SJohn Marino }
3451*e4b17023SJohn Marino else
3452*e4b17023SJohn Marino emit_insn (normal_call_insns);
3453*e4b17023SJohn Marino
3454*e4b17023SJohn Marino currently_expanding_call--;
3455*e4b17023SJohn Marino
3456*e4b17023SJohn Marino free (stack_usage_map_buf);
3457*e4b17023SJohn Marino
3458*e4b17023SJohn Marino return target;
3459*e4b17023SJohn Marino }
3460*e4b17023SJohn Marino
3461*e4b17023SJohn Marino /* A sibling call sequence invalidates any REG_EQUIV notes made for
3462*e4b17023SJohn Marino this function's incoming arguments.
3463*e4b17023SJohn Marino
3464*e4b17023SJohn Marino At the start of RTL generation we know the only REG_EQUIV notes
3465*e4b17023SJohn Marino in the rtl chain are those for incoming arguments, so we can look
3466*e4b17023SJohn Marino for REG_EQUIV notes between the start of the function and the
3467*e4b17023SJohn Marino NOTE_INSN_FUNCTION_BEG.
3468*e4b17023SJohn Marino
3469*e4b17023SJohn Marino This is (slight) overkill. We could keep track of the highest
3470*e4b17023SJohn Marino argument we clobber and be more selective in removing notes, but it
3471*e4b17023SJohn Marino does not seem to be worth the effort. */
3472*e4b17023SJohn Marino
3473*e4b17023SJohn Marino void
fixup_tail_calls(void)3474*e4b17023SJohn Marino fixup_tail_calls (void)
3475*e4b17023SJohn Marino {
3476*e4b17023SJohn Marino rtx insn;
3477*e4b17023SJohn Marino
3478*e4b17023SJohn Marino for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3479*e4b17023SJohn Marino {
3480*e4b17023SJohn Marino rtx note;
3481*e4b17023SJohn Marino
3482*e4b17023SJohn Marino /* There are never REG_EQUIV notes for the incoming arguments
3483*e4b17023SJohn Marino after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3484*e4b17023SJohn Marino if (NOTE_P (insn)
3485*e4b17023SJohn Marino && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3486*e4b17023SJohn Marino break;
3487*e4b17023SJohn Marino
3488*e4b17023SJohn Marino note = find_reg_note (insn, REG_EQUIV, 0);
3489*e4b17023SJohn Marino if (note)
3490*e4b17023SJohn Marino remove_note (insn, note);
3491*e4b17023SJohn Marino note = find_reg_note (insn, REG_EQUIV, 0);
3492*e4b17023SJohn Marino gcc_assert (!note);
3493*e4b17023SJohn Marino }
3494*e4b17023SJohn Marino }
3495*e4b17023SJohn Marino
3496*e4b17023SJohn Marino /* Traverse a list of TYPES and expand all complex types into their
3497*e4b17023SJohn Marino components. */
3498*e4b17023SJohn Marino static tree
split_complex_types(tree types)3499*e4b17023SJohn Marino split_complex_types (tree types)
3500*e4b17023SJohn Marino {
3501*e4b17023SJohn Marino tree p;
3502*e4b17023SJohn Marino
3503*e4b17023SJohn Marino /* Before allocating memory, check for the common case of no complex. */
3504*e4b17023SJohn Marino for (p = types; p; p = TREE_CHAIN (p))
3505*e4b17023SJohn Marino {
3506*e4b17023SJohn Marino tree type = TREE_VALUE (p);
3507*e4b17023SJohn Marino if (TREE_CODE (type) == COMPLEX_TYPE
3508*e4b17023SJohn Marino && targetm.calls.split_complex_arg (type))
3509*e4b17023SJohn Marino goto found;
3510*e4b17023SJohn Marino }
3511*e4b17023SJohn Marino return types;
3512*e4b17023SJohn Marino
3513*e4b17023SJohn Marino found:
3514*e4b17023SJohn Marino types = copy_list (types);
3515*e4b17023SJohn Marino
3516*e4b17023SJohn Marino for (p = types; p; p = TREE_CHAIN (p))
3517*e4b17023SJohn Marino {
3518*e4b17023SJohn Marino tree complex_type = TREE_VALUE (p);
3519*e4b17023SJohn Marino
3520*e4b17023SJohn Marino if (TREE_CODE (complex_type) == COMPLEX_TYPE
3521*e4b17023SJohn Marino && targetm.calls.split_complex_arg (complex_type))
3522*e4b17023SJohn Marino {
3523*e4b17023SJohn Marino tree next, imag;
3524*e4b17023SJohn Marino
3525*e4b17023SJohn Marino /* Rewrite complex type with component type. */
3526*e4b17023SJohn Marino TREE_VALUE (p) = TREE_TYPE (complex_type);
3527*e4b17023SJohn Marino next = TREE_CHAIN (p);
3528*e4b17023SJohn Marino
3529*e4b17023SJohn Marino /* Add another component type for the imaginary part. */
3530*e4b17023SJohn Marino imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3531*e4b17023SJohn Marino TREE_CHAIN (p) = imag;
3532*e4b17023SJohn Marino TREE_CHAIN (imag) = next;
3533*e4b17023SJohn Marino
3534*e4b17023SJohn Marino /* Skip the newly created node. */
3535*e4b17023SJohn Marino p = TREE_CHAIN (p);
3536*e4b17023SJohn Marino }
3537*e4b17023SJohn Marino }
3538*e4b17023SJohn Marino
3539*e4b17023SJohn Marino return types;
3540*e4b17023SJohn Marino }
3541*e4b17023SJohn Marino
3542*e4b17023SJohn Marino /* Output a library call to function FUN (a SYMBOL_REF rtx).
3543*e4b17023SJohn Marino The RETVAL parameter specifies whether return value needs to be saved, other
3544*e4b17023SJohn Marino parameters are documented in the emit_library_call function below. */
3545*e4b17023SJohn Marino
3546*e4b17023SJohn Marino static rtx
emit_library_call_value_1(int retval,rtx orgfun,rtx value,enum libcall_type fn_type,enum machine_mode outmode,int nargs,va_list p)3547*e4b17023SJohn Marino emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3548*e4b17023SJohn Marino enum libcall_type fn_type,
3549*e4b17023SJohn Marino enum machine_mode outmode, int nargs, va_list p)
3550*e4b17023SJohn Marino {
3551*e4b17023SJohn Marino /* Total size in bytes of all the stack-parms scanned so far. */
3552*e4b17023SJohn Marino struct args_size args_size;
3553*e4b17023SJohn Marino /* Size of arguments before any adjustments (such as rounding). */
3554*e4b17023SJohn Marino struct args_size original_args_size;
3555*e4b17023SJohn Marino int argnum;
3556*e4b17023SJohn Marino rtx fun;
3557*e4b17023SJohn Marino /* Todo, choose the correct decl type of orgfun. Sadly this information
3558*e4b17023SJohn Marino isn't present here, so we default to native calling abi here. */
3559*e4b17023SJohn Marino tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3560*e4b17023SJohn Marino tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3561*e4b17023SJohn Marino int inc;
3562*e4b17023SJohn Marino int count;
3563*e4b17023SJohn Marino rtx argblock = 0;
3564*e4b17023SJohn Marino CUMULATIVE_ARGS args_so_far_v;
3565*e4b17023SJohn Marino cumulative_args_t args_so_far;
3566*e4b17023SJohn Marino struct arg
3567*e4b17023SJohn Marino {
3568*e4b17023SJohn Marino rtx value;
3569*e4b17023SJohn Marino enum machine_mode mode;
3570*e4b17023SJohn Marino rtx reg;
3571*e4b17023SJohn Marino int partial;
3572*e4b17023SJohn Marino struct locate_and_pad_arg_data locate;
3573*e4b17023SJohn Marino rtx save_area;
3574*e4b17023SJohn Marino };
3575*e4b17023SJohn Marino struct arg *argvec;
3576*e4b17023SJohn Marino int old_inhibit_defer_pop = inhibit_defer_pop;
3577*e4b17023SJohn Marino rtx call_fusage = 0;
3578*e4b17023SJohn Marino rtx mem_value = 0;
3579*e4b17023SJohn Marino rtx valreg;
3580*e4b17023SJohn Marino int pcc_struct_value = 0;
3581*e4b17023SJohn Marino int struct_value_size = 0;
3582*e4b17023SJohn Marino int flags;
3583*e4b17023SJohn Marino int reg_parm_stack_space = 0;
3584*e4b17023SJohn Marino int needed;
3585*e4b17023SJohn Marino rtx before_call;
3586*e4b17023SJohn Marino tree tfom; /* type_for_mode (outmode, 0) */
3587*e4b17023SJohn Marino
3588*e4b17023SJohn Marino #ifdef REG_PARM_STACK_SPACE
3589*e4b17023SJohn Marino /* Define the boundary of the register parm stack space that needs to be
3590*e4b17023SJohn Marino save, if any. */
3591*e4b17023SJohn Marino int low_to_save = 0, high_to_save = 0;
3592*e4b17023SJohn Marino rtx save_area = 0; /* Place that it is saved. */
3593*e4b17023SJohn Marino #endif
3594*e4b17023SJohn Marino
3595*e4b17023SJohn Marino /* Size of the stack reserved for parameter registers. */
3596*e4b17023SJohn Marino int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3597*e4b17023SJohn Marino char *initial_stack_usage_map = stack_usage_map;
3598*e4b17023SJohn Marino char *stack_usage_map_buf = NULL;
3599*e4b17023SJohn Marino
3600*e4b17023SJohn Marino rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3601*e4b17023SJohn Marino
3602*e4b17023SJohn Marino #ifdef REG_PARM_STACK_SPACE
3603*e4b17023SJohn Marino reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3604*e4b17023SJohn Marino #endif
3605*e4b17023SJohn Marino
3606*e4b17023SJohn Marino /* By default, library functions can not throw. */
3607*e4b17023SJohn Marino flags = ECF_NOTHROW;
3608*e4b17023SJohn Marino
3609*e4b17023SJohn Marino switch (fn_type)
3610*e4b17023SJohn Marino {
3611*e4b17023SJohn Marino case LCT_NORMAL:
3612*e4b17023SJohn Marino break;
3613*e4b17023SJohn Marino case LCT_CONST:
3614*e4b17023SJohn Marino flags |= ECF_CONST;
3615*e4b17023SJohn Marino break;
3616*e4b17023SJohn Marino case LCT_PURE:
3617*e4b17023SJohn Marino flags |= ECF_PURE;
3618*e4b17023SJohn Marino break;
3619*e4b17023SJohn Marino case LCT_NORETURN:
3620*e4b17023SJohn Marino flags |= ECF_NORETURN;
3621*e4b17023SJohn Marino break;
3622*e4b17023SJohn Marino case LCT_THROW:
3623*e4b17023SJohn Marino flags = ECF_NORETURN;
3624*e4b17023SJohn Marino break;
3625*e4b17023SJohn Marino case LCT_RETURNS_TWICE:
3626*e4b17023SJohn Marino flags = ECF_RETURNS_TWICE;
3627*e4b17023SJohn Marino break;
3628*e4b17023SJohn Marino }
3629*e4b17023SJohn Marino fun = orgfun;
3630*e4b17023SJohn Marino
3631*e4b17023SJohn Marino /* Ensure current function's preferred stack boundary is at least
3632*e4b17023SJohn Marino what we need. */
3633*e4b17023SJohn Marino if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3634*e4b17023SJohn Marino crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3635*e4b17023SJohn Marino
3636*e4b17023SJohn Marino /* If this kind of value comes back in memory,
3637*e4b17023SJohn Marino decide where in memory it should come back. */
3638*e4b17023SJohn Marino if (outmode != VOIDmode)
3639*e4b17023SJohn Marino {
3640*e4b17023SJohn Marino tfom = lang_hooks.types.type_for_mode (outmode, 0);
3641*e4b17023SJohn Marino if (aggregate_value_p (tfom, 0))
3642*e4b17023SJohn Marino {
3643*e4b17023SJohn Marino #ifdef PCC_STATIC_STRUCT_RETURN
3644*e4b17023SJohn Marino rtx pointer_reg
3645*e4b17023SJohn Marino = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3646*e4b17023SJohn Marino mem_value = gen_rtx_MEM (outmode, pointer_reg);
3647*e4b17023SJohn Marino pcc_struct_value = 1;
3648*e4b17023SJohn Marino if (value == 0)
3649*e4b17023SJohn Marino value = gen_reg_rtx (outmode);
3650*e4b17023SJohn Marino #else /* not PCC_STATIC_STRUCT_RETURN */
3651*e4b17023SJohn Marino struct_value_size = GET_MODE_SIZE (outmode);
3652*e4b17023SJohn Marino if (value != 0 && MEM_P (value))
3653*e4b17023SJohn Marino mem_value = value;
3654*e4b17023SJohn Marino else
3655*e4b17023SJohn Marino mem_value = assign_temp (tfom, 0, 1, 1);
3656*e4b17023SJohn Marino #endif
3657*e4b17023SJohn Marino /* This call returns a big structure. */
3658*e4b17023SJohn Marino flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3659*e4b17023SJohn Marino }
3660*e4b17023SJohn Marino }
3661*e4b17023SJohn Marino else
3662*e4b17023SJohn Marino tfom = void_type_node;
3663*e4b17023SJohn Marino
3664*e4b17023SJohn Marino /* ??? Unfinished: must pass the memory address as an argument. */
3665*e4b17023SJohn Marino
3666*e4b17023SJohn Marino /* Copy all the libcall-arguments out of the varargs data
3667*e4b17023SJohn Marino and into a vector ARGVEC.
3668*e4b17023SJohn Marino
3669*e4b17023SJohn Marino Compute how to pass each argument. We only support a very small subset
3670*e4b17023SJohn Marino of the full argument passing conventions to limit complexity here since
3671*e4b17023SJohn Marino library functions shouldn't have many args. */
3672*e4b17023SJohn Marino
3673*e4b17023SJohn Marino argvec = XALLOCAVEC (struct arg, nargs + 1);
3674*e4b17023SJohn Marino memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3675*e4b17023SJohn Marino
3676*e4b17023SJohn Marino #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3677*e4b17023SJohn Marino INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
3678*e4b17023SJohn Marino #else
3679*e4b17023SJohn Marino INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
3680*e4b17023SJohn Marino #endif
3681*e4b17023SJohn Marino args_so_far = pack_cumulative_args (&args_so_far_v);
3682*e4b17023SJohn Marino
3683*e4b17023SJohn Marino args_size.constant = 0;
3684*e4b17023SJohn Marino args_size.var = 0;
3685*e4b17023SJohn Marino
3686*e4b17023SJohn Marino count = 0;
3687*e4b17023SJohn Marino
3688*e4b17023SJohn Marino push_temp_slots ();
3689*e4b17023SJohn Marino
3690*e4b17023SJohn Marino /* If there's a structure value address to be passed,
3691*e4b17023SJohn Marino either pass it in the special place, or pass it as an extra argument. */
3692*e4b17023SJohn Marino if (mem_value && struct_value == 0 && ! pcc_struct_value)
3693*e4b17023SJohn Marino {
3694*e4b17023SJohn Marino rtx addr = XEXP (mem_value, 0);
3695*e4b17023SJohn Marino
3696*e4b17023SJohn Marino nargs++;
3697*e4b17023SJohn Marino
3698*e4b17023SJohn Marino /* Make sure it is a reasonable operand for a move or push insn. */
3699*e4b17023SJohn Marino if (!REG_P (addr) && !MEM_P (addr)
3700*e4b17023SJohn Marino && !(CONSTANT_P (addr)
3701*e4b17023SJohn Marino && targetm.legitimate_constant_p (Pmode, addr)))
3702*e4b17023SJohn Marino addr = force_operand (addr, NULL_RTX);
3703*e4b17023SJohn Marino
3704*e4b17023SJohn Marino argvec[count].value = addr;
3705*e4b17023SJohn Marino argvec[count].mode = Pmode;
3706*e4b17023SJohn Marino argvec[count].partial = 0;
3707*e4b17023SJohn Marino
3708*e4b17023SJohn Marino argvec[count].reg = targetm.calls.function_arg (args_so_far,
3709*e4b17023SJohn Marino Pmode, NULL_TREE, true);
3710*e4b17023SJohn Marino gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
3711*e4b17023SJohn Marino NULL_TREE, 1) == 0);
3712*e4b17023SJohn Marino
3713*e4b17023SJohn Marino locate_and_pad_parm (Pmode, NULL_TREE,
3714*e4b17023SJohn Marino #ifdef STACK_PARMS_IN_REG_PARM_AREA
3715*e4b17023SJohn Marino 1,
3716*e4b17023SJohn Marino #else
3717*e4b17023SJohn Marino argvec[count].reg != 0,
3718*e4b17023SJohn Marino #endif
3719*e4b17023SJohn Marino 0, NULL_TREE, &args_size, &argvec[count].locate);
3720*e4b17023SJohn Marino
3721*e4b17023SJohn Marino if (argvec[count].reg == 0 || argvec[count].partial != 0
3722*e4b17023SJohn Marino || reg_parm_stack_space > 0)
3723*e4b17023SJohn Marino args_size.constant += argvec[count].locate.size.constant;
3724*e4b17023SJohn Marino
3725*e4b17023SJohn Marino targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
3726*e4b17023SJohn Marino
3727*e4b17023SJohn Marino count++;
3728*e4b17023SJohn Marino }
3729*e4b17023SJohn Marino
3730*e4b17023SJohn Marino for (; count < nargs; count++)
3731*e4b17023SJohn Marino {
3732*e4b17023SJohn Marino rtx val = va_arg (p, rtx);
3733*e4b17023SJohn Marino enum machine_mode mode = (enum machine_mode) va_arg (p, int);
3734*e4b17023SJohn Marino int unsigned_p = 0;
3735*e4b17023SJohn Marino
3736*e4b17023SJohn Marino /* We cannot convert the arg value to the mode the library wants here;
3737*e4b17023SJohn Marino must do it earlier where we know the signedness of the arg. */
3738*e4b17023SJohn Marino gcc_assert (mode != BLKmode
3739*e4b17023SJohn Marino && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3740*e4b17023SJohn Marino
3741*e4b17023SJohn Marino /* Make sure it is a reasonable operand for a move or push insn. */
3742*e4b17023SJohn Marino if (!REG_P (val) && !MEM_P (val)
3743*e4b17023SJohn Marino && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
3744*e4b17023SJohn Marino val = force_operand (val, NULL_RTX);
3745*e4b17023SJohn Marino
3746*e4b17023SJohn Marino if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
3747*e4b17023SJohn Marino {
3748*e4b17023SJohn Marino rtx slot;
3749*e4b17023SJohn Marino int must_copy
3750*e4b17023SJohn Marino = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
3751*e4b17023SJohn Marino
3752*e4b17023SJohn Marino /* If this was a CONST function, it is now PURE since it now
3753*e4b17023SJohn Marino reads memory. */
3754*e4b17023SJohn Marino if (flags & ECF_CONST)
3755*e4b17023SJohn Marino {
3756*e4b17023SJohn Marino flags &= ~ECF_CONST;
3757*e4b17023SJohn Marino flags |= ECF_PURE;
3758*e4b17023SJohn Marino }
3759*e4b17023SJohn Marino
3760*e4b17023SJohn Marino if (MEM_P (val) && !must_copy)
3761*e4b17023SJohn Marino {
3762*e4b17023SJohn Marino tree val_expr = MEM_EXPR (val);
3763*e4b17023SJohn Marino if (val_expr)
3764*e4b17023SJohn Marino mark_addressable (val_expr);
3765*e4b17023SJohn Marino slot = val;
3766*e4b17023SJohn Marino }
3767*e4b17023SJohn Marino else
3768*e4b17023SJohn Marino {
3769*e4b17023SJohn Marino slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3770*e4b17023SJohn Marino 0, 1, 1);
3771*e4b17023SJohn Marino emit_move_insn (slot, val);
3772*e4b17023SJohn Marino }
3773*e4b17023SJohn Marino
3774*e4b17023SJohn Marino call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3775*e4b17023SJohn Marino gen_rtx_USE (VOIDmode, slot),
3776*e4b17023SJohn Marino call_fusage);
3777*e4b17023SJohn Marino if (must_copy)
3778*e4b17023SJohn Marino call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3779*e4b17023SJohn Marino gen_rtx_CLOBBER (VOIDmode,
3780*e4b17023SJohn Marino slot),
3781*e4b17023SJohn Marino call_fusage);
3782*e4b17023SJohn Marino
3783*e4b17023SJohn Marino mode = Pmode;
3784*e4b17023SJohn Marino val = force_operand (XEXP (slot, 0), NULL_RTX);
3785*e4b17023SJohn Marino }
3786*e4b17023SJohn Marino
3787*e4b17023SJohn Marino mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
3788*e4b17023SJohn Marino argvec[count].mode = mode;
3789*e4b17023SJohn Marino argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
3790*e4b17023SJohn Marino argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
3791*e4b17023SJohn Marino NULL_TREE, true);
3792*e4b17023SJohn Marino
3793*e4b17023SJohn Marino argvec[count].partial
3794*e4b17023SJohn Marino = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
3795*e4b17023SJohn Marino
3796*e4b17023SJohn Marino if (argvec[count].reg == 0
3797*e4b17023SJohn Marino || argvec[count].partial != 0
3798*e4b17023SJohn Marino || reg_parm_stack_space > 0)
3799*e4b17023SJohn Marino {
3800*e4b17023SJohn Marino locate_and_pad_parm (mode, NULL_TREE,
3801*e4b17023SJohn Marino #ifdef STACK_PARMS_IN_REG_PARM_AREA
3802*e4b17023SJohn Marino 1,
3803*e4b17023SJohn Marino #else
3804*e4b17023SJohn Marino argvec[count].reg != 0,
3805*e4b17023SJohn Marino #endif
3806*e4b17023SJohn Marino argvec[count].partial,
3807*e4b17023SJohn Marino NULL_TREE, &args_size, &argvec[count].locate);
3808*e4b17023SJohn Marino args_size.constant += argvec[count].locate.size.constant;
3809*e4b17023SJohn Marino gcc_assert (!argvec[count].locate.size.var);
3810*e4b17023SJohn Marino }
3811*e4b17023SJohn Marino #ifdef BLOCK_REG_PADDING
3812*e4b17023SJohn Marino else
3813*e4b17023SJohn Marino /* The argument is passed entirely in registers. See at which
3814*e4b17023SJohn Marino end it should be padded. */
3815*e4b17023SJohn Marino argvec[count].locate.where_pad =
3816*e4b17023SJohn Marino BLOCK_REG_PADDING (mode, NULL_TREE,
3817*e4b17023SJohn Marino GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
3818*e4b17023SJohn Marino #endif
3819*e4b17023SJohn Marino
3820*e4b17023SJohn Marino targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
3821*e4b17023SJohn Marino }
3822*e4b17023SJohn Marino
3823*e4b17023SJohn Marino /* If this machine requires an external definition for library
3824*e4b17023SJohn Marino functions, write one out. */
3825*e4b17023SJohn Marino assemble_external_libcall (fun);
3826*e4b17023SJohn Marino
3827*e4b17023SJohn Marino original_args_size = args_size;
3828*e4b17023SJohn Marino args_size.constant = (((args_size.constant
3829*e4b17023SJohn Marino + stack_pointer_delta
3830*e4b17023SJohn Marino + STACK_BYTES - 1)
3831*e4b17023SJohn Marino / STACK_BYTES
3832*e4b17023SJohn Marino * STACK_BYTES)
3833*e4b17023SJohn Marino - stack_pointer_delta);
3834*e4b17023SJohn Marino
3835*e4b17023SJohn Marino args_size.constant = MAX (args_size.constant,
3836*e4b17023SJohn Marino reg_parm_stack_space);
3837*e4b17023SJohn Marino
3838*e4b17023SJohn Marino if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3839*e4b17023SJohn Marino args_size.constant -= reg_parm_stack_space;
3840*e4b17023SJohn Marino
3841*e4b17023SJohn Marino if (args_size.constant > crtl->outgoing_args_size)
3842*e4b17023SJohn Marino crtl->outgoing_args_size = args_size.constant;
3843*e4b17023SJohn Marino
3844*e4b17023SJohn Marino if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
3845*e4b17023SJohn Marino {
3846*e4b17023SJohn Marino int pushed = args_size.constant + pending_stack_adjust;
3847*e4b17023SJohn Marino if (pushed > current_function_pushed_stack_size)
3848*e4b17023SJohn Marino current_function_pushed_stack_size = pushed;
3849*e4b17023SJohn Marino }
3850*e4b17023SJohn Marino
3851*e4b17023SJohn Marino if (ACCUMULATE_OUTGOING_ARGS)
3852*e4b17023SJohn Marino {
3853*e4b17023SJohn Marino /* Since the stack pointer will never be pushed, it is possible for
3854*e4b17023SJohn Marino the evaluation of a parm to clobber something we have already
3855*e4b17023SJohn Marino written to the stack. Since most function calls on RISC machines
3856*e4b17023SJohn Marino do not use the stack, this is uncommon, but must work correctly.
3857*e4b17023SJohn Marino
3858*e4b17023SJohn Marino Therefore, we save any area of the stack that was already written
3859*e4b17023SJohn Marino and that we are using. Here we set up to do this by making a new
3860*e4b17023SJohn Marino stack usage map from the old one.
3861*e4b17023SJohn Marino
3862*e4b17023SJohn Marino Another approach might be to try to reorder the argument
3863*e4b17023SJohn Marino evaluations to avoid this conflicting stack usage. */
3864*e4b17023SJohn Marino
3865*e4b17023SJohn Marino needed = args_size.constant;
3866*e4b17023SJohn Marino
3867*e4b17023SJohn Marino /* Since we will be writing into the entire argument area, the
3868*e4b17023SJohn Marino map must be allocated for its entire size, not just the part that
3869*e4b17023SJohn Marino is the responsibility of the caller. */
3870*e4b17023SJohn Marino if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3871*e4b17023SJohn Marino needed += reg_parm_stack_space;
3872*e4b17023SJohn Marino
3873*e4b17023SJohn Marino #ifdef ARGS_GROW_DOWNWARD
3874*e4b17023SJohn Marino highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3875*e4b17023SJohn Marino needed + 1);
3876*e4b17023SJohn Marino #else
3877*e4b17023SJohn Marino highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3878*e4b17023SJohn Marino needed);
3879*e4b17023SJohn Marino #endif
3880*e4b17023SJohn Marino stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3881*e4b17023SJohn Marino stack_usage_map = stack_usage_map_buf;
3882*e4b17023SJohn Marino
3883*e4b17023SJohn Marino if (initial_highest_arg_in_use)
3884*e4b17023SJohn Marino memcpy (stack_usage_map, initial_stack_usage_map,
3885*e4b17023SJohn Marino initial_highest_arg_in_use);
3886*e4b17023SJohn Marino
3887*e4b17023SJohn Marino if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3888*e4b17023SJohn Marino memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3889*e4b17023SJohn Marino highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3890*e4b17023SJohn Marino needed = 0;
3891*e4b17023SJohn Marino
3892*e4b17023SJohn Marino /* We must be careful to use virtual regs before they're instantiated,
3893*e4b17023SJohn Marino and real regs afterwards. Loop optimization, for example, can create
3894*e4b17023SJohn Marino new libcalls after we've instantiated the virtual regs, and if we
3895*e4b17023SJohn Marino use virtuals anyway, they won't match the rtl patterns. */
3896*e4b17023SJohn Marino
3897*e4b17023SJohn Marino if (virtuals_instantiated)
3898*e4b17023SJohn Marino argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3899*e4b17023SJohn Marino else
3900*e4b17023SJohn Marino argblock = virtual_outgoing_args_rtx;
3901*e4b17023SJohn Marino }
3902*e4b17023SJohn Marino else
3903*e4b17023SJohn Marino {
3904*e4b17023SJohn Marino if (!PUSH_ARGS)
3905*e4b17023SJohn Marino argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3906*e4b17023SJohn Marino }
3907*e4b17023SJohn Marino
3908*e4b17023SJohn Marino /* If we push args individually in reverse order, perform stack alignment
3909*e4b17023SJohn Marino before the first push (the last arg). */
3910*e4b17023SJohn Marino if (argblock == 0 && PUSH_ARGS_REVERSED)
3911*e4b17023SJohn Marino anti_adjust_stack (GEN_INT (args_size.constant
3912*e4b17023SJohn Marino - original_args_size.constant));
3913*e4b17023SJohn Marino
3914*e4b17023SJohn Marino if (PUSH_ARGS_REVERSED)
3915*e4b17023SJohn Marino {
3916*e4b17023SJohn Marino inc = -1;
3917*e4b17023SJohn Marino argnum = nargs - 1;
3918*e4b17023SJohn Marino }
3919*e4b17023SJohn Marino else
3920*e4b17023SJohn Marino {
3921*e4b17023SJohn Marino inc = 1;
3922*e4b17023SJohn Marino argnum = 0;
3923*e4b17023SJohn Marino }
3924*e4b17023SJohn Marino
3925*e4b17023SJohn Marino #ifdef REG_PARM_STACK_SPACE
3926*e4b17023SJohn Marino if (ACCUMULATE_OUTGOING_ARGS)
3927*e4b17023SJohn Marino {
3928*e4b17023SJohn Marino /* The argument list is the property of the called routine and it
3929*e4b17023SJohn Marino may clobber it. If the fixed area has been used for previous
3930*e4b17023SJohn Marino parameters, we must save and restore it. */
3931*e4b17023SJohn Marino save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3932*e4b17023SJohn Marino &low_to_save, &high_to_save);
3933*e4b17023SJohn Marino }
3934*e4b17023SJohn Marino #endif
3935*e4b17023SJohn Marino
3936*e4b17023SJohn Marino /* Push the args that need to be pushed. */
3937*e4b17023SJohn Marino
3938*e4b17023SJohn Marino /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3939*e4b17023SJohn Marino are to be pushed. */
3940*e4b17023SJohn Marino for (count = 0; count < nargs; count++, argnum += inc)
3941*e4b17023SJohn Marino {
3942*e4b17023SJohn Marino enum machine_mode mode = argvec[argnum].mode;
3943*e4b17023SJohn Marino rtx val = argvec[argnum].value;
3944*e4b17023SJohn Marino rtx reg = argvec[argnum].reg;
3945*e4b17023SJohn Marino int partial = argvec[argnum].partial;
3946*e4b17023SJohn Marino unsigned int parm_align = argvec[argnum].locate.boundary;
3947*e4b17023SJohn Marino int lower_bound = 0, upper_bound = 0, i;
3948*e4b17023SJohn Marino
3949*e4b17023SJohn Marino if (! (reg != 0 && partial == 0))
3950*e4b17023SJohn Marino {
3951*e4b17023SJohn Marino rtx use;
3952*e4b17023SJohn Marino
3953*e4b17023SJohn Marino if (ACCUMULATE_OUTGOING_ARGS)
3954*e4b17023SJohn Marino {
3955*e4b17023SJohn Marino /* If this is being stored into a pre-allocated, fixed-size,
3956*e4b17023SJohn Marino stack area, save any previous data at that location. */
3957*e4b17023SJohn Marino
3958*e4b17023SJohn Marino #ifdef ARGS_GROW_DOWNWARD
3959*e4b17023SJohn Marino /* stack_slot is negative, but we want to index stack_usage_map
3960*e4b17023SJohn Marino with positive values. */
3961*e4b17023SJohn Marino upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
3962*e4b17023SJohn Marino lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3963*e4b17023SJohn Marino #else
3964*e4b17023SJohn Marino lower_bound = argvec[argnum].locate.slot_offset.constant;
3965*e4b17023SJohn Marino upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3966*e4b17023SJohn Marino #endif
3967*e4b17023SJohn Marino
3968*e4b17023SJohn Marino i = lower_bound;
3969*e4b17023SJohn Marino /* Don't worry about things in the fixed argument area;
3970*e4b17023SJohn Marino it has already been saved. */
3971*e4b17023SJohn Marino if (i < reg_parm_stack_space)
3972*e4b17023SJohn Marino i = reg_parm_stack_space;
3973*e4b17023SJohn Marino while (i < upper_bound && stack_usage_map[i] == 0)
3974*e4b17023SJohn Marino i++;
3975*e4b17023SJohn Marino
3976*e4b17023SJohn Marino if (i < upper_bound)
3977*e4b17023SJohn Marino {
3978*e4b17023SJohn Marino /* We need to make a save area. */
3979*e4b17023SJohn Marino unsigned int size
3980*e4b17023SJohn Marino = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3981*e4b17023SJohn Marino enum machine_mode save_mode
3982*e4b17023SJohn Marino = mode_for_size (size, MODE_INT, 1);
3983*e4b17023SJohn Marino rtx adr
3984*e4b17023SJohn Marino = plus_constant (argblock,
3985*e4b17023SJohn Marino argvec[argnum].locate.offset.constant);
3986*e4b17023SJohn Marino rtx stack_area
3987*e4b17023SJohn Marino = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3988*e4b17023SJohn Marino
3989*e4b17023SJohn Marino if (save_mode == BLKmode)
3990*e4b17023SJohn Marino {
3991*e4b17023SJohn Marino argvec[argnum].save_area
3992*e4b17023SJohn Marino = assign_stack_temp (BLKmode,
3993*e4b17023SJohn Marino argvec[argnum].locate.size.constant,
3994*e4b17023SJohn Marino 0);
3995*e4b17023SJohn Marino
3996*e4b17023SJohn Marino emit_block_move (validize_mem (argvec[argnum].save_area),
3997*e4b17023SJohn Marino stack_area,
3998*e4b17023SJohn Marino GEN_INT (argvec[argnum].locate.size.constant),
3999*e4b17023SJohn Marino BLOCK_OP_CALL_PARM);
4000*e4b17023SJohn Marino }
4001*e4b17023SJohn Marino else
4002*e4b17023SJohn Marino {
4003*e4b17023SJohn Marino argvec[argnum].save_area = gen_reg_rtx (save_mode);
4004*e4b17023SJohn Marino
4005*e4b17023SJohn Marino emit_move_insn (argvec[argnum].save_area, stack_area);
4006*e4b17023SJohn Marino }
4007*e4b17023SJohn Marino }
4008*e4b17023SJohn Marino }
4009*e4b17023SJohn Marino
4010*e4b17023SJohn Marino emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
4011*e4b17023SJohn Marino partial, reg, 0, argblock,
4012*e4b17023SJohn Marino GEN_INT (argvec[argnum].locate.offset.constant),
4013*e4b17023SJohn Marino reg_parm_stack_space,
4014*e4b17023SJohn Marino ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
4015*e4b17023SJohn Marino
4016*e4b17023SJohn Marino /* Now mark the segment we just used. */
4017*e4b17023SJohn Marino if (ACCUMULATE_OUTGOING_ARGS)
4018*e4b17023SJohn Marino for (i = lower_bound; i < upper_bound; i++)
4019*e4b17023SJohn Marino stack_usage_map[i] = 1;
4020*e4b17023SJohn Marino
4021*e4b17023SJohn Marino NO_DEFER_POP;
4022*e4b17023SJohn Marino
4023*e4b17023SJohn Marino /* Indicate argument access so that alias.c knows that these
4024*e4b17023SJohn Marino values are live. */
4025*e4b17023SJohn Marino if (argblock)
4026*e4b17023SJohn Marino use = plus_constant (argblock,
4027*e4b17023SJohn Marino argvec[argnum].locate.offset.constant);
4028*e4b17023SJohn Marino else
4029*e4b17023SJohn Marino /* When arguments are pushed, trying to tell alias.c where
4030*e4b17023SJohn Marino exactly this argument is won't work, because the
4031*e4b17023SJohn Marino auto-increment causes confusion. So we merely indicate
4032*e4b17023SJohn Marino that we access something with a known mode somewhere on
4033*e4b17023SJohn Marino the stack. */
4034*e4b17023SJohn Marino use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
4035*e4b17023SJohn Marino gen_rtx_SCRATCH (Pmode));
4036*e4b17023SJohn Marino use = gen_rtx_MEM (argvec[argnum].mode, use);
4037*e4b17023SJohn Marino use = gen_rtx_USE (VOIDmode, use);
4038*e4b17023SJohn Marino call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
4039*e4b17023SJohn Marino }
4040*e4b17023SJohn Marino }
4041*e4b17023SJohn Marino
4042*e4b17023SJohn Marino /* If we pushed args in forward order, perform stack alignment
4043*e4b17023SJohn Marino after pushing the last arg. */
4044*e4b17023SJohn Marino if (argblock == 0 && !PUSH_ARGS_REVERSED)
4045*e4b17023SJohn Marino anti_adjust_stack (GEN_INT (args_size.constant
4046*e4b17023SJohn Marino - original_args_size.constant));
4047*e4b17023SJohn Marino
4048*e4b17023SJohn Marino if (PUSH_ARGS_REVERSED)
4049*e4b17023SJohn Marino argnum = nargs - 1;
4050*e4b17023SJohn Marino else
4051*e4b17023SJohn Marino argnum = 0;
4052*e4b17023SJohn Marino
4053*e4b17023SJohn Marino fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
4054*e4b17023SJohn Marino
4055*e4b17023SJohn Marino /* Now load any reg parms into their regs. */
4056*e4b17023SJohn Marino
4057*e4b17023SJohn Marino /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4058*e4b17023SJohn Marino are to be pushed. */
4059*e4b17023SJohn Marino for (count = 0; count < nargs; count++, argnum += inc)
4060*e4b17023SJohn Marino {
4061*e4b17023SJohn Marino enum machine_mode mode = argvec[argnum].mode;
4062*e4b17023SJohn Marino rtx val = argvec[argnum].value;
4063*e4b17023SJohn Marino rtx reg = argvec[argnum].reg;
4064*e4b17023SJohn Marino int partial = argvec[argnum].partial;
4065*e4b17023SJohn Marino #ifdef BLOCK_REG_PADDING
4066*e4b17023SJohn Marino int size = 0;
4067*e4b17023SJohn Marino #endif
4068*e4b17023SJohn Marino
4069*e4b17023SJohn Marino /* Handle calls that pass values in multiple non-contiguous
4070*e4b17023SJohn Marino locations. The PA64 has examples of this for library calls. */
4071*e4b17023SJohn Marino if (reg != 0 && GET_CODE (reg) == PARALLEL)
4072*e4b17023SJohn Marino emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
4073*e4b17023SJohn Marino else if (reg != 0 && partial == 0)
4074*e4b17023SJohn Marino {
4075*e4b17023SJohn Marino emit_move_insn (reg, val);
4076*e4b17023SJohn Marino #ifdef BLOCK_REG_PADDING
4077*e4b17023SJohn Marino size = GET_MODE_SIZE (argvec[argnum].mode);
4078*e4b17023SJohn Marino
4079*e4b17023SJohn Marino /* Copied from load_register_parameters. */
4080*e4b17023SJohn Marino
4081*e4b17023SJohn Marino /* Handle case where we have a value that needs shifting
4082*e4b17023SJohn Marino up to the msb. eg. a QImode value and we're padding
4083*e4b17023SJohn Marino upward on a BYTES_BIG_ENDIAN machine. */
4084*e4b17023SJohn Marino if (size < UNITS_PER_WORD
4085*e4b17023SJohn Marino && (argvec[argnum].locate.where_pad
4086*e4b17023SJohn Marino == (BYTES_BIG_ENDIAN ? upward : downward)))
4087*e4b17023SJohn Marino {
4088*e4b17023SJohn Marino rtx x;
4089*e4b17023SJohn Marino int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4090*e4b17023SJohn Marino
4091*e4b17023SJohn Marino /* Assigning REG here rather than a temp makes CALL_FUSAGE
4092*e4b17023SJohn Marino report the whole reg as used. Strictly speaking, the
4093*e4b17023SJohn Marino call only uses SIZE bytes at the msb end, but it doesn't
4094*e4b17023SJohn Marino seem worth generating rtl to say that. */
4095*e4b17023SJohn Marino reg = gen_rtx_REG (word_mode, REGNO (reg));
4096*e4b17023SJohn Marino x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4097*e4b17023SJohn Marino if (x != reg)
4098*e4b17023SJohn Marino emit_move_insn (reg, x);
4099*e4b17023SJohn Marino }
4100*e4b17023SJohn Marino #endif
4101*e4b17023SJohn Marino }
4102*e4b17023SJohn Marino
4103*e4b17023SJohn Marino NO_DEFER_POP;
4104*e4b17023SJohn Marino }
4105*e4b17023SJohn Marino
4106*e4b17023SJohn Marino /* Any regs containing parms remain in use through the call. */
4107*e4b17023SJohn Marino for (count = 0; count < nargs; count++)
4108*e4b17023SJohn Marino {
4109*e4b17023SJohn Marino rtx reg = argvec[count].reg;
4110*e4b17023SJohn Marino if (reg != 0 && GET_CODE (reg) == PARALLEL)
4111*e4b17023SJohn Marino use_group_regs (&call_fusage, reg);
4112*e4b17023SJohn Marino else if (reg != 0)
4113*e4b17023SJohn Marino {
4114*e4b17023SJohn Marino int partial = argvec[count].partial;
4115*e4b17023SJohn Marino if (partial)
4116*e4b17023SJohn Marino {
4117*e4b17023SJohn Marino int nregs;
4118*e4b17023SJohn Marino gcc_assert (partial % UNITS_PER_WORD == 0);
4119*e4b17023SJohn Marino nregs = partial / UNITS_PER_WORD;
4120*e4b17023SJohn Marino use_regs (&call_fusage, REGNO (reg), nregs);
4121*e4b17023SJohn Marino }
4122*e4b17023SJohn Marino else
4123*e4b17023SJohn Marino use_reg (&call_fusage, reg);
4124*e4b17023SJohn Marino }
4125*e4b17023SJohn Marino }
4126*e4b17023SJohn Marino
4127*e4b17023SJohn Marino /* Pass the function the address in which to return a structure value. */
4128*e4b17023SJohn Marino if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
4129*e4b17023SJohn Marino {
4130*e4b17023SJohn Marino emit_move_insn (struct_value,
4131*e4b17023SJohn Marino force_reg (Pmode,
4132*e4b17023SJohn Marino force_operand (XEXP (mem_value, 0),
4133*e4b17023SJohn Marino NULL_RTX)));
4134*e4b17023SJohn Marino if (REG_P (struct_value))
4135*e4b17023SJohn Marino use_reg (&call_fusage, struct_value);
4136*e4b17023SJohn Marino }
4137*e4b17023SJohn Marino
4138*e4b17023SJohn Marino /* Don't allow popping to be deferred, since then
4139*e4b17023SJohn Marino cse'ing of library calls could delete a call and leave the pop. */
4140*e4b17023SJohn Marino NO_DEFER_POP;
4141*e4b17023SJohn Marino valreg = (mem_value == 0 && outmode != VOIDmode
4142*e4b17023SJohn Marino ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
4143*e4b17023SJohn Marino
4144*e4b17023SJohn Marino /* Stack must be properly aligned now. */
4145*e4b17023SJohn Marino gcc_assert (!(stack_pointer_delta
4146*e4b17023SJohn Marino & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
4147*e4b17023SJohn Marino
4148*e4b17023SJohn Marino before_call = get_last_insn ();
4149*e4b17023SJohn Marino
4150*e4b17023SJohn Marino /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4151*e4b17023SJohn Marino will set inhibit_defer_pop to that value. */
4152*e4b17023SJohn Marino /* The return type is needed to decide how many bytes the function pops.
4153*e4b17023SJohn Marino Signedness plays no role in that, so for simplicity, we pretend it's
4154*e4b17023SJohn Marino always signed. We also assume that the list of arguments passed has
4155*e4b17023SJohn Marino no impact, so we pretend it is unknown. */
4156*e4b17023SJohn Marino
4157*e4b17023SJohn Marino emit_call_1 (fun, NULL,
4158*e4b17023SJohn Marino get_identifier (XSTR (orgfun, 0)),
4159*e4b17023SJohn Marino build_function_type (tfom, NULL_TREE),
4160*e4b17023SJohn Marino original_args_size.constant, args_size.constant,
4161*e4b17023SJohn Marino struct_value_size,
4162*e4b17023SJohn Marino targetm.calls.function_arg (args_so_far,
4163*e4b17023SJohn Marino VOIDmode, void_type_node, true),
4164*e4b17023SJohn Marino valreg,
4165*e4b17023SJohn Marino old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
4166*e4b17023SJohn Marino
4167*e4b17023SJohn Marino /* Right-shift returned value if necessary. */
4168*e4b17023SJohn Marino if (!pcc_struct_value
4169*e4b17023SJohn Marino && TYPE_MODE (tfom) != BLKmode
4170*e4b17023SJohn Marino && targetm.calls.return_in_msb (tfom))
4171*e4b17023SJohn Marino {
4172*e4b17023SJohn Marino shift_return_value (TYPE_MODE (tfom), false, valreg);
4173*e4b17023SJohn Marino valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
4174*e4b17023SJohn Marino }
4175*e4b17023SJohn Marino
4176*e4b17023SJohn Marino /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
4177*e4b17023SJohn Marino that it should complain if nonvolatile values are live. For
4178*e4b17023SJohn Marino functions that cannot return, inform flow that control does not
4179*e4b17023SJohn Marino fall through. */
4180*e4b17023SJohn Marino
4181*e4b17023SJohn Marino if (flags & ECF_NORETURN)
4182*e4b17023SJohn Marino {
4183*e4b17023SJohn Marino /* The barrier note must be emitted
4184*e4b17023SJohn Marino immediately after the CALL_INSN. Some ports emit more than
4185*e4b17023SJohn Marino just a CALL_INSN above, so we must search for it here. */
4186*e4b17023SJohn Marino
4187*e4b17023SJohn Marino rtx last = get_last_insn ();
4188*e4b17023SJohn Marino while (!CALL_P (last))
4189*e4b17023SJohn Marino {
4190*e4b17023SJohn Marino last = PREV_INSN (last);
4191*e4b17023SJohn Marino /* There was no CALL_INSN? */
4192*e4b17023SJohn Marino gcc_assert (last != before_call);
4193*e4b17023SJohn Marino }
4194*e4b17023SJohn Marino
4195*e4b17023SJohn Marino emit_barrier_after (last);
4196*e4b17023SJohn Marino }
4197*e4b17023SJohn Marino
4198*e4b17023SJohn Marino /* Now restore inhibit_defer_pop to its actual original value. */
4199*e4b17023SJohn Marino OK_DEFER_POP;
4200*e4b17023SJohn Marino
4201*e4b17023SJohn Marino pop_temp_slots ();
4202*e4b17023SJohn Marino
4203*e4b17023SJohn Marino /* Copy the value to the right place. */
4204*e4b17023SJohn Marino if (outmode != VOIDmode && retval)
4205*e4b17023SJohn Marino {
4206*e4b17023SJohn Marino if (mem_value)
4207*e4b17023SJohn Marino {
4208*e4b17023SJohn Marino if (value == 0)
4209*e4b17023SJohn Marino value = mem_value;
4210*e4b17023SJohn Marino if (value != mem_value)
4211*e4b17023SJohn Marino emit_move_insn (value, mem_value);
4212*e4b17023SJohn Marino }
4213*e4b17023SJohn Marino else if (GET_CODE (valreg) == PARALLEL)
4214*e4b17023SJohn Marino {
4215*e4b17023SJohn Marino if (value == 0)
4216*e4b17023SJohn Marino value = gen_reg_rtx (outmode);
4217*e4b17023SJohn Marino emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4218*e4b17023SJohn Marino }
4219*e4b17023SJohn Marino else
4220*e4b17023SJohn Marino {
4221*e4b17023SJohn Marino /* Convert to the proper mode if a promotion has been active. */
4222*e4b17023SJohn Marino if (GET_MODE (valreg) != outmode)
4223*e4b17023SJohn Marino {
4224*e4b17023SJohn Marino int unsignedp = TYPE_UNSIGNED (tfom);
4225*e4b17023SJohn Marino
4226*e4b17023SJohn Marino gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4227*e4b17023SJohn Marino fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4228*e4b17023SJohn Marino == GET_MODE (valreg));
4229*e4b17023SJohn Marino valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
4230*e4b17023SJohn Marino }
4231*e4b17023SJohn Marino
4232*e4b17023SJohn Marino if (value != 0)
4233*e4b17023SJohn Marino emit_move_insn (value, valreg);
4234*e4b17023SJohn Marino else
4235*e4b17023SJohn Marino value = valreg;
4236*e4b17023SJohn Marino }
4237*e4b17023SJohn Marino }
4238*e4b17023SJohn Marino
4239*e4b17023SJohn Marino if (ACCUMULATE_OUTGOING_ARGS)
4240*e4b17023SJohn Marino {
4241*e4b17023SJohn Marino #ifdef REG_PARM_STACK_SPACE
4242*e4b17023SJohn Marino if (save_area)
4243*e4b17023SJohn Marino restore_fixed_argument_area (save_area, argblock,
4244*e4b17023SJohn Marino high_to_save, low_to_save);
4245*e4b17023SJohn Marino #endif
4246*e4b17023SJohn Marino
4247*e4b17023SJohn Marino /* If we saved any argument areas, restore them. */
4248*e4b17023SJohn Marino for (count = 0; count < nargs; count++)
4249*e4b17023SJohn Marino if (argvec[count].save_area)
4250*e4b17023SJohn Marino {
4251*e4b17023SJohn Marino enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4252*e4b17023SJohn Marino rtx adr = plus_constant (argblock,
4253*e4b17023SJohn Marino argvec[count].locate.offset.constant);
4254*e4b17023SJohn Marino rtx stack_area = gen_rtx_MEM (save_mode,
4255*e4b17023SJohn Marino memory_address (save_mode, adr));
4256*e4b17023SJohn Marino
4257*e4b17023SJohn Marino if (save_mode == BLKmode)
4258*e4b17023SJohn Marino emit_block_move (stack_area,
4259*e4b17023SJohn Marino validize_mem (argvec[count].save_area),
4260*e4b17023SJohn Marino GEN_INT (argvec[count].locate.size.constant),
4261*e4b17023SJohn Marino BLOCK_OP_CALL_PARM);
4262*e4b17023SJohn Marino else
4263*e4b17023SJohn Marino emit_move_insn (stack_area, argvec[count].save_area);
4264*e4b17023SJohn Marino }
4265*e4b17023SJohn Marino
4266*e4b17023SJohn Marino highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4267*e4b17023SJohn Marino stack_usage_map = initial_stack_usage_map;
4268*e4b17023SJohn Marino }
4269*e4b17023SJohn Marino
4270*e4b17023SJohn Marino free (stack_usage_map_buf);
4271*e4b17023SJohn Marino
4272*e4b17023SJohn Marino return value;
4273*e4b17023SJohn Marino
4274*e4b17023SJohn Marino }
4275*e4b17023SJohn Marino
4276*e4b17023SJohn Marino /* Output a library call to function FUN (a SYMBOL_REF rtx)
4277*e4b17023SJohn Marino (emitting the queue unless NO_QUEUE is nonzero),
4278*e4b17023SJohn Marino for a value of mode OUTMODE,
4279*e4b17023SJohn Marino with NARGS different arguments, passed as alternating rtx values
4280*e4b17023SJohn Marino and machine_modes to convert them to.
4281*e4b17023SJohn Marino
4282*e4b17023SJohn Marino FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4283*e4b17023SJohn Marino `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
4284*e4b17023SJohn Marino other types of library calls. */
4285*e4b17023SJohn Marino
4286*e4b17023SJohn Marino void
emit_library_call(rtx orgfun,enum libcall_type fn_type,enum machine_mode outmode,int nargs,...)4287*e4b17023SJohn Marino emit_library_call (rtx orgfun, enum libcall_type fn_type,
4288*e4b17023SJohn Marino enum machine_mode outmode, int nargs, ...)
4289*e4b17023SJohn Marino {
4290*e4b17023SJohn Marino va_list p;
4291*e4b17023SJohn Marino
4292*e4b17023SJohn Marino va_start (p, nargs);
4293*e4b17023SJohn Marino emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4294*e4b17023SJohn Marino va_end (p);
4295*e4b17023SJohn Marino }
4296*e4b17023SJohn Marino
4297*e4b17023SJohn Marino /* Like emit_library_call except that an extra argument, VALUE,
4298*e4b17023SJohn Marino comes second and says where to store the result.
4299*e4b17023SJohn Marino (If VALUE is zero, this function chooses a convenient way
4300*e4b17023SJohn Marino to return the value.
4301*e4b17023SJohn Marino
4302*e4b17023SJohn Marino This function returns an rtx for where the value is to be found.
4303*e4b17023SJohn Marino If VALUE is nonzero, VALUE is returned. */
4304*e4b17023SJohn Marino
4305*e4b17023SJohn Marino rtx
emit_library_call_value(rtx orgfun,rtx value,enum libcall_type fn_type,enum machine_mode outmode,int nargs,...)4306*e4b17023SJohn Marino emit_library_call_value (rtx orgfun, rtx value,
4307*e4b17023SJohn Marino enum libcall_type fn_type,
4308*e4b17023SJohn Marino enum machine_mode outmode, int nargs, ...)
4309*e4b17023SJohn Marino {
4310*e4b17023SJohn Marino rtx result;
4311*e4b17023SJohn Marino va_list p;
4312*e4b17023SJohn Marino
4313*e4b17023SJohn Marino va_start (p, nargs);
4314*e4b17023SJohn Marino result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4315*e4b17023SJohn Marino nargs, p);
4316*e4b17023SJohn Marino va_end (p);
4317*e4b17023SJohn Marino
4318*e4b17023SJohn Marino return result;
4319*e4b17023SJohn Marino }
4320*e4b17023SJohn Marino
4321*e4b17023SJohn Marino /* Store a single argument for a function call
4322*e4b17023SJohn Marino into the register or memory area where it must be passed.
4323*e4b17023SJohn Marino *ARG describes the argument value and where to pass it.
4324*e4b17023SJohn Marino
4325*e4b17023SJohn Marino ARGBLOCK is the address of the stack-block for all the arguments,
4326*e4b17023SJohn Marino or 0 on a machine where arguments are pushed individually.
4327*e4b17023SJohn Marino
4328*e4b17023SJohn Marino MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4329*e4b17023SJohn Marino so must be careful about how the stack is used.
4330*e4b17023SJohn Marino
4331*e4b17023SJohn Marino VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4332*e4b17023SJohn Marino argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4333*e4b17023SJohn Marino that we need not worry about saving and restoring the stack.
4334*e4b17023SJohn Marino
4335*e4b17023SJohn Marino FNDECL is the declaration of the function we are calling.
4336*e4b17023SJohn Marino
4337*e4b17023SJohn Marino Return nonzero if this arg should cause sibcall failure,
4338*e4b17023SJohn Marino zero otherwise. */
4339*e4b17023SJohn Marino
4340*e4b17023SJohn Marino static int
store_one_arg(struct arg_data * arg,rtx argblock,int flags,int variable_size ATTRIBUTE_UNUSED,int reg_parm_stack_space)4341*e4b17023SJohn Marino store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4342*e4b17023SJohn Marino int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4343*e4b17023SJohn Marino {
4344*e4b17023SJohn Marino tree pval = arg->tree_value;
4345*e4b17023SJohn Marino rtx reg = 0;
4346*e4b17023SJohn Marino int partial = 0;
4347*e4b17023SJohn Marino int used = 0;
4348*e4b17023SJohn Marino int i, lower_bound = 0, upper_bound = 0;
4349*e4b17023SJohn Marino int sibcall_failure = 0;
4350*e4b17023SJohn Marino
4351*e4b17023SJohn Marino if (TREE_CODE (pval) == ERROR_MARK)
4352*e4b17023SJohn Marino return 1;
4353*e4b17023SJohn Marino
4354*e4b17023SJohn Marino /* Push a new temporary level for any temporaries we make for
4355*e4b17023SJohn Marino this argument. */
4356*e4b17023SJohn Marino push_temp_slots ();
4357*e4b17023SJohn Marino
4358*e4b17023SJohn Marino if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4359*e4b17023SJohn Marino {
4360*e4b17023SJohn Marino /* If this is being stored into a pre-allocated, fixed-size, stack area,
4361*e4b17023SJohn Marino save any previous data at that location. */
4362*e4b17023SJohn Marino if (argblock && ! variable_size && arg->stack)
4363*e4b17023SJohn Marino {
4364*e4b17023SJohn Marino #ifdef ARGS_GROW_DOWNWARD
4365*e4b17023SJohn Marino /* stack_slot is negative, but we want to index stack_usage_map
4366*e4b17023SJohn Marino with positive values. */
4367*e4b17023SJohn Marino if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4368*e4b17023SJohn Marino upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4369*e4b17023SJohn Marino else
4370*e4b17023SJohn Marino upper_bound = 0;
4371*e4b17023SJohn Marino
4372*e4b17023SJohn Marino lower_bound = upper_bound - arg->locate.size.constant;
4373*e4b17023SJohn Marino #else
4374*e4b17023SJohn Marino if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4375*e4b17023SJohn Marino lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4376*e4b17023SJohn Marino else
4377*e4b17023SJohn Marino lower_bound = 0;
4378*e4b17023SJohn Marino
4379*e4b17023SJohn Marino upper_bound = lower_bound + arg->locate.size.constant;
4380*e4b17023SJohn Marino #endif
4381*e4b17023SJohn Marino
4382*e4b17023SJohn Marino i = lower_bound;
4383*e4b17023SJohn Marino /* Don't worry about things in the fixed argument area;
4384*e4b17023SJohn Marino it has already been saved. */
4385*e4b17023SJohn Marino if (i < reg_parm_stack_space)
4386*e4b17023SJohn Marino i = reg_parm_stack_space;
4387*e4b17023SJohn Marino while (i < upper_bound && stack_usage_map[i] == 0)
4388*e4b17023SJohn Marino i++;
4389*e4b17023SJohn Marino
4390*e4b17023SJohn Marino if (i < upper_bound)
4391*e4b17023SJohn Marino {
4392*e4b17023SJohn Marino /* We need to make a save area. */
4393*e4b17023SJohn Marino unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4394*e4b17023SJohn Marino enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4395*e4b17023SJohn Marino rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4396*e4b17023SJohn Marino rtx stack_area = gen_rtx_MEM (save_mode, adr);
4397*e4b17023SJohn Marino
4398*e4b17023SJohn Marino if (save_mode == BLKmode)
4399*e4b17023SJohn Marino {
4400*e4b17023SJohn Marino tree ot = TREE_TYPE (arg->tree_value);
4401*e4b17023SJohn Marino tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4402*e4b17023SJohn Marino | TYPE_QUAL_CONST));
4403*e4b17023SJohn Marino
4404*e4b17023SJohn Marino arg->save_area = assign_temp (nt, 0, 1, 1);
4405*e4b17023SJohn Marino preserve_temp_slots (arg->save_area);
4406*e4b17023SJohn Marino emit_block_move (validize_mem (arg->save_area), stack_area,
4407*e4b17023SJohn Marino GEN_INT (arg->locate.size.constant),
4408*e4b17023SJohn Marino BLOCK_OP_CALL_PARM);
4409*e4b17023SJohn Marino }
4410*e4b17023SJohn Marino else
4411*e4b17023SJohn Marino {
4412*e4b17023SJohn Marino arg->save_area = gen_reg_rtx (save_mode);
4413*e4b17023SJohn Marino emit_move_insn (arg->save_area, stack_area);
4414*e4b17023SJohn Marino }
4415*e4b17023SJohn Marino }
4416*e4b17023SJohn Marino }
4417*e4b17023SJohn Marino }
4418*e4b17023SJohn Marino
4419*e4b17023SJohn Marino /* If this isn't going to be placed on both the stack and in registers,
4420*e4b17023SJohn Marino set up the register and number of words. */
4421*e4b17023SJohn Marino if (! arg->pass_on_stack)
4422*e4b17023SJohn Marino {
4423*e4b17023SJohn Marino if (flags & ECF_SIBCALL)
4424*e4b17023SJohn Marino reg = arg->tail_call_reg;
4425*e4b17023SJohn Marino else
4426*e4b17023SJohn Marino reg = arg->reg;
4427*e4b17023SJohn Marino partial = arg->partial;
4428*e4b17023SJohn Marino }
4429*e4b17023SJohn Marino
4430*e4b17023SJohn Marino /* Being passed entirely in a register. We shouldn't be called in
4431*e4b17023SJohn Marino this case. */
4432*e4b17023SJohn Marino gcc_assert (reg == 0 || partial != 0);
4433*e4b17023SJohn Marino
4434*e4b17023SJohn Marino /* If this arg needs special alignment, don't load the registers
4435*e4b17023SJohn Marino here. */
4436*e4b17023SJohn Marino if (arg->n_aligned_regs != 0)
4437*e4b17023SJohn Marino reg = 0;
4438*e4b17023SJohn Marino
4439*e4b17023SJohn Marino /* If this is being passed partially in a register, we can't evaluate
4440*e4b17023SJohn Marino it directly into its stack slot. Otherwise, we can. */
4441*e4b17023SJohn Marino if (arg->value == 0)
4442*e4b17023SJohn Marino {
4443*e4b17023SJohn Marino /* stack_arg_under_construction is nonzero if a function argument is
4444*e4b17023SJohn Marino being evaluated directly into the outgoing argument list and
4445*e4b17023SJohn Marino expand_call must take special action to preserve the argument list
4446*e4b17023SJohn Marino if it is called recursively.
4447*e4b17023SJohn Marino
4448*e4b17023SJohn Marino For scalar function arguments stack_usage_map is sufficient to
4449*e4b17023SJohn Marino determine which stack slots must be saved and restored. Scalar
4450*e4b17023SJohn Marino arguments in general have pass_on_stack == 0.
4451*e4b17023SJohn Marino
4452*e4b17023SJohn Marino If this argument is initialized by a function which takes the
4453*e4b17023SJohn Marino address of the argument (a C++ constructor or a C function
4454*e4b17023SJohn Marino returning a BLKmode structure), then stack_usage_map is
4455*e4b17023SJohn Marino insufficient and expand_call must push the stack around the
4456*e4b17023SJohn Marino function call. Such arguments have pass_on_stack == 1.
4457*e4b17023SJohn Marino
4458*e4b17023SJohn Marino Note that it is always safe to set stack_arg_under_construction,
4459*e4b17023SJohn Marino but this generates suboptimal code if set when not needed. */
4460*e4b17023SJohn Marino
4461*e4b17023SJohn Marino if (arg->pass_on_stack)
4462*e4b17023SJohn Marino stack_arg_under_construction++;
4463*e4b17023SJohn Marino
4464*e4b17023SJohn Marino arg->value = expand_expr (pval,
4465*e4b17023SJohn Marino (partial
4466*e4b17023SJohn Marino || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4467*e4b17023SJohn Marino ? NULL_RTX : arg->stack,
4468*e4b17023SJohn Marino VOIDmode, EXPAND_STACK_PARM);
4469*e4b17023SJohn Marino
4470*e4b17023SJohn Marino /* If we are promoting object (or for any other reason) the mode
4471*e4b17023SJohn Marino doesn't agree, convert the mode. */
4472*e4b17023SJohn Marino
4473*e4b17023SJohn Marino if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4474*e4b17023SJohn Marino arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4475*e4b17023SJohn Marino arg->value, arg->unsignedp);
4476*e4b17023SJohn Marino
4477*e4b17023SJohn Marino if (arg->pass_on_stack)
4478*e4b17023SJohn Marino stack_arg_under_construction--;
4479*e4b17023SJohn Marino }
4480*e4b17023SJohn Marino
4481*e4b17023SJohn Marino /* Check for overlap with already clobbered argument area. */
4482*e4b17023SJohn Marino if ((flags & ECF_SIBCALL)
4483*e4b17023SJohn Marino && MEM_P (arg->value)
4484*e4b17023SJohn Marino && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4485*e4b17023SJohn Marino arg->locate.size.constant))
4486*e4b17023SJohn Marino sibcall_failure = 1;
4487*e4b17023SJohn Marino
4488*e4b17023SJohn Marino /* Don't allow anything left on stack from computation
4489*e4b17023SJohn Marino of argument to alloca. */
4490*e4b17023SJohn Marino if (flags & ECF_MAY_BE_ALLOCA)
4491*e4b17023SJohn Marino do_pending_stack_adjust ();
4492*e4b17023SJohn Marino
4493*e4b17023SJohn Marino if (arg->value == arg->stack)
4494*e4b17023SJohn Marino /* If the value is already in the stack slot, we are done. */
4495*e4b17023SJohn Marino ;
4496*e4b17023SJohn Marino else if (arg->mode != BLKmode)
4497*e4b17023SJohn Marino {
4498*e4b17023SJohn Marino int size;
4499*e4b17023SJohn Marino unsigned int parm_align;
4500*e4b17023SJohn Marino
4501*e4b17023SJohn Marino /* Argument is a scalar, not entirely passed in registers.
4502*e4b17023SJohn Marino (If part is passed in registers, arg->partial says how much
4503*e4b17023SJohn Marino and emit_push_insn will take care of putting it there.)
4504*e4b17023SJohn Marino
4505*e4b17023SJohn Marino Push it, and if its size is less than the
4506*e4b17023SJohn Marino amount of space allocated to it,
4507*e4b17023SJohn Marino also bump stack pointer by the additional space.
4508*e4b17023SJohn Marino Note that in C the default argument promotions
4509*e4b17023SJohn Marino will prevent such mismatches. */
4510*e4b17023SJohn Marino
4511*e4b17023SJohn Marino size = GET_MODE_SIZE (arg->mode);
4512*e4b17023SJohn Marino /* Compute how much space the push instruction will push.
4513*e4b17023SJohn Marino On many machines, pushing a byte will advance the stack
4514*e4b17023SJohn Marino pointer by a halfword. */
4515*e4b17023SJohn Marino #ifdef PUSH_ROUNDING
4516*e4b17023SJohn Marino size = PUSH_ROUNDING (size);
4517*e4b17023SJohn Marino #endif
4518*e4b17023SJohn Marino used = size;
4519*e4b17023SJohn Marino
4520*e4b17023SJohn Marino /* Compute how much space the argument should get:
4521*e4b17023SJohn Marino round up to a multiple of the alignment for arguments. */
4522*e4b17023SJohn Marino if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4523*e4b17023SJohn Marino used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4524*e4b17023SJohn Marino / (PARM_BOUNDARY / BITS_PER_UNIT))
4525*e4b17023SJohn Marino * (PARM_BOUNDARY / BITS_PER_UNIT));
4526*e4b17023SJohn Marino
4527*e4b17023SJohn Marino /* Compute the alignment of the pushed argument. */
4528*e4b17023SJohn Marino parm_align = arg->locate.boundary;
4529*e4b17023SJohn Marino if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4530*e4b17023SJohn Marino {
4531*e4b17023SJohn Marino int pad = used - size;
4532*e4b17023SJohn Marino if (pad)
4533*e4b17023SJohn Marino {
4534*e4b17023SJohn Marino unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4535*e4b17023SJohn Marino parm_align = MIN (parm_align, pad_align);
4536*e4b17023SJohn Marino }
4537*e4b17023SJohn Marino }
4538*e4b17023SJohn Marino
4539*e4b17023SJohn Marino /* This isn't already where we want it on the stack, so put it there.
4540*e4b17023SJohn Marino This can either be done with push or copy insns. */
4541*e4b17023SJohn Marino emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4542*e4b17023SJohn Marino parm_align, partial, reg, used - size, argblock,
4543*e4b17023SJohn Marino ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4544*e4b17023SJohn Marino ARGS_SIZE_RTX (arg->locate.alignment_pad));
4545*e4b17023SJohn Marino
4546*e4b17023SJohn Marino /* Unless this is a partially-in-register argument, the argument is now
4547*e4b17023SJohn Marino in the stack. */
4548*e4b17023SJohn Marino if (partial == 0)
4549*e4b17023SJohn Marino arg->value = arg->stack;
4550*e4b17023SJohn Marino }
4551*e4b17023SJohn Marino else
4552*e4b17023SJohn Marino {
4553*e4b17023SJohn Marino /* BLKmode, at least partly to be pushed. */
4554*e4b17023SJohn Marino
4555*e4b17023SJohn Marino unsigned int parm_align;
4556*e4b17023SJohn Marino int excess;
4557*e4b17023SJohn Marino rtx size_rtx;
4558*e4b17023SJohn Marino
4559*e4b17023SJohn Marino /* Pushing a nonscalar.
4560*e4b17023SJohn Marino If part is passed in registers, PARTIAL says how much
4561*e4b17023SJohn Marino and emit_push_insn will take care of putting it there. */
4562*e4b17023SJohn Marino
4563*e4b17023SJohn Marino /* Round its size up to a multiple
4564*e4b17023SJohn Marino of the allocation unit for arguments. */
4565*e4b17023SJohn Marino
4566*e4b17023SJohn Marino if (arg->locate.size.var != 0)
4567*e4b17023SJohn Marino {
4568*e4b17023SJohn Marino excess = 0;
4569*e4b17023SJohn Marino size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4570*e4b17023SJohn Marino }
4571*e4b17023SJohn Marino else
4572*e4b17023SJohn Marino {
4573*e4b17023SJohn Marino /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4574*e4b17023SJohn Marino for BLKmode is careful to avoid it. */
4575*e4b17023SJohn Marino excess = (arg->locate.size.constant
4576*e4b17023SJohn Marino - int_size_in_bytes (TREE_TYPE (pval))
4577*e4b17023SJohn Marino + partial);
4578*e4b17023SJohn Marino size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4579*e4b17023SJohn Marino NULL_RTX, TYPE_MODE (sizetype),
4580*e4b17023SJohn Marino EXPAND_NORMAL);
4581*e4b17023SJohn Marino }
4582*e4b17023SJohn Marino
4583*e4b17023SJohn Marino parm_align = arg->locate.boundary;
4584*e4b17023SJohn Marino
4585*e4b17023SJohn Marino /* When an argument is padded down, the block is aligned to
4586*e4b17023SJohn Marino PARM_BOUNDARY, but the actual argument isn't. */
4587*e4b17023SJohn Marino if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4588*e4b17023SJohn Marino {
4589*e4b17023SJohn Marino if (arg->locate.size.var)
4590*e4b17023SJohn Marino parm_align = BITS_PER_UNIT;
4591*e4b17023SJohn Marino else if (excess)
4592*e4b17023SJohn Marino {
4593*e4b17023SJohn Marino unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4594*e4b17023SJohn Marino parm_align = MIN (parm_align, excess_align);
4595*e4b17023SJohn Marino }
4596*e4b17023SJohn Marino }
4597*e4b17023SJohn Marino
4598*e4b17023SJohn Marino if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4599*e4b17023SJohn Marino {
4600*e4b17023SJohn Marino /* emit_push_insn might not work properly if arg->value and
4601*e4b17023SJohn Marino argblock + arg->locate.offset areas overlap. */
4602*e4b17023SJohn Marino rtx x = arg->value;
4603*e4b17023SJohn Marino int i = 0;
4604*e4b17023SJohn Marino
4605*e4b17023SJohn Marino if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4606*e4b17023SJohn Marino || (GET_CODE (XEXP (x, 0)) == PLUS
4607*e4b17023SJohn Marino && XEXP (XEXP (x, 0), 0) ==
4608*e4b17023SJohn Marino crtl->args.internal_arg_pointer
4609*e4b17023SJohn Marino && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4610*e4b17023SJohn Marino {
4611*e4b17023SJohn Marino if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4612*e4b17023SJohn Marino i = INTVAL (XEXP (XEXP (x, 0), 1));
4613*e4b17023SJohn Marino
4614*e4b17023SJohn Marino /* expand_call should ensure this. */
4615*e4b17023SJohn Marino gcc_assert (!arg->locate.offset.var
4616*e4b17023SJohn Marino && arg->locate.size.var == 0
4617*e4b17023SJohn Marino && CONST_INT_P (size_rtx));
4618*e4b17023SJohn Marino
4619*e4b17023SJohn Marino if (arg->locate.offset.constant > i)
4620*e4b17023SJohn Marino {
4621*e4b17023SJohn Marino if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4622*e4b17023SJohn Marino sibcall_failure = 1;
4623*e4b17023SJohn Marino }
4624*e4b17023SJohn Marino else if (arg->locate.offset.constant < i)
4625*e4b17023SJohn Marino {
4626*e4b17023SJohn Marino /* Use arg->locate.size.constant instead of size_rtx
4627*e4b17023SJohn Marino because we only care about the part of the argument
4628*e4b17023SJohn Marino on the stack. */
4629*e4b17023SJohn Marino if (i < (arg->locate.offset.constant
4630*e4b17023SJohn Marino + arg->locate.size.constant))
4631*e4b17023SJohn Marino sibcall_failure = 1;
4632*e4b17023SJohn Marino }
4633*e4b17023SJohn Marino else
4634*e4b17023SJohn Marino {
4635*e4b17023SJohn Marino /* Even though they appear to be at the same location,
4636*e4b17023SJohn Marino if part of the outgoing argument is in registers,
4637*e4b17023SJohn Marino they aren't really at the same location. Check for
4638*e4b17023SJohn Marino this by making sure that the incoming size is the
4639*e4b17023SJohn Marino same as the outgoing size. */
4640*e4b17023SJohn Marino if (arg->locate.size.constant != INTVAL (size_rtx))
4641*e4b17023SJohn Marino sibcall_failure = 1;
4642*e4b17023SJohn Marino }
4643*e4b17023SJohn Marino }
4644*e4b17023SJohn Marino }
4645*e4b17023SJohn Marino
4646*e4b17023SJohn Marino emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4647*e4b17023SJohn Marino parm_align, partial, reg, excess, argblock,
4648*e4b17023SJohn Marino ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4649*e4b17023SJohn Marino ARGS_SIZE_RTX (arg->locate.alignment_pad));
4650*e4b17023SJohn Marino
4651*e4b17023SJohn Marino /* Unless this is a partially-in-register argument, the argument is now
4652*e4b17023SJohn Marino in the stack.
4653*e4b17023SJohn Marino
4654*e4b17023SJohn Marino ??? Unlike the case above, in which we want the actual
4655*e4b17023SJohn Marino address of the data, so that we can load it directly into a
4656*e4b17023SJohn Marino register, here we want the address of the stack slot, so that
4657*e4b17023SJohn Marino it's properly aligned for word-by-word copying or something
4658*e4b17023SJohn Marino like that. It's not clear that this is always correct. */
4659*e4b17023SJohn Marino if (partial == 0)
4660*e4b17023SJohn Marino arg->value = arg->stack_slot;
4661*e4b17023SJohn Marino }
4662*e4b17023SJohn Marino
4663*e4b17023SJohn Marino if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4664*e4b17023SJohn Marino {
4665*e4b17023SJohn Marino tree type = TREE_TYPE (arg->tree_value);
4666*e4b17023SJohn Marino arg->parallel_value
4667*e4b17023SJohn Marino = emit_group_load_into_temps (arg->reg, arg->value, type,
4668*e4b17023SJohn Marino int_size_in_bytes (type));
4669*e4b17023SJohn Marino }
4670*e4b17023SJohn Marino
4671*e4b17023SJohn Marino /* Mark all slots this store used. */
4672*e4b17023SJohn Marino if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4673*e4b17023SJohn Marino && argblock && ! variable_size && arg->stack)
4674*e4b17023SJohn Marino for (i = lower_bound; i < upper_bound; i++)
4675*e4b17023SJohn Marino stack_usage_map[i] = 1;
4676*e4b17023SJohn Marino
4677*e4b17023SJohn Marino /* Once we have pushed something, pops can't safely
4678*e4b17023SJohn Marino be deferred during the rest of the arguments. */
4679*e4b17023SJohn Marino NO_DEFER_POP;
4680*e4b17023SJohn Marino
4681*e4b17023SJohn Marino /* Free any temporary slots made in processing this argument. Show
4682*e4b17023SJohn Marino that we might have taken the address of something and pushed that
4683*e4b17023SJohn Marino as an operand. */
4684*e4b17023SJohn Marino preserve_temp_slots (NULL_RTX);
4685*e4b17023SJohn Marino free_temp_slots ();
4686*e4b17023SJohn Marino pop_temp_slots ();
4687*e4b17023SJohn Marino
4688*e4b17023SJohn Marino return sibcall_failure;
4689*e4b17023SJohn Marino }
4690*e4b17023SJohn Marino
4691*e4b17023SJohn Marino /* Nonzero if we do not know how to pass TYPE solely in registers. */
4692*e4b17023SJohn Marino
4693*e4b17023SJohn Marino bool
must_pass_in_stack_var_size(enum machine_mode mode ATTRIBUTE_UNUSED,const_tree type)4694*e4b17023SJohn Marino must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4695*e4b17023SJohn Marino const_tree type)
4696*e4b17023SJohn Marino {
4697*e4b17023SJohn Marino if (!type)
4698*e4b17023SJohn Marino return false;
4699*e4b17023SJohn Marino
4700*e4b17023SJohn Marino /* If the type has variable size... */
4701*e4b17023SJohn Marino if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4702*e4b17023SJohn Marino return true;
4703*e4b17023SJohn Marino
4704*e4b17023SJohn Marino /* If the type is marked as addressable (it is required
4705*e4b17023SJohn Marino to be constructed into the stack)... */
4706*e4b17023SJohn Marino if (TREE_ADDRESSABLE (type))
4707*e4b17023SJohn Marino return true;
4708*e4b17023SJohn Marino
4709*e4b17023SJohn Marino return false;
4710*e4b17023SJohn Marino }
4711*e4b17023SJohn Marino
4712*e4b17023SJohn Marino /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4713*e4b17023SJohn Marino takes trailing padding of a structure into account. */
4714*e4b17023SJohn Marino /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4715*e4b17023SJohn Marino
4716*e4b17023SJohn Marino bool
must_pass_in_stack_var_size_or_pad(enum machine_mode mode,const_tree type)4717*e4b17023SJohn Marino must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
4718*e4b17023SJohn Marino {
4719*e4b17023SJohn Marino if (!type)
4720*e4b17023SJohn Marino return false;
4721*e4b17023SJohn Marino
4722*e4b17023SJohn Marino /* If the type has variable size... */
4723*e4b17023SJohn Marino if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4724*e4b17023SJohn Marino return true;
4725*e4b17023SJohn Marino
4726*e4b17023SJohn Marino /* If the type is marked as addressable (it is required
4727*e4b17023SJohn Marino to be constructed into the stack)... */
4728*e4b17023SJohn Marino if (TREE_ADDRESSABLE (type))
4729*e4b17023SJohn Marino return true;
4730*e4b17023SJohn Marino
4731*e4b17023SJohn Marino /* If the padding and mode of the type is such that a copy into
4732*e4b17023SJohn Marino a register would put it into the wrong part of the register. */
4733*e4b17023SJohn Marino if (mode == BLKmode
4734*e4b17023SJohn Marino && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4735*e4b17023SJohn Marino && (FUNCTION_ARG_PADDING (mode, type)
4736*e4b17023SJohn Marino == (BYTES_BIG_ENDIAN ? upward : downward)))
4737*e4b17023SJohn Marino return true;
4738*e4b17023SJohn Marino
4739*e4b17023SJohn Marino return false;
4740*e4b17023SJohn Marino }
4741