138fd1498Szrj /* Convert function calls to rtl insns, for GNU C compiler.
238fd1498Szrj Copyright (C) 1989-2018 Free Software Foundation, Inc.
338fd1498Szrj
438fd1498Szrj This file is part of GCC.
538fd1498Szrj
638fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
738fd1498Szrj the terms of the GNU General Public License as published by the Free
838fd1498Szrj Software Foundation; either version 3, or (at your option) any later
938fd1498Szrj version.
1038fd1498Szrj
1138fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1238fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
1338fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
1438fd1498Szrj for more details.
1538fd1498Szrj
1638fd1498Szrj You should have received a copy of the GNU General Public License
1738fd1498Szrj along with GCC; see the file COPYING3. If not see
1838fd1498Szrj <http://www.gnu.org/licenses/>. */
1938fd1498Szrj
2038fd1498Szrj #include "config.h"
2138fd1498Szrj #include "system.h"
2238fd1498Szrj #include "coretypes.h"
2338fd1498Szrj #include "backend.h"
2438fd1498Szrj #include "target.h"
2538fd1498Szrj #include "rtl.h"
2638fd1498Szrj #include "tree.h"
2738fd1498Szrj #include "gimple.h"
2838fd1498Szrj #include "predict.h"
2938fd1498Szrj #include "memmodel.h"
3038fd1498Szrj #include "tm_p.h"
3138fd1498Szrj #include "stringpool.h"
3238fd1498Szrj #include "expmed.h"
3338fd1498Szrj #include "optabs.h"
3438fd1498Szrj #include "emit-rtl.h"
3538fd1498Szrj #include "cgraph.h"
3638fd1498Szrj #include "diagnostic-core.h"
3738fd1498Szrj #include "fold-const.h"
3838fd1498Szrj #include "stor-layout.h"
3938fd1498Szrj #include "varasm.h"
4038fd1498Szrj #include "internal-fn.h"
4138fd1498Szrj #include "dojump.h"
4238fd1498Szrj #include "explow.h"
4338fd1498Szrj #include "calls.h"
4438fd1498Szrj #include "expr.h"
4538fd1498Szrj #include "output.h"
4638fd1498Szrj #include "langhooks.h"
4738fd1498Szrj #include "except.h"
4838fd1498Szrj #include "dbgcnt.h"
4938fd1498Szrj #include "rtl-iter.h"
5038fd1498Szrj #include "tree-chkp.h"
5138fd1498Szrj #include "tree-vrp.h"
5238fd1498Szrj #include "tree-ssanames.h"
53*58e805e6Szrj #include "tree-ssa-strlen.h"
5438fd1498Szrj #include "rtl-chkp.h"
5538fd1498Szrj #include "intl.h"
5638fd1498Szrj #include "stringpool.h"
5738fd1498Szrj #include "attribs.h"
5838fd1498Szrj #include "builtins.h"
59*58e805e6Szrj #include "gimple-fold.h"
6038fd1498Szrj
6138fd1498Szrj /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
6238fd1498Szrj #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
6338fd1498Szrj
6438fd1498Szrj /* Data structure and subroutines used within expand_call. */
6538fd1498Szrj
6638fd1498Szrj struct arg_data
6738fd1498Szrj {
6838fd1498Szrj /* Tree node for this argument. */
6938fd1498Szrj tree tree_value;
7038fd1498Szrj /* Mode for value; TYPE_MODE unless promoted. */
7138fd1498Szrj machine_mode mode;
7238fd1498Szrj /* Current RTL value for argument, or 0 if it isn't precomputed. */
7338fd1498Szrj rtx value;
7438fd1498Szrj /* Initially-compute RTL value for argument; only for const functions. */
7538fd1498Szrj rtx initial_value;
7638fd1498Szrj /* Register to pass this argument in, 0 if passed on stack, or an
7738fd1498Szrj PARALLEL if the arg is to be copied into multiple non-contiguous
7838fd1498Szrj registers. */
7938fd1498Szrj rtx reg;
8038fd1498Szrj /* Register to pass this argument in when generating tail call sequence.
8138fd1498Szrj This is not the same register as for normal calls on machines with
8238fd1498Szrj register windows. */
8338fd1498Szrj rtx tail_call_reg;
8438fd1498Szrj /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
8538fd1498Szrj form for emit_group_move. */
8638fd1498Szrj rtx parallel_value;
8738fd1498Szrj /* If value is passed in neither reg nor stack, this field holds a number
8838fd1498Szrj of a special slot to be used. */
8938fd1498Szrj rtx special_slot;
9038fd1498Szrj /* For pointer bounds hold an index of parm bounds are bound to. -1 if
9138fd1498Szrj there is no such pointer. */
9238fd1498Szrj int pointer_arg;
9338fd1498Szrj /* If pointer_arg refers a structure, then pointer_offset holds an offset
9438fd1498Szrj of a pointer in this structure. */
9538fd1498Szrj int pointer_offset;
9638fd1498Szrj /* If REG was promoted from the actual mode of the argument expression,
9738fd1498Szrj indicates whether the promotion is sign- or zero-extended. */
9838fd1498Szrj int unsignedp;
9938fd1498Szrj /* Number of bytes to put in registers. 0 means put the whole arg
10038fd1498Szrj in registers. Also 0 if not passed in registers. */
10138fd1498Szrj int partial;
10238fd1498Szrj /* Nonzero if argument must be passed on stack.
10338fd1498Szrj Note that some arguments may be passed on the stack
10438fd1498Szrj even though pass_on_stack is zero, just because FUNCTION_ARG says so.
10538fd1498Szrj pass_on_stack identifies arguments that *cannot* go in registers. */
10638fd1498Szrj int pass_on_stack;
10738fd1498Szrj /* Some fields packaged up for locate_and_pad_parm. */
10838fd1498Szrj struct locate_and_pad_arg_data locate;
10938fd1498Szrj /* Location on the stack at which parameter should be stored. The store
11038fd1498Szrj has already been done if STACK == VALUE. */
11138fd1498Szrj rtx stack;
11238fd1498Szrj /* Location on the stack of the start of this argument slot. This can
11338fd1498Szrj differ from STACK if this arg pads downward. This location is known
11438fd1498Szrj to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
11538fd1498Szrj rtx stack_slot;
11638fd1498Szrj /* Place that this stack area has been saved, if needed. */
11738fd1498Szrj rtx save_area;
11838fd1498Szrj /* If an argument's alignment does not permit direct copying into registers,
11938fd1498Szrj copy in smaller-sized pieces into pseudos. These are stored in a
12038fd1498Szrj block pointed to by this field. The next field says how many
12138fd1498Szrj word-sized pseudos we made. */
12238fd1498Szrj rtx *aligned_regs;
12338fd1498Szrj int n_aligned_regs;
12438fd1498Szrj };
12538fd1498Szrj
12638fd1498Szrj /* A vector of one char per byte of stack space. A byte if nonzero if
12738fd1498Szrj the corresponding stack location has been used.
12838fd1498Szrj This vector is used to prevent a function call within an argument from
12938fd1498Szrj clobbering any stack already set up. */
13038fd1498Szrj static char *stack_usage_map;
13138fd1498Szrj
13238fd1498Szrj /* Size of STACK_USAGE_MAP. */
13338fd1498Szrj static unsigned int highest_outgoing_arg_in_use;
13438fd1498Szrj
13538fd1498Szrj /* Assume that any stack location at this byte index is used,
13638fd1498Szrj without checking the contents of stack_usage_map. */
13738fd1498Szrj static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
13838fd1498Szrj
13938fd1498Szrj /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
14038fd1498Szrj stack location's tail call argument has been already stored into the stack.
14138fd1498Szrj This bitmap is used to prevent sibling call optimization if function tries
14238fd1498Szrj to use parent's incoming argument slots when they have been already
14338fd1498Szrj overwritten with tail call arguments. */
14438fd1498Szrj static sbitmap stored_args_map;
14538fd1498Szrj
14638fd1498Szrj /* Assume that any virtual-incoming location at this byte index has been
14738fd1498Szrj stored, without checking the contents of stored_args_map. */
14838fd1498Szrj static unsigned HOST_WIDE_INT stored_args_watermark;
14938fd1498Szrj
15038fd1498Szrj /* stack_arg_under_construction is nonzero when an argument may be
15138fd1498Szrj initialized with a constructor call (including a C function that
15238fd1498Szrj returns a BLKmode struct) and expand_call must take special action
15338fd1498Szrj to make sure the object being constructed does not overlap the
15438fd1498Szrj argument list for the constructor call. */
15538fd1498Szrj static int stack_arg_under_construction;
15638fd1498Szrj
15738fd1498Szrj static void precompute_register_parameters (int, struct arg_data *, int *);
15838fd1498Szrj static void store_bounds (struct arg_data *, struct arg_data *);
15938fd1498Szrj static int store_one_arg (struct arg_data *, rtx, int, int, int);
16038fd1498Szrj static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
16138fd1498Szrj static int finalize_must_preallocate (int, int, struct arg_data *,
16238fd1498Szrj struct args_size *);
16338fd1498Szrj static void precompute_arguments (int, struct arg_data *);
16438fd1498Szrj static void compute_argument_addresses (struct arg_data *, rtx, int);
16538fd1498Szrj static rtx rtx_for_function_call (tree, tree);
16638fd1498Szrj static void load_register_parameters (struct arg_data *, int, rtx *, int,
16738fd1498Szrj int, int *);
16838fd1498Szrj static int special_function_p (const_tree, int);
16938fd1498Szrj static int check_sibcall_argument_overlap_1 (rtx);
17038fd1498Szrj static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
17138fd1498Szrj
17238fd1498Szrj static tree split_complex_types (tree);
17338fd1498Szrj
17438fd1498Szrj #ifdef REG_PARM_STACK_SPACE
17538fd1498Szrj static rtx save_fixed_argument_area (int, rtx, int *, int *);
17638fd1498Szrj static void restore_fixed_argument_area (rtx, rtx, int, int);
17738fd1498Szrj #endif
17838fd1498Szrj
17938fd1498Szrj /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
18038fd1498Szrj stack region might already be in use. */
18138fd1498Szrj
18238fd1498Szrj static bool
stack_region_maybe_used_p(poly_uint64 lower_bound,poly_uint64 upper_bound,unsigned int reg_parm_stack_space)18338fd1498Szrj stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
18438fd1498Szrj unsigned int reg_parm_stack_space)
18538fd1498Szrj {
18638fd1498Szrj unsigned HOST_WIDE_INT const_lower, const_upper;
18738fd1498Szrj const_lower = constant_lower_bound (lower_bound);
18838fd1498Szrj if (!upper_bound.is_constant (&const_upper))
18938fd1498Szrj const_upper = HOST_WIDE_INT_M1U;
19038fd1498Szrj
19138fd1498Szrj if (const_upper > stack_usage_watermark)
19238fd1498Szrj return true;
19338fd1498Szrj
19438fd1498Szrj /* Don't worry about things in the fixed argument area;
19538fd1498Szrj it has already been saved. */
19638fd1498Szrj const_lower = MAX (const_lower, reg_parm_stack_space);
19738fd1498Szrj const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
19838fd1498Szrj for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
19938fd1498Szrj if (stack_usage_map[i])
20038fd1498Szrj return true;
20138fd1498Szrj return false;
20238fd1498Szrj }
20338fd1498Szrj
20438fd1498Szrj /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
20538fd1498Szrj stack region are now in use. */
20638fd1498Szrj
20738fd1498Szrj static void
mark_stack_region_used(poly_uint64 lower_bound,poly_uint64 upper_bound)20838fd1498Szrj mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
20938fd1498Szrj {
21038fd1498Szrj unsigned HOST_WIDE_INT const_lower, const_upper;
21138fd1498Szrj const_lower = constant_lower_bound (lower_bound);
21238fd1498Szrj if (upper_bound.is_constant (&const_upper))
21338fd1498Szrj for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
21438fd1498Szrj stack_usage_map[i] = 1;
21538fd1498Szrj else
21638fd1498Szrj stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
21738fd1498Szrj }
21838fd1498Szrj
21938fd1498Szrj /* Force FUNEXP into a form suitable for the address of a CALL,
22038fd1498Szrj and return that as an rtx. Also load the static chain register
22138fd1498Szrj if FNDECL is a nested function.
22238fd1498Szrj
22338fd1498Szrj CALL_FUSAGE points to a variable holding the prospective
22438fd1498Szrj CALL_INSN_FUNCTION_USAGE information. */
22538fd1498Szrj
22638fd1498Szrj rtx
prepare_call_address(tree fndecl_or_type,rtx funexp,rtx static_chain_value,rtx * call_fusage,int reg_parm_seen,int flags)22738fd1498Szrj prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
22838fd1498Szrj rtx *call_fusage, int reg_parm_seen, int flags)
22938fd1498Szrj {
23038fd1498Szrj /* Make a valid memory address and copy constants through pseudo-regs,
23138fd1498Szrj but not for a constant address if -fno-function-cse. */
23238fd1498Szrj if (GET_CODE (funexp) != SYMBOL_REF)
23338fd1498Szrj {
23438fd1498Szrj /* If it's an indirect call by descriptor, generate code to perform
23538fd1498Szrj runtime identification of the pointer and load the descriptor. */
23638fd1498Szrj if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
23738fd1498Szrj {
23838fd1498Szrj const int bit_val = targetm.calls.custom_function_descriptors;
23938fd1498Szrj rtx call_lab = gen_label_rtx ();
24038fd1498Szrj
24138fd1498Szrj gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
24238fd1498Szrj fndecl_or_type
24338fd1498Szrj = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24438fd1498Szrj fndecl_or_type);
24538fd1498Szrj DECL_STATIC_CHAIN (fndecl_or_type) = 1;
24638fd1498Szrj rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
24738fd1498Szrj
24838fd1498Szrj if (GET_MODE (funexp) != Pmode)
24938fd1498Szrj funexp = convert_memory_address (Pmode, funexp);
25038fd1498Szrj
25138fd1498Szrj /* Avoid long live ranges around function calls. */
25238fd1498Szrj funexp = copy_to_mode_reg (Pmode, funexp);
25338fd1498Szrj
25438fd1498Szrj if (REG_P (chain))
25538fd1498Szrj emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
25638fd1498Szrj
25738fd1498Szrj /* Emit the runtime identification pattern. */
25838fd1498Szrj rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
25938fd1498Szrj emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
26038fd1498Szrj call_lab);
26138fd1498Szrj
26238fd1498Szrj /* Statically predict the branch to very likely taken. */
26338fd1498Szrj rtx_insn *insn = get_last_insn ();
26438fd1498Szrj if (JUMP_P (insn))
26538fd1498Szrj predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
26638fd1498Szrj
26738fd1498Szrj /* Load the descriptor. */
26838fd1498Szrj rtx mem = gen_rtx_MEM (ptr_mode,
26938fd1498Szrj plus_constant (Pmode, funexp, - bit_val));
27038fd1498Szrj MEM_NOTRAP_P (mem) = 1;
27138fd1498Szrj mem = convert_memory_address (Pmode, mem);
27238fd1498Szrj emit_move_insn (chain, mem);
27338fd1498Szrj
27438fd1498Szrj mem = gen_rtx_MEM (ptr_mode,
27538fd1498Szrj plus_constant (Pmode, funexp,
27638fd1498Szrj POINTER_SIZE / BITS_PER_UNIT
27738fd1498Szrj - bit_val));
27838fd1498Szrj MEM_NOTRAP_P (mem) = 1;
27938fd1498Szrj mem = convert_memory_address (Pmode, mem);
28038fd1498Szrj emit_move_insn (funexp, mem);
28138fd1498Szrj
28238fd1498Szrj emit_label (call_lab);
28338fd1498Szrj
28438fd1498Szrj if (REG_P (chain))
28538fd1498Szrj {
28638fd1498Szrj use_reg (call_fusage, chain);
28738fd1498Szrj STATIC_CHAIN_REG_P (chain) = 1;
28838fd1498Szrj }
28938fd1498Szrj
29038fd1498Szrj /* Make sure we're not going to be overwritten below. */
29138fd1498Szrj gcc_assert (!static_chain_value);
29238fd1498Szrj }
29338fd1498Szrj
29438fd1498Szrj /* If we are using registers for parameters, force the
29538fd1498Szrj function address into a register now. */
29638fd1498Szrj funexp = ((reg_parm_seen
29738fd1498Szrj && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
29838fd1498Szrj ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
29938fd1498Szrj : memory_address (FUNCTION_MODE, funexp));
30038fd1498Szrj }
30138fd1498Szrj else
30238fd1498Szrj {
30338fd1498Szrj /* funexp could be a SYMBOL_REF represents a function pointer which is
30438fd1498Szrj of ptr_mode. In this case, it should be converted into address mode
30538fd1498Szrj to be a valid address for memory rtx pattern. See PR 64971. */
30638fd1498Szrj if (GET_MODE (funexp) != Pmode)
30738fd1498Szrj funexp = convert_memory_address (Pmode, funexp);
30838fd1498Szrj
30938fd1498Szrj if (!(flags & ECF_SIBCALL))
31038fd1498Szrj {
31138fd1498Szrj if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
31238fd1498Szrj funexp = force_reg (Pmode, funexp);
31338fd1498Szrj }
31438fd1498Szrj }
31538fd1498Szrj
31638fd1498Szrj if (static_chain_value != 0
31738fd1498Szrj && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
31838fd1498Szrj || DECL_STATIC_CHAIN (fndecl_or_type)))
31938fd1498Szrj {
32038fd1498Szrj rtx chain;
32138fd1498Szrj
32238fd1498Szrj chain = targetm.calls.static_chain (fndecl_or_type, false);
32338fd1498Szrj static_chain_value = convert_memory_address (Pmode, static_chain_value);
32438fd1498Szrj
32538fd1498Szrj emit_move_insn (chain, static_chain_value);
32638fd1498Szrj if (REG_P (chain))
32738fd1498Szrj {
32838fd1498Szrj use_reg (call_fusage, chain);
32938fd1498Szrj STATIC_CHAIN_REG_P (chain) = 1;
33038fd1498Szrj }
33138fd1498Szrj }
33238fd1498Szrj
33338fd1498Szrj return funexp;
33438fd1498Szrj }
33538fd1498Szrj
33638fd1498Szrj /* Generate instructions to call function FUNEXP,
33738fd1498Szrj and optionally pop the results.
33838fd1498Szrj The CALL_INSN is the first insn generated.
33938fd1498Szrj
34038fd1498Szrj FNDECL is the declaration node of the function. This is given to the
34138fd1498Szrj hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
34238fd1498Szrj its own args.
34338fd1498Szrj
34438fd1498Szrj FUNTYPE is the data type of the function. This is given to the hook
34538fd1498Szrj TARGET_RETURN_POPS_ARGS to determine whether this function pops its
34638fd1498Szrj own args. We used to allow an identifier for library functions, but
34738fd1498Szrj that doesn't work when the return type is an aggregate type and the
34838fd1498Szrj calling convention says that the pointer to this aggregate is to be
34938fd1498Szrj popped by the callee.
35038fd1498Szrj
35138fd1498Szrj STACK_SIZE is the number of bytes of arguments on the stack,
35238fd1498Szrj ROUNDED_STACK_SIZE is that number rounded up to
35338fd1498Szrj PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
35438fd1498Szrj both to put into the call insn and to generate explicit popping
35538fd1498Szrj code if necessary.
35638fd1498Szrj
35738fd1498Szrj STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
35838fd1498Szrj It is zero if this call doesn't want a structure value.
35938fd1498Szrj
36038fd1498Szrj NEXT_ARG_REG is the rtx that results from executing
36138fd1498Szrj targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
36238fd1498Szrj just after all the args have had their registers assigned.
36338fd1498Szrj This could be whatever you like, but normally it is the first
36438fd1498Szrj arg-register beyond those used for args in this call,
36538fd1498Szrj or 0 if all the arg-registers are used in this call.
36638fd1498Szrj It is passed on to `gen_call' so you can put this info in the call insn.
36738fd1498Szrj
36838fd1498Szrj VALREG is a hard register in which a value is returned,
36938fd1498Szrj or 0 if the call does not return a value.
37038fd1498Szrj
37138fd1498Szrj OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
37238fd1498Szrj the args to this call were processed.
37338fd1498Szrj We restore `inhibit_defer_pop' to that value.
37438fd1498Szrj
37538fd1498Szrj CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
37638fd1498Szrj denote registers used by the called function. */
37738fd1498Szrj
37838fd1498Szrj static void
emit_call_1(rtx funexp,tree fntree ATTRIBUTE_UNUSED,tree fndecl ATTRIBUTE_UNUSED,tree funtype ATTRIBUTE_UNUSED,poly_int64 stack_size ATTRIBUTE_UNUSED,poly_int64 rounded_stack_size,poly_int64 struct_value_size ATTRIBUTE_UNUSED,rtx next_arg_reg ATTRIBUTE_UNUSED,rtx valreg,int old_inhibit_defer_pop,rtx call_fusage,int ecf_flags,cumulative_args_t args_so_far ATTRIBUTE_UNUSED)37938fd1498Szrj emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
38038fd1498Szrj tree funtype ATTRIBUTE_UNUSED,
38138fd1498Szrj poly_int64 stack_size ATTRIBUTE_UNUSED,
38238fd1498Szrj poly_int64 rounded_stack_size,
38338fd1498Szrj poly_int64 struct_value_size ATTRIBUTE_UNUSED,
38438fd1498Szrj rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
38538fd1498Szrj int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
38638fd1498Szrj cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
38738fd1498Szrj {
38838fd1498Szrj rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
38938fd1498Szrj rtx call, funmem, pat;
39038fd1498Szrj int already_popped = 0;
39138fd1498Szrj poly_int64 n_popped = 0;
39238fd1498Szrj
39338fd1498Szrj /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
39438fd1498Szrj patterns exist). Any popping that the callee does on return will
39538fd1498Szrj be from our caller's frame rather than ours. */
39638fd1498Szrj if (!(ecf_flags & ECF_SIBCALL))
39738fd1498Szrj {
39838fd1498Szrj n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
39938fd1498Szrj
40038fd1498Szrj #ifdef CALL_POPS_ARGS
40138fd1498Szrj n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
40238fd1498Szrj #endif
40338fd1498Szrj }
40438fd1498Szrj
40538fd1498Szrj /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
40638fd1498Szrj and we don't want to load it into a register as an optimization,
40738fd1498Szrj because prepare_call_address already did it if it should be done. */
40838fd1498Szrj if (GET_CODE (funexp) != SYMBOL_REF)
40938fd1498Szrj funexp = memory_address (FUNCTION_MODE, funexp);
41038fd1498Szrj
41138fd1498Szrj funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
41238fd1498Szrj if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
41338fd1498Szrj {
41438fd1498Szrj tree t = fndecl;
41538fd1498Szrj
41638fd1498Szrj /* Although a built-in FUNCTION_DECL and its non-__builtin
41738fd1498Szrj counterpart compare equal and get a shared mem_attrs, they
41838fd1498Szrj produce different dump output in compare-debug compilations,
41938fd1498Szrj if an entry gets garbage collected in one compilation, then
42038fd1498Szrj adds a different (but equivalent) entry, while the other
42138fd1498Szrj doesn't run the garbage collector at the same spot and then
42238fd1498Szrj shares the mem_attr with the equivalent entry. */
42338fd1498Szrj if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
42438fd1498Szrj {
42538fd1498Szrj tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
42638fd1498Szrj if (t2)
42738fd1498Szrj t = t2;
42838fd1498Szrj }
42938fd1498Szrj
43038fd1498Szrj set_mem_expr (funmem, t);
43138fd1498Szrj }
43238fd1498Szrj else if (fntree)
43338fd1498Szrj set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
43438fd1498Szrj
43538fd1498Szrj if (ecf_flags & ECF_SIBCALL)
43638fd1498Szrj {
43738fd1498Szrj if (valreg)
43838fd1498Szrj pat = targetm.gen_sibcall_value (valreg, funmem,
43938fd1498Szrj rounded_stack_size_rtx,
44038fd1498Szrj next_arg_reg, NULL_RTX);
44138fd1498Szrj else
44238fd1498Szrj pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
44338fd1498Szrj next_arg_reg,
44438fd1498Szrj gen_int_mode (struct_value_size, Pmode));
44538fd1498Szrj }
44638fd1498Szrj /* If the target has "call" or "call_value" insns, then prefer them
44738fd1498Szrj if no arguments are actually popped. If the target does not have
44838fd1498Szrj "call" or "call_value" insns, then we must use the popping versions
44938fd1498Szrj even if the call has no arguments to pop. */
45038fd1498Szrj else if (maybe_ne (n_popped, 0)
45138fd1498Szrj || !(valreg
45238fd1498Szrj ? targetm.have_call_value ()
45338fd1498Szrj : targetm.have_call ()))
45438fd1498Szrj {
45538fd1498Szrj rtx n_pop = gen_int_mode (n_popped, Pmode);
45638fd1498Szrj
45738fd1498Szrj /* If this subroutine pops its own args, record that in the call insn
45838fd1498Szrj if possible, for the sake of frame pointer elimination. */
45938fd1498Szrj
46038fd1498Szrj if (valreg)
46138fd1498Szrj pat = targetm.gen_call_value_pop (valreg, funmem,
46238fd1498Szrj rounded_stack_size_rtx,
46338fd1498Szrj next_arg_reg, n_pop);
46438fd1498Szrj else
46538fd1498Szrj pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
46638fd1498Szrj next_arg_reg, n_pop);
46738fd1498Szrj
46838fd1498Szrj already_popped = 1;
46938fd1498Szrj }
47038fd1498Szrj else
47138fd1498Szrj {
47238fd1498Szrj if (valreg)
47338fd1498Szrj pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
47438fd1498Szrj next_arg_reg, NULL_RTX);
47538fd1498Szrj else
47638fd1498Szrj pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
47738fd1498Szrj gen_int_mode (struct_value_size, Pmode));
47838fd1498Szrj }
47938fd1498Szrj emit_insn (pat);
48038fd1498Szrj
48138fd1498Szrj /* Find the call we just emitted. */
48238fd1498Szrj rtx_call_insn *call_insn = last_call_insn ();
48338fd1498Szrj
48438fd1498Szrj /* Some target create a fresh MEM instead of reusing the one provided
48538fd1498Szrj above. Set its MEM_EXPR. */
48638fd1498Szrj call = get_call_rtx_from (call_insn);
48738fd1498Szrj if (call
48838fd1498Szrj && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
48938fd1498Szrj && MEM_EXPR (funmem) != NULL_TREE)
49038fd1498Szrj set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
49138fd1498Szrj
49238fd1498Szrj /* Mark instrumented calls. */
49338fd1498Szrj if (call && fntree)
49438fd1498Szrj CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
49538fd1498Szrj
49638fd1498Szrj /* Put the register usage information there. */
49738fd1498Szrj add_function_usage_to (call_insn, call_fusage);
49838fd1498Szrj
49938fd1498Szrj /* If this is a const call, then set the insn's unchanging bit. */
50038fd1498Szrj if (ecf_flags & ECF_CONST)
50138fd1498Szrj RTL_CONST_CALL_P (call_insn) = 1;
50238fd1498Szrj
50338fd1498Szrj /* If this is a pure call, then set the insn's unchanging bit. */
50438fd1498Szrj if (ecf_flags & ECF_PURE)
50538fd1498Szrj RTL_PURE_CALL_P (call_insn) = 1;
50638fd1498Szrj
50738fd1498Szrj /* If this is a const call, then set the insn's unchanging bit. */
50838fd1498Szrj if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
50938fd1498Szrj RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
51038fd1498Szrj
51138fd1498Szrj /* Create a nothrow REG_EH_REGION note, if needed. */
51238fd1498Szrj make_reg_eh_region_note (call_insn, ecf_flags, 0);
51338fd1498Szrj
51438fd1498Szrj if (ecf_flags & ECF_NORETURN)
51538fd1498Szrj add_reg_note (call_insn, REG_NORETURN, const0_rtx);
51638fd1498Szrj
51738fd1498Szrj if (ecf_flags & ECF_RETURNS_TWICE)
51838fd1498Szrj {
51938fd1498Szrj add_reg_note (call_insn, REG_SETJMP, const0_rtx);
52038fd1498Szrj cfun->calls_setjmp = 1;
52138fd1498Szrj }
52238fd1498Szrj
52338fd1498Szrj SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
52438fd1498Szrj
52538fd1498Szrj /* Restore this now, so that we do defer pops for this call's args
52638fd1498Szrj if the context of the call as a whole permits. */
52738fd1498Szrj inhibit_defer_pop = old_inhibit_defer_pop;
52838fd1498Szrj
52938fd1498Szrj if (maybe_ne (n_popped, 0))
53038fd1498Szrj {
53138fd1498Szrj if (!already_popped)
53238fd1498Szrj CALL_INSN_FUNCTION_USAGE (call_insn)
53338fd1498Szrj = gen_rtx_EXPR_LIST (VOIDmode,
53438fd1498Szrj gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
53538fd1498Szrj CALL_INSN_FUNCTION_USAGE (call_insn));
53638fd1498Szrj rounded_stack_size -= n_popped;
53738fd1498Szrj rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
53838fd1498Szrj stack_pointer_delta -= n_popped;
53938fd1498Szrj
54038fd1498Szrj add_args_size_note (call_insn, stack_pointer_delta);
54138fd1498Szrj
54238fd1498Szrj /* If popup is needed, stack realign must use DRAP */
54338fd1498Szrj if (SUPPORTS_STACK_ALIGNMENT)
54438fd1498Szrj crtl->need_drap = true;
54538fd1498Szrj }
54638fd1498Szrj /* For noreturn calls when not accumulating outgoing args force
54738fd1498Szrj REG_ARGS_SIZE note to prevent crossjumping of calls with different
54838fd1498Szrj args sizes. */
54938fd1498Szrj else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
55038fd1498Szrj add_args_size_note (call_insn, stack_pointer_delta);
55138fd1498Szrj
55238fd1498Szrj if (!ACCUMULATE_OUTGOING_ARGS)
55338fd1498Szrj {
55438fd1498Szrj /* If returning from the subroutine does not automatically pop the args,
55538fd1498Szrj we need an instruction to pop them sooner or later.
55638fd1498Szrj Perhaps do it now; perhaps just record how much space to pop later.
55738fd1498Szrj
55838fd1498Szrj If returning from the subroutine does pop the args, indicate that the
55938fd1498Szrj stack pointer will be changed. */
56038fd1498Szrj
56138fd1498Szrj if (maybe_ne (rounded_stack_size, 0))
56238fd1498Szrj {
56338fd1498Szrj if (ecf_flags & ECF_NORETURN)
56438fd1498Szrj /* Just pretend we did the pop. */
56538fd1498Szrj stack_pointer_delta -= rounded_stack_size;
56638fd1498Szrj else if (flag_defer_pop && inhibit_defer_pop == 0
56738fd1498Szrj && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
56838fd1498Szrj pending_stack_adjust += rounded_stack_size;
56938fd1498Szrj else
57038fd1498Szrj adjust_stack (rounded_stack_size_rtx);
57138fd1498Szrj }
57238fd1498Szrj }
57338fd1498Szrj /* When we accumulate outgoing args, we must avoid any stack manipulations.
57438fd1498Szrj Restore the stack pointer to its original value now. Usually
57538fd1498Szrj ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
57638fd1498Szrj On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
57738fd1498Szrj popping variants of functions exist as well.
57838fd1498Szrj
57938fd1498Szrj ??? We may optimize similar to defer_pop above, but it is
58038fd1498Szrj probably not worthwhile.
58138fd1498Szrj
58238fd1498Szrj ??? It will be worthwhile to enable combine_stack_adjustments even for
58338fd1498Szrj such machines. */
58438fd1498Szrj else if (maybe_ne (n_popped, 0))
58538fd1498Szrj anti_adjust_stack (gen_int_mode (n_popped, Pmode));
58638fd1498Szrj }
58738fd1498Szrj
58838fd1498Szrj /* Determine if the function identified by FNDECL is one with
58938fd1498Szrj special properties we wish to know about. Modify FLAGS accordingly.
59038fd1498Szrj
59138fd1498Szrj For example, if the function might return more than one time (setjmp), then
59238fd1498Szrj set ECF_RETURNS_TWICE.
59338fd1498Szrj
59438fd1498Szrj Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
59538fd1498Szrj space from the stack such as alloca. */
59638fd1498Szrj
59738fd1498Szrj static int
special_function_p(const_tree fndecl,int flags)59838fd1498Szrj special_function_p (const_tree fndecl, int flags)
59938fd1498Szrj {
60038fd1498Szrj tree name_decl = DECL_NAME (fndecl);
60138fd1498Szrj
60238fd1498Szrj /* For instrumentation clones we want to derive flags
60338fd1498Szrj from the original name. */
60438fd1498Szrj if (cgraph_node::get (fndecl)
60538fd1498Szrj && cgraph_node::get (fndecl)->instrumentation_clone)
60638fd1498Szrj name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
60738fd1498Szrj
60838fd1498Szrj if (fndecl && name_decl
60938fd1498Szrj && IDENTIFIER_LENGTH (name_decl) <= 11
61038fd1498Szrj /* Exclude functions not at the file scope, or not `extern',
61138fd1498Szrj since they are not the magic functions we would otherwise
61238fd1498Szrj think they are.
61338fd1498Szrj FIXME: this should be handled with attributes, not with this
61438fd1498Szrj hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
61538fd1498Szrj because you can declare fork() inside a function if you
61638fd1498Szrj wish. */
61738fd1498Szrj && (DECL_CONTEXT (fndecl) == NULL_TREE
61838fd1498Szrj || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
61938fd1498Szrj && TREE_PUBLIC (fndecl))
62038fd1498Szrj {
62138fd1498Szrj const char *name = IDENTIFIER_POINTER (name_decl);
62238fd1498Szrj const char *tname = name;
62338fd1498Szrj
62438fd1498Szrj /* We assume that alloca will always be called by name. It
62538fd1498Szrj makes no sense to pass it as a pointer-to-function to
62638fd1498Szrj anything that does not understand its behavior. */
62738fd1498Szrj if (IDENTIFIER_LENGTH (name_decl) == 6
62838fd1498Szrj && name[0] == 'a'
62938fd1498Szrj && ! strcmp (name, "alloca"))
63038fd1498Szrj flags |= ECF_MAY_BE_ALLOCA;
63138fd1498Szrj
63238fd1498Szrj /* Disregard prefix _ or __. */
63338fd1498Szrj if (name[0] == '_')
63438fd1498Szrj {
63538fd1498Szrj if (name[1] == '_')
63638fd1498Szrj tname += 2;
63738fd1498Szrj else
63838fd1498Szrj tname += 1;
63938fd1498Szrj }
64038fd1498Szrj
64138fd1498Szrj /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
64238fd1498Szrj if (! strcmp (tname, "setjmp")
64338fd1498Szrj || ! strcmp (tname, "sigsetjmp")
64438fd1498Szrj || ! strcmp (name, "savectx")
64538fd1498Szrj || ! strcmp (name, "vfork")
64638fd1498Szrj || ! strcmp (name, "getcontext"))
64738fd1498Szrj flags |= ECF_RETURNS_TWICE;
64838fd1498Szrj }
64938fd1498Szrj
65038fd1498Szrj if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
65138fd1498Szrj && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
65238fd1498Szrj flags |= ECF_MAY_BE_ALLOCA;
65338fd1498Szrj
65438fd1498Szrj return flags;
65538fd1498Szrj }
65638fd1498Szrj
65738fd1498Szrj /* Similar to special_function_p; return a set of ERF_ flags for the
65838fd1498Szrj function FNDECL. */
65938fd1498Szrj static int
decl_return_flags(tree fndecl)66038fd1498Szrj decl_return_flags (tree fndecl)
66138fd1498Szrj {
66238fd1498Szrj tree attr;
66338fd1498Szrj tree type = TREE_TYPE (fndecl);
66438fd1498Szrj if (!type)
66538fd1498Szrj return 0;
66638fd1498Szrj
66738fd1498Szrj attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
66838fd1498Szrj if (!attr)
66938fd1498Szrj return 0;
67038fd1498Szrj
67138fd1498Szrj attr = TREE_VALUE (TREE_VALUE (attr));
67238fd1498Szrj if (!attr || TREE_STRING_LENGTH (attr) < 1)
67338fd1498Szrj return 0;
67438fd1498Szrj
67538fd1498Szrj switch (TREE_STRING_POINTER (attr)[0])
67638fd1498Szrj {
67738fd1498Szrj case '1':
67838fd1498Szrj case '2':
67938fd1498Szrj case '3':
68038fd1498Szrj case '4':
68138fd1498Szrj return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
68238fd1498Szrj
68338fd1498Szrj case 'm':
68438fd1498Szrj return ERF_NOALIAS;
68538fd1498Szrj
68638fd1498Szrj case '.':
68738fd1498Szrj default:
68838fd1498Szrj return 0;
68938fd1498Szrj }
69038fd1498Szrj }
69138fd1498Szrj
69238fd1498Szrj /* Return nonzero when FNDECL represents a call to setjmp. */
69338fd1498Szrj
69438fd1498Szrj int
setjmp_call_p(const_tree fndecl)69538fd1498Szrj setjmp_call_p (const_tree fndecl)
69638fd1498Szrj {
69738fd1498Szrj if (DECL_IS_RETURNS_TWICE (fndecl))
69838fd1498Szrj return ECF_RETURNS_TWICE;
69938fd1498Szrj return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
70038fd1498Szrj }
70138fd1498Szrj
70238fd1498Szrj
70338fd1498Szrj /* Return true if STMT may be an alloca call. */
70438fd1498Szrj
70538fd1498Szrj bool
gimple_maybe_alloca_call_p(const gimple * stmt)70638fd1498Szrj gimple_maybe_alloca_call_p (const gimple *stmt)
70738fd1498Szrj {
70838fd1498Szrj tree fndecl;
70938fd1498Szrj
71038fd1498Szrj if (!is_gimple_call (stmt))
71138fd1498Szrj return false;
71238fd1498Szrj
71338fd1498Szrj fndecl = gimple_call_fndecl (stmt);
71438fd1498Szrj if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
71538fd1498Szrj return true;
71638fd1498Szrj
71738fd1498Szrj return false;
71838fd1498Szrj }
71938fd1498Szrj
72038fd1498Szrj /* Return true if STMT is a builtin alloca call. */
72138fd1498Szrj
72238fd1498Szrj bool
gimple_alloca_call_p(const gimple * stmt)72338fd1498Szrj gimple_alloca_call_p (const gimple *stmt)
72438fd1498Szrj {
72538fd1498Szrj tree fndecl;
72638fd1498Szrj
72738fd1498Szrj if (!is_gimple_call (stmt))
72838fd1498Szrj return false;
72938fd1498Szrj
73038fd1498Szrj fndecl = gimple_call_fndecl (stmt);
73138fd1498Szrj if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
73238fd1498Szrj switch (DECL_FUNCTION_CODE (fndecl))
73338fd1498Szrj {
73438fd1498Szrj CASE_BUILT_IN_ALLOCA:
73538fd1498Szrj return gimple_call_num_args (stmt) > 0;
73638fd1498Szrj default:
73738fd1498Szrj break;
73838fd1498Szrj }
73938fd1498Szrj
74038fd1498Szrj return false;
74138fd1498Szrj }
74238fd1498Szrj
74338fd1498Szrj /* Return true when exp contains a builtin alloca call. */
74438fd1498Szrj
74538fd1498Szrj bool
alloca_call_p(const_tree exp)74638fd1498Szrj alloca_call_p (const_tree exp)
74738fd1498Szrj {
74838fd1498Szrj tree fndecl;
74938fd1498Szrj if (TREE_CODE (exp) == CALL_EXPR
75038fd1498Szrj && (fndecl = get_callee_fndecl (exp))
75138fd1498Szrj && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
75238fd1498Szrj switch (DECL_FUNCTION_CODE (fndecl))
75338fd1498Szrj {
75438fd1498Szrj CASE_BUILT_IN_ALLOCA:
75538fd1498Szrj return true;
75638fd1498Szrj default:
75738fd1498Szrj break;
75838fd1498Szrj }
75938fd1498Szrj
76038fd1498Szrj return false;
76138fd1498Szrj }
76238fd1498Szrj
76338fd1498Szrj /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
76438fd1498Szrj function. Return FALSE otherwise. */
76538fd1498Szrj
76638fd1498Szrj static bool
is_tm_builtin(const_tree fndecl)76738fd1498Szrj is_tm_builtin (const_tree fndecl)
76838fd1498Szrj {
76938fd1498Szrj if (fndecl == NULL)
77038fd1498Szrj return false;
77138fd1498Szrj
77238fd1498Szrj if (decl_is_tm_clone (fndecl))
77338fd1498Szrj return true;
77438fd1498Szrj
77538fd1498Szrj if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
77638fd1498Szrj {
77738fd1498Szrj switch (DECL_FUNCTION_CODE (fndecl))
77838fd1498Szrj {
77938fd1498Szrj case BUILT_IN_TM_COMMIT:
78038fd1498Szrj case BUILT_IN_TM_COMMIT_EH:
78138fd1498Szrj case BUILT_IN_TM_ABORT:
78238fd1498Szrj case BUILT_IN_TM_IRREVOCABLE:
78338fd1498Szrj case BUILT_IN_TM_GETTMCLONE_IRR:
78438fd1498Szrj case BUILT_IN_TM_MEMCPY:
78538fd1498Szrj case BUILT_IN_TM_MEMMOVE:
78638fd1498Szrj case BUILT_IN_TM_MEMSET:
78738fd1498Szrj CASE_BUILT_IN_TM_STORE (1):
78838fd1498Szrj CASE_BUILT_IN_TM_STORE (2):
78938fd1498Szrj CASE_BUILT_IN_TM_STORE (4):
79038fd1498Szrj CASE_BUILT_IN_TM_STORE (8):
79138fd1498Szrj CASE_BUILT_IN_TM_STORE (FLOAT):
79238fd1498Szrj CASE_BUILT_IN_TM_STORE (DOUBLE):
79338fd1498Szrj CASE_BUILT_IN_TM_STORE (LDOUBLE):
79438fd1498Szrj CASE_BUILT_IN_TM_STORE (M64):
79538fd1498Szrj CASE_BUILT_IN_TM_STORE (M128):
79638fd1498Szrj CASE_BUILT_IN_TM_STORE (M256):
79738fd1498Szrj CASE_BUILT_IN_TM_LOAD (1):
79838fd1498Szrj CASE_BUILT_IN_TM_LOAD (2):
79938fd1498Szrj CASE_BUILT_IN_TM_LOAD (4):
80038fd1498Szrj CASE_BUILT_IN_TM_LOAD (8):
80138fd1498Szrj CASE_BUILT_IN_TM_LOAD (FLOAT):
80238fd1498Szrj CASE_BUILT_IN_TM_LOAD (DOUBLE):
80338fd1498Szrj CASE_BUILT_IN_TM_LOAD (LDOUBLE):
80438fd1498Szrj CASE_BUILT_IN_TM_LOAD (M64):
80538fd1498Szrj CASE_BUILT_IN_TM_LOAD (M128):
80638fd1498Szrj CASE_BUILT_IN_TM_LOAD (M256):
80738fd1498Szrj case BUILT_IN_TM_LOG:
80838fd1498Szrj case BUILT_IN_TM_LOG_1:
80938fd1498Szrj case BUILT_IN_TM_LOG_2:
81038fd1498Szrj case BUILT_IN_TM_LOG_4:
81138fd1498Szrj case BUILT_IN_TM_LOG_8:
81238fd1498Szrj case BUILT_IN_TM_LOG_FLOAT:
81338fd1498Szrj case BUILT_IN_TM_LOG_DOUBLE:
81438fd1498Szrj case BUILT_IN_TM_LOG_LDOUBLE:
81538fd1498Szrj case BUILT_IN_TM_LOG_M64:
81638fd1498Szrj case BUILT_IN_TM_LOG_M128:
81738fd1498Szrj case BUILT_IN_TM_LOG_M256:
81838fd1498Szrj return true;
81938fd1498Szrj default:
82038fd1498Szrj break;
82138fd1498Szrj }
82238fd1498Szrj }
82338fd1498Szrj return false;
82438fd1498Szrj }
82538fd1498Szrj
82638fd1498Szrj /* Detect flags (function attributes) from the function decl or type node. */
82738fd1498Szrj
82838fd1498Szrj int
flags_from_decl_or_type(const_tree exp)82938fd1498Szrj flags_from_decl_or_type (const_tree exp)
83038fd1498Szrj {
83138fd1498Szrj int flags = 0;
83238fd1498Szrj
83338fd1498Szrj if (DECL_P (exp))
83438fd1498Szrj {
83538fd1498Szrj /* The function exp may have the `malloc' attribute. */
83638fd1498Szrj if (DECL_IS_MALLOC (exp))
83738fd1498Szrj flags |= ECF_MALLOC;
83838fd1498Szrj
83938fd1498Szrj /* The function exp may have the `returns_twice' attribute. */
84038fd1498Szrj if (DECL_IS_RETURNS_TWICE (exp))
84138fd1498Szrj flags |= ECF_RETURNS_TWICE;
84238fd1498Szrj
84338fd1498Szrj /* Process the pure and const attributes. */
84438fd1498Szrj if (TREE_READONLY (exp))
84538fd1498Szrj flags |= ECF_CONST;
84638fd1498Szrj if (DECL_PURE_P (exp))
84738fd1498Szrj flags |= ECF_PURE;
84838fd1498Szrj if (DECL_LOOPING_CONST_OR_PURE_P (exp))
84938fd1498Szrj flags |= ECF_LOOPING_CONST_OR_PURE;
85038fd1498Szrj
85138fd1498Szrj if (DECL_IS_NOVOPS (exp))
85238fd1498Szrj flags |= ECF_NOVOPS;
85338fd1498Szrj if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
85438fd1498Szrj flags |= ECF_LEAF;
85538fd1498Szrj if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
85638fd1498Szrj flags |= ECF_COLD;
85738fd1498Szrj
85838fd1498Szrj if (TREE_NOTHROW (exp))
85938fd1498Szrj flags |= ECF_NOTHROW;
86038fd1498Szrj
86138fd1498Szrj if (flag_tm)
86238fd1498Szrj {
86338fd1498Szrj if (is_tm_builtin (exp))
86438fd1498Szrj flags |= ECF_TM_BUILTIN;
86538fd1498Szrj else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
86638fd1498Szrj || lookup_attribute ("transaction_pure",
86738fd1498Szrj TYPE_ATTRIBUTES (TREE_TYPE (exp))))
86838fd1498Szrj flags |= ECF_TM_PURE;
86938fd1498Szrj }
87038fd1498Szrj
87138fd1498Szrj flags = special_function_p (exp, flags);
87238fd1498Szrj }
87338fd1498Szrj else if (TYPE_P (exp))
87438fd1498Szrj {
87538fd1498Szrj if (TYPE_READONLY (exp))
87638fd1498Szrj flags |= ECF_CONST;
87738fd1498Szrj
87838fd1498Szrj if (flag_tm
87938fd1498Szrj && ((flags & ECF_CONST) != 0
88038fd1498Szrj || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
88138fd1498Szrj flags |= ECF_TM_PURE;
88238fd1498Szrj }
88338fd1498Szrj else
88438fd1498Szrj gcc_unreachable ();
88538fd1498Szrj
88638fd1498Szrj if (TREE_THIS_VOLATILE (exp))
88738fd1498Szrj {
88838fd1498Szrj flags |= ECF_NORETURN;
88938fd1498Szrj if (flags & (ECF_CONST|ECF_PURE))
89038fd1498Szrj flags |= ECF_LOOPING_CONST_OR_PURE;
89138fd1498Szrj }
89238fd1498Szrj
89338fd1498Szrj return flags;
89438fd1498Szrj }
89538fd1498Szrj
89638fd1498Szrj /* Detect flags from a CALL_EXPR. */
89738fd1498Szrj
89838fd1498Szrj int
call_expr_flags(const_tree t)89938fd1498Szrj call_expr_flags (const_tree t)
90038fd1498Szrj {
90138fd1498Szrj int flags;
90238fd1498Szrj tree decl = get_callee_fndecl (t);
90338fd1498Szrj
90438fd1498Szrj if (decl)
90538fd1498Szrj flags = flags_from_decl_or_type (decl);
90638fd1498Szrj else if (CALL_EXPR_FN (t) == NULL_TREE)
90738fd1498Szrj flags = internal_fn_flags (CALL_EXPR_IFN (t));
90838fd1498Szrj else
90938fd1498Szrj {
91038fd1498Szrj tree type = TREE_TYPE (CALL_EXPR_FN (t));
91138fd1498Szrj if (type && TREE_CODE (type) == POINTER_TYPE)
91238fd1498Szrj flags = flags_from_decl_or_type (TREE_TYPE (type));
91338fd1498Szrj else
91438fd1498Szrj flags = 0;
91538fd1498Szrj if (CALL_EXPR_BY_DESCRIPTOR (t))
91638fd1498Szrj flags |= ECF_BY_DESCRIPTOR;
91738fd1498Szrj }
91838fd1498Szrj
91938fd1498Szrj return flags;
92038fd1498Szrj }
92138fd1498Szrj
92238fd1498Szrj /* Return true if TYPE should be passed by invisible reference. */
92338fd1498Szrj
92438fd1498Szrj bool
pass_by_reference(CUMULATIVE_ARGS * ca,machine_mode mode,tree type,bool named_arg)92538fd1498Szrj pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
92638fd1498Szrj tree type, bool named_arg)
92738fd1498Szrj {
92838fd1498Szrj if (type)
92938fd1498Szrj {
93038fd1498Szrj /* If this type contains non-trivial constructors, then it is
93138fd1498Szrj forbidden for the middle-end to create any new copies. */
93238fd1498Szrj if (TREE_ADDRESSABLE (type))
93338fd1498Szrj return true;
93438fd1498Szrj
93538fd1498Szrj /* GCC post 3.4 passes *all* variable sized types by reference. */
93638fd1498Szrj if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
93738fd1498Szrj return true;
93838fd1498Szrj
93938fd1498Szrj /* If a record type should be passed the same as its first (and only)
94038fd1498Szrj member, use the type and mode of that member. */
94138fd1498Szrj if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
94238fd1498Szrj {
94338fd1498Szrj type = TREE_TYPE (first_field (type));
94438fd1498Szrj mode = TYPE_MODE (type);
94538fd1498Szrj }
94638fd1498Szrj }
94738fd1498Szrj
94838fd1498Szrj return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
94938fd1498Szrj type, named_arg);
95038fd1498Szrj }
95138fd1498Szrj
95238fd1498Szrj /* Return true if TYPE, which is passed by reference, should be callee
95338fd1498Szrj copied instead of caller copied. */
95438fd1498Szrj
95538fd1498Szrj bool
reference_callee_copied(CUMULATIVE_ARGS * ca,machine_mode mode,tree type,bool named_arg)95638fd1498Szrj reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
95738fd1498Szrj tree type, bool named_arg)
95838fd1498Szrj {
95938fd1498Szrj if (type && TREE_ADDRESSABLE (type))
96038fd1498Szrj return false;
96138fd1498Szrj return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
96238fd1498Szrj named_arg);
96338fd1498Szrj }
96438fd1498Szrj
96538fd1498Szrj
96638fd1498Szrj /* Precompute all register parameters as described by ARGS, storing values
96738fd1498Szrj into fields within the ARGS array.
96838fd1498Szrj
96938fd1498Szrj NUM_ACTUALS indicates the total number elements in the ARGS array.
97038fd1498Szrj
97138fd1498Szrj Set REG_PARM_SEEN if we encounter a register parameter. */
97238fd1498Szrj
97338fd1498Szrj static void
precompute_register_parameters(int num_actuals,struct arg_data * args,int * reg_parm_seen)97438fd1498Szrj precompute_register_parameters (int num_actuals, struct arg_data *args,
97538fd1498Szrj int *reg_parm_seen)
97638fd1498Szrj {
97738fd1498Szrj int i;
97838fd1498Szrj
97938fd1498Szrj *reg_parm_seen = 0;
98038fd1498Szrj
98138fd1498Szrj for (i = 0; i < num_actuals; i++)
98238fd1498Szrj if (args[i].reg != 0 && ! args[i].pass_on_stack)
98338fd1498Szrj {
98438fd1498Szrj *reg_parm_seen = 1;
98538fd1498Szrj
98638fd1498Szrj if (args[i].value == 0)
98738fd1498Szrj {
98838fd1498Szrj push_temp_slots ();
98938fd1498Szrj args[i].value = expand_normal (args[i].tree_value);
99038fd1498Szrj preserve_temp_slots (args[i].value);
99138fd1498Szrj pop_temp_slots ();
99238fd1498Szrj }
99338fd1498Szrj
99438fd1498Szrj /* If we are to promote the function arg to a wider mode,
99538fd1498Szrj do it now. */
99638fd1498Szrj
99738fd1498Szrj if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
99838fd1498Szrj args[i].value
99938fd1498Szrj = convert_modes (args[i].mode,
100038fd1498Szrj TYPE_MODE (TREE_TYPE (args[i].tree_value)),
100138fd1498Szrj args[i].value, args[i].unsignedp);
100238fd1498Szrj
100338fd1498Szrj /* If the value is a non-legitimate constant, force it into a
100438fd1498Szrj pseudo now. TLS symbols sometimes need a call to resolve. */
100538fd1498Szrj if (CONSTANT_P (args[i].value)
100638fd1498Szrj && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
100738fd1498Szrj args[i].value = force_reg (args[i].mode, args[i].value);
100838fd1498Szrj
100938fd1498Szrj /* If we're going to have to load the value by parts, pull the
101038fd1498Szrj parts into pseudos. The part extraction process can involve
101138fd1498Szrj non-trivial computation. */
101238fd1498Szrj if (GET_CODE (args[i].reg) == PARALLEL)
101338fd1498Szrj {
101438fd1498Szrj tree type = TREE_TYPE (args[i].tree_value);
101538fd1498Szrj args[i].parallel_value
101638fd1498Szrj = emit_group_load_into_temps (args[i].reg, args[i].value,
101738fd1498Szrj type, int_size_in_bytes (type));
101838fd1498Szrj }
101938fd1498Szrj
102038fd1498Szrj /* If the value is expensive, and we are inside an appropriately
102138fd1498Szrj short loop, put the value into a pseudo and then put the pseudo
102238fd1498Szrj into the hard reg.
102338fd1498Szrj
102438fd1498Szrj For small register classes, also do this if this call uses
102538fd1498Szrj register parameters. This is to avoid reload conflicts while
102638fd1498Szrj loading the parameters registers. */
102738fd1498Szrj
102838fd1498Szrj else if ((! (REG_P (args[i].value)
102938fd1498Szrj || (GET_CODE (args[i].value) == SUBREG
103038fd1498Szrj && REG_P (SUBREG_REG (args[i].value)))))
103138fd1498Szrj && args[i].mode != BLKmode
103238fd1498Szrj && (set_src_cost (args[i].value, args[i].mode,
103338fd1498Szrj optimize_insn_for_speed_p ())
103438fd1498Szrj > COSTS_N_INSNS (1))
103538fd1498Szrj && ((*reg_parm_seen
103638fd1498Szrj && targetm.small_register_classes_for_mode_p (args[i].mode))
103738fd1498Szrj || optimize))
103838fd1498Szrj args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
103938fd1498Szrj }
104038fd1498Szrj }
104138fd1498Szrj
104238fd1498Szrj #ifdef REG_PARM_STACK_SPACE
104338fd1498Szrj
104438fd1498Szrj /* The argument list is the property of the called routine and it
104538fd1498Szrj may clobber it. If the fixed area has been used for previous
104638fd1498Szrj parameters, we must save and restore it. */
104738fd1498Szrj
104838fd1498Szrj static rtx
save_fixed_argument_area(int reg_parm_stack_space,rtx argblock,int * low_to_save,int * high_to_save)104938fd1498Szrj save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
105038fd1498Szrj {
105138fd1498Szrj unsigned int low;
105238fd1498Szrj unsigned int high;
105338fd1498Szrj
105438fd1498Szrj /* Compute the boundary of the area that needs to be saved, if any. */
105538fd1498Szrj high = reg_parm_stack_space;
105638fd1498Szrj if (ARGS_GROW_DOWNWARD)
105738fd1498Szrj high += 1;
105838fd1498Szrj
105938fd1498Szrj if (high > highest_outgoing_arg_in_use)
106038fd1498Szrj high = highest_outgoing_arg_in_use;
106138fd1498Szrj
106238fd1498Szrj for (low = 0; low < high; low++)
106338fd1498Szrj if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
106438fd1498Szrj {
106538fd1498Szrj int num_to_save;
106638fd1498Szrj machine_mode save_mode;
106738fd1498Szrj int delta;
106838fd1498Szrj rtx addr;
106938fd1498Szrj rtx stack_area;
107038fd1498Szrj rtx save_area;
107138fd1498Szrj
107238fd1498Szrj while (stack_usage_map[--high] == 0)
107338fd1498Szrj ;
107438fd1498Szrj
107538fd1498Szrj *low_to_save = low;
107638fd1498Szrj *high_to_save = high;
107738fd1498Szrj
107838fd1498Szrj num_to_save = high - low + 1;
107938fd1498Szrj
108038fd1498Szrj /* If we don't have the required alignment, must do this
108138fd1498Szrj in BLKmode. */
108238fd1498Szrj scalar_int_mode imode;
108338fd1498Szrj if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
108438fd1498Szrj && (low & (MIN (GET_MODE_SIZE (imode),
108538fd1498Szrj BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
108638fd1498Szrj save_mode = imode;
108738fd1498Szrj else
108838fd1498Szrj save_mode = BLKmode;
108938fd1498Szrj
109038fd1498Szrj if (ARGS_GROW_DOWNWARD)
109138fd1498Szrj delta = -high;
109238fd1498Szrj else
109338fd1498Szrj delta = low;
109438fd1498Szrj
109538fd1498Szrj addr = plus_constant (Pmode, argblock, delta);
109638fd1498Szrj stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
109738fd1498Szrj
109838fd1498Szrj set_mem_align (stack_area, PARM_BOUNDARY);
109938fd1498Szrj if (save_mode == BLKmode)
110038fd1498Szrj {
110138fd1498Szrj save_area = assign_stack_temp (BLKmode, num_to_save);
110238fd1498Szrj emit_block_move (validize_mem (save_area), stack_area,
110338fd1498Szrj GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
110438fd1498Szrj }
110538fd1498Szrj else
110638fd1498Szrj {
110738fd1498Szrj save_area = gen_reg_rtx (save_mode);
110838fd1498Szrj emit_move_insn (save_area, stack_area);
110938fd1498Szrj }
111038fd1498Szrj
111138fd1498Szrj return save_area;
111238fd1498Szrj }
111338fd1498Szrj
111438fd1498Szrj return NULL_RTX;
111538fd1498Szrj }
111638fd1498Szrj
111738fd1498Szrj static void
restore_fixed_argument_area(rtx save_area,rtx argblock,int high_to_save,int low_to_save)111838fd1498Szrj restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
111938fd1498Szrj {
112038fd1498Szrj machine_mode save_mode = GET_MODE (save_area);
112138fd1498Szrj int delta;
112238fd1498Szrj rtx addr, stack_area;
112338fd1498Szrj
112438fd1498Szrj if (ARGS_GROW_DOWNWARD)
112538fd1498Szrj delta = -high_to_save;
112638fd1498Szrj else
112738fd1498Szrj delta = low_to_save;
112838fd1498Szrj
112938fd1498Szrj addr = plus_constant (Pmode, argblock, delta);
113038fd1498Szrj stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
113138fd1498Szrj set_mem_align (stack_area, PARM_BOUNDARY);
113238fd1498Szrj
113338fd1498Szrj if (save_mode != BLKmode)
113438fd1498Szrj emit_move_insn (stack_area, save_area);
113538fd1498Szrj else
113638fd1498Szrj emit_block_move (stack_area, validize_mem (save_area),
113738fd1498Szrj GEN_INT (high_to_save - low_to_save + 1),
113838fd1498Szrj BLOCK_OP_CALL_PARM);
113938fd1498Szrj }
114038fd1498Szrj #endif /* REG_PARM_STACK_SPACE */
114138fd1498Szrj
114238fd1498Szrj /* If any elements in ARGS refer to parameters that are to be passed in
114338fd1498Szrj registers, but not in memory, and whose alignment does not permit a
114438fd1498Szrj direct copy into registers. Copy the values into a group of pseudos
114538fd1498Szrj which we will later copy into the appropriate hard registers.
114638fd1498Szrj
114738fd1498Szrj Pseudos for each unaligned argument will be stored into the array
114838fd1498Szrj args[argnum].aligned_regs. The caller is responsible for deallocating
114938fd1498Szrj the aligned_regs array if it is nonzero. */
115038fd1498Szrj
115138fd1498Szrj static void
store_unaligned_arguments_into_pseudos(struct arg_data * args,int num_actuals)115238fd1498Szrj store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
115338fd1498Szrj {
115438fd1498Szrj int i, j;
115538fd1498Szrj
115638fd1498Szrj for (i = 0; i < num_actuals; i++)
115738fd1498Szrj if (args[i].reg != 0 && ! args[i].pass_on_stack
115838fd1498Szrj && GET_CODE (args[i].reg) != PARALLEL
115938fd1498Szrj && args[i].mode == BLKmode
116038fd1498Szrj && MEM_P (args[i].value)
116138fd1498Szrj && (MEM_ALIGN (args[i].value)
116238fd1498Szrj < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
116338fd1498Szrj {
116438fd1498Szrj int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
116538fd1498Szrj int endian_correction = 0;
116638fd1498Szrj
116738fd1498Szrj if (args[i].partial)
116838fd1498Szrj {
116938fd1498Szrj gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
117038fd1498Szrj args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
117138fd1498Szrj }
117238fd1498Szrj else
117338fd1498Szrj {
117438fd1498Szrj args[i].n_aligned_regs
117538fd1498Szrj = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
117638fd1498Szrj }
117738fd1498Szrj
117838fd1498Szrj args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
117938fd1498Szrj
118038fd1498Szrj /* Structures smaller than a word are normally aligned to the
118138fd1498Szrj least significant byte. On a BYTES_BIG_ENDIAN machine,
118238fd1498Szrj this means we must skip the empty high order bytes when
118338fd1498Szrj calculating the bit offset. */
118438fd1498Szrj if (bytes < UNITS_PER_WORD
118538fd1498Szrj #ifdef BLOCK_REG_PADDING
118638fd1498Szrj && (BLOCK_REG_PADDING (args[i].mode,
118738fd1498Szrj TREE_TYPE (args[i].tree_value), 1)
118838fd1498Szrj == PAD_DOWNWARD)
118938fd1498Szrj #else
119038fd1498Szrj && BYTES_BIG_ENDIAN
119138fd1498Szrj #endif
119238fd1498Szrj )
119338fd1498Szrj endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
119438fd1498Szrj
119538fd1498Szrj for (j = 0; j < args[i].n_aligned_regs; j++)
119638fd1498Szrj {
119738fd1498Szrj rtx reg = gen_reg_rtx (word_mode);
119838fd1498Szrj rtx word = operand_subword_force (args[i].value, j, BLKmode);
119938fd1498Szrj int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
120038fd1498Szrj
120138fd1498Szrj args[i].aligned_regs[j] = reg;
120238fd1498Szrj word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
120338fd1498Szrj word_mode, word_mode, false, NULL);
120438fd1498Szrj
120538fd1498Szrj /* There is no need to restrict this code to loading items
120638fd1498Szrj in TYPE_ALIGN sized hunks. The bitfield instructions can
120738fd1498Szrj load up entire word sized registers efficiently.
120838fd1498Szrj
120938fd1498Szrj ??? This may not be needed anymore.
121038fd1498Szrj We use to emit a clobber here but that doesn't let later
121138fd1498Szrj passes optimize the instructions we emit. By storing 0 into
121238fd1498Szrj the register later passes know the first AND to zero out the
121338fd1498Szrj bitfield being set in the register is unnecessary. The store
121438fd1498Szrj of 0 will be deleted as will at least the first AND. */
121538fd1498Szrj
121638fd1498Szrj emit_move_insn (reg, const0_rtx);
121738fd1498Szrj
121838fd1498Szrj bytes -= bitsize / BITS_PER_UNIT;
121938fd1498Szrj store_bit_field (reg, bitsize, endian_correction, 0, 0,
122038fd1498Szrj word_mode, word, false);
122138fd1498Szrj }
122238fd1498Szrj }
122338fd1498Szrj }
122438fd1498Szrj
122538fd1498Szrj /* The limit set by -Walloc-larger-than=. */
122638fd1498Szrj static GTY(()) tree alloc_object_size_limit;
122738fd1498Szrj
122838fd1498Szrj /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
122938fd1498Szrj setting if the option is specified, or to the maximum object size if it
123038fd1498Szrj is not. Return the initialized value. */
123138fd1498Szrj
123238fd1498Szrj static tree
alloc_max_size(void)123338fd1498Szrj alloc_max_size (void)
123438fd1498Szrj {
1235*58e805e6Szrj if (alloc_object_size_limit)
1236*58e805e6Szrj return alloc_object_size_limit;
1237*58e805e6Szrj
123838fd1498Szrj alloc_object_size_limit = max_object_size ();
123938fd1498Szrj
1240*58e805e6Szrj if (!warn_alloc_size_limit)
1241*58e805e6Szrj return alloc_object_size_limit;
1242*58e805e6Szrj
1243*58e805e6Szrj const char *optname = "-Walloc-size-larger-than=";
1244*58e805e6Szrj
124538fd1498Szrj char *end = NULL;
124638fd1498Szrj errno = 0;
124738fd1498Szrj unsigned HOST_WIDE_INT unit = 1;
124838fd1498Szrj unsigned HOST_WIDE_INT limit
124938fd1498Szrj = strtoull (warn_alloc_size_limit, &end, 10);
125038fd1498Szrj
1251*58e805e6Szrj /* If the value is too large to be represented use the maximum
1252*58e805e6Szrj representable value that strtoull sets limit to (setting
1253*58e805e6Szrj errno to ERANGE). */
1254*58e805e6Szrj
125538fd1498Szrj if (end && *end)
125638fd1498Szrj {
125738fd1498Szrj /* Numeric option arguments are at most INT_MAX. Make it
125838fd1498Szrj possible to specify a larger value by accepting common
125938fd1498Szrj suffixes. */
126038fd1498Szrj if (!strcmp (end, "kB"))
126138fd1498Szrj unit = 1000;
1262*58e805e6Szrj else if (!strcasecmp (end, "KiB") || !strcmp (end, "KB"))
126338fd1498Szrj unit = 1024;
126438fd1498Szrj else if (!strcmp (end, "MB"))
126538fd1498Szrj unit = HOST_WIDE_INT_UC (1000) * 1000;
126638fd1498Szrj else if (!strcasecmp (end, "MiB"))
126738fd1498Szrj unit = HOST_WIDE_INT_UC (1024) * 1024;
126838fd1498Szrj else if (!strcasecmp (end, "GB"))
126938fd1498Szrj unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000;
127038fd1498Szrj else if (!strcasecmp (end, "GiB"))
127138fd1498Szrj unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024;
127238fd1498Szrj else if (!strcasecmp (end, "TB"))
127338fd1498Szrj unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000;
127438fd1498Szrj else if (!strcasecmp (end, "TiB"))
127538fd1498Szrj unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024;
127638fd1498Szrj else if (!strcasecmp (end, "PB"))
127738fd1498Szrj unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000;
127838fd1498Szrj else if (!strcasecmp (end, "PiB"))
127938fd1498Szrj unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024;
128038fd1498Szrj else if (!strcasecmp (end, "EB"))
128138fd1498Szrj unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000
128238fd1498Szrj * 1000;
128338fd1498Szrj else if (!strcasecmp (end, "EiB"))
128438fd1498Szrj unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024
128538fd1498Szrj * 1024;
128638fd1498Szrj else
1287*58e805e6Szrj {
1288*58e805e6Szrj /* This could mean an unknown suffix or a bad prefix, like
1289*58e805e6Szrj "+-1". */
1290*58e805e6Szrj warning_at (UNKNOWN_LOCATION, 0,
1291*58e805e6Szrj "invalid argument %qs to %qs",
1292*58e805e6Szrj warn_alloc_size_limit, optname);
1293*58e805e6Szrj
1294*58e805e6Szrj /* Ignore the limit extracted by strtoull. */
129538fd1498Szrj unit = 0;
129638fd1498Szrj }
1297*58e805e6Szrj }
129838fd1498Szrj
129938fd1498Szrj if (unit)
130038fd1498Szrj {
130138fd1498Szrj widest_int w = wi::mul (limit, unit);
130238fd1498Szrj if (w < wi::to_widest (alloc_object_size_limit))
130338fd1498Szrj alloc_object_size_limit
130438fd1498Szrj = wide_int_to_tree (ptrdiff_type_node, w);
1305*58e805e6Szrj else
1306*58e805e6Szrj alloc_object_size_limit = build_all_ones_cst (size_type_node);
130738fd1498Szrj }
1308*58e805e6Szrj
1309*58e805e6Szrj
131038fd1498Szrj return alloc_object_size_limit;
131138fd1498Szrj }
131238fd1498Szrj
131338fd1498Szrj /* Return true when EXP's range can be determined and set RANGE[] to it
131438fd1498Szrj after adjusting it if necessary to make EXP a represents a valid size
131538fd1498Szrj of object, or a valid size argument to an allocation function declared
131638fd1498Szrj with attribute alloc_size (whose argument may be signed), or to a string
131738fd1498Szrj manipulation function like memset. When ALLOW_ZERO is true, allow
131838fd1498Szrj returning a range of [0, 0] for a size in an anti-range [1, N] where
131938fd1498Szrj N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
132038fd1498Szrj allocation functions like malloc but it is a valid argument to
132138fd1498Szrj functions like memset. */
132238fd1498Szrj
132338fd1498Szrj bool
get_size_range(tree exp,tree range[2],bool allow_zero)132438fd1498Szrj get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
132538fd1498Szrj {
132638fd1498Szrj if (tree_fits_uhwi_p (exp))
132738fd1498Szrj {
132838fd1498Szrj /* EXP is a constant. */
132938fd1498Szrj range[0] = range[1] = exp;
133038fd1498Szrj return true;
133138fd1498Szrj }
133238fd1498Szrj
133338fd1498Szrj tree exptype = TREE_TYPE (exp);
133438fd1498Szrj bool integral = INTEGRAL_TYPE_P (exptype);
133538fd1498Szrj
133638fd1498Szrj wide_int min, max;
133738fd1498Szrj enum value_range_type range_type;
133838fd1498Szrj
133938fd1498Szrj if (TREE_CODE (exp) == SSA_NAME && integral)
134038fd1498Szrj range_type = get_range_info (exp, &min, &max);
134138fd1498Szrj else
134238fd1498Szrj range_type = VR_VARYING;
134338fd1498Szrj
134438fd1498Szrj if (range_type == VR_VARYING)
134538fd1498Szrj {
134638fd1498Szrj if (integral)
134738fd1498Szrj {
134838fd1498Szrj /* Use the full range of the type of the expression when
134938fd1498Szrj no value range information is available. */
135038fd1498Szrj range[0] = TYPE_MIN_VALUE (exptype);
135138fd1498Szrj range[1] = TYPE_MAX_VALUE (exptype);
135238fd1498Szrj return true;
135338fd1498Szrj }
135438fd1498Szrj
135538fd1498Szrj range[0] = NULL_TREE;
135638fd1498Szrj range[1] = NULL_TREE;
135738fd1498Szrj return false;
135838fd1498Szrj }
135938fd1498Szrj
136038fd1498Szrj unsigned expprec = TYPE_PRECISION (exptype);
136138fd1498Szrj
136238fd1498Szrj bool signed_p = !TYPE_UNSIGNED (exptype);
136338fd1498Szrj
136438fd1498Szrj if (range_type == VR_ANTI_RANGE)
136538fd1498Szrj {
136638fd1498Szrj if (signed_p)
136738fd1498Szrj {
136838fd1498Szrj if (wi::les_p (max, 0))
136938fd1498Szrj {
137038fd1498Szrj /* EXP is not in a strictly negative range. That means
137138fd1498Szrj it must be in some (not necessarily strictly) positive
137238fd1498Szrj range which includes zero. Since in signed to unsigned
137338fd1498Szrj conversions negative values end up converted to large
137438fd1498Szrj positive values, and otherwise they are not valid sizes,
137538fd1498Szrj the resulting range is in both cases [0, TYPE_MAX]. */
137638fd1498Szrj min = wi::zero (expprec);
137738fd1498Szrj max = wi::to_wide (TYPE_MAX_VALUE (exptype));
137838fd1498Szrj }
137938fd1498Szrj else if (wi::les_p (min - 1, 0))
138038fd1498Szrj {
138138fd1498Szrj /* EXP is not in a negative-positive range. That means EXP
138238fd1498Szrj is either negative, or greater than max. Since negative
138338fd1498Szrj sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
138438fd1498Szrj min = max + 1;
138538fd1498Szrj max = wi::to_wide (TYPE_MAX_VALUE (exptype));
138638fd1498Szrj }
138738fd1498Szrj else
138838fd1498Szrj {
138938fd1498Szrj max = min - 1;
139038fd1498Szrj min = wi::zero (expprec);
139138fd1498Szrj }
139238fd1498Szrj }
139338fd1498Szrj else if (wi::eq_p (0, min - 1))
139438fd1498Szrj {
139538fd1498Szrj /* EXP is unsigned and not in the range [1, MAX]. That means
139638fd1498Szrj it's either zero or greater than MAX. Even though 0 would
139738fd1498Szrj normally be detected by -Walloc-zero, unless ALLOW_ZERO
139838fd1498Szrj is true, set the range to [MAX, TYPE_MAX] so that when MAX
139938fd1498Szrj is greater than the limit the whole range is diagnosed. */
140038fd1498Szrj if (allow_zero)
140138fd1498Szrj min = max = wi::zero (expprec);
140238fd1498Szrj else
140338fd1498Szrj {
140438fd1498Szrj min = max + 1;
140538fd1498Szrj max = wi::to_wide (TYPE_MAX_VALUE (exptype));
140638fd1498Szrj }
140738fd1498Szrj }
140838fd1498Szrj else
140938fd1498Szrj {
141038fd1498Szrj max = min - 1;
141138fd1498Szrj min = wi::zero (expprec);
141238fd1498Szrj }
141338fd1498Szrj }
141438fd1498Szrj
141538fd1498Szrj range[0] = wide_int_to_tree (exptype, min);
141638fd1498Szrj range[1] = wide_int_to_tree (exptype, max);
141738fd1498Szrj
141838fd1498Szrj return true;
141938fd1498Szrj }
142038fd1498Szrj
142138fd1498Szrj /* Diagnose a call EXP to function FN decorated with attribute alloc_size
142238fd1498Szrj whose argument numbers given by IDX with values given by ARGS exceed
142338fd1498Szrj the maximum object size or cause an unsigned oveflow (wrapping) when
142438fd1498Szrj multiplied. When ARGS[0] is null the function does nothing. ARGS[1]
142538fd1498Szrj may be null for functions like malloc, and non-null for those like
142638fd1498Szrj calloc that are decorated with a two-argument attribute alloc_size. */
142738fd1498Szrj
142838fd1498Szrj void
maybe_warn_alloc_args_overflow(tree fn,tree exp,tree args[2],int idx[2])142938fd1498Szrj maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
143038fd1498Szrj {
143138fd1498Szrj /* The range each of the (up to) two arguments is known to be in. */
143238fd1498Szrj tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
143338fd1498Szrj
143438fd1498Szrj /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
143538fd1498Szrj tree maxobjsize = alloc_max_size ();
143638fd1498Szrj
143738fd1498Szrj location_t loc = EXPR_LOCATION (exp);
143838fd1498Szrj
143938fd1498Szrj bool warned = false;
144038fd1498Szrj
144138fd1498Szrj /* Validate each argument individually. */
144238fd1498Szrj for (unsigned i = 0; i != 2 && args[i]; ++i)
144338fd1498Szrj {
144438fd1498Szrj if (TREE_CODE (args[i]) == INTEGER_CST)
144538fd1498Szrj {
144638fd1498Szrj argrange[i][0] = args[i];
144738fd1498Szrj argrange[i][1] = args[i];
144838fd1498Szrj
144938fd1498Szrj if (tree_int_cst_lt (args[i], integer_zero_node))
145038fd1498Szrj {
145138fd1498Szrj warned = warning_at (loc, OPT_Walloc_size_larger_than_,
145238fd1498Szrj "%Kargument %i value %qE is negative",
145338fd1498Szrj exp, idx[i] + 1, args[i]);
145438fd1498Szrj }
145538fd1498Szrj else if (integer_zerop (args[i]))
145638fd1498Szrj {
145738fd1498Szrj /* Avoid issuing -Walloc-zero for allocation functions other
145838fd1498Szrj than __builtin_alloca that are declared with attribute
145938fd1498Szrj returns_nonnull because there's no portability risk. This
146038fd1498Szrj avoids warning for such calls to libiberty's xmalloc and
146138fd1498Szrj friends.
146238fd1498Szrj Also avoid issuing the warning for calls to function named
146338fd1498Szrj "alloca". */
146438fd1498Szrj if ((DECL_FUNCTION_CODE (fn) == BUILT_IN_ALLOCA
146538fd1498Szrj && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6)
146638fd1498Szrj || (DECL_FUNCTION_CODE (fn) != BUILT_IN_ALLOCA
146738fd1498Szrj && !lookup_attribute ("returns_nonnull",
146838fd1498Szrj TYPE_ATTRIBUTES (TREE_TYPE (fn)))))
146938fd1498Szrj warned = warning_at (loc, OPT_Walloc_zero,
147038fd1498Szrj "%Kargument %i value is zero",
147138fd1498Szrj exp, idx[i] + 1);
147238fd1498Szrj }
147338fd1498Szrj else if (tree_int_cst_lt (maxobjsize, args[i]))
147438fd1498Szrj {
147538fd1498Szrj /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
147638fd1498Szrj mode and with -fno-exceptions as a way to indicate array
147738fd1498Szrj size overflow. There's no good way to detect C++98 here
147838fd1498Szrj so avoid diagnosing these calls for all C++ modes. */
147938fd1498Szrj if (i == 0
148038fd1498Szrj && !args[1]
148138fd1498Szrj && lang_GNU_CXX ()
148238fd1498Szrj && DECL_IS_OPERATOR_NEW (fn)
148338fd1498Szrj && integer_all_onesp (args[i]))
148438fd1498Szrj continue;
148538fd1498Szrj
148638fd1498Szrj warned = warning_at (loc, OPT_Walloc_size_larger_than_,
148738fd1498Szrj "%Kargument %i value %qE exceeds "
148838fd1498Szrj "maximum object size %E",
148938fd1498Szrj exp, idx[i] + 1, args[i], maxobjsize);
149038fd1498Szrj }
149138fd1498Szrj }
149238fd1498Szrj else if (TREE_CODE (args[i]) == SSA_NAME
149338fd1498Szrj && get_size_range (args[i], argrange[i]))
149438fd1498Szrj {
149538fd1498Szrj /* Verify that the argument's range is not negative (including
149638fd1498Szrj upper bound of zero). */
149738fd1498Szrj if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
149838fd1498Szrj && tree_int_cst_le (argrange[i][1], integer_zero_node))
149938fd1498Szrj {
150038fd1498Szrj warned = warning_at (loc, OPT_Walloc_size_larger_than_,
150138fd1498Szrj "%Kargument %i range [%E, %E] is negative",
150238fd1498Szrj exp, idx[i] + 1,
150338fd1498Szrj argrange[i][0], argrange[i][1]);
150438fd1498Szrj }
150538fd1498Szrj else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
150638fd1498Szrj {
150738fd1498Szrj warned = warning_at (loc, OPT_Walloc_size_larger_than_,
150838fd1498Szrj "%Kargument %i range [%E, %E] exceeds "
150938fd1498Szrj "maximum object size %E",
151038fd1498Szrj exp, idx[i] + 1,
151138fd1498Szrj argrange[i][0], argrange[i][1],
151238fd1498Szrj maxobjsize);
151338fd1498Szrj }
151438fd1498Szrj }
151538fd1498Szrj }
151638fd1498Szrj
151738fd1498Szrj if (!argrange[0])
151838fd1498Szrj return;
151938fd1498Szrj
152038fd1498Szrj /* For a two-argument alloc_size, validate the product of the two
152138fd1498Szrj arguments if both of their values or ranges are known. */
152238fd1498Szrj if (!warned && tree_fits_uhwi_p (argrange[0][0])
152338fd1498Szrj && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
152438fd1498Szrj && !integer_onep (argrange[0][0])
152538fd1498Szrj && !integer_onep (argrange[1][0]))
152638fd1498Szrj {
152738fd1498Szrj /* Check for overflow in the product of a function decorated with
152838fd1498Szrj attribute alloc_size (X, Y). */
152938fd1498Szrj unsigned szprec = TYPE_PRECISION (size_type_node);
153038fd1498Szrj wide_int x = wi::to_wide (argrange[0][0], szprec);
153138fd1498Szrj wide_int y = wi::to_wide (argrange[1][0], szprec);
153238fd1498Szrj
153338fd1498Szrj bool vflow;
153438fd1498Szrj wide_int prod = wi::umul (x, y, &vflow);
153538fd1498Szrj
153638fd1498Szrj if (vflow)
153738fd1498Szrj warned = warning_at (loc, OPT_Walloc_size_larger_than_,
153838fd1498Szrj "%Kproduct %<%E * %E%> of arguments %i and %i "
153938fd1498Szrj "exceeds %<SIZE_MAX%>",
154038fd1498Szrj exp, argrange[0][0], argrange[1][0],
154138fd1498Szrj idx[0] + 1, idx[1] + 1);
154238fd1498Szrj else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
154338fd1498Szrj warned = warning_at (loc, OPT_Walloc_size_larger_than_,
154438fd1498Szrj "%Kproduct %<%E * %E%> of arguments %i and %i "
154538fd1498Szrj "exceeds maximum object size %E",
154638fd1498Szrj exp, argrange[0][0], argrange[1][0],
154738fd1498Szrj idx[0] + 1, idx[1] + 1,
154838fd1498Szrj maxobjsize);
154938fd1498Szrj
155038fd1498Szrj if (warned)
155138fd1498Szrj {
155238fd1498Szrj /* Print the full range of each of the two arguments to make
155338fd1498Szrj it clear when it is, in fact, in a range and not constant. */
155438fd1498Szrj if (argrange[0][0] != argrange [0][1])
155538fd1498Szrj inform (loc, "argument %i in the range [%E, %E]",
155638fd1498Szrj idx[0] + 1, argrange[0][0], argrange[0][1]);
155738fd1498Szrj if (argrange[1][0] != argrange [1][1])
155838fd1498Szrj inform (loc, "argument %i in the range [%E, %E]",
155938fd1498Szrj idx[1] + 1, argrange[1][0], argrange[1][1]);
156038fd1498Szrj }
156138fd1498Szrj }
156238fd1498Szrj
156338fd1498Szrj if (warned)
156438fd1498Szrj {
156538fd1498Szrj location_t fnloc = DECL_SOURCE_LOCATION (fn);
156638fd1498Szrj
156738fd1498Szrj if (DECL_IS_BUILTIN (fn))
156838fd1498Szrj inform (loc,
156938fd1498Szrj "in a call to built-in allocation function %qD", fn);
157038fd1498Szrj else
157138fd1498Szrj inform (fnloc,
157238fd1498Szrj "in a call to allocation function %qD declared here", fn);
157338fd1498Szrj }
157438fd1498Szrj }
157538fd1498Szrj
157638fd1498Szrj /* If EXPR refers to a character array or pointer declared attribute
157738fd1498Szrj nonstring return a decl for that array or pointer and set *REF to
157838fd1498Szrj the referenced enclosing object or pointer. Otherwise returns
157938fd1498Szrj null. */
158038fd1498Szrj
158138fd1498Szrj tree
get_attr_nonstring_decl(tree expr,tree * ref)158238fd1498Szrj get_attr_nonstring_decl (tree expr, tree *ref)
158338fd1498Szrj {
158438fd1498Szrj tree decl = expr;
158538fd1498Szrj if (TREE_CODE (decl) == SSA_NAME)
158638fd1498Szrj {
158738fd1498Szrj gimple *def = SSA_NAME_DEF_STMT (decl);
158838fd1498Szrj
158938fd1498Szrj if (is_gimple_assign (def))
159038fd1498Szrj {
159138fd1498Szrj tree_code code = gimple_assign_rhs_code (def);
159238fd1498Szrj if (code == ADDR_EXPR
159338fd1498Szrj || code == COMPONENT_REF
159438fd1498Szrj || code == VAR_DECL)
159538fd1498Szrj decl = gimple_assign_rhs1 (def);
159638fd1498Szrj }
159738fd1498Szrj else if (tree var = SSA_NAME_VAR (decl))
159838fd1498Szrj decl = var;
159938fd1498Szrj }
160038fd1498Szrj
160138fd1498Szrj if (TREE_CODE (decl) == ADDR_EXPR)
160238fd1498Szrj decl = TREE_OPERAND (decl, 0);
160338fd1498Szrj
160438fd1498Szrj if (ref)
160538fd1498Szrj *ref = decl;
160638fd1498Szrj
1607*58e805e6Szrj if (TREE_CODE (decl) == ARRAY_REF)
1608*58e805e6Szrj decl = TREE_OPERAND (decl, 0);
1609*58e805e6Szrj else if (TREE_CODE (decl) == COMPONENT_REF)
161038fd1498Szrj decl = TREE_OPERAND (decl, 1);
1611*58e805e6Szrj else if (TREE_CODE (decl) == MEM_REF)
1612*58e805e6Szrj return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
161338fd1498Szrj
161438fd1498Szrj if (DECL_P (decl)
161538fd1498Szrj && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
161638fd1498Szrj return decl;
161738fd1498Szrj
161838fd1498Szrj return NULL_TREE;
161938fd1498Szrj }
162038fd1498Szrj
162138fd1498Szrj /* Warn about passing a non-string array/pointer to a function that
162238fd1498Szrj expects a nul-terminated string argument. */
162338fd1498Szrj
162438fd1498Szrj void
maybe_warn_nonstring_arg(tree fndecl,tree exp)162538fd1498Szrj maybe_warn_nonstring_arg (tree fndecl, tree exp)
162638fd1498Szrj {
162738fd1498Szrj if (!fndecl || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
162838fd1498Szrj return;
162938fd1498Szrj
1630*58e805e6Szrj if (!warn_stringop_overflow)
1631*58e805e6Szrj return;
1632*58e805e6Szrj
163338fd1498Szrj bool with_bounds = CALL_WITH_BOUNDS_P (exp);
163438fd1498Szrj
163538fd1498Szrj unsigned nargs = call_expr_nargs (exp);
163638fd1498Szrj
163738fd1498Szrj /* The bound argument to a bounded string function like strncpy. */
163838fd1498Szrj tree bound = NULL_TREE;
163938fd1498Szrj
1640*58e805e6Szrj /* The range of lengths of a string argument to one of the comparison
1641*58e805e6Szrj functions. If the length is less than the bound it is used instead. */
1642*58e805e6Szrj tree lenrng[2] = { NULL_TREE, NULL_TREE };
1643*58e805e6Szrj
164438fd1498Szrj /* It's safe to call "bounded" string functions with a non-string
164538fd1498Szrj argument since the functions provide an explicit bound for this
1646*58e805e6Szrj purpose. The exception is strncat where the bound may refer to
1647*58e805e6Szrj either the destination or the source. */
1648*58e805e6Szrj int fncode = DECL_FUNCTION_CODE (fndecl);
1649*58e805e6Szrj switch (fncode)
165038fd1498Szrj {
1651*58e805e6Szrj case BUILT_IN_STRCMP:
165238fd1498Szrj case BUILT_IN_STRNCMP:
165338fd1498Szrj case BUILT_IN_STRNCASECMP:
1654*58e805e6Szrj {
1655*58e805e6Szrj /* For these, if one argument refers to one or more of a set
1656*58e805e6Szrj of string constants or arrays of known size, determine
1657*58e805e6Szrj the range of their known or possible lengths and use it
1658*58e805e6Szrj conservatively as the bound for the unbounded function,
1659*58e805e6Szrj and to adjust the range of the bound of the bounded ones. */
1660*58e805e6Szrj unsigned stride = with_bounds ? 2 : 1;
1661*58e805e6Szrj for (unsigned argno = 0;
1662*58e805e6Szrj argno < MIN (nargs, 2 * stride)
1663*58e805e6Szrj && !(lenrng[1] && TREE_CODE (lenrng[1]) == INTEGER_CST);
1664*58e805e6Szrj argno += stride)
1665*58e805e6Szrj {
1666*58e805e6Szrj tree arg = CALL_EXPR_ARG (exp, argno);
1667*58e805e6Szrj if (!get_attr_nonstring_decl (arg))
1668*58e805e6Szrj get_range_strlen (arg, lenrng);
1669*58e805e6Szrj }
1670*58e805e6Szrj }
1671*58e805e6Szrj /* Fall through. */
1672*58e805e6Szrj
1673*58e805e6Szrj case BUILT_IN_STRNCAT:
1674*58e805e6Szrj case BUILT_IN_STPNCPY:
1675*58e805e6Szrj case BUILT_IN_STPNCPY_CHK:
167638fd1498Szrj case BUILT_IN_STRNCPY:
167738fd1498Szrj case BUILT_IN_STRNCPY_CHK:
167838fd1498Szrj {
167938fd1498Szrj unsigned argno = with_bounds ? 4 : 2;
168038fd1498Szrj if (argno < nargs)
168138fd1498Szrj bound = CALL_EXPR_ARG (exp, argno);
168238fd1498Szrj break;
168338fd1498Szrj }
168438fd1498Szrj
168538fd1498Szrj case BUILT_IN_STRNDUP:
168638fd1498Szrj {
168738fd1498Szrj unsigned argno = with_bounds ? 2 : 1;
168838fd1498Szrj if (argno < nargs)
168938fd1498Szrj bound = CALL_EXPR_ARG (exp, argno);
169038fd1498Szrj break;
169138fd1498Szrj }
169238fd1498Szrj
169338fd1498Szrj default:
169438fd1498Szrj break;
169538fd1498Szrj }
169638fd1498Szrj
169738fd1498Szrj /* Determine the range of the bound argument (if specified). */
169838fd1498Szrj tree bndrng[2] = { NULL_TREE, NULL_TREE };
169938fd1498Szrj if (bound)
170038fd1498Szrj get_size_range (bound, bndrng);
170138fd1498Szrj
1702*58e805e6Szrj if (lenrng[1] && TREE_CODE (lenrng[1]) == INTEGER_CST)
1703*58e805e6Szrj {
1704*58e805e6Szrj /* Add one for the nul. */
1705*58e805e6Szrj lenrng[1] = const_binop (PLUS_EXPR, TREE_TYPE (lenrng[1]),
1706*58e805e6Szrj lenrng[1], size_one_node);
1707*58e805e6Szrj
1708*58e805e6Szrj if (!bndrng[0])
1709*58e805e6Szrj {
1710*58e805e6Szrj /* Conservatively use the upper bound of the lengths for
1711*58e805e6Szrj both the lower and the upper bound of the operation. */
1712*58e805e6Szrj bndrng[0] = lenrng[1];
1713*58e805e6Szrj bndrng[1] = lenrng[1];
1714*58e805e6Szrj bound = void_type_node;
1715*58e805e6Szrj }
1716*58e805e6Szrj else
1717*58e805e6Szrj {
1718*58e805e6Szrj /* Replace the bound on the oparation with the upper bound
1719*58e805e6Szrj of the length of the string if the latter is smaller. */
1720*58e805e6Szrj if (tree_int_cst_lt (lenrng[1], bndrng[0]))
1721*58e805e6Szrj bndrng[0] = lenrng[1];
1722*58e805e6Szrj else if (tree_int_cst_lt (lenrng[1], bndrng[1]))
1723*58e805e6Szrj bndrng[1] = lenrng[1];
1724*58e805e6Szrj }
1725*58e805e6Szrj }
1726*58e805e6Szrj
172738fd1498Szrj /* Iterate over the built-in function's formal arguments and check
172838fd1498Szrj each const char* against the actual argument. If the actual
172938fd1498Szrj argument is declared attribute non-string issue a warning unless
173038fd1498Szrj the argument's maximum length is bounded. */
173138fd1498Szrj function_args_iterator it;
173238fd1498Szrj function_args_iter_init (&it, TREE_TYPE (fndecl));
173338fd1498Szrj
173438fd1498Szrj for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
173538fd1498Szrj {
173638fd1498Szrj /* Avoid iterating past the declared argument in a call
173738fd1498Szrj to function declared without a prototype. */
173838fd1498Szrj if (argno >= nargs)
173938fd1498Szrj break;
174038fd1498Szrj
174138fd1498Szrj tree argtype = function_args_iter_cond (&it);
174238fd1498Szrj if (!argtype)
174338fd1498Szrj break;
174438fd1498Szrj
174538fd1498Szrj if (TREE_CODE (argtype) != POINTER_TYPE)
174638fd1498Szrj continue;
174738fd1498Szrj
174838fd1498Szrj argtype = TREE_TYPE (argtype);
174938fd1498Szrj
175038fd1498Szrj if (TREE_CODE (argtype) != INTEGER_TYPE
175138fd1498Szrj || !TYPE_READONLY (argtype))
175238fd1498Szrj continue;
175338fd1498Szrj
175438fd1498Szrj argtype = TYPE_MAIN_VARIANT (argtype);
175538fd1498Szrj if (argtype != char_type_node)
175638fd1498Szrj continue;
175738fd1498Szrj
175838fd1498Szrj tree callarg = CALL_EXPR_ARG (exp, argno);
175938fd1498Szrj if (TREE_CODE (callarg) == ADDR_EXPR)
176038fd1498Szrj callarg = TREE_OPERAND (callarg, 0);
176138fd1498Szrj
176238fd1498Szrj /* See if the destination is declared with attribute "nonstring". */
176338fd1498Szrj tree decl = get_attr_nonstring_decl (callarg);
176438fd1498Szrj if (!decl)
176538fd1498Szrj continue;
176638fd1498Szrj
1767*58e805e6Szrj /* The maximum number of array elements accessed. */
176838fd1498Szrj offset_int wibnd = 0;
1769*58e805e6Szrj
1770*58e805e6Szrj if (argno && fncode == BUILT_IN_STRNCAT)
1771*58e805e6Szrj {
1772*58e805e6Szrj /* See if the bound in strncat is derived from the length
1773*58e805e6Szrj of the strlen of the destination (as it's expected to be).
1774*58e805e6Szrj If so, reset BOUND and FNCODE to trigger a warning. */
1775*58e805e6Szrj tree dstarg = CALL_EXPR_ARG (exp, 0);
1776*58e805e6Szrj if (is_strlen_related_p (dstarg, bound))
1777*58e805e6Szrj {
1778*58e805e6Szrj /* The bound applies to the destination, not to the source,
1779*58e805e6Szrj so reset these to trigger a warning without mentioning
1780*58e805e6Szrj the bound. */
1781*58e805e6Szrj bound = NULL;
1782*58e805e6Szrj fncode = 0;
1783*58e805e6Szrj }
1784*58e805e6Szrj else if (bndrng[1])
1785*58e805e6Szrj /* Use the upper bound of the range for strncat. */
1786*58e805e6Szrj wibnd = wi::to_offset (bndrng[1]);
1787*58e805e6Szrj }
1788*58e805e6Szrj else if (bndrng[0])
1789*58e805e6Szrj /* Use the lower bound of the range for functions other than
1790*58e805e6Szrj strncat. */
179138fd1498Szrj wibnd = wi::to_offset (bndrng[0]);
179238fd1498Szrj
1793*58e805e6Szrj /* Determine the size of the argument array if it is one. */
179438fd1498Szrj offset_int asize = wibnd;
1795*58e805e6Szrj bool known_size = false;
1796*58e805e6Szrj tree type = TREE_TYPE (decl);
179738fd1498Szrj
1798*58e805e6Szrj /* Determine the array size. For arrays of unknown bound and
1799*58e805e6Szrj pointers reset BOUND to trigger the appropriate warning. */
180038fd1498Szrj if (TREE_CODE (type) == ARRAY_TYPE)
1801*58e805e6Szrj {
180238fd1498Szrj if (tree arrbnd = TYPE_DOMAIN (type))
180338fd1498Szrj {
180438fd1498Szrj if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1805*58e805e6Szrj {
180638fd1498Szrj asize = wi::to_offset (arrbnd) + 1;
1807*58e805e6Szrj known_size = true;
180838fd1498Szrj }
1809*58e805e6Szrj }
1810*58e805e6Szrj else if (bound == void_type_node)
1811*58e805e6Szrj bound = NULL_TREE;
1812*58e805e6Szrj }
1813*58e805e6Szrj else if (bound == void_type_node)
1814*58e805e6Szrj bound = NULL_TREE;
181538fd1498Szrj
181638fd1498Szrj location_t loc = EXPR_LOCATION (exp);
181738fd1498Szrj
1818*58e805e6Szrj /* In a call to strncat with a bound in a range whose lower but
1819*58e805e6Szrj not upper bound is less than the array size, reset ASIZE to
1820*58e805e6Szrj be the same as the bound and the other variable to trigger
1821*58e805e6Szrj the apprpriate warning below. */
1822*58e805e6Szrj if (fncode == BUILT_IN_STRNCAT
1823*58e805e6Szrj && bndrng[0] != bndrng[1]
1824*58e805e6Szrj && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1825*58e805e6Szrj && (!known_size
1826*58e805e6Szrj || wi::ltu_p (asize, wibnd)))
1827*58e805e6Szrj {
1828*58e805e6Szrj asize = wibnd;
1829*58e805e6Szrj bound = NULL_TREE;
1830*58e805e6Szrj fncode = 0;
1831*58e805e6Szrj }
1832*58e805e6Szrj
183338fd1498Szrj bool warned = false;
183438fd1498Szrj
183538fd1498Szrj if (wi::ltu_p (asize, wibnd))
1836*58e805e6Szrj {
1837*58e805e6Szrj if (bndrng[0] == bndrng[1])
183838fd1498Szrj warned = warning_at (loc, OPT_Wstringop_overflow_,
1839*58e805e6Szrj "%qD argument %i declared attribute "
1840*58e805e6Szrj "%<nonstring%> is smaller than the specified "
1841*58e805e6Szrj "bound %wu",
1842*58e805e6Szrj fndecl, argno + 1, wibnd.to_uhwi ());
1843*58e805e6Szrj else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1844*58e805e6Szrj warned = warning_at (loc, OPT_Wstringop_overflow_,
1845*58e805e6Szrj "%qD argument %i declared attribute "
1846*58e805e6Szrj "%<nonstring%> is smaller than "
1847*58e805e6Szrj "the specified bound [%E, %E]",
1848*58e805e6Szrj fndecl, argno + 1, bndrng[0], bndrng[1]);
1849*58e805e6Szrj else
1850*58e805e6Szrj warned = warning_at (loc, OPT_Wstringop_overflow_,
1851*58e805e6Szrj "%qD argument %i declared attribute "
1852*58e805e6Szrj "%<nonstring%> may be smaller than "
1853*58e805e6Szrj "the specified bound [%E, %E]",
1854*58e805e6Szrj fndecl, argno + 1, bndrng[0], bndrng[1]);
1855*58e805e6Szrj }
1856*58e805e6Szrj else if (fncode == BUILT_IN_STRNCAT)
1857*58e805e6Szrj ; /* Avoid warning for calls to strncat() when the bound
1858*58e805e6Szrj is equal to the size of the non-string argument. */
185938fd1498Szrj else if (!bound)
186038fd1498Szrj warned = warning_at (loc, OPT_Wstringop_overflow_,
186138fd1498Szrj "%qD argument %i declared attribute %<nonstring%>",
186238fd1498Szrj fndecl, argno + 1);
186338fd1498Szrj
186438fd1498Szrj if (warned)
186538fd1498Szrj inform (DECL_SOURCE_LOCATION (decl),
186638fd1498Szrj "argument %qD declared here", decl);
186738fd1498Szrj }
186838fd1498Szrj }
186938fd1498Szrj
187038fd1498Szrj /* Issue an error if CALL_EXPR was flagged as requiring
187138fd1498Szrj tall-call optimization. */
187238fd1498Szrj
187338fd1498Szrj static void
maybe_complain_about_tail_call(tree call_expr,const char * reason)187438fd1498Szrj maybe_complain_about_tail_call (tree call_expr, const char *reason)
187538fd1498Szrj {
187638fd1498Szrj gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
187738fd1498Szrj if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
187838fd1498Szrj return;
187938fd1498Szrj
188038fd1498Szrj error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
188138fd1498Szrj }
188238fd1498Szrj
188338fd1498Szrj /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
188438fd1498Szrj CALL_EXPR EXP.
188538fd1498Szrj
188638fd1498Szrj NUM_ACTUALS is the total number of parameters.
188738fd1498Szrj
188838fd1498Szrj N_NAMED_ARGS is the total number of named arguments.
188938fd1498Szrj
189038fd1498Szrj STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
189138fd1498Szrj value, or null.
189238fd1498Szrj
189338fd1498Szrj FNDECL is the tree code for the target of this call (if known)
189438fd1498Szrj
189538fd1498Szrj ARGS_SO_FAR holds state needed by the target to know where to place
189638fd1498Szrj the next argument.
189738fd1498Szrj
189838fd1498Szrj REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
189938fd1498Szrj for arguments which are passed in registers.
190038fd1498Szrj
190138fd1498Szrj OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
190238fd1498Szrj and may be modified by this routine.
190338fd1498Szrj
190438fd1498Szrj OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
190538fd1498Szrj flags which may be modified by this routine.
190638fd1498Szrj
190738fd1498Szrj MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
190838fd1498Szrj that requires allocation of stack space.
190938fd1498Szrj
191038fd1498Szrj CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
191138fd1498Szrj the thunked-to function. */
191238fd1498Szrj
191338fd1498Szrj static void
initialize_argument_information(int num_actuals ATTRIBUTE_UNUSED,struct arg_data * args,struct args_size * args_size,int n_named_args ATTRIBUTE_UNUSED,tree exp,tree struct_value_addr_value,tree fndecl,tree fntype,cumulative_args_t args_so_far,int reg_parm_stack_space,rtx * old_stack_level,poly_int64_pod * old_pending_adj,int * must_preallocate,int * ecf_flags,bool * may_tailcall,bool call_from_thunk_p)191438fd1498Szrj initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
191538fd1498Szrj struct arg_data *args,
191638fd1498Szrj struct args_size *args_size,
191738fd1498Szrj int n_named_args ATTRIBUTE_UNUSED,
191838fd1498Szrj tree exp, tree struct_value_addr_value,
191938fd1498Szrj tree fndecl, tree fntype,
192038fd1498Szrj cumulative_args_t args_so_far,
192138fd1498Szrj int reg_parm_stack_space,
192238fd1498Szrj rtx *old_stack_level,
192338fd1498Szrj poly_int64_pod *old_pending_adj,
192438fd1498Szrj int *must_preallocate, int *ecf_flags,
192538fd1498Szrj bool *may_tailcall, bool call_from_thunk_p)
192638fd1498Szrj {
192738fd1498Szrj CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
192838fd1498Szrj location_t loc = EXPR_LOCATION (exp);
192938fd1498Szrj
193038fd1498Szrj /* Count arg position in order args appear. */
193138fd1498Szrj int argpos;
193238fd1498Szrj
193338fd1498Szrj int i;
193438fd1498Szrj
193538fd1498Szrj args_size->constant = 0;
193638fd1498Szrj args_size->var = 0;
193738fd1498Szrj
193838fd1498Szrj bitmap_obstack_initialize (NULL);
193938fd1498Szrj
194038fd1498Szrj /* In this loop, we consider args in the order they are written.
194138fd1498Szrj We fill up ARGS from the back. */
194238fd1498Szrj
194338fd1498Szrj i = num_actuals - 1;
194438fd1498Szrj {
194538fd1498Szrj int j = i, ptr_arg = -1;
194638fd1498Szrj call_expr_arg_iterator iter;
194738fd1498Szrj tree arg;
194838fd1498Szrj bitmap slots = NULL;
194938fd1498Szrj
195038fd1498Szrj if (struct_value_addr_value)
195138fd1498Szrj {
195238fd1498Szrj args[j].tree_value = struct_value_addr_value;
195338fd1498Szrj j--;
195438fd1498Szrj
195538fd1498Szrj /* If we pass structure address then we need to
195638fd1498Szrj create bounds for it. Since created bounds is
195738fd1498Szrj a call statement, we expand it right here to avoid
195838fd1498Szrj fixing all other places where it may be expanded. */
195938fd1498Szrj if (CALL_WITH_BOUNDS_P (exp))
196038fd1498Szrj {
196138fd1498Szrj args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
196238fd1498Szrj args[j].tree_value
196338fd1498Szrj = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
196438fd1498Szrj expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
196538fd1498Szrj EXPAND_NORMAL, 0, false);
196638fd1498Szrj args[j].pointer_arg = j + 1;
196738fd1498Szrj j--;
196838fd1498Szrj }
196938fd1498Szrj }
197038fd1498Szrj argpos = 0;
197138fd1498Szrj FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
197238fd1498Szrj {
197338fd1498Szrj tree argtype = TREE_TYPE (arg);
197438fd1498Szrj
197538fd1498Szrj /* Remember last param with pointer and associate it
197638fd1498Szrj with following pointer bounds. */
197738fd1498Szrj if (CALL_WITH_BOUNDS_P (exp)
197838fd1498Szrj && chkp_type_has_pointer (argtype))
197938fd1498Szrj {
198038fd1498Szrj if (slots)
198138fd1498Szrj BITMAP_FREE (slots);
198238fd1498Szrj ptr_arg = j;
198338fd1498Szrj if (!BOUNDED_TYPE_P (argtype))
198438fd1498Szrj {
198538fd1498Szrj slots = BITMAP_ALLOC (NULL);
198638fd1498Szrj chkp_find_bound_slots (argtype, slots);
198738fd1498Szrj }
198838fd1498Szrj }
198938fd1498Szrj else if (CALL_WITH_BOUNDS_P (exp)
199038fd1498Szrj && pass_by_reference (NULL, TYPE_MODE (argtype), argtype,
199138fd1498Szrj argpos < n_named_args))
199238fd1498Szrj {
199338fd1498Szrj if (slots)
199438fd1498Szrj BITMAP_FREE (slots);
199538fd1498Szrj ptr_arg = j;
199638fd1498Szrj }
199738fd1498Szrj else if (POINTER_BOUNDS_TYPE_P (argtype))
199838fd1498Szrj {
199938fd1498Szrj /* We expect bounds in instrumented calls only.
200038fd1498Szrj Otherwise it is a sign we lost flag due to some optimization
200138fd1498Szrj and may emit call args incorrectly. */
200238fd1498Szrj gcc_assert (CALL_WITH_BOUNDS_P (exp));
200338fd1498Szrj
200438fd1498Szrj /* For structures look for the next available pointer. */
200538fd1498Szrj if (ptr_arg != -1 && slots)
200638fd1498Szrj {
200738fd1498Szrj unsigned bnd_no = bitmap_first_set_bit (slots);
200838fd1498Szrj args[j].pointer_offset =
200938fd1498Szrj bnd_no * POINTER_SIZE / BITS_PER_UNIT;
201038fd1498Szrj
201138fd1498Szrj bitmap_clear_bit (slots, bnd_no);
201238fd1498Szrj
201338fd1498Szrj /* Check we have no more pointers in the structure. */
201438fd1498Szrj if (bitmap_empty_p (slots))
201538fd1498Szrj BITMAP_FREE (slots);
201638fd1498Szrj }
201738fd1498Szrj args[j].pointer_arg = ptr_arg;
201838fd1498Szrj
201938fd1498Szrj /* Check we covered all pointers in the previous
202038fd1498Szrj non bounds arg. */
202138fd1498Szrj if (!slots)
202238fd1498Szrj ptr_arg = -1;
202338fd1498Szrj }
202438fd1498Szrj else
202538fd1498Szrj ptr_arg = -1;
202638fd1498Szrj
202738fd1498Szrj if (targetm.calls.split_complex_arg
202838fd1498Szrj && argtype
202938fd1498Szrj && TREE_CODE (argtype) == COMPLEX_TYPE
203038fd1498Szrj && targetm.calls.split_complex_arg (argtype))
203138fd1498Szrj {
203238fd1498Szrj tree subtype = TREE_TYPE (argtype);
203338fd1498Szrj args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
203438fd1498Szrj j--;
203538fd1498Szrj args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
203638fd1498Szrj }
203738fd1498Szrj else
203838fd1498Szrj args[j].tree_value = arg;
203938fd1498Szrj j--;
204038fd1498Szrj argpos++;
204138fd1498Szrj }
204238fd1498Szrj
204338fd1498Szrj if (slots)
204438fd1498Szrj BITMAP_FREE (slots);
204538fd1498Szrj }
204638fd1498Szrj
204738fd1498Szrj bitmap_obstack_release (NULL);
204838fd1498Szrj
204938fd1498Szrj /* Extract attribute alloc_size and if set, store the indices of
205038fd1498Szrj the corresponding arguments in ALLOC_IDX, and then the actual
205138fd1498Szrj argument(s) at those indices in ALLOC_ARGS. */
205238fd1498Szrj int alloc_idx[2] = { -1, -1 };
205338fd1498Szrj if (tree alloc_size
205438fd1498Szrj = (fndecl ? lookup_attribute ("alloc_size",
205538fd1498Szrj TYPE_ATTRIBUTES (TREE_TYPE (fndecl)))
205638fd1498Szrj : NULL_TREE))
205738fd1498Szrj {
205838fd1498Szrj tree args = TREE_VALUE (alloc_size);
205938fd1498Szrj alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
206038fd1498Szrj if (TREE_CHAIN (args))
206138fd1498Szrj alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
206238fd1498Szrj }
206338fd1498Szrj
206438fd1498Szrj /* Array for up to the two attribute alloc_size arguments. */
206538fd1498Szrj tree alloc_args[] = { NULL_TREE, NULL_TREE };
206638fd1498Szrj
206738fd1498Szrj /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
206838fd1498Szrj for (argpos = 0; argpos < num_actuals; i--, argpos++)
206938fd1498Szrj {
207038fd1498Szrj tree type = TREE_TYPE (args[i].tree_value);
207138fd1498Szrj int unsignedp;
207238fd1498Szrj machine_mode mode;
207338fd1498Szrj
207438fd1498Szrj /* Replace erroneous argument with constant zero. */
207538fd1498Szrj if (type == error_mark_node || !COMPLETE_TYPE_P (type))
207638fd1498Szrj args[i].tree_value = integer_zero_node, type = integer_type_node;
207738fd1498Szrj
207838fd1498Szrj /* If TYPE is a transparent union or record, pass things the way
207938fd1498Szrj we would pass the first field of the union or record. We have
208038fd1498Szrj already verified that the modes are the same. */
208138fd1498Szrj if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
208238fd1498Szrj && TYPE_TRANSPARENT_AGGR (type))
208338fd1498Szrj type = TREE_TYPE (first_field (type));
208438fd1498Szrj
208538fd1498Szrj /* Decide where to pass this arg.
208638fd1498Szrj
208738fd1498Szrj args[i].reg is nonzero if all or part is passed in registers.
208838fd1498Szrj
208938fd1498Szrj args[i].partial is nonzero if part but not all is passed in registers,
209038fd1498Szrj and the exact value says how many bytes are passed in registers.
209138fd1498Szrj
209238fd1498Szrj args[i].pass_on_stack is nonzero if the argument must at least be
209338fd1498Szrj computed on the stack. It may then be loaded back into registers
209438fd1498Szrj if args[i].reg is nonzero.
209538fd1498Szrj
209638fd1498Szrj These decisions are driven by the FUNCTION_... macros and must agree
209738fd1498Szrj with those made by function.c. */
209838fd1498Szrj
209938fd1498Szrj /* See if this argument should be passed by invisible reference. */
210038fd1498Szrj if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
210138fd1498Szrj type, argpos < n_named_args))
210238fd1498Szrj {
210338fd1498Szrj bool callee_copies;
210438fd1498Szrj tree base = NULL_TREE;
210538fd1498Szrj
210638fd1498Szrj callee_copies
210738fd1498Szrj = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
210838fd1498Szrj type, argpos < n_named_args);
210938fd1498Szrj
211038fd1498Szrj /* If we're compiling a thunk, pass through invisible references
211138fd1498Szrj instead of making a copy. */
211238fd1498Szrj if (call_from_thunk_p
211338fd1498Szrj || (callee_copies
211438fd1498Szrj && !TREE_ADDRESSABLE (type)
211538fd1498Szrj && (base = get_base_address (args[i].tree_value))
211638fd1498Szrj && TREE_CODE (base) != SSA_NAME
211738fd1498Szrj && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
211838fd1498Szrj {
211938fd1498Szrj /* We may have turned the parameter value into an SSA name.
212038fd1498Szrj Go back to the original parameter so we can take the
212138fd1498Szrj address. */
212238fd1498Szrj if (TREE_CODE (args[i].tree_value) == SSA_NAME)
212338fd1498Szrj {
212438fd1498Szrj gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
212538fd1498Szrj args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
212638fd1498Szrj gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
212738fd1498Szrj }
212838fd1498Szrj /* Argument setup code may have copied the value to register. We
212938fd1498Szrj revert that optimization now because the tail call code must
213038fd1498Szrj use the original location. */
213138fd1498Szrj if (TREE_CODE (args[i].tree_value) == PARM_DECL
213238fd1498Szrj && !MEM_P (DECL_RTL (args[i].tree_value))
213338fd1498Szrj && DECL_INCOMING_RTL (args[i].tree_value)
213438fd1498Szrj && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
213538fd1498Szrj set_decl_rtl (args[i].tree_value,
213638fd1498Szrj DECL_INCOMING_RTL (args[i].tree_value));
213738fd1498Szrj
213838fd1498Szrj mark_addressable (args[i].tree_value);
213938fd1498Szrj
214038fd1498Szrj /* We can't use sibcalls if a callee-copied argument is
214138fd1498Szrj stored in the current function's frame. */
214238fd1498Szrj if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
214338fd1498Szrj {
214438fd1498Szrj *may_tailcall = false;
214538fd1498Szrj maybe_complain_about_tail_call (exp,
214638fd1498Szrj "a callee-copied argument is"
214738fd1498Szrj " stored in the current"
214838fd1498Szrj " function's frame");
214938fd1498Szrj }
215038fd1498Szrj
215138fd1498Szrj args[i].tree_value = build_fold_addr_expr_loc (loc,
215238fd1498Szrj args[i].tree_value);
215338fd1498Szrj type = TREE_TYPE (args[i].tree_value);
215438fd1498Szrj
215538fd1498Szrj if (*ecf_flags & ECF_CONST)
215638fd1498Szrj *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
215738fd1498Szrj }
215838fd1498Szrj else
215938fd1498Szrj {
216038fd1498Szrj /* We make a copy of the object and pass the address to the
216138fd1498Szrj function being called. */
216238fd1498Szrj rtx copy;
216338fd1498Szrj
216438fd1498Szrj if (!COMPLETE_TYPE_P (type)
216538fd1498Szrj || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
216638fd1498Szrj || (flag_stack_check == GENERIC_STACK_CHECK
216738fd1498Szrj && compare_tree_int (TYPE_SIZE_UNIT (type),
216838fd1498Szrj STACK_CHECK_MAX_VAR_SIZE) > 0))
216938fd1498Szrj {
217038fd1498Szrj /* This is a variable-sized object. Make space on the stack
217138fd1498Szrj for it. */
217238fd1498Szrj rtx size_rtx = expr_size (args[i].tree_value);
217338fd1498Szrj
217438fd1498Szrj if (*old_stack_level == 0)
217538fd1498Szrj {
217638fd1498Szrj emit_stack_save (SAVE_BLOCK, old_stack_level);
217738fd1498Szrj *old_pending_adj = pending_stack_adjust;
217838fd1498Szrj pending_stack_adjust = 0;
217938fd1498Szrj }
218038fd1498Szrj
218138fd1498Szrj /* We can pass TRUE as the 4th argument because we just
218238fd1498Szrj saved the stack pointer and will restore it right after
218338fd1498Szrj the call. */
218438fd1498Szrj copy = allocate_dynamic_stack_space (size_rtx,
218538fd1498Szrj TYPE_ALIGN (type),
218638fd1498Szrj TYPE_ALIGN (type),
218738fd1498Szrj max_int_size_in_bytes
218838fd1498Szrj (type),
218938fd1498Szrj true);
219038fd1498Szrj copy = gen_rtx_MEM (BLKmode, copy);
219138fd1498Szrj set_mem_attributes (copy, type, 1);
219238fd1498Szrj }
219338fd1498Szrj else
219438fd1498Szrj copy = assign_temp (type, 1, 0);
219538fd1498Szrj
219638fd1498Szrj store_expr (args[i].tree_value, copy, 0, false, false);
219738fd1498Szrj
219838fd1498Szrj /* Just change the const function to pure and then let
219938fd1498Szrj the next test clear the pure based on
220038fd1498Szrj callee_copies. */
220138fd1498Szrj if (*ecf_flags & ECF_CONST)
220238fd1498Szrj {
220338fd1498Szrj *ecf_flags &= ~ECF_CONST;
220438fd1498Szrj *ecf_flags |= ECF_PURE;
220538fd1498Szrj }
220638fd1498Szrj
220738fd1498Szrj if (!callee_copies && *ecf_flags & ECF_PURE)
220838fd1498Szrj *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
220938fd1498Szrj
221038fd1498Szrj args[i].tree_value
221138fd1498Szrj = build_fold_addr_expr_loc (loc, make_tree (type, copy));
221238fd1498Szrj type = TREE_TYPE (args[i].tree_value);
221338fd1498Szrj *may_tailcall = false;
221438fd1498Szrj maybe_complain_about_tail_call (exp,
221538fd1498Szrj "argument must be passed"
221638fd1498Szrj " by copying");
221738fd1498Szrj }
221838fd1498Szrj }
221938fd1498Szrj
222038fd1498Szrj unsignedp = TYPE_UNSIGNED (type);
222138fd1498Szrj mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
222238fd1498Szrj fndecl ? TREE_TYPE (fndecl) : fntype, 0);
222338fd1498Szrj
222438fd1498Szrj args[i].unsignedp = unsignedp;
222538fd1498Szrj args[i].mode = mode;
222638fd1498Szrj
222738fd1498Szrj targetm.calls.warn_parameter_passing_abi (args_so_far, type);
222838fd1498Szrj
222938fd1498Szrj args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
223038fd1498Szrj argpos < n_named_args);
223138fd1498Szrj
223238fd1498Szrj if (args[i].reg && CONST_INT_P (args[i].reg))
223338fd1498Szrj {
223438fd1498Szrj args[i].special_slot = args[i].reg;
223538fd1498Szrj args[i].reg = NULL;
223638fd1498Szrj }
223738fd1498Szrj
223838fd1498Szrj /* If this is a sibling call and the machine has register windows, the
223938fd1498Szrj register window has to be unwinded before calling the routine, so
224038fd1498Szrj arguments have to go into the incoming registers. */
224138fd1498Szrj if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
224238fd1498Szrj args[i].tail_call_reg
224338fd1498Szrj = targetm.calls.function_incoming_arg (args_so_far, mode, type,
224438fd1498Szrj argpos < n_named_args);
224538fd1498Szrj else
224638fd1498Szrj args[i].tail_call_reg = args[i].reg;
224738fd1498Szrj
224838fd1498Szrj if (args[i].reg)
224938fd1498Szrj args[i].partial
225038fd1498Szrj = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
225138fd1498Szrj argpos < n_named_args);
225238fd1498Szrj
225338fd1498Szrj args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
225438fd1498Szrj
225538fd1498Szrj /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
225638fd1498Szrj it means that we are to pass this arg in the register(s) designated
225738fd1498Szrj by the PARALLEL, but also to pass it in the stack. */
225838fd1498Szrj if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
225938fd1498Szrj && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
226038fd1498Szrj args[i].pass_on_stack = 1;
226138fd1498Szrj
226238fd1498Szrj /* If this is an addressable type, we must preallocate the stack
226338fd1498Szrj since we must evaluate the object into its final location.
226438fd1498Szrj
226538fd1498Szrj If this is to be passed in both registers and the stack, it is simpler
226638fd1498Szrj to preallocate. */
226738fd1498Szrj if (TREE_ADDRESSABLE (type)
226838fd1498Szrj || (args[i].pass_on_stack && args[i].reg != 0))
226938fd1498Szrj *must_preallocate = 1;
227038fd1498Szrj
227138fd1498Szrj /* No stack allocation and padding for bounds. */
227238fd1498Szrj if (POINTER_BOUNDS_P (args[i].tree_value))
227338fd1498Szrj ;
227438fd1498Szrj /* Compute the stack-size of this argument. */
227538fd1498Szrj else if (args[i].reg == 0 || args[i].partial != 0
227638fd1498Szrj || reg_parm_stack_space > 0
227738fd1498Szrj || args[i].pass_on_stack)
227838fd1498Szrj locate_and_pad_parm (mode, type,
227938fd1498Szrj #ifdef STACK_PARMS_IN_REG_PARM_AREA
228038fd1498Szrj 1,
228138fd1498Szrj #else
228238fd1498Szrj args[i].reg != 0,
228338fd1498Szrj #endif
228438fd1498Szrj reg_parm_stack_space,
228538fd1498Szrj args[i].pass_on_stack ? 0 : args[i].partial,
228638fd1498Szrj fndecl, args_size, &args[i].locate);
228738fd1498Szrj #ifdef BLOCK_REG_PADDING
228838fd1498Szrj else
228938fd1498Szrj /* The argument is passed entirely in registers. See at which
229038fd1498Szrj end it should be padded. */
229138fd1498Szrj args[i].locate.where_pad =
229238fd1498Szrj BLOCK_REG_PADDING (mode, type,
229338fd1498Szrj int_size_in_bytes (type) <= UNITS_PER_WORD);
229438fd1498Szrj #endif
229538fd1498Szrj
229638fd1498Szrj /* Update ARGS_SIZE, the total stack space for args so far. */
229738fd1498Szrj
229838fd1498Szrj args_size->constant += args[i].locate.size.constant;
229938fd1498Szrj if (args[i].locate.size.var)
230038fd1498Szrj ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
230138fd1498Szrj
230238fd1498Szrj /* Increment ARGS_SO_FAR, which has info about which arg-registers
230338fd1498Szrj have been used, etc. */
230438fd1498Szrj
230538fd1498Szrj targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
230638fd1498Szrj type, argpos < n_named_args);
230738fd1498Szrj
230838fd1498Szrj /* Store argument values for functions decorated with attribute
230938fd1498Szrj alloc_size. */
231038fd1498Szrj if (argpos == alloc_idx[0])
231138fd1498Szrj alloc_args[0] = args[i].tree_value;
231238fd1498Szrj else if (argpos == alloc_idx[1])
231338fd1498Szrj alloc_args[1] = args[i].tree_value;
231438fd1498Szrj }
231538fd1498Szrj
231638fd1498Szrj if (alloc_args[0])
231738fd1498Szrj {
231838fd1498Szrj /* Check the arguments of functions decorated with attribute
231938fd1498Szrj alloc_size. */
232038fd1498Szrj maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
232138fd1498Szrj }
232238fd1498Szrj
232338fd1498Szrj /* Detect passing non-string arguments to functions expecting
232438fd1498Szrj nul-terminated strings. */
232538fd1498Szrj maybe_warn_nonstring_arg (fndecl, exp);
232638fd1498Szrj }
232738fd1498Szrj
232838fd1498Szrj /* Update ARGS_SIZE to contain the total size for the argument block.
232938fd1498Szrj Return the original constant component of the argument block's size.
233038fd1498Szrj
233138fd1498Szrj REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
233238fd1498Szrj for arguments passed in registers. */
233338fd1498Szrj
233438fd1498Szrj static poly_int64
compute_argument_block_size(int reg_parm_stack_space,struct args_size * args_size,tree fndecl ATTRIBUTE_UNUSED,tree fntype ATTRIBUTE_UNUSED,int preferred_stack_boundary ATTRIBUTE_UNUSED)233538fd1498Szrj compute_argument_block_size (int reg_parm_stack_space,
233638fd1498Szrj struct args_size *args_size,
233738fd1498Szrj tree fndecl ATTRIBUTE_UNUSED,
233838fd1498Szrj tree fntype ATTRIBUTE_UNUSED,
233938fd1498Szrj int preferred_stack_boundary ATTRIBUTE_UNUSED)
234038fd1498Szrj {
234138fd1498Szrj poly_int64 unadjusted_args_size = args_size->constant;
234238fd1498Szrj
234338fd1498Szrj /* For accumulate outgoing args mode we don't need to align, since the frame
234438fd1498Szrj will be already aligned. Align to STACK_BOUNDARY in order to prevent
234538fd1498Szrj backends from generating misaligned frame sizes. */
234638fd1498Szrj if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
234738fd1498Szrj preferred_stack_boundary = STACK_BOUNDARY;
234838fd1498Szrj
234938fd1498Szrj /* Compute the actual size of the argument block required. The variable
235038fd1498Szrj and constant sizes must be combined, the size may have to be rounded,
235138fd1498Szrj and there may be a minimum required size. */
235238fd1498Szrj
235338fd1498Szrj if (args_size->var)
235438fd1498Szrj {
235538fd1498Szrj args_size->var = ARGS_SIZE_TREE (*args_size);
235638fd1498Szrj args_size->constant = 0;
235738fd1498Szrj
235838fd1498Szrj preferred_stack_boundary /= BITS_PER_UNIT;
235938fd1498Szrj if (preferred_stack_boundary > 1)
236038fd1498Szrj {
236138fd1498Szrj /* We don't handle this case yet. To handle it correctly we have
236238fd1498Szrj to add the delta, round and subtract the delta.
236338fd1498Szrj Currently no machine description requires this support. */
236438fd1498Szrj gcc_assert (multiple_p (stack_pointer_delta,
236538fd1498Szrj preferred_stack_boundary));
236638fd1498Szrj args_size->var = round_up (args_size->var, preferred_stack_boundary);
236738fd1498Szrj }
236838fd1498Szrj
236938fd1498Szrj if (reg_parm_stack_space > 0)
237038fd1498Szrj {
237138fd1498Szrj args_size->var
237238fd1498Szrj = size_binop (MAX_EXPR, args_size->var,
237338fd1498Szrj ssize_int (reg_parm_stack_space));
237438fd1498Szrj
237538fd1498Szrj /* The area corresponding to register parameters is not to count in
237638fd1498Szrj the size of the block we need. So make the adjustment. */
237738fd1498Szrj if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
237838fd1498Szrj args_size->var
237938fd1498Szrj = size_binop (MINUS_EXPR, args_size->var,
238038fd1498Szrj ssize_int (reg_parm_stack_space));
238138fd1498Szrj }
238238fd1498Szrj }
238338fd1498Szrj else
238438fd1498Szrj {
238538fd1498Szrj preferred_stack_boundary /= BITS_PER_UNIT;
238638fd1498Szrj if (preferred_stack_boundary < 1)
238738fd1498Szrj preferred_stack_boundary = 1;
238838fd1498Szrj args_size->constant = (aligned_upper_bound (args_size->constant
238938fd1498Szrj + stack_pointer_delta,
239038fd1498Szrj preferred_stack_boundary)
239138fd1498Szrj - stack_pointer_delta);
239238fd1498Szrj
239338fd1498Szrj args_size->constant = upper_bound (args_size->constant,
239438fd1498Szrj reg_parm_stack_space);
239538fd1498Szrj
239638fd1498Szrj if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
239738fd1498Szrj args_size->constant -= reg_parm_stack_space;
239838fd1498Szrj }
239938fd1498Szrj return unadjusted_args_size;
240038fd1498Szrj }
240138fd1498Szrj
240238fd1498Szrj /* Precompute parameters as needed for a function call.
240338fd1498Szrj
240438fd1498Szrj FLAGS is mask of ECF_* constants.
240538fd1498Szrj
240638fd1498Szrj NUM_ACTUALS is the number of arguments.
240738fd1498Szrj
240838fd1498Szrj ARGS is an array containing information for each argument; this
240938fd1498Szrj routine fills in the INITIAL_VALUE and VALUE fields for each
241038fd1498Szrj precomputed argument. */
241138fd1498Szrj
241238fd1498Szrj static void
precompute_arguments(int num_actuals,struct arg_data * args)241338fd1498Szrj precompute_arguments (int num_actuals, struct arg_data *args)
241438fd1498Szrj {
241538fd1498Szrj int i;
241638fd1498Szrj
241738fd1498Szrj /* If this is a libcall, then precompute all arguments so that we do not
241838fd1498Szrj get extraneous instructions emitted as part of the libcall sequence. */
241938fd1498Szrj
242038fd1498Szrj /* If we preallocated the stack space, and some arguments must be passed
242138fd1498Szrj on the stack, then we must precompute any parameter which contains a
242238fd1498Szrj function call which will store arguments on the stack.
242338fd1498Szrj Otherwise, evaluating the parameter may clobber previous parameters
242438fd1498Szrj which have already been stored into the stack. (we have code to avoid
242538fd1498Szrj such case by saving the outgoing stack arguments, but it results in
242638fd1498Szrj worse code) */
242738fd1498Szrj if (!ACCUMULATE_OUTGOING_ARGS)
242838fd1498Szrj return;
242938fd1498Szrj
243038fd1498Szrj for (i = 0; i < num_actuals; i++)
243138fd1498Szrj {
243238fd1498Szrj tree type;
243338fd1498Szrj machine_mode mode;
243438fd1498Szrj
243538fd1498Szrj if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
243638fd1498Szrj continue;
243738fd1498Szrj
243838fd1498Szrj /* If this is an addressable type, we cannot pre-evaluate it. */
243938fd1498Szrj type = TREE_TYPE (args[i].tree_value);
244038fd1498Szrj gcc_assert (!TREE_ADDRESSABLE (type));
244138fd1498Szrj
244238fd1498Szrj args[i].initial_value = args[i].value
244338fd1498Szrj = expand_normal (args[i].tree_value);
244438fd1498Szrj
244538fd1498Szrj mode = TYPE_MODE (type);
244638fd1498Szrj if (mode != args[i].mode)
244738fd1498Szrj {
244838fd1498Szrj int unsignedp = args[i].unsignedp;
244938fd1498Szrj args[i].value
245038fd1498Szrj = convert_modes (args[i].mode, mode,
245138fd1498Szrj args[i].value, args[i].unsignedp);
245238fd1498Szrj
245338fd1498Szrj /* CSE will replace this only if it contains args[i].value
245438fd1498Szrj pseudo, so convert it down to the declared mode using
245538fd1498Szrj a SUBREG. */
245638fd1498Szrj if (REG_P (args[i].value)
245738fd1498Szrj && GET_MODE_CLASS (args[i].mode) == MODE_INT
245838fd1498Szrj && promote_mode (type, mode, &unsignedp) != args[i].mode)
245938fd1498Szrj {
246038fd1498Szrj args[i].initial_value
246138fd1498Szrj = gen_lowpart_SUBREG (mode, args[i].value);
246238fd1498Szrj SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
246338fd1498Szrj SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
246438fd1498Szrj }
246538fd1498Szrj }
246638fd1498Szrj }
246738fd1498Szrj }
246838fd1498Szrj
246938fd1498Szrj /* Given the current state of MUST_PREALLOCATE and information about
247038fd1498Szrj arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
247138fd1498Szrj compute and return the final value for MUST_PREALLOCATE. */
247238fd1498Szrj
247338fd1498Szrj static int
finalize_must_preallocate(int must_preallocate,int num_actuals,struct arg_data * args,struct args_size * args_size)247438fd1498Szrj finalize_must_preallocate (int must_preallocate, int num_actuals,
247538fd1498Szrj struct arg_data *args, struct args_size *args_size)
247638fd1498Szrj {
247738fd1498Szrj /* See if we have or want to preallocate stack space.
247838fd1498Szrj
247938fd1498Szrj If we would have to push a partially-in-regs parm
248038fd1498Szrj before other stack parms, preallocate stack space instead.
248138fd1498Szrj
248238fd1498Szrj If the size of some parm is not a multiple of the required stack
248338fd1498Szrj alignment, we must preallocate.
248438fd1498Szrj
248538fd1498Szrj If the total size of arguments that would otherwise create a copy in
248638fd1498Szrj a temporary (such as a CALL) is more than half the total argument list
248738fd1498Szrj size, preallocation is faster.
248838fd1498Szrj
248938fd1498Szrj Another reason to preallocate is if we have a machine (like the m88k)
249038fd1498Szrj where stack alignment is required to be maintained between every
249138fd1498Szrj pair of insns, not just when the call is made. However, we assume here
249238fd1498Szrj that such machines either do not have push insns (and hence preallocation
249338fd1498Szrj would occur anyway) or the problem is taken care of with
249438fd1498Szrj PUSH_ROUNDING. */
249538fd1498Szrj
249638fd1498Szrj if (! must_preallocate)
249738fd1498Szrj {
249838fd1498Szrj int partial_seen = 0;
249938fd1498Szrj poly_int64 copy_to_evaluate_size = 0;
250038fd1498Szrj int i;
250138fd1498Szrj
250238fd1498Szrj for (i = 0; i < num_actuals && ! must_preallocate; i++)
250338fd1498Szrj {
250438fd1498Szrj if (args[i].partial > 0 && ! args[i].pass_on_stack)
250538fd1498Szrj partial_seen = 1;
250638fd1498Szrj else if (partial_seen && args[i].reg == 0)
250738fd1498Szrj must_preallocate = 1;
250838fd1498Szrj /* We preallocate in case there are bounds passed
250938fd1498Szrj in the bounds table to have precomputed address
251038fd1498Szrj for bounds association. */
251138fd1498Szrj else if (POINTER_BOUNDS_P (args[i].tree_value)
251238fd1498Szrj && !args[i].reg)
251338fd1498Szrj must_preallocate = 1;
251438fd1498Szrj
251538fd1498Szrj if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
251638fd1498Szrj && (TREE_CODE (args[i].tree_value) == CALL_EXPR
251738fd1498Szrj || TREE_CODE (args[i].tree_value) == TARGET_EXPR
251838fd1498Szrj || TREE_CODE (args[i].tree_value) == COND_EXPR
251938fd1498Szrj || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
252038fd1498Szrj copy_to_evaluate_size
252138fd1498Szrj += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
252238fd1498Szrj }
252338fd1498Szrj
252438fd1498Szrj if (maybe_ne (args_size->constant, 0)
252538fd1498Szrj && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
252638fd1498Szrj must_preallocate = 1;
252738fd1498Szrj }
252838fd1498Szrj return must_preallocate;
252938fd1498Szrj }
253038fd1498Szrj
253138fd1498Szrj /* If we preallocated stack space, compute the address of each argument
253238fd1498Szrj and store it into the ARGS array.
253338fd1498Szrj
253438fd1498Szrj We need not ensure it is a valid memory address here; it will be
253538fd1498Szrj validized when it is used.
253638fd1498Szrj
253738fd1498Szrj ARGBLOCK is an rtx for the address of the outgoing arguments. */
253838fd1498Szrj
253938fd1498Szrj static void
compute_argument_addresses(struct arg_data * args,rtx argblock,int num_actuals)254038fd1498Szrj compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
254138fd1498Szrj {
254238fd1498Szrj if (argblock)
254338fd1498Szrj {
254438fd1498Szrj rtx arg_reg = argblock;
254538fd1498Szrj int i;
254638fd1498Szrj poly_int64 arg_offset = 0;
254738fd1498Szrj
254838fd1498Szrj if (GET_CODE (argblock) == PLUS)
254938fd1498Szrj {
255038fd1498Szrj arg_reg = XEXP (argblock, 0);
255138fd1498Szrj arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
255238fd1498Szrj }
255338fd1498Szrj
255438fd1498Szrj for (i = 0; i < num_actuals; i++)
255538fd1498Szrj {
255638fd1498Szrj rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
255738fd1498Szrj rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
255838fd1498Szrj rtx addr;
255938fd1498Szrj unsigned int align, boundary;
256038fd1498Szrj poly_uint64 units_on_stack = 0;
256138fd1498Szrj machine_mode partial_mode = VOIDmode;
256238fd1498Szrj
256338fd1498Szrj /* Skip this parm if it will not be passed on the stack. */
256438fd1498Szrj if (! args[i].pass_on_stack
256538fd1498Szrj && args[i].reg != 0
256638fd1498Szrj && args[i].partial == 0)
256738fd1498Szrj continue;
256838fd1498Szrj
256938fd1498Szrj if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
257038fd1498Szrj continue;
257138fd1498Szrj
257238fd1498Szrj /* Pointer Bounds are never passed on the stack. */
257338fd1498Szrj if (POINTER_BOUNDS_P (args[i].tree_value))
257438fd1498Szrj continue;
257538fd1498Szrj
257638fd1498Szrj addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
257738fd1498Szrj addr = plus_constant (Pmode, addr, arg_offset);
257838fd1498Szrj
257938fd1498Szrj if (args[i].partial != 0)
258038fd1498Szrj {
258138fd1498Szrj /* Only part of the parameter is being passed on the stack.
258238fd1498Szrj Generate a simple memory reference of the correct size. */
258338fd1498Szrj units_on_stack = args[i].locate.size.constant;
258438fd1498Szrj poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
258538fd1498Szrj partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
258638fd1498Szrj args[i].stack = gen_rtx_MEM (partial_mode, addr);
258738fd1498Szrj set_mem_size (args[i].stack, units_on_stack);
258838fd1498Szrj }
258938fd1498Szrj else
259038fd1498Szrj {
259138fd1498Szrj args[i].stack = gen_rtx_MEM (args[i].mode, addr);
259238fd1498Szrj set_mem_attributes (args[i].stack,
259338fd1498Szrj TREE_TYPE (args[i].tree_value), 1);
259438fd1498Szrj }
259538fd1498Szrj align = BITS_PER_UNIT;
259638fd1498Szrj boundary = args[i].locate.boundary;
259738fd1498Szrj poly_int64 offset_val;
259838fd1498Szrj if (args[i].locate.where_pad != PAD_DOWNWARD)
259938fd1498Szrj align = boundary;
260038fd1498Szrj else if (poly_int_rtx_p (offset, &offset_val))
260138fd1498Szrj {
260238fd1498Szrj align = least_bit_hwi (boundary);
260338fd1498Szrj unsigned int offset_align
260438fd1498Szrj = known_alignment (offset_val) * BITS_PER_UNIT;
260538fd1498Szrj if (offset_align != 0)
260638fd1498Szrj align = MIN (align, offset_align);
260738fd1498Szrj }
260838fd1498Szrj set_mem_align (args[i].stack, align);
260938fd1498Szrj
261038fd1498Szrj addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
261138fd1498Szrj addr = plus_constant (Pmode, addr, arg_offset);
261238fd1498Szrj
261338fd1498Szrj if (args[i].partial != 0)
261438fd1498Szrj {
261538fd1498Szrj /* Only part of the parameter is being passed on the stack.
261638fd1498Szrj Generate a simple memory reference of the correct size.
261738fd1498Szrj */
261838fd1498Szrj args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
261938fd1498Szrj set_mem_size (args[i].stack_slot, units_on_stack);
262038fd1498Szrj }
262138fd1498Szrj else
262238fd1498Szrj {
262338fd1498Szrj args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
262438fd1498Szrj set_mem_attributes (args[i].stack_slot,
262538fd1498Szrj TREE_TYPE (args[i].tree_value), 1);
262638fd1498Szrj }
262738fd1498Szrj set_mem_align (args[i].stack_slot, args[i].locate.boundary);
262838fd1498Szrj
262938fd1498Szrj /* Function incoming arguments may overlap with sibling call
263038fd1498Szrj outgoing arguments and we cannot allow reordering of reads
263138fd1498Szrj from function arguments with stores to outgoing arguments
263238fd1498Szrj of sibling calls. */
263338fd1498Szrj set_mem_alias_set (args[i].stack, 0);
263438fd1498Szrj set_mem_alias_set (args[i].stack_slot, 0);
263538fd1498Szrj }
263638fd1498Szrj }
263738fd1498Szrj }
263838fd1498Szrj
263938fd1498Szrj /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
264038fd1498Szrj in a call instruction.
264138fd1498Szrj
264238fd1498Szrj FNDECL is the tree node for the target function. For an indirect call
264338fd1498Szrj FNDECL will be NULL_TREE.
264438fd1498Szrj
264538fd1498Szrj ADDR is the operand 0 of CALL_EXPR for this call. */
264638fd1498Szrj
264738fd1498Szrj static rtx
rtx_for_function_call(tree fndecl,tree addr)264838fd1498Szrj rtx_for_function_call (tree fndecl, tree addr)
264938fd1498Szrj {
265038fd1498Szrj rtx funexp;
265138fd1498Szrj
265238fd1498Szrj /* Get the function to call, in the form of RTL. */
265338fd1498Szrj if (fndecl)
265438fd1498Szrj {
265538fd1498Szrj if (!TREE_USED (fndecl) && fndecl != current_function_decl)
265638fd1498Szrj TREE_USED (fndecl) = 1;
265738fd1498Szrj
265838fd1498Szrj /* Get a SYMBOL_REF rtx for the function address. */
265938fd1498Szrj funexp = XEXP (DECL_RTL (fndecl), 0);
266038fd1498Szrj }
266138fd1498Szrj else
266238fd1498Szrj /* Generate an rtx (probably a pseudo-register) for the address. */
266338fd1498Szrj {
266438fd1498Szrj push_temp_slots ();
266538fd1498Szrj funexp = expand_normal (addr);
266638fd1498Szrj pop_temp_slots (); /* FUNEXP can't be BLKmode. */
266738fd1498Szrj }
266838fd1498Szrj return funexp;
266938fd1498Szrj }
267038fd1498Szrj
267138fd1498Szrj /* Return the static chain for this function, if any. */
267238fd1498Szrj
267338fd1498Szrj rtx
rtx_for_static_chain(const_tree fndecl_or_type,bool incoming_p)267438fd1498Szrj rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
267538fd1498Szrj {
267638fd1498Szrj if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
267738fd1498Szrj return NULL;
267838fd1498Szrj
267938fd1498Szrj return targetm.calls.static_chain (fndecl_or_type, incoming_p);
268038fd1498Szrj }
268138fd1498Szrj
268238fd1498Szrj /* Internal state for internal_arg_pointer_based_exp and its helpers. */
268338fd1498Szrj static struct
268438fd1498Szrj {
268538fd1498Szrj /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
268638fd1498Szrj or NULL_RTX if none has been scanned yet. */
268738fd1498Szrj rtx_insn *scan_start;
268838fd1498Szrj /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
268938fd1498Szrj based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
269038fd1498Szrj pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
269138fd1498Szrj with fixed offset, or PC if this is with variable or unknown offset. */
269238fd1498Szrj vec<rtx> cache;
269338fd1498Szrj } internal_arg_pointer_exp_state;
269438fd1498Szrj
269538fd1498Szrj static rtx internal_arg_pointer_based_exp (const_rtx, bool);
269638fd1498Szrj
269738fd1498Szrj /* Helper function for internal_arg_pointer_based_exp. Scan insns in
269838fd1498Szrj the tail call sequence, starting with first insn that hasn't been
269938fd1498Szrj scanned yet, and note for each pseudo on the LHS whether it is based
270038fd1498Szrj on crtl->args.internal_arg_pointer or not, and what offset from that
270138fd1498Szrj that pointer it has. */
270238fd1498Szrj
270338fd1498Szrj static void
internal_arg_pointer_based_exp_scan(void)270438fd1498Szrj internal_arg_pointer_based_exp_scan (void)
270538fd1498Szrj {
270638fd1498Szrj rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
270738fd1498Szrj
270838fd1498Szrj if (scan_start == NULL_RTX)
270938fd1498Szrj insn = get_insns ();
271038fd1498Szrj else
271138fd1498Szrj insn = NEXT_INSN (scan_start);
271238fd1498Szrj
271338fd1498Szrj while (insn)
271438fd1498Szrj {
271538fd1498Szrj rtx set = single_set (insn);
271638fd1498Szrj if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
271738fd1498Szrj {
271838fd1498Szrj rtx val = NULL_RTX;
271938fd1498Szrj unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
272038fd1498Szrj /* Punt on pseudos set multiple times. */
272138fd1498Szrj if (idx < internal_arg_pointer_exp_state.cache.length ()
272238fd1498Szrj && (internal_arg_pointer_exp_state.cache[idx]
272338fd1498Szrj != NULL_RTX))
272438fd1498Szrj val = pc_rtx;
272538fd1498Szrj else
272638fd1498Szrj val = internal_arg_pointer_based_exp (SET_SRC (set), false);
272738fd1498Szrj if (val != NULL_RTX)
272838fd1498Szrj {
272938fd1498Szrj if (idx >= internal_arg_pointer_exp_state.cache.length ())
273038fd1498Szrj internal_arg_pointer_exp_state.cache
273138fd1498Szrj .safe_grow_cleared (idx + 1);
273238fd1498Szrj internal_arg_pointer_exp_state.cache[idx] = val;
273338fd1498Szrj }
273438fd1498Szrj }
273538fd1498Szrj if (NEXT_INSN (insn) == NULL_RTX)
273638fd1498Szrj scan_start = insn;
273738fd1498Szrj insn = NEXT_INSN (insn);
273838fd1498Szrj }
273938fd1498Szrj
274038fd1498Szrj internal_arg_pointer_exp_state.scan_start = scan_start;
274138fd1498Szrj }
274238fd1498Szrj
274338fd1498Szrj /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
274438fd1498Szrj NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
274538fd1498Szrj it with fixed offset, or PC if this is with variable or unknown offset.
274638fd1498Szrj TOPLEVEL is true if the function is invoked at the topmost level. */
274738fd1498Szrj
274838fd1498Szrj static rtx
internal_arg_pointer_based_exp(const_rtx rtl,bool toplevel)274938fd1498Szrj internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
275038fd1498Szrj {
275138fd1498Szrj if (CONSTANT_P (rtl))
275238fd1498Szrj return NULL_RTX;
275338fd1498Szrj
275438fd1498Szrj if (rtl == crtl->args.internal_arg_pointer)
275538fd1498Szrj return const0_rtx;
275638fd1498Szrj
275738fd1498Szrj if (REG_P (rtl) && HARD_REGISTER_P (rtl))
275838fd1498Szrj return NULL_RTX;
275938fd1498Szrj
276038fd1498Szrj poly_int64 offset;
276138fd1498Szrj if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
276238fd1498Szrj {
276338fd1498Szrj rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
276438fd1498Szrj if (val == NULL_RTX || val == pc_rtx)
276538fd1498Szrj return val;
276638fd1498Szrj return plus_constant (Pmode, val, offset);
276738fd1498Szrj }
276838fd1498Szrj
276938fd1498Szrj /* When called at the topmost level, scan pseudo assignments in between the
277038fd1498Szrj last scanned instruction in the tail call sequence and the latest insn
277138fd1498Szrj in that sequence. */
277238fd1498Szrj if (toplevel)
277338fd1498Szrj internal_arg_pointer_based_exp_scan ();
277438fd1498Szrj
277538fd1498Szrj if (REG_P (rtl))
277638fd1498Szrj {
277738fd1498Szrj unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
277838fd1498Szrj if (idx < internal_arg_pointer_exp_state.cache.length ())
277938fd1498Szrj return internal_arg_pointer_exp_state.cache[idx];
278038fd1498Szrj
278138fd1498Szrj return NULL_RTX;
278238fd1498Szrj }
278338fd1498Szrj
278438fd1498Szrj subrtx_iterator::array_type array;
278538fd1498Szrj FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
278638fd1498Szrj {
278738fd1498Szrj const_rtx x = *iter;
278838fd1498Szrj if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
278938fd1498Szrj return pc_rtx;
279038fd1498Szrj if (MEM_P (x))
279138fd1498Szrj iter.skip_subrtxes ();
279238fd1498Szrj }
279338fd1498Szrj
279438fd1498Szrj return NULL_RTX;
279538fd1498Szrj }
279638fd1498Szrj
279738fd1498Szrj /* Return true if SIZE bytes starting from address ADDR might overlap an
279838fd1498Szrj already-clobbered argument area. This function is used to determine
279938fd1498Szrj if we should give up a sibcall. */
280038fd1498Szrj
280138fd1498Szrj static bool
mem_might_overlap_already_clobbered_arg_p(rtx addr,poly_uint64 size)280238fd1498Szrj mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
280338fd1498Szrj {
280438fd1498Szrj poly_int64 i;
280538fd1498Szrj unsigned HOST_WIDE_INT start, end;
280638fd1498Szrj rtx val;
280738fd1498Szrj
280838fd1498Szrj if (bitmap_empty_p (stored_args_map)
280938fd1498Szrj && stored_args_watermark == HOST_WIDE_INT_M1U)
281038fd1498Szrj return false;
281138fd1498Szrj val = internal_arg_pointer_based_exp (addr, true);
281238fd1498Szrj if (val == NULL_RTX)
281338fd1498Szrj return false;
281438fd1498Szrj else if (!poly_int_rtx_p (val, &i))
281538fd1498Szrj return true;
281638fd1498Szrj
281738fd1498Szrj if (known_eq (size, 0U))
281838fd1498Szrj return false;
281938fd1498Szrj
282038fd1498Szrj if (STACK_GROWS_DOWNWARD)
282138fd1498Szrj i -= crtl->args.pretend_args_size;
282238fd1498Szrj else
282338fd1498Szrj i += crtl->args.pretend_args_size;
282438fd1498Szrj
282538fd1498Szrj if (ARGS_GROW_DOWNWARD)
282638fd1498Szrj i = -i - size;
282738fd1498Szrj
282838fd1498Szrj /* We can ignore any references to the function's pretend args,
282938fd1498Szrj which at this point would manifest as negative values of I. */
283038fd1498Szrj if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
283138fd1498Szrj return false;
283238fd1498Szrj
283338fd1498Szrj start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
283438fd1498Szrj if (!(i + size).is_constant (&end))
283538fd1498Szrj end = HOST_WIDE_INT_M1U;
283638fd1498Szrj
283738fd1498Szrj if (end > stored_args_watermark)
283838fd1498Szrj return true;
283938fd1498Szrj
284038fd1498Szrj end = MIN (end, SBITMAP_SIZE (stored_args_map));
284138fd1498Szrj for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
284238fd1498Szrj if (bitmap_bit_p (stored_args_map, k))
284338fd1498Szrj return true;
284438fd1498Szrj
284538fd1498Szrj return false;
284638fd1498Szrj }
284738fd1498Szrj
284838fd1498Szrj /* Do the register loads required for any wholly-register parms or any
284938fd1498Szrj parms which are passed both on the stack and in a register. Their
285038fd1498Szrj expressions were already evaluated.
285138fd1498Szrj
285238fd1498Szrj Mark all register-parms as living through the call, putting these USE
285338fd1498Szrj insns in the CALL_INSN_FUNCTION_USAGE field.
285438fd1498Szrj
285538fd1498Szrj When IS_SIBCALL, perform the check_sibcall_argument_overlap
285638fd1498Szrj checking, setting *SIBCALL_FAILURE if appropriate. */
285738fd1498Szrj
285838fd1498Szrj static void
load_register_parameters(struct arg_data * args,int num_actuals,rtx * call_fusage,int flags,int is_sibcall,int * sibcall_failure)285938fd1498Szrj load_register_parameters (struct arg_data *args, int num_actuals,
286038fd1498Szrj rtx *call_fusage, int flags, int is_sibcall,
286138fd1498Szrj int *sibcall_failure)
286238fd1498Szrj {
286338fd1498Szrj int i, j;
286438fd1498Szrj
286538fd1498Szrj for (i = 0; i < num_actuals; i++)
286638fd1498Szrj {
286738fd1498Szrj rtx reg = ((flags & ECF_SIBCALL)
286838fd1498Szrj ? args[i].tail_call_reg : args[i].reg);
286938fd1498Szrj if (reg)
287038fd1498Szrj {
287138fd1498Szrj int partial = args[i].partial;
287238fd1498Szrj int nregs;
287338fd1498Szrj poly_int64 size = 0;
287438fd1498Szrj HOST_WIDE_INT const_size = 0;
287538fd1498Szrj rtx_insn *before_arg = get_last_insn ();
287638fd1498Szrj /* Set non-negative if we must move a word at a time, even if
287738fd1498Szrj just one word (e.g, partial == 4 && mode == DFmode). Set
287838fd1498Szrj to -1 if we just use a normal move insn. This value can be
287938fd1498Szrj zero if the argument is a zero size structure. */
288038fd1498Szrj nregs = -1;
288138fd1498Szrj if (GET_CODE (reg) == PARALLEL)
288238fd1498Szrj ;
288338fd1498Szrj else if (partial)
288438fd1498Szrj {
288538fd1498Szrj gcc_assert (partial % UNITS_PER_WORD == 0);
288638fd1498Szrj nregs = partial / UNITS_PER_WORD;
288738fd1498Szrj }
288838fd1498Szrj else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
288938fd1498Szrj {
289038fd1498Szrj /* Variable-sized parameters should be described by a
289138fd1498Szrj PARALLEL instead. */
289238fd1498Szrj const_size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
289338fd1498Szrj gcc_assert (const_size >= 0);
289438fd1498Szrj nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
289538fd1498Szrj size = const_size;
289638fd1498Szrj }
289738fd1498Szrj else
289838fd1498Szrj size = GET_MODE_SIZE (args[i].mode);
289938fd1498Szrj
290038fd1498Szrj /* Handle calls that pass values in multiple non-contiguous
290138fd1498Szrj locations. The Irix 6 ABI has examples of this. */
290238fd1498Szrj
290338fd1498Szrj if (GET_CODE (reg) == PARALLEL)
290438fd1498Szrj emit_group_move (reg, args[i].parallel_value);
290538fd1498Szrj
290638fd1498Szrj /* If simple case, just do move. If normal partial, store_one_arg
290738fd1498Szrj has already loaded the register for us. In all other cases,
290838fd1498Szrj load the register(s) from memory. */
290938fd1498Szrj
291038fd1498Szrj else if (nregs == -1)
291138fd1498Szrj {
291238fd1498Szrj emit_move_insn (reg, args[i].value);
291338fd1498Szrj #ifdef BLOCK_REG_PADDING
291438fd1498Szrj /* Handle case where we have a value that needs shifting
291538fd1498Szrj up to the msb. eg. a QImode value and we're padding
291638fd1498Szrj upward on a BYTES_BIG_ENDIAN machine. */
291738fd1498Szrj if (args[i].locate.where_pad
291838fd1498Szrj == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
291938fd1498Szrj {
292038fd1498Szrj gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
292138fd1498Szrj if (maybe_lt (size, UNITS_PER_WORD))
292238fd1498Szrj {
292338fd1498Szrj rtx x;
292438fd1498Szrj poly_int64 shift
292538fd1498Szrj = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
292638fd1498Szrj
292738fd1498Szrj /* Assigning REG here rather than a temp makes
292838fd1498Szrj CALL_FUSAGE report the whole reg as used.
292938fd1498Szrj Strictly speaking, the call only uses SIZE
293038fd1498Szrj bytes at the msb end, but it doesn't seem worth
293138fd1498Szrj generating rtl to say that. */
293238fd1498Szrj reg = gen_rtx_REG (word_mode, REGNO (reg));
293338fd1498Szrj x = expand_shift (LSHIFT_EXPR, word_mode,
293438fd1498Szrj reg, shift, reg, 1);
293538fd1498Szrj if (x != reg)
293638fd1498Szrj emit_move_insn (reg, x);
293738fd1498Szrj }
293838fd1498Szrj }
293938fd1498Szrj #endif
294038fd1498Szrj }
294138fd1498Szrj
294238fd1498Szrj /* If we have pre-computed the values to put in the registers in
294338fd1498Szrj the case of non-aligned structures, copy them in now. */
294438fd1498Szrj
294538fd1498Szrj else if (args[i].n_aligned_regs != 0)
294638fd1498Szrj for (j = 0; j < args[i].n_aligned_regs; j++)
294738fd1498Szrj emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
294838fd1498Szrj args[i].aligned_regs[j]);
294938fd1498Szrj
295038fd1498Szrj else if (partial == 0 || args[i].pass_on_stack)
295138fd1498Szrj {
295238fd1498Szrj /* SIZE and CONST_SIZE are 0 for partial arguments and
295338fd1498Szrj the size of a BLKmode type otherwise. */
295438fd1498Szrj gcc_checking_assert (known_eq (size, const_size));
295538fd1498Szrj rtx mem = validize_mem (copy_rtx (args[i].value));
295638fd1498Szrj
295738fd1498Szrj /* Check for overlap with already clobbered argument area,
295838fd1498Szrj providing that this has non-zero size. */
295938fd1498Szrj if (is_sibcall
296038fd1498Szrj && const_size != 0
296138fd1498Szrj && (mem_might_overlap_already_clobbered_arg_p
296238fd1498Szrj (XEXP (args[i].value, 0), const_size)))
296338fd1498Szrj *sibcall_failure = 1;
296438fd1498Szrj
296538fd1498Szrj if (const_size % UNITS_PER_WORD == 0
296638fd1498Szrj || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
296738fd1498Szrj move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
296838fd1498Szrj else
296938fd1498Szrj {
297038fd1498Szrj if (nregs > 1)
297138fd1498Szrj move_block_to_reg (REGNO (reg), mem, nregs - 1,
297238fd1498Szrj args[i].mode);
297338fd1498Szrj rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
297438fd1498Szrj unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
297538fd1498Szrj unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
297638fd1498Szrj rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
297738fd1498Szrj word_mode, word_mode, false,
297838fd1498Szrj NULL);
297938fd1498Szrj if (BYTES_BIG_ENDIAN)
298038fd1498Szrj x = expand_shift (LSHIFT_EXPR, word_mode, x,
298138fd1498Szrj BITS_PER_WORD - bitsize, dest, 1);
298238fd1498Szrj if (x != dest)
298338fd1498Szrj emit_move_insn (dest, x);
298438fd1498Szrj }
298538fd1498Szrj
298638fd1498Szrj /* Handle a BLKmode that needs shifting. */
298738fd1498Szrj if (nregs == 1 && const_size < UNITS_PER_WORD
298838fd1498Szrj #ifdef BLOCK_REG_PADDING
298938fd1498Szrj && args[i].locate.where_pad == PAD_DOWNWARD
299038fd1498Szrj #else
299138fd1498Szrj && BYTES_BIG_ENDIAN
299238fd1498Szrj #endif
299338fd1498Szrj )
299438fd1498Szrj {
299538fd1498Szrj rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
299638fd1498Szrj int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
299738fd1498Szrj enum tree_code dir = (BYTES_BIG_ENDIAN
299838fd1498Szrj ? RSHIFT_EXPR : LSHIFT_EXPR);
299938fd1498Szrj rtx x;
300038fd1498Szrj
300138fd1498Szrj x = expand_shift (dir, word_mode, dest, shift, dest, 1);
300238fd1498Szrj if (x != dest)
300338fd1498Szrj emit_move_insn (dest, x);
300438fd1498Szrj }
300538fd1498Szrj }
300638fd1498Szrj
300738fd1498Szrj /* When a parameter is a block, and perhaps in other cases, it is
300838fd1498Szrj possible that it did a load from an argument slot that was
300938fd1498Szrj already clobbered. */
301038fd1498Szrj if (is_sibcall
301138fd1498Szrj && check_sibcall_argument_overlap (before_arg, &args[i], 0))
301238fd1498Szrj *sibcall_failure = 1;
301338fd1498Szrj
301438fd1498Szrj /* Handle calls that pass values in multiple non-contiguous
301538fd1498Szrj locations. The Irix 6 ABI has examples of this. */
301638fd1498Szrj if (GET_CODE (reg) == PARALLEL)
301738fd1498Szrj use_group_regs (call_fusage, reg);
301838fd1498Szrj else if (nregs == -1)
301938fd1498Szrj use_reg_mode (call_fusage, reg,
302038fd1498Szrj TYPE_MODE (TREE_TYPE (args[i].tree_value)));
302138fd1498Szrj else if (nregs > 0)
302238fd1498Szrj use_regs (call_fusage, REGNO (reg), nregs);
302338fd1498Szrj }
302438fd1498Szrj }
302538fd1498Szrj }
302638fd1498Szrj
302738fd1498Szrj /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
302838fd1498Szrj wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
302938fd1498Szrj bytes, then we would need to push some additional bytes to pad the
303038fd1498Szrj arguments. So, we try to compute an adjust to the stack pointer for an
303138fd1498Szrj amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
303238fd1498Szrj bytes. Then, when the arguments are pushed the stack will be perfectly
303338fd1498Szrj aligned.
303438fd1498Szrj
303538fd1498Szrj Return true if this optimization is possible, storing the adjustment
303638fd1498Szrj in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
303738fd1498Szrj bytes that should be popped after the call. */
303838fd1498Szrj
303938fd1498Szrj static bool
combine_pending_stack_adjustment_and_call(poly_int64_pod * adjustment_out,poly_int64 unadjusted_args_size,struct args_size * args_size,unsigned int preferred_unit_stack_boundary)304038fd1498Szrj combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
304138fd1498Szrj poly_int64 unadjusted_args_size,
304238fd1498Szrj struct args_size *args_size,
304338fd1498Szrj unsigned int preferred_unit_stack_boundary)
304438fd1498Szrj {
304538fd1498Szrj /* The number of bytes to pop so that the stack will be
304638fd1498Szrj under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
304738fd1498Szrj poly_int64 adjustment;
304838fd1498Szrj /* The alignment of the stack after the arguments are pushed, if we
304938fd1498Szrj just pushed the arguments without adjust the stack here. */
305038fd1498Szrj unsigned HOST_WIDE_INT unadjusted_alignment;
305138fd1498Szrj
305238fd1498Szrj if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
305338fd1498Szrj preferred_unit_stack_boundary,
305438fd1498Szrj &unadjusted_alignment))
305538fd1498Szrj return false;
305638fd1498Szrj
305738fd1498Szrj /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
305838fd1498Szrj as possible -- leaving just enough left to cancel out the
305938fd1498Szrj UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
306038fd1498Szrj PENDING_STACK_ADJUST is non-negative, and congruent to
306138fd1498Szrj -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
306238fd1498Szrj
306338fd1498Szrj /* Begin by trying to pop all the bytes. */
306438fd1498Szrj unsigned HOST_WIDE_INT tmp_misalignment;
306538fd1498Szrj if (!known_misalignment (pending_stack_adjust,
306638fd1498Szrj preferred_unit_stack_boundary,
306738fd1498Szrj &tmp_misalignment))
306838fd1498Szrj return false;
306938fd1498Szrj unadjusted_alignment -= tmp_misalignment;
307038fd1498Szrj adjustment = pending_stack_adjust;
307138fd1498Szrj /* Push enough additional bytes that the stack will be aligned
307238fd1498Szrj after the arguments are pushed. */
307338fd1498Szrj if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
307438fd1498Szrj adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
307538fd1498Szrj
307638fd1498Szrj /* We need to know whether the adjusted argument size
307738fd1498Szrj (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
307838fd1498Szrj or a deallocation. */
307938fd1498Szrj if (!ordered_p (adjustment, unadjusted_args_size))
308038fd1498Szrj return false;
308138fd1498Szrj
308238fd1498Szrj /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
308338fd1498Szrj bytes after the call. The right number is the entire
308438fd1498Szrj PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
308538fd1498Szrj by the arguments in the first place. */
308638fd1498Szrj args_size->constant
308738fd1498Szrj = pending_stack_adjust - adjustment + unadjusted_args_size;
308838fd1498Szrj
308938fd1498Szrj *adjustment_out = adjustment;
309038fd1498Szrj return true;
309138fd1498Szrj }
309238fd1498Szrj
309338fd1498Szrj /* Scan X expression if it does not dereference any argument slots
309438fd1498Szrj we already clobbered by tail call arguments (as noted in stored_args_map
309538fd1498Szrj bitmap).
309638fd1498Szrj Return nonzero if X expression dereferences such argument slots,
309738fd1498Szrj zero otherwise. */
309838fd1498Szrj
309938fd1498Szrj static int
check_sibcall_argument_overlap_1(rtx x)310038fd1498Szrj check_sibcall_argument_overlap_1 (rtx x)
310138fd1498Szrj {
310238fd1498Szrj RTX_CODE code;
310338fd1498Szrj int i, j;
310438fd1498Szrj const char *fmt;
310538fd1498Szrj
310638fd1498Szrj if (x == NULL_RTX)
310738fd1498Szrj return 0;
310838fd1498Szrj
310938fd1498Szrj code = GET_CODE (x);
311038fd1498Szrj
311138fd1498Szrj /* We need not check the operands of the CALL expression itself. */
311238fd1498Szrj if (code == CALL)
311338fd1498Szrj return 0;
311438fd1498Szrj
311538fd1498Szrj if (code == MEM)
311638fd1498Szrj return (mem_might_overlap_already_clobbered_arg_p
311738fd1498Szrj (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
311838fd1498Szrj
311938fd1498Szrj /* Scan all subexpressions. */
312038fd1498Szrj fmt = GET_RTX_FORMAT (code);
312138fd1498Szrj for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
312238fd1498Szrj {
312338fd1498Szrj if (*fmt == 'e')
312438fd1498Szrj {
312538fd1498Szrj if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
312638fd1498Szrj return 1;
312738fd1498Szrj }
312838fd1498Szrj else if (*fmt == 'E')
312938fd1498Szrj {
313038fd1498Szrj for (j = 0; j < XVECLEN (x, i); j++)
313138fd1498Szrj if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
313238fd1498Szrj return 1;
313338fd1498Szrj }
313438fd1498Szrj }
313538fd1498Szrj return 0;
313638fd1498Szrj }
313738fd1498Szrj
313838fd1498Szrj /* Scan sequence after INSN if it does not dereference any argument slots
313938fd1498Szrj we already clobbered by tail call arguments (as noted in stored_args_map
314038fd1498Szrj bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
314138fd1498Szrj stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
314238fd1498Szrj should be 0). Return nonzero if sequence after INSN dereferences such argument
314338fd1498Szrj slots, zero otherwise. */
314438fd1498Szrj
314538fd1498Szrj static int
check_sibcall_argument_overlap(rtx_insn * insn,struct arg_data * arg,int mark_stored_args_map)314638fd1498Szrj check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
314738fd1498Szrj int mark_stored_args_map)
314838fd1498Szrj {
314938fd1498Szrj poly_uint64 low, high;
315038fd1498Szrj unsigned HOST_WIDE_INT const_low, const_high;
315138fd1498Szrj
315238fd1498Szrj if (insn == NULL_RTX)
315338fd1498Szrj insn = get_insns ();
315438fd1498Szrj else
315538fd1498Szrj insn = NEXT_INSN (insn);
315638fd1498Szrj
315738fd1498Szrj for (; insn; insn = NEXT_INSN (insn))
315838fd1498Szrj if (INSN_P (insn)
315938fd1498Szrj && check_sibcall_argument_overlap_1 (PATTERN (insn)))
316038fd1498Szrj break;
316138fd1498Szrj
316238fd1498Szrj if (mark_stored_args_map)
316338fd1498Szrj {
316438fd1498Szrj if (ARGS_GROW_DOWNWARD)
316538fd1498Szrj low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
316638fd1498Szrj else
316738fd1498Szrj low = arg->locate.slot_offset.constant;
316838fd1498Szrj high = low + arg->locate.size.constant;
316938fd1498Szrj
317038fd1498Szrj const_low = constant_lower_bound (low);
317138fd1498Szrj if (high.is_constant (&const_high))
317238fd1498Szrj for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
317338fd1498Szrj bitmap_set_bit (stored_args_map, i);
317438fd1498Szrj else
317538fd1498Szrj stored_args_watermark = MIN (stored_args_watermark, const_low);
317638fd1498Szrj }
317738fd1498Szrj return insn != NULL_RTX;
317838fd1498Szrj }
317938fd1498Szrj
318038fd1498Szrj /* Given that a function returns a value of mode MODE at the most
318138fd1498Szrj significant end of hard register VALUE, shift VALUE left or right
318238fd1498Szrj as specified by LEFT_P. Return true if some action was needed. */
318338fd1498Szrj
318438fd1498Szrj bool
shift_return_value(machine_mode mode,bool left_p,rtx value)318538fd1498Szrj shift_return_value (machine_mode mode, bool left_p, rtx value)
318638fd1498Szrj {
318738fd1498Szrj gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
318838fd1498Szrj machine_mode value_mode = GET_MODE (value);
318938fd1498Szrj poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
319038fd1498Szrj
319138fd1498Szrj if (known_eq (shift, 0))
319238fd1498Szrj return false;
319338fd1498Szrj
319438fd1498Szrj /* Use ashr rather than lshr for right shifts. This is for the benefit
319538fd1498Szrj of the MIPS port, which requires SImode values to be sign-extended
319638fd1498Szrj when stored in 64-bit registers. */
319738fd1498Szrj if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
319838fd1498Szrj value, gen_int_shift_amount (value_mode, shift),
319938fd1498Szrj value, 1, OPTAB_WIDEN))
320038fd1498Szrj gcc_unreachable ();
320138fd1498Szrj return true;
320238fd1498Szrj }
320338fd1498Szrj
320438fd1498Szrj /* If X is a likely-spilled register value, copy it to a pseudo
320538fd1498Szrj register and return that register. Return X otherwise. */
320638fd1498Szrj
320738fd1498Szrj static rtx
avoid_likely_spilled_reg(rtx x)320838fd1498Szrj avoid_likely_spilled_reg (rtx x)
320938fd1498Szrj {
321038fd1498Szrj rtx new_rtx;
321138fd1498Szrj
321238fd1498Szrj if (REG_P (x)
321338fd1498Szrj && HARD_REGISTER_P (x)
321438fd1498Szrj && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
321538fd1498Szrj {
321638fd1498Szrj /* Make sure that we generate a REG rather than a CONCAT.
321738fd1498Szrj Moves into CONCATs can need nontrivial instructions,
321838fd1498Szrj and the whole point of this function is to avoid
321938fd1498Szrj using the hard register directly in such a situation. */
322038fd1498Szrj generating_concat_p = 0;
322138fd1498Szrj new_rtx = gen_reg_rtx (GET_MODE (x));
322238fd1498Szrj generating_concat_p = 1;
322338fd1498Szrj emit_move_insn (new_rtx, x);
322438fd1498Szrj return new_rtx;
322538fd1498Szrj }
322638fd1498Szrj return x;
322738fd1498Szrj }
322838fd1498Szrj
322938fd1498Szrj /* Helper function for expand_call.
323038fd1498Szrj Return false is EXP is not implementable as a sibling call. */
323138fd1498Szrj
323238fd1498Szrj static bool
can_implement_as_sibling_call_p(tree exp,rtx structure_value_addr,tree funtype,int reg_parm_stack_space ATTRIBUTE_UNUSED,tree fndecl,int flags,tree addr,const args_size & args_size)323338fd1498Szrj can_implement_as_sibling_call_p (tree exp,
323438fd1498Szrj rtx structure_value_addr,
323538fd1498Szrj tree funtype,
323638fd1498Szrj int reg_parm_stack_space ATTRIBUTE_UNUSED,
323738fd1498Szrj tree fndecl,
323838fd1498Szrj int flags,
323938fd1498Szrj tree addr,
324038fd1498Szrj const args_size &args_size)
324138fd1498Szrj {
324238fd1498Szrj if (!targetm.have_sibcall_epilogue ())
324338fd1498Szrj {
324438fd1498Szrj maybe_complain_about_tail_call
324538fd1498Szrj (exp,
324638fd1498Szrj "machine description does not have"
324738fd1498Szrj " a sibcall_epilogue instruction pattern");
324838fd1498Szrj return false;
324938fd1498Szrj }
325038fd1498Szrj
325138fd1498Szrj /* Doing sibling call optimization needs some work, since
325238fd1498Szrj structure_value_addr can be allocated on the stack.
325338fd1498Szrj It does not seem worth the effort since few optimizable
325438fd1498Szrj sibling calls will return a structure. */
325538fd1498Szrj if (structure_value_addr != NULL_RTX)
325638fd1498Szrj {
325738fd1498Szrj maybe_complain_about_tail_call (exp, "callee returns a structure");
325838fd1498Szrj return false;
325938fd1498Szrj }
326038fd1498Szrj
326138fd1498Szrj #ifdef REG_PARM_STACK_SPACE
326238fd1498Szrj /* If outgoing reg parm stack space changes, we can not do sibcall. */
326338fd1498Szrj if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
326438fd1498Szrj != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
326538fd1498Szrj || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
326638fd1498Szrj {
326738fd1498Szrj maybe_complain_about_tail_call (exp,
326838fd1498Szrj "inconsistent size of stack space"
326938fd1498Szrj " allocated for arguments which are"
327038fd1498Szrj " passed in registers");
327138fd1498Szrj return false;
327238fd1498Szrj }
327338fd1498Szrj #endif
327438fd1498Szrj
327538fd1498Szrj /* Check whether the target is able to optimize the call
327638fd1498Szrj into a sibcall. */
327738fd1498Szrj if (!targetm.function_ok_for_sibcall (fndecl, exp))
327838fd1498Szrj {
327938fd1498Szrj maybe_complain_about_tail_call (exp,
328038fd1498Szrj "target is not able to optimize the"
328138fd1498Szrj " call into a sibling call");
328238fd1498Szrj return false;
328338fd1498Szrj }
328438fd1498Szrj
328538fd1498Szrj /* Functions that do not return exactly once may not be sibcall
328638fd1498Szrj optimized. */
328738fd1498Szrj if (flags & ECF_RETURNS_TWICE)
328838fd1498Szrj {
328938fd1498Szrj maybe_complain_about_tail_call (exp, "callee returns twice");
329038fd1498Szrj return false;
329138fd1498Szrj }
329238fd1498Szrj if (flags & ECF_NORETURN)
329338fd1498Szrj {
329438fd1498Szrj maybe_complain_about_tail_call (exp, "callee does not return");
329538fd1498Szrj return false;
329638fd1498Szrj }
329738fd1498Szrj
329838fd1498Szrj if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
329938fd1498Szrj {
330038fd1498Szrj maybe_complain_about_tail_call (exp, "volatile function type");
330138fd1498Szrj return false;
330238fd1498Szrj }
330338fd1498Szrj
330438fd1498Szrj /* If the called function is nested in the current one, it might access
330538fd1498Szrj some of the caller's arguments, but could clobber them beforehand if
330638fd1498Szrj the argument areas are shared. */
330738fd1498Szrj if (fndecl && decl_function_context (fndecl) == current_function_decl)
330838fd1498Szrj {
330938fd1498Szrj maybe_complain_about_tail_call (exp, "nested function");
331038fd1498Szrj return false;
331138fd1498Szrj }
331238fd1498Szrj
331338fd1498Szrj /* If this function requires more stack slots than the current
331438fd1498Szrj function, we cannot change it into a sibling call.
331538fd1498Szrj crtl->args.pretend_args_size is not part of the
331638fd1498Szrj stack allocated by our caller. */
331738fd1498Szrj if (maybe_gt (args_size.constant,
331838fd1498Szrj crtl->args.size - crtl->args.pretend_args_size))
331938fd1498Szrj {
332038fd1498Szrj maybe_complain_about_tail_call (exp,
332138fd1498Szrj "callee required more stack slots"
332238fd1498Szrj " than the caller");
332338fd1498Szrj return false;
332438fd1498Szrj }
332538fd1498Szrj
332638fd1498Szrj /* If the callee pops its own arguments, then it must pop exactly
332738fd1498Szrj the same number of arguments as the current function. */
332838fd1498Szrj if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
332938fd1498Szrj args_size.constant),
333038fd1498Szrj targetm.calls.return_pops_args (current_function_decl,
333138fd1498Szrj TREE_TYPE
333238fd1498Szrj (current_function_decl),
333338fd1498Szrj crtl->args.size)))
333438fd1498Szrj {
333538fd1498Szrj maybe_complain_about_tail_call (exp,
333638fd1498Szrj "inconsistent number of"
333738fd1498Szrj " popped arguments");
333838fd1498Szrj return false;
333938fd1498Szrj }
334038fd1498Szrj
334138fd1498Szrj if (!lang_hooks.decls.ok_for_sibcall (fndecl))
334238fd1498Szrj {
334338fd1498Szrj maybe_complain_about_tail_call (exp, "frontend does not support"
334438fd1498Szrj " sibling call");
334538fd1498Szrj return false;
334638fd1498Szrj }
334738fd1498Szrj
334838fd1498Szrj /* All checks passed. */
334938fd1498Szrj return true;
335038fd1498Szrj }
335138fd1498Szrj
335238fd1498Szrj /* Generate all the code for a CALL_EXPR exp
335338fd1498Szrj and return an rtx for its value.
335438fd1498Szrj Store the value in TARGET (specified as an rtx) if convenient.
335538fd1498Szrj If the value is stored in TARGET then TARGET is returned.
335638fd1498Szrj If IGNORE is nonzero, then we ignore the value of the function call. */
335738fd1498Szrj
335838fd1498Szrj rtx
expand_call(tree exp,rtx target,int ignore)335938fd1498Szrj expand_call (tree exp, rtx target, int ignore)
336038fd1498Szrj {
336138fd1498Szrj /* Nonzero if we are currently expanding a call. */
336238fd1498Szrj static int currently_expanding_call = 0;
336338fd1498Szrj
336438fd1498Szrj /* RTX for the function to be called. */
336538fd1498Szrj rtx funexp;
336638fd1498Szrj /* Sequence of insns to perform a normal "call". */
336738fd1498Szrj rtx_insn *normal_call_insns = NULL;
336838fd1498Szrj /* Sequence of insns to perform a tail "call". */
336938fd1498Szrj rtx_insn *tail_call_insns = NULL;
337038fd1498Szrj /* Data type of the function. */
337138fd1498Szrj tree funtype;
337238fd1498Szrj tree type_arg_types;
337338fd1498Szrj tree rettype;
337438fd1498Szrj /* Declaration of the function being called,
337538fd1498Szrj or 0 if the function is computed (not known by name). */
337638fd1498Szrj tree fndecl = 0;
337738fd1498Szrj /* The type of the function being called. */
337838fd1498Szrj tree fntype;
337938fd1498Szrj bool try_tail_call = CALL_EXPR_TAILCALL (exp);
338038fd1498Szrj bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
338138fd1498Szrj int pass;
338238fd1498Szrj
338338fd1498Szrj /* Register in which non-BLKmode value will be returned,
338438fd1498Szrj or 0 if no value or if value is BLKmode. */
338538fd1498Szrj rtx valreg;
338638fd1498Szrj /* Register(s) in which bounds are returned. */
338738fd1498Szrj rtx valbnd = NULL;
338838fd1498Szrj /* Address where we should return a BLKmode value;
338938fd1498Szrj 0 if value not BLKmode. */
339038fd1498Szrj rtx structure_value_addr = 0;
339138fd1498Szrj /* Nonzero if that address is being passed by treating it as
339238fd1498Szrj an extra, implicit first parameter. Otherwise,
339338fd1498Szrj it is passed by being copied directly into struct_value_rtx. */
339438fd1498Szrj int structure_value_addr_parm = 0;
339538fd1498Szrj /* Holds the value of implicit argument for the struct value. */
339638fd1498Szrj tree structure_value_addr_value = NULL_TREE;
339738fd1498Szrj /* Size of aggregate value wanted, or zero if none wanted
339838fd1498Szrj or if we are using the non-reentrant PCC calling convention
339938fd1498Szrj or expecting the value in registers. */
340038fd1498Szrj poly_int64 struct_value_size = 0;
340138fd1498Szrj /* Nonzero if called function returns an aggregate in memory PCC style,
340238fd1498Szrj by returning the address of where to find it. */
340338fd1498Szrj int pcc_struct_value = 0;
340438fd1498Szrj rtx struct_value = 0;
340538fd1498Szrj
340638fd1498Szrj /* Number of actual parameters in this call, including struct value addr. */
340738fd1498Szrj int num_actuals;
340838fd1498Szrj /* Number of named args. Args after this are anonymous ones
340938fd1498Szrj and they must all go on the stack. */
341038fd1498Szrj int n_named_args;
341138fd1498Szrj /* Number of complex actual arguments that need to be split. */
341238fd1498Szrj int num_complex_actuals = 0;
341338fd1498Szrj
341438fd1498Szrj /* Vector of information about each argument.
341538fd1498Szrj Arguments are numbered in the order they will be pushed,
341638fd1498Szrj not the order they are written. */
341738fd1498Szrj struct arg_data *args;
341838fd1498Szrj
341938fd1498Szrj /* Total size in bytes of all the stack-parms scanned so far. */
342038fd1498Szrj struct args_size args_size;
342138fd1498Szrj struct args_size adjusted_args_size;
342238fd1498Szrj /* Size of arguments before any adjustments (such as rounding). */
342338fd1498Szrj poly_int64 unadjusted_args_size;
342438fd1498Szrj /* Data on reg parms scanned so far. */
342538fd1498Szrj CUMULATIVE_ARGS args_so_far_v;
342638fd1498Szrj cumulative_args_t args_so_far;
342738fd1498Szrj /* Nonzero if a reg parm has been scanned. */
342838fd1498Szrj int reg_parm_seen;
342938fd1498Szrj /* Nonzero if this is an indirect function call. */
343038fd1498Szrj
343138fd1498Szrj /* Nonzero if we must avoid push-insns in the args for this call.
343238fd1498Szrj If stack space is allocated for register parameters, but not by the
343338fd1498Szrj caller, then it is preallocated in the fixed part of the stack frame.
343438fd1498Szrj So the entire argument block must then be preallocated (i.e., we
343538fd1498Szrj ignore PUSH_ROUNDING in that case). */
343638fd1498Szrj
343738fd1498Szrj int must_preallocate = !PUSH_ARGS;
343838fd1498Szrj
343938fd1498Szrj /* Size of the stack reserved for parameter registers. */
344038fd1498Szrj int reg_parm_stack_space = 0;
344138fd1498Szrj
344238fd1498Szrj /* Address of space preallocated for stack parms
344338fd1498Szrj (on machines that lack push insns), or 0 if space not preallocated. */
344438fd1498Szrj rtx argblock = 0;
344538fd1498Szrj
344638fd1498Szrj /* Mask of ECF_ and ERF_ flags. */
344738fd1498Szrj int flags = 0;
344838fd1498Szrj int return_flags = 0;
344938fd1498Szrj #ifdef REG_PARM_STACK_SPACE
345038fd1498Szrj /* Define the boundary of the register parm stack space that needs to be
345138fd1498Szrj saved, if any. */
345238fd1498Szrj int low_to_save, high_to_save;
345338fd1498Szrj rtx save_area = 0; /* Place that it is saved */
345438fd1498Szrj #endif
345538fd1498Szrj
345638fd1498Szrj unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
345738fd1498Szrj char *initial_stack_usage_map = stack_usage_map;
345838fd1498Szrj unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
345938fd1498Szrj char *stack_usage_map_buf = NULL;
346038fd1498Szrj
346138fd1498Szrj poly_int64 old_stack_allocated;
346238fd1498Szrj
346338fd1498Szrj /* State variables to track stack modifications. */
346438fd1498Szrj rtx old_stack_level = 0;
346538fd1498Szrj int old_stack_arg_under_construction = 0;
346638fd1498Szrj poly_int64 old_pending_adj = 0;
346738fd1498Szrj int old_inhibit_defer_pop = inhibit_defer_pop;
346838fd1498Szrj
346938fd1498Szrj /* Some stack pointer alterations we make are performed via
347038fd1498Szrj allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
347138fd1498Szrj which we then also need to save/restore along the way. */
347238fd1498Szrj poly_int64 old_stack_pointer_delta = 0;
347338fd1498Szrj
347438fd1498Szrj rtx call_fusage;
347538fd1498Szrj tree addr = CALL_EXPR_FN (exp);
347638fd1498Szrj int i;
347738fd1498Szrj /* The alignment of the stack, in bits. */
347838fd1498Szrj unsigned HOST_WIDE_INT preferred_stack_boundary;
347938fd1498Szrj /* The alignment of the stack, in bytes. */
348038fd1498Szrj unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
348138fd1498Szrj /* The static chain value to use for this call. */
348238fd1498Szrj rtx static_chain_value;
348338fd1498Szrj /* See if this is "nothrow" function call. */
348438fd1498Szrj if (TREE_NOTHROW (exp))
348538fd1498Szrj flags |= ECF_NOTHROW;
348638fd1498Szrj
348738fd1498Szrj /* See if we can find a DECL-node for the actual function, and get the
348838fd1498Szrj function attributes (flags) from the function decl or type node. */
348938fd1498Szrj fndecl = get_callee_fndecl (exp);
349038fd1498Szrj if (fndecl)
349138fd1498Szrj {
349238fd1498Szrj fntype = TREE_TYPE (fndecl);
349338fd1498Szrj flags |= flags_from_decl_or_type (fndecl);
349438fd1498Szrj return_flags |= decl_return_flags (fndecl);
349538fd1498Szrj }
349638fd1498Szrj else
349738fd1498Szrj {
349838fd1498Szrj fntype = TREE_TYPE (TREE_TYPE (addr));
349938fd1498Szrj flags |= flags_from_decl_or_type (fntype);
350038fd1498Szrj if (CALL_EXPR_BY_DESCRIPTOR (exp))
350138fd1498Szrj flags |= ECF_BY_DESCRIPTOR;
350238fd1498Szrj }
350338fd1498Szrj rettype = TREE_TYPE (exp);
350438fd1498Szrj
350538fd1498Szrj struct_value = targetm.calls.struct_value_rtx (fntype, 0);
350638fd1498Szrj
350738fd1498Szrj /* Warn if this value is an aggregate type,
350838fd1498Szrj regardless of which calling convention we are using for it. */
350938fd1498Szrj if (AGGREGATE_TYPE_P (rettype))
351038fd1498Szrj warning (OPT_Waggregate_return, "function call has aggregate value");
351138fd1498Szrj
351238fd1498Szrj /* If the result of a non looping pure or const function call is
351338fd1498Szrj ignored (or void), and none of its arguments are volatile, we can
351438fd1498Szrj avoid expanding the call and just evaluate the arguments for
351538fd1498Szrj side-effects. */
351638fd1498Szrj if ((flags & (ECF_CONST | ECF_PURE))
351738fd1498Szrj && (!(flags & ECF_LOOPING_CONST_OR_PURE))
351838fd1498Szrj && (ignore || target == const0_rtx
351938fd1498Szrj || TYPE_MODE (rettype) == VOIDmode))
352038fd1498Szrj {
352138fd1498Szrj bool volatilep = false;
352238fd1498Szrj tree arg;
352338fd1498Szrj call_expr_arg_iterator iter;
352438fd1498Szrj
352538fd1498Szrj FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
352638fd1498Szrj if (TREE_THIS_VOLATILE (arg))
352738fd1498Szrj {
352838fd1498Szrj volatilep = true;
352938fd1498Szrj break;
353038fd1498Szrj }
353138fd1498Szrj
353238fd1498Szrj if (! volatilep)
353338fd1498Szrj {
353438fd1498Szrj FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
353538fd1498Szrj expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
353638fd1498Szrj return const0_rtx;
353738fd1498Szrj }
353838fd1498Szrj }
353938fd1498Szrj
354038fd1498Szrj #ifdef REG_PARM_STACK_SPACE
354138fd1498Szrj reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
354238fd1498Szrj #endif
354338fd1498Szrj
354438fd1498Szrj if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
354538fd1498Szrj && reg_parm_stack_space > 0 && PUSH_ARGS)
354638fd1498Szrj must_preallocate = 1;
354738fd1498Szrj
354838fd1498Szrj /* Set up a place to return a structure. */
354938fd1498Szrj
355038fd1498Szrj /* Cater to broken compilers. */
355138fd1498Szrj if (aggregate_value_p (exp, fntype))
355238fd1498Szrj {
355338fd1498Szrj /* This call returns a big structure. */
355438fd1498Szrj flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
355538fd1498Szrj
355638fd1498Szrj #ifdef PCC_STATIC_STRUCT_RETURN
355738fd1498Szrj {
355838fd1498Szrj pcc_struct_value = 1;
355938fd1498Szrj }
356038fd1498Szrj #else /* not PCC_STATIC_STRUCT_RETURN */
356138fd1498Szrj {
356238fd1498Szrj if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
356338fd1498Szrj struct_value_size = -1;
356438fd1498Szrj
356538fd1498Szrj /* Even if it is semantically safe to use the target as the return
356638fd1498Szrj slot, it may be not sufficiently aligned for the return type. */
356738fd1498Szrj if (CALL_EXPR_RETURN_SLOT_OPT (exp)
356838fd1498Szrj && target
356938fd1498Szrj && MEM_P (target)
357038fd1498Szrj /* If rettype is addressable, we may not create a temporary.
357138fd1498Szrj If target is properly aligned at runtime and the compiler
357238fd1498Szrj just doesn't know about it, it will work fine, otherwise it
357338fd1498Szrj will be UB. */
357438fd1498Szrj && (TREE_ADDRESSABLE (rettype)
357538fd1498Szrj || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
357638fd1498Szrj && targetm.slow_unaligned_access (TYPE_MODE (rettype),
357738fd1498Szrj MEM_ALIGN (target)))))
357838fd1498Szrj structure_value_addr = XEXP (target, 0);
357938fd1498Szrj else
358038fd1498Szrj {
358138fd1498Szrj /* For variable-sized objects, we must be called with a target
358238fd1498Szrj specified. If we were to allocate space on the stack here,
358338fd1498Szrj we would have no way of knowing when to free it. */
358438fd1498Szrj rtx d = assign_temp (rettype, 1, 1);
358538fd1498Szrj structure_value_addr = XEXP (d, 0);
358638fd1498Szrj target = 0;
358738fd1498Szrj }
358838fd1498Szrj }
358938fd1498Szrj #endif /* not PCC_STATIC_STRUCT_RETURN */
359038fd1498Szrj }
359138fd1498Szrj
359238fd1498Szrj /* Figure out the amount to which the stack should be aligned. */
359338fd1498Szrj preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
359438fd1498Szrj if (fndecl)
359538fd1498Szrj {
359638fd1498Szrj struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
359738fd1498Szrj /* Without automatic stack alignment, we can't increase preferred
359838fd1498Szrj stack boundary. With automatic stack alignment, it is
359938fd1498Szrj unnecessary since unless we can guarantee that all callers will
360038fd1498Szrj align the outgoing stack properly, callee has to align its
360138fd1498Szrj stack anyway. */
360238fd1498Szrj if (i
360338fd1498Szrj && i->preferred_incoming_stack_boundary
360438fd1498Szrj && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
360538fd1498Szrj preferred_stack_boundary = i->preferred_incoming_stack_boundary;
360638fd1498Szrj }
360738fd1498Szrj
360838fd1498Szrj /* Operand 0 is a pointer-to-function; get the type of the function. */
360938fd1498Szrj funtype = TREE_TYPE (addr);
361038fd1498Szrj gcc_assert (POINTER_TYPE_P (funtype));
361138fd1498Szrj funtype = TREE_TYPE (funtype);
361238fd1498Szrj
361338fd1498Szrj /* Count whether there are actual complex arguments that need to be split
361438fd1498Szrj into their real and imaginary parts. Munge the type_arg_types
361538fd1498Szrj appropriately here as well. */
361638fd1498Szrj if (targetm.calls.split_complex_arg)
361738fd1498Szrj {
361838fd1498Szrj call_expr_arg_iterator iter;
361938fd1498Szrj tree arg;
362038fd1498Szrj FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
362138fd1498Szrj {
362238fd1498Szrj tree type = TREE_TYPE (arg);
362338fd1498Szrj if (type && TREE_CODE (type) == COMPLEX_TYPE
362438fd1498Szrj && targetm.calls.split_complex_arg (type))
362538fd1498Szrj num_complex_actuals++;
362638fd1498Szrj }
362738fd1498Szrj type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
362838fd1498Szrj }
362938fd1498Szrj else
363038fd1498Szrj type_arg_types = TYPE_ARG_TYPES (funtype);
363138fd1498Szrj
363238fd1498Szrj if (flags & ECF_MAY_BE_ALLOCA)
363338fd1498Szrj cfun->calls_alloca = 1;
363438fd1498Szrj
363538fd1498Szrj /* If struct_value_rtx is 0, it means pass the address
363638fd1498Szrj as if it were an extra parameter. Put the argument expression
363738fd1498Szrj in structure_value_addr_value. */
363838fd1498Szrj if (structure_value_addr && struct_value == 0)
363938fd1498Szrj {
364038fd1498Szrj /* If structure_value_addr is a REG other than
364138fd1498Szrj virtual_outgoing_args_rtx, we can use always use it. If it
364238fd1498Szrj is not a REG, we must always copy it into a register.
364338fd1498Szrj If it is virtual_outgoing_args_rtx, we must copy it to another
364438fd1498Szrj register in some cases. */
364538fd1498Szrj rtx temp = (!REG_P (structure_value_addr)
364638fd1498Szrj || (ACCUMULATE_OUTGOING_ARGS
364738fd1498Szrj && stack_arg_under_construction
364838fd1498Szrj && structure_value_addr == virtual_outgoing_args_rtx)
364938fd1498Szrj ? copy_addr_to_reg (convert_memory_address
365038fd1498Szrj (Pmode, structure_value_addr))
365138fd1498Szrj : structure_value_addr);
365238fd1498Szrj
365338fd1498Szrj structure_value_addr_value =
365438fd1498Szrj make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
365538fd1498Szrj structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
365638fd1498Szrj }
365738fd1498Szrj
365838fd1498Szrj /* Count the arguments and set NUM_ACTUALS. */
365938fd1498Szrj num_actuals =
366038fd1498Szrj call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
366138fd1498Szrj
366238fd1498Szrj /* Compute number of named args.
366338fd1498Szrj First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
366438fd1498Szrj
366538fd1498Szrj if (type_arg_types != 0)
366638fd1498Szrj n_named_args
366738fd1498Szrj = (list_length (type_arg_types)
366838fd1498Szrj /* Count the struct value address, if it is passed as a parm. */
366938fd1498Szrj + structure_value_addr_parm);
367038fd1498Szrj else
367138fd1498Szrj /* If we know nothing, treat all args as named. */
367238fd1498Szrj n_named_args = num_actuals;
367338fd1498Szrj
367438fd1498Szrj /* Start updating where the next arg would go.
367538fd1498Szrj
367638fd1498Szrj On some machines (such as the PA) indirect calls have a different
367738fd1498Szrj calling convention than normal calls. The fourth argument in
367838fd1498Szrj INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
367938fd1498Szrj or not. */
368038fd1498Szrj INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
368138fd1498Szrj args_so_far = pack_cumulative_args (&args_so_far_v);
368238fd1498Szrj
368338fd1498Szrj /* Now possibly adjust the number of named args.
368438fd1498Szrj Normally, don't include the last named arg if anonymous args follow.
368538fd1498Szrj We do include the last named arg if
368638fd1498Szrj targetm.calls.strict_argument_naming() returns nonzero.
368738fd1498Szrj (If no anonymous args follow, the result of list_length is actually
368838fd1498Szrj one too large. This is harmless.)
368938fd1498Szrj
369038fd1498Szrj If targetm.calls.pretend_outgoing_varargs_named() returns
369138fd1498Szrj nonzero, and targetm.calls.strict_argument_naming() returns zero,
369238fd1498Szrj this machine will be able to place unnamed args that were passed
369338fd1498Szrj in registers into the stack. So treat all args as named. This
369438fd1498Szrj allows the insns emitting for a specific argument list to be
369538fd1498Szrj independent of the function declaration.
369638fd1498Szrj
369738fd1498Szrj If targetm.calls.pretend_outgoing_varargs_named() returns zero,
369838fd1498Szrj we do not have any reliable way to pass unnamed args in
369938fd1498Szrj registers, so we must force them into memory. */
370038fd1498Szrj
370138fd1498Szrj if (type_arg_types != 0
370238fd1498Szrj && targetm.calls.strict_argument_naming (args_so_far))
370338fd1498Szrj ;
370438fd1498Szrj else if (type_arg_types != 0
370538fd1498Szrj && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
370638fd1498Szrj /* Don't include the last named arg. */
370738fd1498Szrj --n_named_args;
370838fd1498Szrj else
370938fd1498Szrj /* Treat all args as named. */
371038fd1498Szrj n_named_args = num_actuals;
371138fd1498Szrj
371238fd1498Szrj /* Make a vector to hold all the information about each arg. */
371338fd1498Szrj args = XCNEWVEC (struct arg_data, num_actuals);
371438fd1498Szrj
371538fd1498Szrj /* Build up entries in the ARGS array, compute the size of the
371638fd1498Szrj arguments into ARGS_SIZE, etc. */
371738fd1498Szrj initialize_argument_information (num_actuals, args, &args_size,
371838fd1498Szrj n_named_args, exp,
371938fd1498Szrj structure_value_addr_value, fndecl, fntype,
372038fd1498Szrj args_so_far, reg_parm_stack_space,
372138fd1498Szrj &old_stack_level, &old_pending_adj,
372238fd1498Szrj &must_preallocate, &flags,
372338fd1498Szrj &try_tail_call, CALL_FROM_THUNK_P (exp));
372438fd1498Szrj
372538fd1498Szrj if (args_size.var)
372638fd1498Szrj must_preallocate = 1;
372738fd1498Szrj
372838fd1498Szrj /* Now make final decision about preallocating stack space. */
372938fd1498Szrj must_preallocate = finalize_must_preallocate (must_preallocate,
373038fd1498Szrj num_actuals, args,
373138fd1498Szrj &args_size);
373238fd1498Szrj
373338fd1498Szrj /* If the structure value address will reference the stack pointer, we
373438fd1498Szrj must stabilize it. We don't need to do this if we know that we are
373538fd1498Szrj not going to adjust the stack pointer in processing this call. */
373638fd1498Szrj
373738fd1498Szrj if (structure_value_addr
373838fd1498Szrj && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
373938fd1498Szrj || reg_mentioned_p (virtual_outgoing_args_rtx,
374038fd1498Szrj structure_value_addr))
374138fd1498Szrj && (args_size.var
374238fd1498Szrj || (!ACCUMULATE_OUTGOING_ARGS
374338fd1498Szrj && maybe_ne (args_size.constant, 0))))
374438fd1498Szrj structure_value_addr = copy_to_reg (structure_value_addr);
374538fd1498Szrj
374638fd1498Szrj /* Tail calls can make things harder to debug, and we've traditionally
374738fd1498Szrj pushed these optimizations into -O2. Don't try if we're already
374838fd1498Szrj expanding a call, as that means we're an argument. Don't try if
374938fd1498Szrj there's cleanups, as we know there's code to follow the call. */
375038fd1498Szrj
375138fd1498Szrj if (currently_expanding_call++ != 0
375238fd1498Szrj || !flag_optimize_sibling_calls
375338fd1498Szrj || args_size.var
375438fd1498Szrj || dbg_cnt (tail_call) == false)
375538fd1498Szrj try_tail_call = 0;
375638fd1498Szrj
375738fd1498Szrj /* If the user has marked the function as requiring tail-call
375838fd1498Szrj optimization, attempt it. */
375938fd1498Szrj if (must_tail_call)
376038fd1498Szrj try_tail_call = 1;
376138fd1498Szrj
376238fd1498Szrj /* Rest of purposes for tail call optimizations to fail. */
376338fd1498Szrj if (try_tail_call)
376438fd1498Szrj try_tail_call = can_implement_as_sibling_call_p (exp,
376538fd1498Szrj structure_value_addr,
376638fd1498Szrj funtype,
376738fd1498Szrj reg_parm_stack_space,
376838fd1498Szrj fndecl,
376938fd1498Szrj flags, addr, args_size);
377038fd1498Szrj
377138fd1498Szrj /* Check if caller and callee disagree in promotion of function
377238fd1498Szrj return value. */
377338fd1498Szrj if (try_tail_call)
377438fd1498Szrj {
377538fd1498Szrj machine_mode caller_mode, caller_promoted_mode;
377638fd1498Szrj machine_mode callee_mode, callee_promoted_mode;
377738fd1498Szrj int caller_unsignedp, callee_unsignedp;
377838fd1498Szrj tree caller_res = DECL_RESULT (current_function_decl);
377938fd1498Szrj
378038fd1498Szrj caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
378138fd1498Szrj caller_mode = DECL_MODE (caller_res);
378238fd1498Szrj callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
378338fd1498Szrj callee_mode = TYPE_MODE (TREE_TYPE (funtype));
378438fd1498Szrj caller_promoted_mode
378538fd1498Szrj = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
378638fd1498Szrj &caller_unsignedp,
378738fd1498Szrj TREE_TYPE (current_function_decl), 1);
378838fd1498Szrj callee_promoted_mode
378938fd1498Szrj = promote_function_mode (TREE_TYPE (funtype), callee_mode,
379038fd1498Szrj &callee_unsignedp,
379138fd1498Szrj funtype, 1);
379238fd1498Szrj if (caller_mode != VOIDmode
379338fd1498Szrj && (caller_promoted_mode != callee_promoted_mode
379438fd1498Szrj || ((caller_mode != caller_promoted_mode
379538fd1498Szrj || callee_mode != callee_promoted_mode)
379638fd1498Szrj && (caller_unsignedp != callee_unsignedp
379738fd1498Szrj || partial_subreg_p (caller_mode, callee_mode)))))
379838fd1498Szrj {
379938fd1498Szrj try_tail_call = 0;
380038fd1498Szrj maybe_complain_about_tail_call (exp,
380138fd1498Szrj "caller and callee disagree in"
380238fd1498Szrj " promotion of function"
380338fd1498Szrj " return value");
380438fd1498Szrj }
380538fd1498Szrj }
380638fd1498Szrj
380738fd1498Szrj /* Ensure current function's preferred stack boundary is at least
380838fd1498Szrj what we need. Stack alignment may also increase preferred stack
380938fd1498Szrj boundary. */
381038fd1498Szrj if (crtl->preferred_stack_boundary < preferred_stack_boundary)
381138fd1498Szrj crtl->preferred_stack_boundary = preferred_stack_boundary;
381238fd1498Szrj else
381338fd1498Szrj preferred_stack_boundary = crtl->preferred_stack_boundary;
381438fd1498Szrj
381538fd1498Szrj preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
381638fd1498Szrj
381738fd1498Szrj /* We want to make two insn chains; one for a sibling call, the other
381838fd1498Szrj for a normal call. We will select one of the two chains after
381938fd1498Szrj initial RTL generation is complete. */
382038fd1498Szrj for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
382138fd1498Szrj {
382238fd1498Szrj int sibcall_failure = 0;
382338fd1498Szrj /* We want to emit any pending stack adjustments before the tail
382438fd1498Szrj recursion "call". That way we know any adjustment after the tail
382538fd1498Szrj recursion call can be ignored if we indeed use the tail
382638fd1498Szrj call expansion. */
382738fd1498Szrj saved_pending_stack_adjust save;
382838fd1498Szrj rtx_insn *insns, *before_call, *after_args;
382938fd1498Szrj rtx next_arg_reg;
383038fd1498Szrj
383138fd1498Szrj if (pass == 0)
383238fd1498Szrj {
383338fd1498Szrj /* State variables we need to save and restore between
383438fd1498Szrj iterations. */
383538fd1498Szrj save_pending_stack_adjust (&save);
383638fd1498Szrj }
383738fd1498Szrj if (pass)
383838fd1498Szrj flags &= ~ECF_SIBCALL;
383938fd1498Szrj else
384038fd1498Szrj flags |= ECF_SIBCALL;
384138fd1498Szrj
384238fd1498Szrj /* Other state variables that we must reinitialize each time
384338fd1498Szrj through the loop (that are not initialized by the loop itself). */
384438fd1498Szrj argblock = 0;
384538fd1498Szrj call_fusage = 0;
384638fd1498Szrj
384738fd1498Szrj /* Start a new sequence for the normal call case.
384838fd1498Szrj
384938fd1498Szrj From this point on, if the sibling call fails, we want to set
385038fd1498Szrj sibcall_failure instead of continuing the loop. */
385138fd1498Szrj start_sequence ();
385238fd1498Szrj
385338fd1498Szrj /* Don't let pending stack adjusts add up to too much.
385438fd1498Szrj Also, do all pending adjustments now if there is any chance
385538fd1498Szrj this might be a call to alloca or if we are expanding a sibling
385638fd1498Szrj call sequence.
385738fd1498Szrj Also do the adjustments before a throwing call, otherwise
385838fd1498Szrj exception handling can fail; PR 19225. */
385938fd1498Szrj if (maybe_ge (pending_stack_adjust, 32)
386038fd1498Szrj || (maybe_ne (pending_stack_adjust, 0)
386138fd1498Szrj && (flags & ECF_MAY_BE_ALLOCA))
386238fd1498Szrj || (maybe_ne (pending_stack_adjust, 0)
386338fd1498Szrj && flag_exceptions && !(flags & ECF_NOTHROW))
386438fd1498Szrj || pass == 0)
386538fd1498Szrj do_pending_stack_adjust ();
386638fd1498Szrj
386738fd1498Szrj /* Precompute any arguments as needed. */
386838fd1498Szrj if (pass)
386938fd1498Szrj precompute_arguments (num_actuals, args);
387038fd1498Szrj
387138fd1498Szrj /* Now we are about to start emitting insns that can be deleted
387238fd1498Szrj if a libcall is deleted. */
387338fd1498Szrj if (pass && (flags & ECF_MALLOC))
387438fd1498Szrj start_sequence ();
387538fd1498Szrj
387638fd1498Szrj if (pass == 0
387738fd1498Szrj && crtl->stack_protect_guard
387838fd1498Szrj && targetm.stack_protect_runtime_enabled_p ())
387938fd1498Szrj stack_protect_epilogue ();
388038fd1498Szrj
388138fd1498Szrj adjusted_args_size = args_size;
388238fd1498Szrj /* Compute the actual size of the argument block required. The variable
388338fd1498Szrj and constant sizes must be combined, the size may have to be rounded,
388438fd1498Szrj and there may be a minimum required size. When generating a sibcall
388538fd1498Szrj pattern, do not round up, since we'll be re-using whatever space our
388638fd1498Szrj caller provided. */
388738fd1498Szrj unadjusted_args_size
388838fd1498Szrj = compute_argument_block_size (reg_parm_stack_space,
388938fd1498Szrj &adjusted_args_size,
389038fd1498Szrj fndecl, fntype,
389138fd1498Szrj (pass == 0 ? 0
389238fd1498Szrj : preferred_stack_boundary));
389338fd1498Szrj
389438fd1498Szrj old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
389538fd1498Szrj
389638fd1498Szrj /* The argument block when performing a sibling call is the
389738fd1498Szrj incoming argument block. */
389838fd1498Szrj if (pass == 0)
389938fd1498Szrj {
390038fd1498Szrj argblock = crtl->args.internal_arg_pointer;
390138fd1498Szrj if (STACK_GROWS_DOWNWARD)
390238fd1498Szrj argblock
390338fd1498Szrj = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
390438fd1498Szrj else
390538fd1498Szrj argblock
390638fd1498Szrj = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
390738fd1498Szrj
390838fd1498Szrj HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
390938fd1498Szrj stored_args_map = sbitmap_alloc (map_size);
391038fd1498Szrj bitmap_clear (stored_args_map);
391138fd1498Szrj stored_args_watermark = HOST_WIDE_INT_M1U;
391238fd1498Szrj }
391338fd1498Szrj
391438fd1498Szrj /* If we have no actual push instructions, or shouldn't use them,
391538fd1498Szrj make space for all args right now. */
391638fd1498Szrj else if (adjusted_args_size.var != 0)
391738fd1498Szrj {
391838fd1498Szrj if (old_stack_level == 0)
391938fd1498Szrj {
392038fd1498Szrj emit_stack_save (SAVE_BLOCK, &old_stack_level);
392138fd1498Szrj old_stack_pointer_delta = stack_pointer_delta;
392238fd1498Szrj old_pending_adj = pending_stack_adjust;
392338fd1498Szrj pending_stack_adjust = 0;
392438fd1498Szrj /* stack_arg_under_construction says whether a stack arg is
392538fd1498Szrj being constructed at the old stack level. Pushing the stack
392638fd1498Szrj gets a clean outgoing argument block. */
392738fd1498Szrj old_stack_arg_under_construction = stack_arg_under_construction;
392838fd1498Szrj stack_arg_under_construction = 0;
392938fd1498Szrj }
393038fd1498Szrj argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
393138fd1498Szrj if (flag_stack_usage_info)
393238fd1498Szrj current_function_has_unbounded_dynamic_stack_size = 1;
393338fd1498Szrj }
393438fd1498Szrj else
393538fd1498Szrj {
393638fd1498Szrj /* Note that we must go through the motions of allocating an argument
393738fd1498Szrj block even if the size is zero because we may be storing args
393838fd1498Szrj in the area reserved for register arguments, which may be part of
393938fd1498Szrj the stack frame. */
394038fd1498Szrj
394138fd1498Szrj poly_int64 needed = adjusted_args_size.constant;
394238fd1498Szrj
394338fd1498Szrj /* Store the maximum argument space used. It will be pushed by
394438fd1498Szrj the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
394538fd1498Szrj checking). */
394638fd1498Szrj
394738fd1498Szrj crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
394838fd1498Szrj needed);
394938fd1498Szrj
395038fd1498Szrj if (must_preallocate)
395138fd1498Szrj {
395238fd1498Szrj if (ACCUMULATE_OUTGOING_ARGS)
395338fd1498Szrj {
395438fd1498Szrj /* Since the stack pointer will never be pushed, it is
395538fd1498Szrj possible for the evaluation of a parm to clobber
395638fd1498Szrj something we have already written to the stack.
395738fd1498Szrj Since most function calls on RISC machines do not use
395838fd1498Szrj the stack, this is uncommon, but must work correctly.
395938fd1498Szrj
396038fd1498Szrj Therefore, we save any area of the stack that was already
396138fd1498Szrj written and that we are using. Here we set up to do this
396238fd1498Szrj by making a new stack usage map from the old one. The
396338fd1498Szrj actual save will be done by store_one_arg.
396438fd1498Szrj
396538fd1498Szrj Another approach might be to try to reorder the argument
396638fd1498Szrj evaluations to avoid this conflicting stack usage. */
396738fd1498Szrj
396838fd1498Szrj /* Since we will be writing into the entire argument area,
396938fd1498Szrj the map must be allocated for its entire size, not just
397038fd1498Szrj the part that is the responsibility of the caller. */
397138fd1498Szrj if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
397238fd1498Szrj needed += reg_parm_stack_space;
397338fd1498Szrj
397438fd1498Szrj poly_int64 limit = needed;
397538fd1498Szrj if (ARGS_GROW_DOWNWARD)
397638fd1498Szrj limit += 1;
397738fd1498Szrj
397838fd1498Szrj /* For polynomial sizes, this is the maximum possible
397938fd1498Szrj size needed for arguments with a constant size
398038fd1498Szrj and offset. */
398138fd1498Szrj HOST_WIDE_INT const_limit = constant_lower_bound (limit);
398238fd1498Szrj highest_outgoing_arg_in_use
398338fd1498Szrj = MAX (initial_highest_arg_in_use, const_limit);
398438fd1498Szrj
398538fd1498Szrj free (stack_usage_map_buf);
398638fd1498Szrj stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
398738fd1498Szrj stack_usage_map = stack_usage_map_buf;
398838fd1498Szrj
398938fd1498Szrj if (initial_highest_arg_in_use)
399038fd1498Szrj memcpy (stack_usage_map, initial_stack_usage_map,
399138fd1498Szrj initial_highest_arg_in_use);
399238fd1498Szrj
399338fd1498Szrj if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
399438fd1498Szrj memset (&stack_usage_map[initial_highest_arg_in_use], 0,
399538fd1498Szrj (highest_outgoing_arg_in_use
399638fd1498Szrj - initial_highest_arg_in_use));
399738fd1498Szrj needed = 0;
399838fd1498Szrj
399938fd1498Szrj /* The address of the outgoing argument list must not be
400038fd1498Szrj copied to a register here, because argblock would be left
400138fd1498Szrj pointing to the wrong place after the call to
400238fd1498Szrj allocate_dynamic_stack_space below. */
400338fd1498Szrj
400438fd1498Szrj argblock = virtual_outgoing_args_rtx;
400538fd1498Szrj }
400638fd1498Szrj else
400738fd1498Szrj {
400838fd1498Szrj /* Try to reuse some or all of the pending_stack_adjust
400938fd1498Szrj to get this space. */
401038fd1498Szrj if (inhibit_defer_pop == 0
401138fd1498Szrj && (combine_pending_stack_adjustment_and_call
401238fd1498Szrj (&needed,
401338fd1498Szrj unadjusted_args_size,
401438fd1498Szrj &adjusted_args_size,
401538fd1498Szrj preferred_unit_stack_boundary)))
401638fd1498Szrj {
401738fd1498Szrj /* combine_pending_stack_adjustment_and_call computes
401838fd1498Szrj an adjustment before the arguments are allocated.
401938fd1498Szrj Account for them and see whether or not the stack
402038fd1498Szrj needs to go up or down. */
402138fd1498Szrj needed = unadjusted_args_size - needed;
402238fd1498Szrj
402338fd1498Szrj /* Checked by
402438fd1498Szrj combine_pending_stack_adjustment_and_call. */
402538fd1498Szrj gcc_checking_assert (ordered_p (needed, 0));
402638fd1498Szrj if (maybe_lt (needed, 0))
402738fd1498Szrj {
402838fd1498Szrj /* We're releasing stack space. */
402938fd1498Szrj /* ??? We can avoid any adjustment at all if we're
403038fd1498Szrj already aligned. FIXME. */
403138fd1498Szrj pending_stack_adjust = -needed;
403238fd1498Szrj do_pending_stack_adjust ();
403338fd1498Szrj needed = 0;
403438fd1498Szrj }
403538fd1498Szrj else
403638fd1498Szrj /* We need to allocate space. We'll do that in
403738fd1498Szrj push_block below. */
403838fd1498Szrj pending_stack_adjust = 0;
403938fd1498Szrj }
404038fd1498Szrj
404138fd1498Szrj /* Special case this because overhead of `push_block' in
404238fd1498Szrj this case is non-trivial. */
404338fd1498Szrj if (known_eq (needed, 0))
404438fd1498Szrj argblock = virtual_outgoing_args_rtx;
404538fd1498Szrj else
404638fd1498Szrj {
404738fd1498Szrj rtx needed_rtx = gen_int_mode (needed, Pmode);
404838fd1498Szrj argblock = push_block (needed_rtx, 0, 0);
404938fd1498Szrj if (ARGS_GROW_DOWNWARD)
405038fd1498Szrj argblock = plus_constant (Pmode, argblock, needed);
405138fd1498Szrj }
405238fd1498Szrj
405338fd1498Szrj /* We only really need to call `copy_to_reg' in the case
405438fd1498Szrj where push insns are going to be used to pass ARGBLOCK
405538fd1498Szrj to a function call in ARGS. In that case, the stack
405638fd1498Szrj pointer changes value from the allocation point to the
405738fd1498Szrj call point, and hence the value of
405838fd1498Szrj VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
405938fd1498Szrj as well always do it. */
406038fd1498Szrj argblock = copy_to_reg (argblock);
406138fd1498Szrj }
406238fd1498Szrj }
406338fd1498Szrj }
406438fd1498Szrj
406538fd1498Szrj if (ACCUMULATE_OUTGOING_ARGS)
406638fd1498Szrj {
406738fd1498Szrj /* The save/restore code in store_one_arg handles all
406838fd1498Szrj cases except one: a constructor call (including a C
406938fd1498Szrj function returning a BLKmode struct) to initialize
407038fd1498Szrj an argument. */
407138fd1498Szrj if (stack_arg_under_construction)
407238fd1498Szrj {
407338fd1498Szrj rtx push_size
407438fd1498Szrj = (gen_int_mode
407538fd1498Szrj (adjusted_args_size.constant
407638fd1498Szrj + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
407738fd1498Szrj : TREE_TYPE (fndecl))
407838fd1498Szrj ? 0 : reg_parm_stack_space), Pmode));
407938fd1498Szrj if (old_stack_level == 0)
408038fd1498Szrj {
408138fd1498Szrj emit_stack_save (SAVE_BLOCK, &old_stack_level);
408238fd1498Szrj old_stack_pointer_delta = stack_pointer_delta;
408338fd1498Szrj old_pending_adj = pending_stack_adjust;
408438fd1498Szrj pending_stack_adjust = 0;
408538fd1498Szrj /* stack_arg_under_construction says whether a stack
408638fd1498Szrj arg is being constructed at the old stack level.
408738fd1498Szrj Pushing the stack gets a clean outgoing argument
408838fd1498Szrj block. */
408938fd1498Szrj old_stack_arg_under_construction
409038fd1498Szrj = stack_arg_under_construction;
409138fd1498Szrj stack_arg_under_construction = 0;
409238fd1498Szrj /* Make a new map for the new argument list. */
409338fd1498Szrj free (stack_usage_map_buf);
409438fd1498Szrj stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
409538fd1498Szrj stack_usage_map = stack_usage_map_buf;
409638fd1498Szrj highest_outgoing_arg_in_use = 0;
409738fd1498Szrj stack_usage_watermark = HOST_WIDE_INT_M1U;
409838fd1498Szrj }
409938fd1498Szrj /* We can pass TRUE as the 4th argument because we just
410038fd1498Szrj saved the stack pointer and will restore it right after
410138fd1498Szrj the call. */
410238fd1498Szrj allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
410338fd1498Szrj -1, true);
410438fd1498Szrj }
410538fd1498Szrj
410638fd1498Szrj /* If argument evaluation might modify the stack pointer,
410738fd1498Szrj copy the address of the argument list to a register. */
410838fd1498Szrj for (i = 0; i < num_actuals; i++)
410938fd1498Szrj if (args[i].pass_on_stack)
411038fd1498Szrj {
411138fd1498Szrj argblock = copy_addr_to_reg (argblock);
411238fd1498Szrj break;
411338fd1498Szrj }
411438fd1498Szrj }
411538fd1498Szrj
411638fd1498Szrj compute_argument_addresses (args, argblock, num_actuals);
411738fd1498Szrj
411838fd1498Szrj /* Stack is properly aligned, pops can't safely be deferred during
411938fd1498Szrj the evaluation of the arguments. */
412038fd1498Szrj NO_DEFER_POP;
412138fd1498Szrj
412238fd1498Szrj /* Precompute all register parameters. It isn't safe to compute
412338fd1498Szrj anything once we have started filling any specific hard regs.
412438fd1498Szrj TLS symbols sometimes need a call to resolve. Precompute
412538fd1498Szrj register parameters before any stack pointer manipulation
412638fd1498Szrj to avoid unaligned stack in the called function. */
412738fd1498Szrj precompute_register_parameters (num_actuals, args, ®_parm_seen);
412838fd1498Szrj
412938fd1498Szrj OK_DEFER_POP;
413038fd1498Szrj
413138fd1498Szrj /* Perform stack alignment before the first push (the last arg). */
413238fd1498Szrj if (argblock == 0
413338fd1498Szrj && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
413438fd1498Szrj && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
413538fd1498Szrj {
413638fd1498Szrj /* When the stack adjustment is pending, we get better code
413738fd1498Szrj by combining the adjustments. */
413838fd1498Szrj if (maybe_ne (pending_stack_adjust, 0)
413938fd1498Szrj && ! inhibit_defer_pop
414038fd1498Szrj && (combine_pending_stack_adjustment_and_call
414138fd1498Szrj (&pending_stack_adjust,
414238fd1498Szrj unadjusted_args_size,
414338fd1498Szrj &adjusted_args_size,
414438fd1498Szrj preferred_unit_stack_boundary)))
414538fd1498Szrj do_pending_stack_adjust ();
414638fd1498Szrj else if (argblock == 0)
414738fd1498Szrj anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
414838fd1498Szrj - unadjusted_args_size,
414938fd1498Szrj Pmode));
415038fd1498Szrj }
415138fd1498Szrj /* Now that the stack is properly aligned, pops can't safely
415238fd1498Szrj be deferred during the evaluation of the arguments. */
415338fd1498Szrj NO_DEFER_POP;
415438fd1498Szrj
415538fd1498Szrj /* Record the maximum pushed stack space size. We need to delay
415638fd1498Szrj doing it this far to take into account the optimization done
415738fd1498Szrj by combine_pending_stack_adjustment_and_call. */
415838fd1498Szrj if (flag_stack_usage_info
415938fd1498Szrj && !ACCUMULATE_OUTGOING_ARGS
416038fd1498Szrj && pass
416138fd1498Szrj && adjusted_args_size.var == 0)
416238fd1498Szrj {
416338fd1498Szrj poly_int64 pushed = (adjusted_args_size.constant
416438fd1498Szrj + pending_stack_adjust);
416538fd1498Szrj current_function_pushed_stack_size
416638fd1498Szrj = upper_bound (current_function_pushed_stack_size, pushed);
416738fd1498Szrj }
416838fd1498Szrj
416938fd1498Szrj funexp = rtx_for_function_call (fndecl, addr);
417038fd1498Szrj
417138fd1498Szrj if (CALL_EXPR_STATIC_CHAIN (exp))
417238fd1498Szrj static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
417338fd1498Szrj else
417438fd1498Szrj static_chain_value = 0;
417538fd1498Szrj
417638fd1498Szrj #ifdef REG_PARM_STACK_SPACE
417738fd1498Szrj /* Save the fixed argument area if it's part of the caller's frame and
417838fd1498Szrj is clobbered by argument setup for this call. */
417938fd1498Szrj if (ACCUMULATE_OUTGOING_ARGS && pass)
418038fd1498Szrj save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
418138fd1498Szrj &low_to_save, &high_to_save);
418238fd1498Szrj #endif
418338fd1498Szrj
418438fd1498Szrj /* Now store (and compute if necessary) all non-register parms.
418538fd1498Szrj These come before register parms, since they can require block-moves,
418638fd1498Szrj which could clobber the registers used for register parms.
418738fd1498Szrj Parms which have partial registers are not stored here,
418838fd1498Szrj but we do preallocate space here if they want that. */
418938fd1498Szrj
419038fd1498Szrj for (i = 0; i < num_actuals; i++)
419138fd1498Szrj {
419238fd1498Szrj /* Delay bounds until all other args are stored. */
419338fd1498Szrj if (POINTER_BOUNDS_P (args[i].tree_value))
419438fd1498Szrj continue;
419538fd1498Szrj else if (args[i].reg == 0 || args[i].pass_on_stack)
419638fd1498Szrj {
419738fd1498Szrj rtx_insn *before_arg = get_last_insn ();
419838fd1498Szrj
419938fd1498Szrj /* We don't allow passing huge (> 2^30 B) arguments
420038fd1498Szrj by value. It would cause an overflow later on. */
420138fd1498Szrj if (constant_lower_bound (adjusted_args_size.constant)
420238fd1498Szrj >= (1 << (HOST_BITS_PER_INT - 2)))
420338fd1498Szrj {
420438fd1498Szrj sorry ("passing too large argument on stack");
420538fd1498Szrj continue;
420638fd1498Szrj }
420738fd1498Szrj
420838fd1498Szrj if (store_one_arg (&args[i], argblock, flags,
420938fd1498Szrj adjusted_args_size.var != 0,
421038fd1498Szrj reg_parm_stack_space)
421138fd1498Szrj || (pass == 0
421238fd1498Szrj && check_sibcall_argument_overlap (before_arg,
421338fd1498Szrj &args[i], 1)))
421438fd1498Szrj sibcall_failure = 1;
421538fd1498Szrj }
421638fd1498Szrj
421738fd1498Szrj if (args[i].stack)
421838fd1498Szrj call_fusage
421938fd1498Szrj = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
422038fd1498Szrj gen_rtx_USE (VOIDmode, args[i].stack),
422138fd1498Szrj call_fusage);
422238fd1498Szrj }
422338fd1498Szrj
422438fd1498Szrj /* If we have a parm that is passed in registers but not in memory
422538fd1498Szrj and whose alignment does not permit a direct copy into registers,
422638fd1498Szrj make a group of pseudos that correspond to each register that we
422738fd1498Szrj will later fill. */
422838fd1498Szrj if (STRICT_ALIGNMENT)
422938fd1498Szrj store_unaligned_arguments_into_pseudos (args, num_actuals);
423038fd1498Szrj
423138fd1498Szrj /* Now store any partially-in-registers parm.
423238fd1498Szrj This is the last place a block-move can happen. */
423338fd1498Szrj if (reg_parm_seen)
423438fd1498Szrj for (i = 0; i < num_actuals; i++)
423538fd1498Szrj if (args[i].partial != 0 && ! args[i].pass_on_stack)
423638fd1498Szrj {
423738fd1498Szrj rtx_insn *before_arg = get_last_insn ();
423838fd1498Szrj
423938fd1498Szrj /* On targets with weird calling conventions (e.g. PA) it's
424038fd1498Szrj hard to ensure that all cases of argument overlap between
424138fd1498Szrj stack and registers work. Play it safe and bail out. */
424238fd1498Szrj if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
424338fd1498Szrj {
424438fd1498Szrj sibcall_failure = 1;
424538fd1498Szrj break;
424638fd1498Szrj }
424738fd1498Szrj
424838fd1498Szrj if (store_one_arg (&args[i], argblock, flags,
424938fd1498Szrj adjusted_args_size.var != 0,
425038fd1498Szrj reg_parm_stack_space)
425138fd1498Szrj || (pass == 0
425238fd1498Szrj && check_sibcall_argument_overlap (before_arg,
425338fd1498Szrj &args[i], 1)))
425438fd1498Szrj sibcall_failure = 1;
425538fd1498Szrj }
425638fd1498Szrj
425738fd1498Szrj bool any_regs = false;
425838fd1498Szrj for (i = 0; i < num_actuals; i++)
425938fd1498Szrj if (args[i].reg != NULL_RTX)
426038fd1498Szrj {
426138fd1498Szrj any_regs = true;
426238fd1498Szrj targetm.calls.call_args (args[i].reg, funtype);
426338fd1498Szrj }
426438fd1498Szrj if (!any_regs)
426538fd1498Szrj targetm.calls.call_args (pc_rtx, funtype);
426638fd1498Szrj
426738fd1498Szrj /* Figure out the register where the value, if any, will come back. */
426838fd1498Szrj valreg = 0;
426938fd1498Szrj valbnd = 0;
427038fd1498Szrj if (TYPE_MODE (rettype) != VOIDmode
427138fd1498Szrj && ! structure_value_addr)
427238fd1498Szrj {
427338fd1498Szrj if (pcc_struct_value)
427438fd1498Szrj {
427538fd1498Szrj valreg = hard_function_value (build_pointer_type (rettype),
427638fd1498Szrj fndecl, NULL, (pass == 0));
427738fd1498Szrj if (CALL_WITH_BOUNDS_P (exp))
427838fd1498Szrj valbnd = targetm.calls.
427938fd1498Szrj chkp_function_value_bounds (build_pointer_type (rettype),
428038fd1498Szrj fndecl, (pass == 0));
428138fd1498Szrj }
428238fd1498Szrj else
428338fd1498Szrj {
428438fd1498Szrj valreg = hard_function_value (rettype, fndecl, fntype,
428538fd1498Szrj (pass == 0));
428638fd1498Szrj if (CALL_WITH_BOUNDS_P (exp))
428738fd1498Szrj valbnd = targetm.calls.chkp_function_value_bounds (rettype,
428838fd1498Szrj fndecl,
428938fd1498Szrj (pass == 0));
429038fd1498Szrj }
429138fd1498Szrj
429238fd1498Szrj /* If VALREG is a PARALLEL whose first member has a zero
429338fd1498Szrj offset, use that. This is for targets such as m68k that
429438fd1498Szrj return the same value in multiple places. */
429538fd1498Szrj if (GET_CODE (valreg) == PARALLEL)
429638fd1498Szrj {
429738fd1498Szrj rtx elem = XVECEXP (valreg, 0, 0);
429838fd1498Szrj rtx where = XEXP (elem, 0);
429938fd1498Szrj rtx offset = XEXP (elem, 1);
430038fd1498Szrj if (offset == const0_rtx
430138fd1498Szrj && GET_MODE (where) == GET_MODE (valreg))
430238fd1498Szrj valreg = where;
430338fd1498Szrj }
430438fd1498Szrj }
430538fd1498Szrj
430638fd1498Szrj /* Store all bounds not passed in registers. */
430738fd1498Szrj for (i = 0; i < num_actuals; i++)
430838fd1498Szrj {
430938fd1498Szrj if (POINTER_BOUNDS_P (args[i].tree_value)
431038fd1498Szrj && !args[i].reg)
431138fd1498Szrj store_bounds (&args[i],
431238fd1498Szrj args[i].pointer_arg == -1
431338fd1498Szrj ? NULL
431438fd1498Szrj : &args[args[i].pointer_arg]);
431538fd1498Szrj }
431638fd1498Szrj
431738fd1498Szrj /* If register arguments require space on the stack and stack space
431838fd1498Szrj was not preallocated, allocate stack space here for arguments
431938fd1498Szrj passed in registers. */
432038fd1498Szrj if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
432138fd1498Szrj && !ACCUMULATE_OUTGOING_ARGS
432238fd1498Szrj && must_preallocate == 0 && reg_parm_stack_space > 0)
432338fd1498Szrj anti_adjust_stack (GEN_INT (reg_parm_stack_space));
432438fd1498Szrj
432538fd1498Szrj /* Pass the function the address in which to return a
432638fd1498Szrj structure value. */
432738fd1498Szrj if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
432838fd1498Szrj {
432938fd1498Szrj structure_value_addr
433038fd1498Szrj = convert_memory_address (Pmode, structure_value_addr);
433138fd1498Szrj emit_move_insn (struct_value,
433238fd1498Szrj force_reg (Pmode,
433338fd1498Szrj force_operand (structure_value_addr,
433438fd1498Szrj NULL_RTX)));
433538fd1498Szrj
433638fd1498Szrj if (REG_P (struct_value))
433738fd1498Szrj use_reg (&call_fusage, struct_value);
433838fd1498Szrj }
433938fd1498Szrj
434038fd1498Szrj after_args = get_last_insn ();
434138fd1498Szrj funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
434238fd1498Szrj static_chain_value, &call_fusage,
434338fd1498Szrj reg_parm_seen, flags);
434438fd1498Szrj
434538fd1498Szrj load_register_parameters (args, num_actuals, &call_fusage, flags,
434638fd1498Szrj pass == 0, &sibcall_failure);
434738fd1498Szrj
434838fd1498Szrj /* Save a pointer to the last insn before the call, so that we can
434938fd1498Szrj later safely search backwards to find the CALL_INSN. */
435038fd1498Szrj before_call = get_last_insn ();
435138fd1498Szrj
435238fd1498Szrj /* Set up next argument register. For sibling calls on machines
435338fd1498Szrj with register windows this should be the incoming register. */
435438fd1498Szrj if (pass == 0)
435538fd1498Szrj next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
435638fd1498Szrj VOIDmode,
435738fd1498Szrj void_type_node,
435838fd1498Szrj true);
435938fd1498Szrj else
436038fd1498Szrj next_arg_reg = targetm.calls.function_arg (args_so_far,
436138fd1498Szrj VOIDmode, void_type_node,
436238fd1498Szrj true);
436338fd1498Szrj
436438fd1498Szrj if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
436538fd1498Szrj {
436638fd1498Szrj int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
436738fd1498Szrj arg_nr = num_actuals - arg_nr - 1;
436838fd1498Szrj if (arg_nr >= 0
436938fd1498Szrj && arg_nr < num_actuals
437038fd1498Szrj && args[arg_nr].reg
437138fd1498Szrj && valreg
437238fd1498Szrj && REG_P (valreg)
437338fd1498Szrj && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
437438fd1498Szrj call_fusage
437538fd1498Szrj = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
437638fd1498Szrj gen_rtx_SET (valreg, args[arg_nr].reg),
437738fd1498Szrj call_fusage);
437838fd1498Szrj }
437938fd1498Szrj /* All arguments and registers used for the call must be set up by
438038fd1498Szrj now! */
438138fd1498Szrj
438238fd1498Szrj /* Stack must be properly aligned now. */
438338fd1498Szrj gcc_assert (!pass
438438fd1498Szrj || multiple_p (stack_pointer_delta,
438538fd1498Szrj preferred_unit_stack_boundary));
438638fd1498Szrj
438738fd1498Szrj /* Generate the actual call instruction. */
438838fd1498Szrj emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
438938fd1498Szrj adjusted_args_size.constant, struct_value_size,
439038fd1498Szrj next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
439138fd1498Szrj flags, args_so_far);
439238fd1498Szrj
439338fd1498Szrj if (flag_ipa_ra)
439438fd1498Szrj {
439538fd1498Szrj rtx_call_insn *last;
439638fd1498Szrj rtx datum = NULL_RTX;
439738fd1498Szrj if (fndecl != NULL_TREE)
439838fd1498Szrj {
439938fd1498Szrj datum = XEXP (DECL_RTL (fndecl), 0);
440038fd1498Szrj gcc_assert (datum != NULL_RTX
440138fd1498Szrj && GET_CODE (datum) == SYMBOL_REF);
440238fd1498Szrj }
440338fd1498Szrj last = last_call_insn ();
440438fd1498Szrj add_reg_note (last, REG_CALL_DECL, datum);
440538fd1498Szrj }
440638fd1498Szrj
440738fd1498Szrj /* If the call setup or the call itself overlaps with anything
440838fd1498Szrj of the argument setup we probably clobbered our call address.
440938fd1498Szrj In that case we can't do sibcalls. */
441038fd1498Szrj if (pass == 0
441138fd1498Szrj && check_sibcall_argument_overlap (after_args, 0, 0))
441238fd1498Szrj sibcall_failure = 1;
441338fd1498Szrj
441438fd1498Szrj /* If a non-BLKmode value is returned at the most significant end
441538fd1498Szrj of a register, shift the register right by the appropriate amount
441638fd1498Szrj and update VALREG accordingly. BLKmode values are handled by the
441738fd1498Szrj group load/store machinery below. */
441838fd1498Szrj if (!structure_value_addr
441938fd1498Szrj && !pcc_struct_value
442038fd1498Szrj && TYPE_MODE (rettype) != VOIDmode
442138fd1498Szrj && TYPE_MODE (rettype) != BLKmode
442238fd1498Szrj && REG_P (valreg)
442338fd1498Szrj && targetm.calls.return_in_msb (rettype))
442438fd1498Szrj {
442538fd1498Szrj if (shift_return_value (TYPE_MODE (rettype), false, valreg))
442638fd1498Szrj sibcall_failure = 1;
442738fd1498Szrj valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
442838fd1498Szrj }
442938fd1498Szrj
443038fd1498Szrj if (pass && (flags & ECF_MALLOC))
443138fd1498Szrj {
443238fd1498Szrj rtx temp = gen_reg_rtx (GET_MODE (valreg));
443338fd1498Szrj rtx_insn *last, *insns;
443438fd1498Szrj
443538fd1498Szrj /* The return value from a malloc-like function is a pointer. */
443638fd1498Szrj if (TREE_CODE (rettype) == POINTER_TYPE)
443738fd1498Szrj mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
443838fd1498Szrj
443938fd1498Szrj emit_move_insn (temp, valreg);
444038fd1498Szrj
444138fd1498Szrj /* The return value from a malloc-like function can not alias
444238fd1498Szrj anything else. */
444338fd1498Szrj last = get_last_insn ();
444438fd1498Szrj add_reg_note (last, REG_NOALIAS, temp);
444538fd1498Szrj
444638fd1498Szrj /* Write out the sequence. */
444738fd1498Szrj insns = get_insns ();
444838fd1498Szrj end_sequence ();
444938fd1498Szrj emit_insn (insns);
445038fd1498Szrj valreg = temp;
445138fd1498Szrj }
445238fd1498Szrj
445338fd1498Szrj /* For calls to `setjmp', etc., inform
445438fd1498Szrj function.c:setjmp_warnings that it should complain if
445538fd1498Szrj nonvolatile values are live. For functions that cannot
445638fd1498Szrj return, inform flow that control does not fall through. */
445738fd1498Szrj
445838fd1498Szrj if ((flags & ECF_NORETURN) || pass == 0)
445938fd1498Szrj {
446038fd1498Szrj /* The barrier must be emitted
446138fd1498Szrj immediately after the CALL_INSN. Some ports emit more
446238fd1498Szrj than just a CALL_INSN above, so we must search for it here. */
446338fd1498Szrj
446438fd1498Szrj rtx_insn *last = get_last_insn ();
446538fd1498Szrj while (!CALL_P (last))
446638fd1498Szrj {
446738fd1498Szrj last = PREV_INSN (last);
446838fd1498Szrj /* There was no CALL_INSN? */
446938fd1498Szrj gcc_assert (last != before_call);
447038fd1498Szrj }
447138fd1498Szrj
447238fd1498Szrj emit_barrier_after (last);
447338fd1498Szrj
447438fd1498Szrj /* Stack adjustments after a noreturn call are dead code.
447538fd1498Szrj However when NO_DEFER_POP is in effect, we must preserve
447638fd1498Szrj stack_pointer_delta. */
447738fd1498Szrj if (inhibit_defer_pop == 0)
447838fd1498Szrj {
447938fd1498Szrj stack_pointer_delta = old_stack_allocated;
448038fd1498Szrj pending_stack_adjust = 0;
448138fd1498Szrj }
448238fd1498Szrj }
448338fd1498Szrj
448438fd1498Szrj /* If value type not void, return an rtx for the value. */
448538fd1498Szrj
448638fd1498Szrj if (TYPE_MODE (rettype) == VOIDmode
448738fd1498Szrj || ignore)
448838fd1498Szrj target = const0_rtx;
448938fd1498Szrj else if (structure_value_addr)
449038fd1498Szrj {
449138fd1498Szrj if (target == 0 || !MEM_P (target))
449238fd1498Szrj {
449338fd1498Szrj target
449438fd1498Szrj = gen_rtx_MEM (TYPE_MODE (rettype),
449538fd1498Szrj memory_address (TYPE_MODE (rettype),
449638fd1498Szrj structure_value_addr));
449738fd1498Szrj set_mem_attributes (target, rettype, 1);
449838fd1498Szrj }
449938fd1498Szrj }
450038fd1498Szrj else if (pcc_struct_value)
450138fd1498Szrj {
450238fd1498Szrj /* This is the special C++ case where we need to
450338fd1498Szrj know what the true target was. We take care to
450438fd1498Szrj never use this value more than once in one expression. */
450538fd1498Szrj target = gen_rtx_MEM (TYPE_MODE (rettype),
450638fd1498Szrj copy_to_reg (valreg));
450738fd1498Szrj set_mem_attributes (target, rettype, 1);
450838fd1498Szrj }
450938fd1498Szrj /* Handle calls that return values in multiple non-contiguous locations.
451038fd1498Szrj The Irix 6 ABI has examples of this. */
451138fd1498Szrj else if (GET_CODE (valreg) == PARALLEL)
451238fd1498Szrj {
451338fd1498Szrj if (target == 0)
451438fd1498Szrj target = emit_group_move_into_temps (valreg);
451538fd1498Szrj else if (rtx_equal_p (target, valreg))
451638fd1498Szrj ;
451738fd1498Szrj else if (GET_CODE (target) == PARALLEL)
451838fd1498Szrj /* Handle the result of a emit_group_move_into_temps
451938fd1498Szrj call in the previous pass. */
452038fd1498Szrj emit_group_move (target, valreg);
452138fd1498Szrj else
452238fd1498Szrj emit_group_store (target, valreg, rettype,
452338fd1498Szrj int_size_in_bytes (rettype));
452438fd1498Szrj }
452538fd1498Szrj else if (target
452638fd1498Szrj && GET_MODE (target) == TYPE_MODE (rettype)
452738fd1498Szrj && GET_MODE (target) == GET_MODE (valreg))
452838fd1498Szrj {
452938fd1498Szrj bool may_overlap = false;
453038fd1498Szrj
453138fd1498Szrj /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
453238fd1498Szrj reg to a plain register. */
453338fd1498Szrj if (!REG_P (target) || HARD_REGISTER_P (target))
453438fd1498Szrj valreg = avoid_likely_spilled_reg (valreg);
453538fd1498Szrj
453638fd1498Szrj /* If TARGET is a MEM in the argument area, and we have
453738fd1498Szrj saved part of the argument area, then we can't store
453838fd1498Szrj directly into TARGET as it may get overwritten when we
453938fd1498Szrj restore the argument save area below. Don't work too
454038fd1498Szrj hard though and simply force TARGET to a register if it
454138fd1498Szrj is a MEM; the optimizer is quite likely to sort it out. */
454238fd1498Szrj if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
454338fd1498Szrj for (i = 0; i < num_actuals; i++)
454438fd1498Szrj if (args[i].save_area)
454538fd1498Szrj {
454638fd1498Szrj may_overlap = true;
454738fd1498Szrj break;
454838fd1498Szrj }
454938fd1498Szrj
455038fd1498Szrj if (may_overlap)
455138fd1498Szrj target = copy_to_reg (valreg);
455238fd1498Szrj else
455338fd1498Szrj {
455438fd1498Szrj /* TARGET and VALREG cannot be equal at this point
455538fd1498Szrj because the latter would not have
455638fd1498Szrj REG_FUNCTION_VALUE_P true, while the former would if
455738fd1498Szrj it were referring to the same register.
455838fd1498Szrj
455938fd1498Szrj If they refer to the same register, this move will be
456038fd1498Szrj a no-op, except when function inlining is being
456138fd1498Szrj done. */
456238fd1498Szrj emit_move_insn (target, valreg);
456338fd1498Szrj
456438fd1498Szrj /* If we are setting a MEM, this code must be executed.
456538fd1498Szrj Since it is emitted after the call insn, sibcall
456638fd1498Szrj optimization cannot be performed in that case. */
456738fd1498Szrj if (MEM_P (target))
456838fd1498Szrj sibcall_failure = 1;
456938fd1498Szrj }
457038fd1498Szrj }
457138fd1498Szrj else
457238fd1498Szrj target = copy_to_reg (avoid_likely_spilled_reg (valreg));
457338fd1498Szrj
457438fd1498Szrj /* If we promoted this return value, make the proper SUBREG.
457538fd1498Szrj TARGET might be const0_rtx here, so be careful. */
457638fd1498Szrj if (REG_P (target)
457738fd1498Szrj && TYPE_MODE (rettype) != BLKmode
457838fd1498Szrj && GET_MODE (target) != TYPE_MODE (rettype))
457938fd1498Szrj {
458038fd1498Szrj tree type = rettype;
458138fd1498Szrj int unsignedp = TYPE_UNSIGNED (type);
458238fd1498Szrj machine_mode pmode;
458338fd1498Szrj
458438fd1498Szrj /* Ensure we promote as expected, and get the new unsignedness. */
458538fd1498Szrj pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
458638fd1498Szrj funtype, 1);
458738fd1498Szrj gcc_assert (GET_MODE (target) == pmode);
458838fd1498Szrj
458938fd1498Szrj poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
459038fd1498Szrj GET_MODE (target));
459138fd1498Szrj target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
459238fd1498Szrj SUBREG_PROMOTED_VAR_P (target) = 1;
459338fd1498Szrj SUBREG_PROMOTED_SET (target, unsignedp);
459438fd1498Szrj }
459538fd1498Szrj
459638fd1498Szrj /* If size of args is variable or this was a constructor call for a stack
459738fd1498Szrj argument, restore saved stack-pointer value. */
459838fd1498Szrj
459938fd1498Szrj if (old_stack_level)
460038fd1498Szrj {
460138fd1498Szrj rtx_insn *prev = get_last_insn ();
460238fd1498Szrj
460338fd1498Szrj emit_stack_restore (SAVE_BLOCK, old_stack_level);
460438fd1498Szrj stack_pointer_delta = old_stack_pointer_delta;
460538fd1498Szrj
460638fd1498Szrj fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
460738fd1498Szrj
460838fd1498Szrj pending_stack_adjust = old_pending_adj;
460938fd1498Szrj old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
461038fd1498Szrj stack_arg_under_construction = old_stack_arg_under_construction;
461138fd1498Szrj highest_outgoing_arg_in_use = initial_highest_arg_in_use;
461238fd1498Szrj stack_usage_map = initial_stack_usage_map;
461338fd1498Szrj stack_usage_watermark = initial_stack_usage_watermark;
461438fd1498Szrj sibcall_failure = 1;
461538fd1498Szrj }
461638fd1498Szrj else if (ACCUMULATE_OUTGOING_ARGS && pass)
461738fd1498Szrj {
461838fd1498Szrj #ifdef REG_PARM_STACK_SPACE
461938fd1498Szrj if (save_area)
462038fd1498Szrj restore_fixed_argument_area (save_area, argblock,
462138fd1498Szrj high_to_save, low_to_save);
462238fd1498Szrj #endif
462338fd1498Szrj
462438fd1498Szrj /* If we saved any argument areas, restore them. */
462538fd1498Szrj for (i = 0; i < num_actuals; i++)
462638fd1498Szrj if (args[i].save_area)
462738fd1498Szrj {
462838fd1498Szrj machine_mode save_mode = GET_MODE (args[i].save_area);
462938fd1498Szrj rtx stack_area
463038fd1498Szrj = gen_rtx_MEM (save_mode,
463138fd1498Szrj memory_address (save_mode,
463238fd1498Szrj XEXP (args[i].stack_slot, 0)));
463338fd1498Szrj
463438fd1498Szrj if (save_mode != BLKmode)
463538fd1498Szrj emit_move_insn (stack_area, args[i].save_area);
463638fd1498Szrj else
463738fd1498Szrj emit_block_move (stack_area, args[i].save_area,
463838fd1498Szrj (gen_int_mode
463938fd1498Szrj (args[i].locate.size.constant, Pmode)),
464038fd1498Szrj BLOCK_OP_CALL_PARM);
464138fd1498Szrj }
464238fd1498Szrj
464338fd1498Szrj highest_outgoing_arg_in_use = initial_highest_arg_in_use;
464438fd1498Szrj stack_usage_map = initial_stack_usage_map;
464538fd1498Szrj stack_usage_watermark = initial_stack_usage_watermark;
464638fd1498Szrj }
464738fd1498Szrj
464838fd1498Szrj /* If this was alloca, record the new stack level. */
464938fd1498Szrj if (flags & ECF_MAY_BE_ALLOCA)
465038fd1498Szrj record_new_stack_level ();
465138fd1498Szrj
465238fd1498Szrj /* Free up storage we no longer need. */
465338fd1498Szrj for (i = 0; i < num_actuals; ++i)
465438fd1498Szrj free (args[i].aligned_regs);
465538fd1498Szrj
465638fd1498Szrj targetm.calls.end_call_args ();
465738fd1498Szrj
465838fd1498Szrj insns = get_insns ();
465938fd1498Szrj end_sequence ();
466038fd1498Szrj
466138fd1498Szrj if (pass == 0)
466238fd1498Szrj {
466338fd1498Szrj tail_call_insns = insns;
466438fd1498Szrj
466538fd1498Szrj /* Restore the pending stack adjustment now that we have
466638fd1498Szrj finished generating the sibling call sequence. */
466738fd1498Szrj
466838fd1498Szrj restore_pending_stack_adjust (&save);
466938fd1498Szrj
467038fd1498Szrj /* Prepare arg structure for next iteration. */
467138fd1498Szrj for (i = 0; i < num_actuals; i++)
467238fd1498Szrj {
467338fd1498Szrj args[i].value = 0;
467438fd1498Szrj args[i].aligned_regs = 0;
467538fd1498Szrj args[i].stack = 0;
467638fd1498Szrj }
467738fd1498Szrj
467838fd1498Szrj sbitmap_free (stored_args_map);
467938fd1498Szrj internal_arg_pointer_exp_state.scan_start = NULL;
468038fd1498Szrj internal_arg_pointer_exp_state.cache.release ();
468138fd1498Szrj }
468238fd1498Szrj else
468338fd1498Szrj {
468438fd1498Szrj normal_call_insns = insns;
468538fd1498Szrj
468638fd1498Szrj /* Verify that we've deallocated all the stack we used. */
468738fd1498Szrj gcc_assert ((flags & ECF_NORETURN)
468838fd1498Szrj || known_eq (old_stack_allocated,
468938fd1498Szrj stack_pointer_delta
469038fd1498Szrj - pending_stack_adjust));
469138fd1498Szrj }
469238fd1498Szrj
469338fd1498Szrj /* If something prevents making this a sibling call,
469438fd1498Szrj zero out the sequence. */
469538fd1498Szrj if (sibcall_failure)
469638fd1498Szrj tail_call_insns = NULL;
469738fd1498Szrj else
469838fd1498Szrj break;
469938fd1498Szrj }
470038fd1498Szrj
470138fd1498Szrj /* If tail call production succeeded, we need to remove REG_EQUIV notes on
470238fd1498Szrj arguments too, as argument area is now clobbered by the call. */
470338fd1498Szrj if (tail_call_insns)
470438fd1498Szrj {
470538fd1498Szrj emit_insn (tail_call_insns);
470638fd1498Szrj crtl->tail_call_emit = true;
470738fd1498Szrj }
470838fd1498Szrj else
470938fd1498Szrj {
471038fd1498Szrj emit_insn (normal_call_insns);
471138fd1498Szrj if (try_tail_call)
471238fd1498Szrj /* Ideally we'd emit a message for all of the ways that it could
471338fd1498Szrj have failed. */
471438fd1498Szrj maybe_complain_about_tail_call (exp, "tail call production failed");
471538fd1498Szrj }
471638fd1498Szrj
471738fd1498Szrj currently_expanding_call--;
471838fd1498Szrj
471938fd1498Szrj free (stack_usage_map_buf);
472038fd1498Szrj free (args);
472138fd1498Szrj
472238fd1498Szrj /* Join result with returned bounds so caller may use them if needed. */
472338fd1498Szrj target = chkp_join_splitted_slot (target, valbnd);
472438fd1498Szrj
472538fd1498Szrj return target;
472638fd1498Szrj }
472738fd1498Szrj
472838fd1498Szrj /* A sibling call sequence invalidates any REG_EQUIV notes made for
472938fd1498Szrj this function's incoming arguments.
473038fd1498Szrj
473138fd1498Szrj At the start of RTL generation we know the only REG_EQUIV notes
473238fd1498Szrj in the rtl chain are those for incoming arguments, so we can look
473338fd1498Szrj for REG_EQUIV notes between the start of the function and the
473438fd1498Szrj NOTE_INSN_FUNCTION_BEG.
473538fd1498Szrj
473638fd1498Szrj This is (slight) overkill. We could keep track of the highest
473738fd1498Szrj argument we clobber and be more selective in removing notes, but it
473838fd1498Szrj does not seem to be worth the effort. */
473938fd1498Szrj
474038fd1498Szrj void
fixup_tail_calls(void)474138fd1498Szrj fixup_tail_calls (void)
474238fd1498Szrj {
474338fd1498Szrj rtx_insn *insn;
474438fd1498Szrj
474538fd1498Szrj for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
474638fd1498Szrj {
474738fd1498Szrj rtx note;
474838fd1498Szrj
474938fd1498Szrj /* There are never REG_EQUIV notes for the incoming arguments
475038fd1498Szrj after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
475138fd1498Szrj if (NOTE_P (insn)
475238fd1498Szrj && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
475338fd1498Szrj break;
475438fd1498Szrj
475538fd1498Szrj note = find_reg_note (insn, REG_EQUIV, 0);
475638fd1498Szrj if (note)
475738fd1498Szrj remove_note (insn, note);
475838fd1498Szrj note = find_reg_note (insn, REG_EQUIV, 0);
475938fd1498Szrj gcc_assert (!note);
476038fd1498Szrj }
476138fd1498Szrj }
476238fd1498Szrj
476338fd1498Szrj /* Traverse a list of TYPES and expand all complex types into their
476438fd1498Szrj components. */
476538fd1498Szrj static tree
split_complex_types(tree types)476638fd1498Szrj split_complex_types (tree types)
476738fd1498Szrj {
476838fd1498Szrj tree p;
476938fd1498Szrj
477038fd1498Szrj /* Before allocating memory, check for the common case of no complex. */
477138fd1498Szrj for (p = types; p; p = TREE_CHAIN (p))
477238fd1498Szrj {
477338fd1498Szrj tree type = TREE_VALUE (p);
477438fd1498Szrj if (TREE_CODE (type) == COMPLEX_TYPE
477538fd1498Szrj && targetm.calls.split_complex_arg (type))
477638fd1498Szrj goto found;
477738fd1498Szrj }
477838fd1498Szrj return types;
477938fd1498Szrj
478038fd1498Szrj found:
478138fd1498Szrj types = copy_list (types);
478238fd1498Szrj
478338fd1498Szrj for (p = types; p; p = TREE_CHAIN (p))
478438fd1498Szrj {
478538fd1498Szrj tree complex_type = TREE_VALUE (p);
478638fd1498Szrj
478738fd1498Szrj if (TREE_CODE (complex_type) == COMPLEX_TYPE
478838fd1498Szrj && targetm.calls.split_complex_arg (complex_type))
478938fd1498Szrj {
479038fd1498Szrj tree next, imag;
479138fd1498Szrj
479238fd1498Szrj /* Rewrite complex type with component type. */
479338fd1498Szrj TREE_VALUE (p) = TREE_TYPE (complex_type);
479438fd1498Szrj next = TREE_CHAIN (p);
479538fd1498Szrj
479638fd1498Szrj /* Add another component type for the imaginary part. */
479738fd1498Szrj imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
479838fd1498Szrj TREE_CHAIN (p) = imag;
479938fd1498Szrj TREE_CHAIN (imag) = next;
480038fd1498Szrj
480138fd1498Szrj /* Skip the newly created node. */
480238fd1498Szrj p = TREE_CHAIN (p);
480338fd1498Szrj }
480438fd1498Szrj }
480538fd1498Szrj
480638fd1498Szrj return types;
480738fd1498Szrj }
480838fd1498Szrj
480938fd1498Szrj /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
481038fd1498Szrj for a value of mode OUTMODE,
481138fd1498Szrj with NARGS different arguments, passed as ARGS.
481238fd1498Szrj Store the return value if RETVAL is nonzero: store it in VALUE if
481338fd1498Szrj VALUE is nonnull, otherwise pick a convenient location. In either
481438fd1498Szrj case return the location of the stored value.
481538fd1498Szrj
481638fd1498Szrj FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
481738fd1498Szrj `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
481838fd1498Szrj other types of library calls. */
481938fd1498Szrj
482038fd1498Szrj rtx
emit_library_call_value_1(int retval,rtx orgfun,rtx value,enum libcall_type fn_type,machine_mode outmode,int nargs,rtx_mode_t * args)482138fd1498Szrj emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
482238fd1498Szrj enum libcall_type fn_type,
482338fd1498Szrj machine_mode outmode, int nargs, rtx_mode_t *args)
482438fd1498Szrj {
482538fd1498Szrj /* Total size in bytes of all the stack-parms scanned so far. */
482638fd1498Szrj struct args_size args_size;
482738fd1498Szrj /* Size of arguments before any adjustments (such as rounding). */
482838fd1498Szrj struct args_size original_args_size;
482938fd1498Szrj int argnum;
483038fd1498Szrj rtx fun;
483138fd1498Szrj /* Todo, choose the correct decl type of orgfun. Sadly this information
483238fd1498Szrj isn't present here, so we default to native calling abi here. */
483338fd1498Szrj tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
483438fd1498Szrj tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
483538fd1498Szrj int count;
483638fd1498Szrj rtx argblock = 0;
483738fd1498Szrj CUMULATIVE_ARGS args_so_far_v;
483838fd1498Szrj cumulative_args_t args_so_far;
483938fd1498Szrj struct arg
484038fd1498Szrj {
484138fd1498Szrj rtx value;
484238fd1498Szrj machine_mode mode;
484338fd1498Szrj rtx reg;
484438fd1498Szrj int partial;
484538fd1498Szrj struct locate_and_pad_arg_data locate;
484638fd1498Szrj rtx save_area;
484738fd1498Szrj };
484838fd1498Szrj struct arg *argvec;
484938fd1498Szrj int old_inhibit_defer_pop = inhibit_defer_pop;
485038fd1498Szrj rtx call_fusage = 0;
485138fd1498Szrj rtx mem_value = 0;
485238fd1498Szrj rtx valreg;
485338fd1498Szrj int pcc_struct_value = 0;
485438fd1498Szrj poly_int64 struct_value_size = 0;
485538fd1498Szrj int flags;
485638fd1498Szrj int reg_parm_stack_space = 0;
485738fd1498Szrj poly_int64 needed;
485838fd1498Szrj rtx_insn *before_call;
485938fd1498Szrj bool have_push_fusage;
486038fd1498Szrj tree tfom; /* type_for_mode (outmode, 0) */
486138fd1498Szrj
486238fd1498Szrj #ifdef REG_PARM_STACK_SPACE
486338fd1498Szrj /* Define the boundary of the register parm stack space that needs to be
486438fd1498Szrj save, if any. */
486538fd1498Szrj int low_to_save = 0, high_to_save = 0;
486638fd1498Szrj rtx save_area = 0; /* Place that it is saved. */
486738fd1498Szrj #endif
486838fd1498Szrj
486938fd1498Szrj /* Size of the stack reserved for parameter registers. */
487038fd1498Szrj unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
487138fd1498Szrj char *initial_stack_usage_map = stack_usage_map;
487238fd1498Szrj unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
487338fd1498Szrj char *stack_usage_map_buf = NULL;
487438fd1498Szrj
487538fd1498Szrj rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
487638fd1498Szrj
487738fd1498Szrj #ifdef REG_PARM_STACK_SPACE
487838fd1498Szrj reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
487938fd1498Szrj #endif
488038fd1498Szrj
488138fd1498Szrj /* By default, library functions cannot throw. */
488238fd1498Szrj flags = ECF_NOTHROW;
488338fd1498Szrj
488438fd1498Szrj switch (fn_type)
488538fd1498Szrj {
488638fd1498Szrj case LCT_NORMAL:
488738fd1498Szrj break;
488838fd1498Szrj case LCT_CONST:
488938fd1498Szrj flags |= ECF_CONST;
489038fd1498Szrj break;
489138fd1498Szrj case LCT_PURE:
489238fd1498Szrj flags |= ECF_PURE;
489338fd1498Szrj break;
489438fd1498Szrj case LCT_NORETURN:
489538fd1498Szrj flags |= ECF_NORETURN;
489638fd1498Szrj break;
489738fd1498Szrj case LCT_THROW:
489838fd1498Szrj flags &= ~ECF_NOTHROW;
489938fd1498Szrj break;
490038fd1498Szrj case LCT_RETURNS_TWICE:
490138fd1498Szrj flags = ECF_RETURNS_TWICE;
490238fd1498Szrj break;
490338fd1498Szrj }
490438fd1498Szrj fun = orgfun;
490538fd1498Szrj
490638fd1498Szrj /* Ensure current function's preferred stack boundary is at least
490738fd1498Szrj what we need. */
490838fd1498Szrj if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
490938fd1498Szrj crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
491038fd1498Szrj
491138fd1498Szrj /* If this kind of value comes back in memory,
491238fd1498Szrj decide where in memory it should come back. */
491338fd1498Szrj if (outmode != VOIDmode)
491438fd1498Szrj {
491538fd1498Szrj tfom = lang_hooks.types.type_for_mode (outmode, 0);
491638fd1498Szrj if (aggregate_value_p (tfom, 0))
491738fd1498Szrj {
491838fd1498Szrj #ifdef PCC_STATIC_STRUCT_RETURN
491938fd1498Szrj rtx pointer_reg
492038fd1498Szrj = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
492138fd1498Szrj mem_value = gen_rtx_MEM (outmode, pointer_reg);
492238fd1498Szrj pcc_struct_value = 1;
492338fd1498Szrj if (value == 0)
492438fd1498Szrj value = gen_reg_rtx (outmode);
492538fd1498Szrj #else /* not PCC_STATIC_STRUCT_RETURN */
492638fd1498Szrj struct_value_size = GET_MODE_SIZE (outmode);
492738fd1498Szrj if (value != 0 && MEM_P (value))
492838fd1498Szrj mem_value = value;
492938fd1498Szrj else
493038fd1498Szrj mem_value = assign_temp (tfom, 1, 1);
493138fd1498Szrj #endif
493238fd1498Szrj /* This call returns a big structure. */
493338fd1498Szrj flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
493438fd1498Szrj }
493538fd1498Szrj }
493638fd1498Szrj else
493738fd1498Szrj tfom = void_type_node;
493838fd1498Szrj
493938fd1498Szrj /* ??? Unfinished: must pass the memory address as an argument. */
494038fd1498Szrj
494138fd1498Szrj /* Copy all the libcall-arguments out of the varargs data
494238fd1498Szrj and into a vector ARGVEC.
494338fd1498Szrj
494438fd1498Szrj Compute how to pass each argument. We only support a very small subset
494538fd1498Szrj of the full argument passing conventions to limit complexity here since
494638fd1498Szrj library functions shouldn't have many args. */
494738fd1498Szrj
494838fd1498Szrj argvec = XALLOCAVEC (struct arg, nargs + 1);
494938fd1498Szrj memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
495038fd1498Szrj
495138fd1498Szrj #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
495238fd1498Szrj INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
495338fd1498Szrj #else
495438fd1498Szrj INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
495538fd1498Szrj #endif
495638fd1498Szrj args_so_far = pack_cumulative_args (&args_so_far_v);
495738fd1498Szrj
495838fd1498Szrj args_size.constant = 0;
495938fd1498Szrj args_size.var = 0;
496038fd1498Szrj
496138fd1498Szrj count = 0;
496238fd1498Szrj
496338fd1498Szrj push_temp_slots ();
496438fd1498Szrj
496538fd1498Szrj /* If there's a structure value address to be passed,
496638fd1498Szrj either pass it in the special place, or pass it as an extra argument. */
496738fd1498Szrj if (mem_value && struct_value == 0 && ! pcc_struct_value)
496838fd1498Szrj {
496938fd1498Szrj rtx addr = XEXP (mem_value, 0);
497038fd1498Szrj
497138fd1498Szrj nargs++;
497238fd1498Szrj
497338fd1498Szrj /* Make sure it is a reasonable operand for a move or push insn. */
497438fd1498Szrj if (!REG_P (addr) && !MEM_P (addr)
497538fd1498Szrj && !(CONSTANT_P (addr)
497638fd1498Szrj && targetm.legitimate_constant_p (Pmode, addr)))
497738fd1498Szrj addr = force_operand (addr, NULL_RTX);
497838fd1498Szrj
497938fd1498Szrj argvec[count].value = addr;
498038fd1498Szrj argvec[count].mode = Pmode;
498138fd1498Szrj argvec[count].partial = 0;
498238fd1498Szrj
498338fd1498Szrj argvec[count].reg = targetm.calls.function_arg (args_so_far,
498438fd1498Szrj Pmode, NULL_TREE, true);
498538fd1498Szrj gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
498638fd1498Szrj NULL_TREE, 1) == 0);
498738fd1498Szrj
498838fd1498Szrj locate_and_pad_parm (Pmode, NULL_TREE,
498938fd1498Szrj #ifdef STACK_PARMS_IN_REG_PARM_AREA
499038fd1498Szrj 1,
499138fd1498Szrj #else
499238fd1498Szrj argvec[count].reg != 0,
499338fd1498Szrj #endif
499438fd1498Szrj reg_parm_stack_space, 0,
499538fd1498Szrj NULL_TREE, &args_size, &argvec[count].locate);
499638fd1498Szrj
499738fd1498Szrj if (argvec[count].reg == 0 || argvec[count].partial != 0
499838fd1498Szrj || reg_parm_stack_space > 0)
499938fd1498Szrj args_size.constant += argvec[count].locate.size.constant;
500038fd1498Szrj
500138fd1498Szrj targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
500238fd1498Szrj
500338fd1498Szrj count++;
500438fd1498Szrj }
500538fd1498Szrj
500638fd1498Szrj for (unsigned int i = 0; count < nargs; i++, count++)
500738fd1498Szrj {
500838fd1498Szrj rtx val = args[i].first;
500938fd1498Szrj machine_mode mode = args[i].second;
501038fd1498Szrj int unsigned_p = 0;
501138fd1498Szrj
501238fd1498Szrj /* We cannot convert the arg value to the mode the library wants here;
501338fd1498Szrj must do it earlier where we know the signedness of the arg. */
501438fd1498Szrj gcc_assert (mode != BLKmode
501538fd1498Szrj && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
501638fd1498Szrj
501738fd1498Szrj /* Make sure it is a reasonable operand for a move or push insn. */
501838fd1498Szrj if (!REG_P (val) && !MEM_P (val)
501938fd1498Szrj && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
502038fd1498Szrj val = force_operand (val, NULL_RTX);
502138fd1498Szrj
502238fd1498Szrj if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
502338fd1498Szrj {
502438fd1498Szrj rtx slot;
502538fd1498Szrj int must_copy
502638fd1498Szrj = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
502738fd1498Szrj
502838fd1498Szrj /* If this was a CONST function, it is now PURE since it now
502938fd1498Szrj reads memory. */
503038fd1498Szrj if (flags & ECF_CONST)
503138fd1498Szrj {
503238fd1498Szrj flags &= ~ECF_CONST;
503338fd1498Szrj flags |= ECF_PURE;
503438fd1498Szrj }
503538fd1498Szrj
503638fd1498Szrj if (MEM_P (val) && !must_copy)
503738fd1498Szrj {
503838fd1498Szrj tree val_expr = MEM_EXPR (val);
503938fd1498Szrj if (val_expr)
504038fd1498Szrj mark_addressable (val_expr);
504138fd1498Szrj slot = val;
504238fd1498Szrj }
504338fd1498Szrj else
504438fd1498Szrj {
504538fd1498Szrj slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
504638fd1498Szrj 1, 1);
504738fd1498Szrj emit_move_insn (slot, val);
504838fd1498Szrj }
504938fd1498Szrj
505038fd1498Szrj call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
505138fd1498Szrj gen_rtx_USE (VOIDmode, slot),
505238fd1498Szrj call_fusage);
505338fd1498Szrj if (must_copy)
505438fd1498Szrj call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
505538fd1498Szrj gen_rtx_CLOBBER (VOIDmode,
505638fd1498Szrj slot),
505738fd1498Szrj call_fusage);
505838fd1498Szrj
505938fd1498Szrj mode = Pmode;
506038fd1498Szrj val = force_operand (XEXP (slot, 0), NULL_RTX);
506138fd1498Szrj }
506238fd1498Szrj
506338fd1498Szrj mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
506438fd1498Szrj argvec[count].mode = mode;
506538fd1498Szrj argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
506638fd1498Szrj argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
506738fd1498Szrj NULL_TREE, true);
506838fd1498Szrj
506938fd1498Szrj argvec[count].partial
507038fd1498Szrj = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
507138fd1498Szrj
507238fd1498Szrj if (argvec[count].reg == 0
507338fd1498Szrj || argvec[count].partial != 0
507438fd1498Szrj || reg_parm_stack_space > 0)
507538fd1498Szrj {
507638fd1498Szrj locate_and_pad_parm (mode, NULL_TREE,
507738fd1498Szrj #ifdef STACK_PARMS_IN_REG_PARM_AREA
507838fd1498Szrj 1,
507938fd1498Szrj #else
508038fd1498Szrj argvec[count].reg != 0,
508138fd1498Szrj #endif
508238fd1498Szrj reg_parm_stack_space, argvec[count].partial,
508338fd1498Szrj NULL_TREE, &args_size, &argvec[count].locate);
508438fd1498Szrj args_size.constant += argvec[count].locate.size.constant;
508538fd1498Szrj gcc_assert (!argvec[count].locate.size.var);
508638fd1498Szrj }
508738fd1498Szrj #ifdef BLOCK_REG_PADDING
508838fd1498Szrj else
508938fd1498Szrj /* The argument is passed entirely in registers. See at which
509038fd1498Szrj end it should be padded. */
509138fd1498Szrj argvec[count].locate.where_pad =
509238fd1498Szrj BLOCK_REG_PADDING (mode, NULL_TREE,
509338fd1498Szrj known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD));
509438fd1498Szrj #endif
509538fd1498Szrj
509638fd1498Szrj targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
509738fd1498Szrj }
509838fd1498Szrj
509938fd1498Szrj /* If this machine requires an external definition for library
510038fd1498Szrj functions, write one out. */
510138fd1498Szrj assemble_external_libcall (fun);
510238fd1498Szrj
510338fd1498Szrj original_args_size = args_size;
510438fd1498Szrj args_size.constant = (aligned_upper_bound (args_size.constant
510538fd1498Szrj + stack_pointer_delta,
510638fd1498Szrj STACK_BYTES)
510738fd1498Szrj - stack_pointer_delta);
510838fd1498Szrj
510938fd1498Szrj args_size.constant = upper_bound (args_size.constant,
511038fd1498Szrj reg_parm_stack_space);
511138fd1498Szrj
511238fd1498Szrj if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
511338fd1498Szrj args_size.constant -= reg_parm_stack_space;
511438fd1498Szrj
511538fd1498Szrj crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
511638fd1498Szrj args_size.constant);
511738fd1498Szrj
511838fd1498Szrj if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
511938fd1498Szrj {
512038fd1498Szrj poly_int64 pushed = args_size.constant + pending_stack_adjust;
512138fd1498Szrj current_function_pushed_stack_size
512238fd1498Szrj = upper_bound (current_function_pushed_stack_size, pushed);
512338fd1498Szrj }
512438fd1498Szrj
512538fd1498Szrj if (ACCUMULATE_OUTGOING_ARGS)
512638fd1498Szrj {
512738fd1498Szrj /* Since the stack pointer will never be pushed, it is possible for
512838fd1498Szrj the evaluation of a parm to clobber something we have already
512938fd1498Szrj written to the stack. Since most function calls on RISC machines
513038fd1498Szrj do not use the stack, this is uncommon, but must work correctly.
513138fd1498Szrj
513238fd1498Szrj Therefore, we save any area of the stack that was already written
513338fd1498Szrj and that we are using. Here we set up to do this by making a new
513438fd1498Szrj stack usage map from the old one.
513538fd1498Szrj
513638fd1498Szrj Another approach might be to try to reorder the argument
513738fd1498Szrj evaluations to avoid this conflicting stack usage. */
513838fd1498Szrj
513938fd1498Szrj needed = args_size.constant;
514038fd1498Szrj
514138fd1498Szrj /* Since we will be writing into the entire argument area, the
514238fd1498Szrj map must be allocated for its entire size, not just the part that
514338fd1498Szrj is the responsibility of the caller. */
514438fd1498Szrj if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
514538fd1498Szrj needed += reg_parm_stack_space;
514638fd1498Szrj
514738fd1498Szrj poly_int64 limit = needed;
514838fd1498Szrj if (ARGS_GROW_DOWNWARD)
514938fd1498Szrj limit += 1;
515038fd1498Szrj
515138fd1498Szrj /* For polynomial sizes, this is the maximum possible size needed
515238fd1498Szrj for arguments with a constant size and offset. */
515338fd1498Szrj HOST_WIDE_INT const_limit = constant_lower_bound (limit);
515438fd1498Szrj highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
515538fd1498Szrj const_limit);
515638fd1498Szrj
515738fd1498Szrj stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
515838fd1498Szrj stack_usage_map = stack_usage_map_buf;
515938fd1498Szrj
516038fd1498Szrj if (initial_highest_arg_in_use)
516138fd1498Szrj memcpy (stack_usage_map, initial_stack_usage_map,
516238fd1498Szrj initial_highest_arg_in_use);
516338fd1498Szrj
516438fd1498Szrj if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
516538fd1498Szrj memset (&stack_usage_map[initial_highest_arg_in_use], 0,
516638fd1498Szrj highest_outgoing_arg_in_use - initial_highest_arg_in_use);
516738fd1498Szrj needed = 0;
516838fd1498Szrj
516938fd1498Szrj /* We must be careful to use virtual regs before they're instantiated,
517038fd1498Szrj and real regs afterwards. Loop optimization, for example, can create
517138fd1498Szrj new libcalls after we've instantiated the virtual regs, and if we
517238fd1498Szrj use virtuals anyway, they won't match the rtl patterns. */
517338fd1498Szrj
517438fd1498Szrj if (virtuals_instantiated)
517538fd1498Szrj argblock = plus_constant (Pmode, stack_pointer_rtx,
517638fd1498Szrj STACK_POINTER_OFFSET);
517738fd1498Szrj else
517838fd1498Szrj argblock = virtual_outgoing_args_rtx;
517938fd1498Szrj }
518038fd1498Szrj else
518138fd1498Szrj {
518238fd1498Szrj if (!PUSH_ARGS)
518338fd1498Szrj argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
518438fd1498Szrj }
518538fd1498Szrj
518638fd1498Szrj /* We push args individually in reverse order, perform stack alignment
518738fd1498Szrj before the first push (the last arg). */
518838fd1498Szrj if (argblock == 0)
518938fd1498Szrj anti_adjust_stack (gen_int_mode (args_size.constant
519038fd1498Szrj - original_args_size.constant,
519138fd1498Szrj Pmode));
519238fd1498Szrj
519338fd1498Szrj argnum = nargs - 1;
519438fd1498Szrj
519538fd1498Szrj #ifdef REG_PARM_STACK_SPACE
519638fd1498Szrj if (ACCUMULATE_OUTGOING_ARGS)
519738fd1498Szrj {
519838fd1498Szrj /* The argument list is the property of the called routine and it
519938fd1498Szrj may clobber it. If the fixed area has been used for previous
520038fd1498Szrj parameters, we must save and restore it. */
520138fd1498Szrj save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
520238fd1498Szrj &low_to_save, &high_to_save);
520338fd1498Szrj }
520438fd1498Szrj #endif
520538fd1498Szrj
520638fd1498Szrj /* When expanding a normal call, args are stored in push order,
520738fd1498Szrj which is the reverse of what we have here. */
520838fd1498Szrj bool any_regs = false;
520938fd1498Szrj for (int i = nargs; i-- > 0; )
521038fd1498Szrj if (argvec[i].reg != NULL_RTX)
521138fd1498Szrj {
521238fd1498Szrj targetm.calls.call_args (argvec[i].reg, NULL_TREE);
521338fd1498Szrj any_regs = true;
521438fd1498Szrj }
521538fd1498Szrj if (!any_regs)
521638fd1498Szrj targetm.calls.call_args (pc_rtx, NULL_TREE);
521738fd1498Szrj
521838fd1498Szrj /* Push the args that need to be pushed. */
521938fd1498Szrj
522038fd1498Szrj have_push_fusage = false;
522138fd1498Szrj
522238fd1498Szrj /* ARGNUM indexes the ARGVEC array in the order in which the arguments
522338fd1498Szrj are to be pushed. */
522438fd1498Szrj for (count = 0; count < nargs; count++, argnum--)
522538fd1498Szrj {
522638fd1498Szrj machine_mode mode = argvec[argnum].mode;
522738fd1498Szrj rtx val = argvec[argnum].value;
522838fd1498Szrj rtx reg = argvec[argnum].reg;
522938fd1498Szrj int partial = argvec[argnum].partial;
523038fd1498Szrj unsigned int parm_align = argvec[argnum].locate.boundary;
523138fd1498Szrj poly_int64 lower_bound = 0, upper_bound = 0;
523238fd1498Szrj
523338fd1498Szrj if (! (reg != 0 && partial == 0))
523438fd1498Szrj {
523538fd1498Szrj rtx use;
523638fd1498Szrj
523738fd1498Szrj if (ACCUMULATE_OUTGOING_ARGS)
523838fd1498Szrj {
523938fd1498Szrj /* If this is being stored into a pre-allocated, fixed-size,
524038fd1498Szrj stack area, save any previous data at that location. */
524138fd1498Szrj
524238fd1498Szrj if (ARGS_GROW_DOWNWARD)
524338fd1498Szrj {
524438fd1498Szrj /* stack_slot is negative, but we want to index stack_usage_map
524538fd1498Szrj with positive values. */
524638fd1498Szrj upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
524738fd1498Szrj lower_bound = upper_bound - argvec[argnum].locate.size.constant;
524838fd1498Szrj }
524938fd1498Szrj else
525038fd1498Szrj {
525138fd1498Szrj lower_bound = argvec[argnum].locate.slot_offset.constant;
525238fd1498Szrj upper_bound = lower_bound + argvec[argnum].locate.size.constant;
525338fd1498Szrj }
525438fd1498Szrj
525538fd1498Szrj if (stack_region_maybe_used_p (lower_bound, upper_bound,
525638fd1498Szrj reg_parm_stack_space))
525738fd1498Szrj {
525838fd1498Szrj /* We need to make a save area. */
525938fd1498Szrj poly_uint64 size
526038fd1498Szrj = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
526138fd1498Szrj machine_mode save_mode
526238fd1498Szrj = int_mode_for_size (size, 1).else_blk ();
526338fd1498Szrj rtx adr
526438fd1498Szrj = plus_constant (Pmode, argblock,
526538fd1498Szrj argvec[argnum].locate.offset.constant);
526638fd1498Szrj rtx stack_area
526738fd1498Szrj = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
526838fd1498Szrj
526938fd1498Szrj if (save_mode == BLKmode)
527038fd1498Szrj {
527138fd1498Szrj argvec[argnum].save_area
527238fd1498Szrj = assign_stack_temp (BLKmode,
527338fd1498Szrj argvec[argnum].locate.size.constant
527438fd1498Szrj );
527538fd1498Szrj
527638fd1498Szrj emit_block_move (validize_mem
527738fd1498Szrj (copy_rtx (argvec[argnum].save_area)),
527838fd1498Szrj stack_area,
527938fd1498Szrj (gen_int_mode
528038fd1498Szrj (argvec[argnum].locate.size.constant,
528138fd1498Szrj Pmode)),
528238fd1498Szrj BLOCK_OP_CALL_PARM);
528338fd1498Szrj }
528438fd1498Szrj else
528538fd1498Szrj {
528638fd1498Szrj argvec[argnum].save_area = gen_reg_rtx (save_mode);
528738fd1498Szrj
528838fd1498Szrj emit_move_insn (argvec[argnum].save_area, stack_area);
528938fd1498Szrj }
529038fd1498Szrj }
529138fd1498Szrj }
529238fd1498Szrj
529338fd1498Szrj emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
529438fd1498Szrj partial, reg, 0, argblock,
529538fd1498Szrj (gen_int_mode
529638fd1498Szrj (argvec[argnum].locate.offset.constant, Pmode)),
529738fd1498Szrj reg_parm_stack_space,
529838fd1498Szrj ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
529938fd1498Szrj
530038fd1498Szrj /* Now mark the segment we just used. */
530138fd1498Szrj if (ACCUMULATE_OUTGOING_ARGS)
530238fd1498Szrj mark_stack_region_used (lower_bound, upper_bound);
530338fd1498Szrj
530438fd1498Szrj NO_DEFER_POP;
530538fd1498Szrj
530638fd1498Szrj /* Indicate argument access so that alias.c knows that these
530738fd1498Szrj values are live. */
530838fd1498Szrj if (argblock)
530938fd1498Szrj use = plus_constant (Pmode, argblock,
531038fd1498Szrj argvec[argnum].locate.offset.constant);
531138fd1498Szrj else if (have_push_fusage)
531238fd1498Szrj continue;
531338fd1498Szrj else
531438fd1498Szrj {
531538fd1498Szrj /* When arguments are pushed, trying to tell alias.c where
531638fd1498Szrj exactly this argument is won't work, because the
531738fd1498Szrj auto-increment causes confusion. So we merely indicate
531838fd1498Szrj that we access something with a known mode somewhere on
531938fd1498Szrj the stack. */
532038fd1498Szrj use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
532138fd1498Szrj gen_rtx_SCRATCH (Pmode));
532238fd1498Szrj have_push_fusage = true;
532338fd1498Szrj }
532438fd1498Szrj use = gen_rtx_MEM (argvec[argnum].mode, use);
532538fd1498Szrj use = gen_rtx_USE (VOIDmode, use);
532638fd1498Szrj call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
532738fd1498Szrj }
532838fd1498Szrj }
532938fd1498Szrj
533038fd1498Szrj argnum = nargs - 1;
533138fd1498Szrj
533238fd1498Szrj fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
533338fd1498Szrj
533438fd1498Szrj /* Now load any reg parms into their regs. */
533538fd1498Szrj
533638fd1498Szrj /* ARGNUM indexes the ARGVEC array in the order in which the arguments
533738fd1498Szrj are to be pushed. */
533838fd1498Szrj for (count = 0; count < nargs; count++, argnum--)
533938fd1498Szrj {
534038fd1498Szrj machine_mode mode = argvec[argnum].mode;
534138fd1498Szrj rtx val = argvec[argnum].value;
534238fd1498Szrj rtx reg = argvec[argnum].reg;
534338fd1498Szrj int partial = argvec[argnum].partial;
534438fd1498Szrj
534538fd1498Szrj /* Handle calls that pass values in multiple non-contiguous
534638fd1498Szrj locations. The PA64 has examples of this for library calls. */
534738fd1498Szrj if (reg != 0 && GET_CODE (reg) == PARALLEL)
534838fd1498Szrj emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
534938fd1498Szrj else if (reg != 0 && partial == 0)
535038fd1498Szrj {
535138fd1498Szrj emit_move_insn (reg, val);
535238fd1498Szrj #ifdef BLOCK_REG_PADDING
535338fd1498Szrj poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
535438fd1498Szrj
535538fd1498Szrj /* Copied from load_register_parameters. */
535638fd1498Szrj
535738fd1498Szrj /* Handle case where we have a value that needs shifting
535838fd1498Szrj up to the msb. eg. a QImode value and we're padding
535938fd1498Szrj upward on a BYTES_BIG_ENDIAN machine. */
536038fd1498Szrj if (known_lt (size, UNITS_PER_WORD)
536138fd1498Szrj && (argvec[argnum].locate.where_pad
536238fd1498Szrj == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
536338fd1498Szrj {
536438fd1498Szrj rtx x;
536538fd1498Szrj poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
536638fd1498Szrj
536738fd1498Szrj /* Assigning REG here rather than a temp makes CALL_FUSAGE
536838fd1498Szrj report the whole reg as used. Strictly speaking, the
536938fd1498Szrj call only uses SIZE bytes at the msb end, but it doesn't
537038fd1498Szrj seem worth generating rtl to say that. */
537138fd1498Szrj reg = gen_rtx_REG (word_mode, REGNO (reg));
537238fd1498Szrj x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
537338fd1498Szrj if (x != reg)
537438fd1498Szrj emit_move_insn (reg, x);
537538fd1498Szrj }
537638fd1498Szrj #endif
537738fd1498Szrj }
537838fd1498Szrj
537938fd1498Szrj NO_DEFER_POP;
538038fd1498Szrj }
538138fd1498Szrj
538238fd1498Szrj /* Any regs containing parms remain in use through the call. */
538338fd1498Szrj for (count = 0; count < nargs; count++)
538438fd1498Szrj {
538538fd1498Szrj rtx reg = argvec[count].reg;
538638fd1498Szrj if (reg != 0 && GET_CODE (reg) == PARALLEL)
538738fd1498Szrj use_group_regs (&call_fusage, reg);
538838fd1498Szrj else if (reg != 0)
538938fd1498Szrj {
539038fd1498Szrj int partial = argvec[count].partial;
539138fd1498Szrj if (partial)
539238fd1498Szrj {
539338fd1498Szrj int nregs;
539438fd1498Szrj gcc_assert (partial % UNITS_PER_WORD == 0);
539538fd1498Szrj nregs = partial / UNITS_PER_WORD;
539638fd1498Szrj use_regs (&call_fusage, REGNO (reg), nregs);
539738fd1498Szrj }
539838fd1498Szrj else
539938fd1498Szrj use_reg (&call_fusage, reg);
540038fd1498Szrj }
540138fd1498Szrj }
540238fd1498Szrj
540338fd1498Szrj /* Pass the function the address in which to return a structure value. */
540438fd1498Szrj if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
540538fd1498Szrj {
540638fd1498Szrj emit_move_insn (struct_value,
540738fd1498Szrj force_reg (Pmode,
540838fd1498Szrj force_operand (XEXP (mem_value, 0),
540938fd1498Szrj NULL_RTX)));
541038fd1498Szrj if (REG_P (struct_value))
541138fd1498Szrj use_reg (&call_fusage, struct_value);
541238fd1498Szrj }
541338fd1498Szrj
541438fd1498Szrj /* Don't allow popping to be deferred, since then
541538fd1498Szrj cse'ing of library calls could delete a call and leave the pop. */
541638fd1498Szrj NO_DEFER_POP;
541738fd1498Szrj valreg = (mem_value == 0 && outmode != VOIDmode
541838fd1498Szrj ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
541938fd1498Szrj
542038fd1498Szrj /* Stack must be properly aligned now. */
542138fd1498Szrj gcc_assert (multiple_p (stack_pointer_delta,
542238fd1498Szrj PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
542338fd1498Szrj
542438fd1498Szrj before_call = get_last_insn ();
542538fd1498Szrj
542638fd1498Szrj /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
542738fd1498Szrj will set inhibit_defer_pop to that value. */
542838fd1498Szrj /* The return type is needed to decide how many bytes the function pops.
542938fd1498Szrj Signedness plays no role in that, so for simplicity, we pretend it's
543038fd1498Szrj always signed. We also assume that the list of arguments passed has
543138fd1498Szrj no impact, so we pretend it is unknown. */
543238fd1498Szrj
543338fd1498Szrj emit_call_1 (fun, NULL,
543438fd1498Szrj get_identifier (XSTR (orgfun, 0)),
543538fd1498Szrj build_function_type (tfom, NULL_TREE),
543638fd1498Szrj original_args_size.constant, args_size.constant,
543738fd1498Szrj struct_value_size,
543838fd1498Szrj targetm.calls.function_arg (args_so_far,
543938fd1498Szrj VOIDmode, void_type_node, true),
544038fd1498Szrj valreg,
544138fd1498Szrj old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
544238fd1498Szrj
544338fd1498Szrj if (flag_ipa_ra)
544438fd1498Szrj {
544538fd1498Szrj rtx datum = orgfun;
544638fd1498Szrj gcc_assert (GET_CODE (datum) == SYMBOL_REF);
544738fd1498Szrj rtx_call_insn *last = last_call_insn ();
544838fd1498Szrj add_reg_note (last, REG_CALL_DECL, datum);
544938fd1498Szrj }
545038fd1498Szrj
545138fd1498Szrj /* Right-shift returned value if necessary. */
545238fd1498Szrj if (!pcc_struct_value
545338fd1498Szrj && TYPE_MODE (tfom) != BLKmode
545438fd1498Szrj && targetm.calls.return_in_msb (tfom))
545538fd1498Szrj {
545638fd1498Szrj shift_return_value (TYPE_MODE (tfom), false, valreg);
545738fd1498Szrj valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
545838fd1498Szrj }
545938fd1498Szrj
546038fd1498Szrj targetm.calls.end_call_args ();
546138fd1498Szrj
546238fd1498Szrj /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
546338fd1498Szrj that it should complain if nonvolatile values are live. For
546438fd1498Szrj functions that cannot return, inform flow that control does not
546538fd1498Szrj fall through. */
546638fd1498Szrj if (flags & ECF_NORETURN)
546738fd1498Szrj {
546838fd1498Szrj /* The barrier note must be emitted
546938fd1498Szrj immediately after the CALL_INSN. Some ports emit more than
547038fd1498Szrj just a CALL_INSN above, so we must search for it here. */
547138fd1498Szrj rtx_insn *last = get_last_insn ();
547238fd1498Szrj while (!CALL_P (last))
547338fd1498Szrj {
547438fd1498Szrj last = PREV_INSN (last);
547538fd1498Szrj /* There was no CALL_INSN? */
547638fd1498Szrj gcc_assert (last != before_call);
547738fd1498Szrj }
547838fd1498Szrj
547938fd1498Szrj emit_barrier_after (last);
548038fd1498Szrj }
548138fd1498Szrj
548238fd1498Szrj /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
548338fd1498Szrj and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
548438fd1498Szrj if (flags & ECF_NOTHROW)
548538fd1498Szrj {
548638fd1498Szrj rtx_insn *last = get_last_insn ();
548738fd1498Szrj while (!CALL_P (last))
548838fd1498Szrj {
548938fd1498Szrj last = PREV_INSN (last);
549038fd1498Szrj /* There was no CALL_INSN? */
549138fd1498Szrj gcc_assert (last != before_call);
549238fd1498Szrj }
549338fd1498Szrj
549438fd1498Szrj make_reg_eh_region_note_nothrow_nononlocal (last);
549538fd1498Szrj }
549638fd1498Szrj
549738fd1498Szrj /* Now restore inhibit_defer_pop to its actual original value. */
549838fd1498Szrj OK_DEFER_POP;
549938fd1498Szrj
550038fd1498Szrj pop_temp_slots ();
550138fd1498Szrj
550238fd1498Szrj /* Copy the value to the right place. */
550338fd1498Szrj if (outmode != VOIDmode && retval)
550438fd1498Szrj {
550538fd1498Szrj if (mem_value)
550638fd1498Szrj {
550738fd1498Szrj if (value == 0)
550838fd1498Szrj value = mem_value;
550938fd1498Szrj if (value != mem_value)
551038fd1498Szrj emit_move_insn (value, mem_value);
551138fd1498Szrj }
551238fd1498Szrj else if (GET_CODE (valreg) == PARALLEL)
551338fd1498Szrj {
551438fd1498Szrj if (value == 0)
551538fd1498Szrj value = gen_reg_rtx (outmode);
551638fd1498Szrj emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
551738fd1498Szrj }
551838fd1498Szrj else
551938fd1498Szrj {
552038fd1498Szrj /* Convert to the proper mode if a promotion has been active. */
552138fd1498Szrj if (GET_MODE (valreg) != outmode)
552238fd1498Szrj {
552338fd1498Szrj int unsignedp = TYPE_UNSIGNED (tfom);
552438fd1498Szrj
552538fd1498Szrj gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
552638fd1498Szrj fndecl ? TREE_TYPE (fndecl) : fntype, 1)
552738fd1498Szrj == GET_MODE (valreg));
552838fd1498Szrj valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
552938fd1498Szrj }
553038fd1498Szrj
553138fd1498Szrj if (value != 0)
553238fd1498Szrj emit_move_insn (value, valreg);
553338fd1498Szrj else
553438fd1498Szrj value = valreg;
553538fd1498Szrj }
553638fd1498Szrj }
553738fd1498Szrj
553838fd1498Szrj if (ACCUMULATE_OUTGOING_ARGS)
553938fd1498Szrj {
554038fd1498Szrj #ifdef REG_PARM_STACK_SPACE
554138fd1498Szrj if (save_area)
554238fd1498Szrj restore_fixed_argument_area (save_area, argblock,
554338fd1498Szrj high_to_save, low_to_save);
554438fd1498Szrj #endif
554538fd1498Szrj
554638fd1498Szrj /* If we saved any argument areas, restore them. */
554738fd1498Szrj for (count = 0; count < nargs; count++)
554838fd1498Szrj if (argvec[count].save_area)
554938fd1498Szrj {
555038fd1498Szrj machine_mode save_mode = GET_MODE (argvec[count].save_area);
555138fd1498Szrj rtx adr = plus_constant (Pmode, argblock,
555238fd1498Szrj argvec[count].locate.offset.constant);
555338fd1498Szrj rtx stack_area = gen_rtx_MEM (save_mode,
555438fd1498Szrj memory_address (save_mode, adr));
555538fd1498Szrj
555638fd1498Szrj if (save_mode == BLKmode)
555738fd1498Szrj emit_block_move (stack_area,
555838fd1498Szrj validize_mem
555938fd1498Szrj (copy_rtx (argvec[count].save_area)),
556038fd1498Szrj (gen_int_mode
556138fd1498Szrj (argvec[count].locate.size.constant, Pmode)),
556238fd1498Szrj BLOCK_OP_CALL_PARM);
556338fd1498Szrj else
556438fd1498Szrj emit_move_insn (stack_area, argvec[count].save_area);
556538fd1498Szrj }
556638fd1498Szrj
556738fd1498Szrj highest_outgoing_arg_in_use = initial_highest_arg_in_use;
556838fd1498Szrj stack_usage_map = initial_stack_usage_map;
556938fd1498Szrj stack_usage_watermark = initial_stack_usage_watermark;
557038fd1498Szrj }
557138fd1498Szrj
557238fd1498Szrj free (stack_usage_map_buf);
557338fd1498Szrj
557438fd1498Szrj return value;
557538fd1498Szrj
557638fd1498Szrj }
557738fd1498Szrj
557838fd1498Szrj
557938fd1498Szrj /* Store pointer bounds argument ARG into Bounds Table entry
558038fd1498Szrj associated with PARM. */
558138fd1498Szrj static void
store_bounds(struct arg_data * arg,struct arg_data * parm)558238fd1498Szrj store_bounds (struct arg_data *arg, struct arg_data *parm)
558338fd1498Szrj {
558438fd1498Szrj rtx slot = NULL, ptr = NULL, addr = NULL;
558538fd1498Szrj
558638fd1498Szrj /* We may pass bounds not associated with any pointer. */
558738fd1498Szrj if (!parm)
558838fd1498Szrj {
558938fd1498Szrj gcc_assert (arg->special_slot);
559038fd1498Szrj slot = arg->special_slot;
559138fd1498Szrj ptr = const0_rtx;
559238fd1498Szrj }
559338fd1498Szrj /* Find pointer associated with bounds and where it is
559438fd1498Szrj passed. */
559538fd1498Szrj else
559638fd1498Szrj {
559738fd1498Szrj if (!parm->reg)
559838fd1498Szrj {
559938fd1498Szrj gcc_assert (!arg->special_slot);
560038fd1498Szrj
560138fd1498Szrj addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
560238fd1498Szrj }
560338fd1498Szrj else if (REG_P (parm->reg))
560438fd1498Szrj {
560538fd1498Szrj gcc_assert (arg->special_slot);
560638fd1498Szrj slot = arg->special_slot;
560738fd1498Szrj
560838fd1498Szrj if (MEM_P (parm->value))
560938fd1498Szrj addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
561038fd1498Szrj else if (REG_P (parm->value))
561138fd1498Szrj ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
561238fd1498Szrj else
561338fd1498Szrj {
561438fd1498Szrj gcc_assert (!arg->pointer_offset);
561538fd1498Szrj ptr = parm->value;
561638fd1498Szrj }
561738fd1498Szrj }
561838fd1498Szrj else
561938fd1498Szrj {
562038fd1498Szrj gcc_assert (GET_CODE (parm->reg) == PARALLEL);
562138fd1498Szrj
562238fd1498Szrj gcc_assert (arg->special_slot);
562338fd1498Szrj slot = arg->special_slot;
562438fd1498Szrj
562538fd1498Szrj if (parm->parallel_value)
562638fd1498Szrj ptr = chkp_get_value_with_offs (parm->parallel_value,
562738fd1498Szrj GEN_INT (arg->pointer_offset));
562838fd1498Szrj else
562938fd1498Szrj gcc_unreachable ();
563038fd1498Szrj }
563138fd1498Szrj }
563238fd1498Szrj
563338fd1498Szrj /* Expand bounds. */
563438fd1498Szrj if (!arg->value)
563538fd1498Szrj arg->value = expand_normal (arg->tree_value);
563638fd1498Szrj
563738fd1498Szrj targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
563838fd1498Szrj }
563938fd1498Szrj
564038fd1498Szrj /* Store a single argument for a function call
564138fd1498Szrj into the register or memory area where it must be passed.
564238fd1498Szrj *ARG describes the argument value and where to pass it.
564338fd1498Szrj
564438fd1498Szrj ARGBLOCK is the address of the stack-block for all the arguments,
564538fd1498Szrj or 0 on a machine where arguments are pushed individually.
564638fd1498Szrj
564738fd1498Szrj MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
564838fd1498Szrj so must be careful about how the stack is used.
564938fd1498Szrj
565038fd1498Szrj VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
565138fd1498Szrj argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
565238fd1498Szrj that we need not worry about saving and restoring the stack.
565338fd1498Szrj
565438fd1498Szrj FNDECL is the declaration of the function we are calling.
565538fd1498Szrj
565638fd1498Szrj Return nonzero if this arg should cause sibcall failure,
565738fd1498Szrj zero otherwise. */
565838fd1498Szrj
565938fd1498Szrj static int
store_one_arg(struct arg_data * arg,rtx argblock,int flags,int variable_size ATTRIBUTE_UNUSED,int reg_parm_stack_space)566038fd1498Szrj store_one_arg (struct arg_data *arg, rtx argblock, int flags,
566138fd1498Szrj int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
566238fd1498Szrj {
566338fd1498Szrj tree pval = arg->tree_value;
566438fd1498Szrj rtx reg = 0;
566538fd1498Szrj int partial = 0;
566638fd1498Szrj poly_int64 used = 0;
566738fd1498Szrj poly_int64 lower_bound = 0, upper_bound = 0;
566838fd1498Szrj int sibcall_failure = 0;
566938fd1498Szrj
567038fd1498Szrj if (TREE_CODE (pval) == ERROR_MARK)
567138fd1498Szrj return 1;
567238fd1498Szrj
567338fd1498Szrj /* Push a new temporary level for any temporaries we make for
567438fd1498Szrj this argument. */
567538fd1498Szrj push_temp_slots ();
567638fd1498Szrj
567738fd1498Szrj if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
567838fd1498Szrj {
567938fd1498Szrj /* If this is being stored into a pre-allocated, fixed-size, stack area,
568038fd1498Szrj save any previous data at that location. */
568138fd1498Szrj if (argblock && ! variable_size && arg->stack)
568238fd1498Szrj {
568338fd1498Szrj if (ARGS_GROW_DOWNWARD)
568438fd1498Szrj {
568538fd1498Szrj /* stack_slot is negative, but we want to index stack_usage_map
568638fd1498Szrj with positive values. */
568738fd1498Szrj if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
568838fd1498Szrj {
568938fd1498Szrj rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
569038fd1498Szrj upper_bound = -rtx_to_poly_int64 (offset) + 1;
569138fd1498Szrj }
569238fd1498Szrj else
569338fd1498Szrj upper_bound = 0;
569438fd1498Szrj
569538fd1498Szrj lower_bound = upper_bound - arg->locate.size.constant;
569638fd1498Szrj }
569738fd1498Szrj else
569838fd1498Szrj {
569938fd1498Szrj if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
570038fd1498Szrj {
570138fd1498Szrj rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
570238fd1498Szrj lower_bound = rtx_to_poly_int64 (offset);
570338fd1498Szrj }
570438fd1498Szrj else
570538fd1498Szrj lower_bound = 0;
570638fd1498Szrj
570738fd1498Szrj upper_bound = lower_bound + arg->locate.size.constant;
570838fd1498Szrj }
570938fd1498Szrj
571038fd1498Szrj if (stack_region_maybe_used_p (lower_bound, upper_bound,
571138fd1498Szrj reg_parm_stack_space))
571238fd1498Szrj {
571338fd1498Szrj /* We need to make a save area. */
571438fd1498Szrj poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
571538fd1498Szrj machine_mode save_mode
571638fd1498Szrj = int_mode_for_size (size, 1).else_blk ();
571738fd1498Szrj rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
571838fd1498Szrj rtx stack_area = gen_rtx_MEM (save_mode, adr);
571938fd1498Szrj
572038fd1498Szrj if (save_mode == BLKmode)
572138fd1498Szrj {
572238fd1498Szrj arg->save_area
572338fd1498Szrj = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
572438fd1498Szrj preserve_temp_slots (arg->save_area);
572538fd1498Szrj emit_block_move (validize_mem (copy_rtx (arg->save_area)),
572638fd1498Szrj stack_area,
572738fd1498Szrj (gen_int_mode
572838fd1498Szrj (arg->locate.size.constant, Pmode)),
572938fd1498Szrj BLOCK_OP_CALL_PARM);
573038fd1498Szrj }
573138fd1498Szrj else
573238fd1498Szrj {
573338fd1498Szrj arg->save_area = gen_reg_rtx (save_mode);
573438fd1498Szrj emit_move_insn (arg->save_area, stack_area);
573538fd1498Szrj }
573638fd1498Szrj }
573738fd1498Szrj }
573838fd1498Szrj }
573938fd1498Szrj
574038fd1498Szrj /* If this isn't going to be placed on both the stack and in registers,
574138fd1498Szrj set up the register and number of words. */
574238fd1498Szrj if (! arg->pass_on_stack)
574338fd1498Szrj {
574438fd1498Szrj if (flags & ECF_SIBCALL)
574538fd1498Szrj reg = arg->tail_call_reg;
574638fd1498Szrj else
574738fd1498Szrj reg = arg->reg;
574838fd1498Szrj partial = arg->partial;
574938fd1498Szrj }
575038fd1498Szrj
575138fd1498Szrj /* Being passed entirely in a register. We shouldn't be called in
575238fd1498Szrj this case. */
575338fd1498Szrj gcc_assert (reg == 0 || partial != 0);
575438fd1498Szrj
575538fd1498Szrj /* If this arg needs special alignment, don't load the registers
575638fd1498Szrj here. */
575738fd1498Szrj if (arg->n_aligned_regs != 0)
575838fd1498Szrj reg = 0;
575938fd1498Szrj
576038fd1498Szrj /* If this is being passed partially in a register, we can't evaluate
576138fd1498Szrj it directly into its stack slot. Otherwise, we can. */
576238fd1498Szrj if (arg->value == 0)
576338fd1498Szrj {
576438fd1498Szrj /* stack_arg_under_construction is nonzero if a function argument is
576538fd1498Szrj being evaluated directly into the outgoing argument list and
576638fd1498Szrj expand_call must take special action to preserve the argument list
576738fd1498Szrj if it is called recursively.
576838fd1498Szrj
576938fd1498Szrj For scalar function arguments stack_usage_map is sufficient to
577038fd1498Szrj determine which stack slots must be saved and restored. Scalar
577138fd1498Szrj arguments in general have pass_on_stack == 0.
577238fd1498Szrj
577338fd1498Szrj If this argument is initialized by a function which takes the
577438fd1498Szrj address of the argument (a C++ constructor or a C function
577538fd1498Szrj returning a BLKmode structure), then stack_usage_map is
577638fd1498Szrj insufficient and expand_call must push the stack around the
577738fd1498Szrj function call. Such arguments have pass_on_stack == 1.
577838fd1498Szrj
577938fd1498Szrj Note that it is always safe to set stack_arg_under_construction,
578038fd1498Szrj but this generates suboptimal code if set when not needed. */
578138fd1498Szrj
578238fd1498Szrj if (arg->pass_on_stack)
578338fd1498Szrj stack_arg_under_construction++;
578438fd1498Szrj
578538fd1498Szrj arg->value = expand_expr (pval,
578638fd1498Szrj (partial
578738fd1498Szrj || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
578838fd1498Szrj ? NULL_RTX : arg->stack,
578938fd1498Szrj VOIDmode, EXPAND_STACK_PARM);
579038fd1498Szrj
579138fd1498Szrj /* If we are promoting object (or for any other reason) the mode
579238fd1498Szrj doesn't agree, convert the mode. */
579338fd1498Szrj
579438fd1498Szrj if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
579538fd1498Szrj arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
579638fd1498Szrj arg->value, arg->unsignedp);
579738fd1498Szrj
579838fd1498Szrj if (arg->pass_on_stack)
579938fd1498Szrj stack_arg_under_construction--;
580038fd1498Szrj }
580138fd1498Szrj
580238fd1498Szrj /* Check for overlap with already clobbered argument area. */
580338fd1498Szrj if ((flags & ECF_SIBCALL)
580438fd1498Szrj && MEM_P (arg->value)
580538fd1498Szrj && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
580638fd1498Szrj arg->locate.size.constant))
580738fd1498Szrj sibcall_failure = 1;
580838fd1498Szrj
580938fd1498Szrj /* Don't allow anything left on stack from computation
581038fd1498Szrj of argument to alloca. */
581138fd1498Szrj if (flags & ECF_MAY_BE_ALLOCA)
581238fd1498Szrj do_pending_stack_adjust ();
581338fd1498Szrj
581438fd1498Szrj if (arg->value == arg->stack)
581538fd1498Szrj /* If the value is already in the stack slot, we are done. */
581638fd1498Szrj ;
581738fd1498Szrj else if (arg->mode != BLKmode)
581838fd1498Szrj {
581938fd1498Szrj unsigned int parm_align;
582038fd1498Szrj
582138fd1498Szrj /* Argument is a scalar, not entirely passed in registers.
582238fd1498Szrj (If part is passed in registers, arg->partial says how much
582338fd1498Szrj and emit_push_insn will take care of putting it there.)
582438fd1498Szrj
582538fd1498Szrj Push it, and if its size is less than the
582638fd1498Szrj amount of space allocated to it,
582738fd1498Szrj also bump stack pointer by the additional space.
582838fd1498Szrj Note that in C the default argument promotions
582938fd1498Szrj will prevent such mismatches. */
583038fd1498Szrj
583138fd1498Szrj poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
583238fd1498Szrj ? 0 : GET_MODE_SIZE (arg->mode));
583338fd1498Szrj
583438fd1498Szrj /* Compute how much space the push instruction will push.
583538fd1498Szrj On many machines, pushing a byte will advance the stack
583638fd1498Szrj pointer by a halfword. */
583738fd1498Szrj #ifdef PUSH_ROUNDING
583838fd1498Szrj size = PUSH_ROUNDING (size);
583938fd1498Szrj #endif
584038fd1498Szrj used = size;
584138fd1498Szrj
584238fd1498Szrj /* Compute how much space the argument should get:
584338fd1498Szrj round up to a multiple of the alignment for arguments. */
584438fd1498Szrj if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
584538fd1498Szrj != PAD_NONE)
584638fd1498Szrj /* At the moment we don't (need to) support ABIs for which the
584738fd1498Szrj padding isn't known at compile time. In principle it should
584838fd1498Szrj be easy to add though. */
584938fd1498Szrj used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
585038fd1498Szrj
585138fd1498Szrj /* Compute the alignment of the pushed argument. */
585238fd1498Szrj parm_align = arg->locate.boundary;
585338fd1498Szrj if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
585438fd1498Szrj == PAD_DOWNWARD)
585538fd1498Szrj {
585638fd1498Szrj poly_int64 pad = used - size;
585738fd1498Szrj unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
585838fd1498Szrj if (pad_align != 0)
585938fd1498Szrj parm_align = MIN (parm_align, pad_align);
586038fd1498Szrj }
586138fd1498Szrj
586238fd1498Szrj /* This isn't already where we want it on the stack, so put it there.
586338fd1498Szrj This can either be done with push or copy insns. */
586438fd1498Szrj if (maybe_ne (used, 0)
586538fd1498Szrj && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
586638fd1498Szrj NULL_RTX, parm_align, partial, reg, used - size,
586738fd1498Szrj argblock, ARGS_SIZE_RTX (arg->locate.offset),
586838fd1498Szrj reg_parm_stack_space,
586938fd1498Szrj ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
587038fd1498Szrj sibcall_failure = 1;
587138fd1498Szrj
587238fd1498Szrj /* Unless this is a partially-in-register argument, the argument is now
587338fd1498Szrj in the stack. */
587438fd1498Szrj if (partial == 0)
587538fd1498Szrj arg->value = arg->stack;
587638fd1498Szrj }
587738fd1498Szrj else
587838fd1498Szrj {
587938fd1498Szrj /* BLKmode, at least partly to be pushed. */
588038fd1498Szrj
588138fd1498Szrj unsigned int parm_align;
588238fd1498Szrj poly_int64 excess;
588338fd1498Szrj rtx size_rtx;
588438fd1498Szrj
588538fd1498Szrj /* Pushing a nonscalar.
588638fd1498Szrj If part is passed in registers, PARTIAL says how much
588738fd1498Szrj and emit_push_insn will take care of putting it there. */
588838fd1498Szrj
588938fd1498Szrj /* Round its size up to a multiple
589038fd1498Szrj of the allocation unit for arguments. */
589138fd1498Szrj
589238fd1498Szrj if (arg->locate.size.var != 0)
589338fd1498Szrj {
589438fd1498Szrj excess = 0;
589538fd1498Szrj size_rtx = ARGS_SIZE_RTX (arg->locate.size);
589638fd1498Szrj }
589738fd1498Szrj else
589838fd1498Szrj {
589938fd1498Szrj /* PUSH_ROUNDING has no effect on us, because emit_push_insn
590038fd1498Szrj for BLKmode is careful to avoid it. */
590138fd1498Szrj excess = (arg->locate.size.constant
590238fd1498Szrj - arg_int_size_in_bytes (TREE_TYPE (pval))
590338fd1498Szrj + partial);
590438fd1498Szrj size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
590538fd1498Szrj NULL_RTX, TYPE_MODE (sizetype),
590638fd1498Szrj EXPAND_NORMAL);
590738fd1498Szrj }
590838fd1498Szrj
590938fd1498Szrj parm_align = arg->locate.boundary;
591038fd1498Szrj
591138fd1498Szrj /* When an argument is padded down, the block is aligned to
591238fd1498Szrj PARM_BOUNDARY, but the actual argument isn't. */
591338fd1498Szrj if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
591438fd1498Szrj == PAD_DOWNWARD)
591538fd1498Szrj {
591638fd1498Szrj if (arg->locate.size.var)
591738fd1498Szrj parm_align = BITS_PER_UNIT;
591838fd1498Szrj else
591938fd1498Szrj {
592038fd1498Szrj unsigned int excess_align
592138fd1498Szrj = known_alignment (excess) * BITS_PER_UNIT;
592238fd1498Szrj if (excess_align != 0)
592338fd1498Szrj parm_align = MIN (parm_align, excess_align);
592438fd1498Szrj }
592538fd1498Szrj }
592638fd1498Szrj
592738fd1498Szrj if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
592838fd1498Szrj {
592938fd1498Szrj /* emit_push_insn might not work properly if arg->value and
593038fd1498Szrj argblock + arg->locate.offset areas overlap. */
593138fd1498Szrj rtx x = arg->value;
593238fd1498Szrj poly_int64 i = 0;
593338fd1498Szrj
593438fd1498Szrj if (XEXP (x, 0) == crtl->args.internal_arg_pointer
593538fd1498Szrj || (GET_CODE (XEXP (x, 0)) == PLUS
593638fd1498Szrj && XEXP (XEXP (x, 0), 0) ==
593738fd1498Szrj crtl->args.internal_arg_pointer
593838fd1498Szrj && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
593938fd1498Szrj {
594038fd1498Szrj if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
594138fd1498Szrj i = rtx_to_poly_int64 (XEXP (XEXP (x, 0), 1));
594238fd1498Szrj
594338fd1498Szrj /* arg.locate doesn't contain the pretend_args_size offset,
594438fd1498Szrj it's part of argblock. Ensure we don't count it in I. */
594538fd1498Szrj if (STACK_GROWS_DOWNWARD)
594638fd1498Szrj i -= crtl->args.pretend_args_size;
594738fd1498Szrj else
594838fd1498Szrj i += crtl->args.pretend_args_size;
594938fd1498Szrj
595038fd1498Szrj /* expand_call should ensure this. */
595138fd1498Szrj gcc_assert (!arg->locate.offset.var
595238fd1498Szrj && arg->locate.size.var == 0);
595338fd1498Szrj poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
595438fd1498Szrj
595538fd1498Szrj if (known_eq (arg->locate.offset.constant, i))
595638fd1498Szrj {
595738fd1498Szrj /* Even though they appear to be at the same location,
595838fd1498Szrj if part of the outgoing argument is in registers,
595938fd1498Szrj they aren't really at the same location. Check for
596038fd1498Szrj this by making sure that the incoming size is the
596138fd1498Szrj same as the outgoing size. */
596238fd1498Szrj if (maybe_ne (arg->locate.size.constant, size_val))
596338fd1498Szrj sibcall_failure = 1;
596438fd1498Szrj }
596538fd1498Szrj else if (maybe_in_range_p (arg->locate.offset.constant,
596638fd1498Szrj i, size_val))
596738fd1498Szrj sibcall_failure = 1;
596838fd1498Szrj /* Use arg->locate.size.constant instead of size_rtx
596938fd1498Szrj because we only care about the part of the argument
597038fd1498Szrj on the stack. */
597138fd1498Szrj else if (maybe_in_range_p (i, arg->locate.offset.constant,
597238fd1498Szrj arg->locate.size.constant))
597338fd1498Szrj sibcall_failure = 1;
597438fd1498Szrj }
597538fd1498Szrj }
597638fd1498Szrj
597738fd1498Szrj if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
597838fd1498Szrj emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
597938fd1498Szrj parm_align, partial, reg, excess, argblock,
598038fd1498Szrj ARGS_SIZE_RTX (arg->locate.offset),
598138fd1498Szrj reg_parm_stack_space,
598238fd1498Szrj ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
598338fd1498Szrj
598438fd1498Szrj /* Unless this is a partially-in-register argument, the argument is now
598538fd1498Szrj in the stack.
598638fd1498Szrj
598738fd1498Szrj ??? Unlike the case above, in which we want the actual
598838fd1498Szrj address of the data, so that we can load it directly into a
598938fd1498Szrj register, here we want the address of the stack slot, so that
599038fd1498Szrj it's properly aligned for word-by-word copying or something
599138fd1498Szrj like that. It's not clear that this is always correct. */
599238fd1498Szrj if (partial == 0)
599338fd1498Szrj arg->value = arg->stack_slot;
599438fd1498Szrj }
599538fd1498Szrj
599638fd1498Szrj if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
599738fd1498Szrj {
599838fd1498Szrj tree type = TREE_TYPE (arg->tree_value);
599938fd1498Szrj arg->parallel_value
600038fd1498Szrj = emit_group_load_into_temps (arg->reg, arg->value, type,
600138fd1498Szrj int_size_in_bytes (type));
600238fd1498Szrj }
600338fd1498Szrj
600438fd1498Szrj /* Mark all slots this store used. */
600538fd1498Szrj if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
600638fd1498Szrj && argblock && ! variable_size && arg->stack)
600738fd1498Szrj mark_stack_region_used (lower_bound, upper_bound);
600838fd1498Szrj
600938fd1498Szrj /* Once we have pushed something, pops can't safely
601038fd1498Szrj be deferred during the rest of the arguments. */
601138fd1498Szrj NO_DEFER_POP;
601238fd1498Szrj
601338fd1498Szrj /* Free any temporary slots made in processing this argument. */
601438fd1498Szrj pop_temp_slots ();
601538fd1498Szrj
601638fd1498Szrj return sibcall_failure;
601738fd1498Szrj }
601838fd1498Szrj
601938fd1498Szrj /* Nonzero if we do not know how to pass TYPE solely in registers. */
602038fd1498Szrj
602138fd1498Szrj bool
must_pass_in_stack_var_size(machine_mode mode ATTRIBUTE_UNUSED,const_tree type)602238fd1498Szrj must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
602338fd1498Szrj const_tree type)
602438fd1498Szrj {
602538fd1498Szrj if (!type)
602638fd1498Szrj return false;
602738fd1498Szrj
602838fd1498Szrj /* If the type has variable size... */
602938fd1498Szrj if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
603038fd1498Szrj return true;
603138fd1498Szrj
603238fd1498Szrj /* If the type is marked as addressable (it is required
603338fd1498Szrj to be constructed into the stack)... */
603438fd1498Szrj if (TREE_ADDRESSABLE (type))
603538fd1498Szrj return true;
603638fd1498Szrj
603738fd1498Szrj return false;
603838fd1498Szrj }
603938fd1498Szrj
604038fd1498Szrj /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
604138fd1498Szrj takes trailing padding of a structure into account. */
604238fd1498Szrj /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
604338fd1498Szrj
604438fd1498Szrj bool
must_pass_in_stack_var_size_or_pad(machine_mode mode,const_tree type)604538fd1498Szrj must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
604638fd1498Szrj {
604738fd1498Szrj if (!type)
604838fd1498Szrj return false;
604938fd1498Szrj
605038fd1498Szrj /* If the type has variable size... */
605138fd1498Szrj if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
605238fd1498Szrj return true;
605338fd1498Szrj
605438fd1498Szrj /* If the type is marked as addressable (it is required
605538fd1498Szrj to be constructed into the stack)... */
605638fd1498Szrj if (TREE_ADDRESSABLE (type))
605738fd1498Szrj return true;
605838fd1498Szrj
605938fd1498Szrj if (TYPE_EMPTY_P (type))
606038fd1498Szrj return false;
606138fd1498Szrj
606238fd1498Szrj /* If the padding and mode of the type is such that a copy into
606338fd1498Szrj a register would put it into the wrong part of the register. */
606438fd1498Szrj if (mode == BLKmode
606538fd1498Szrj && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
606638fd1498Szrj && (targetm.calls.function_arg_padding (mode, type)
606738fd1498Szrj == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
606838fd1498Szrj return true;
606938fd1498Szrj
607038fd1498Szrj return false;
607138fd1498Szrj }
607238fd1498Szrj
607338fd1498Szrj /* Tell the garbage collector about GTY markers in this source file. */
607438fd1498Szrj #include "gt-calls.h"
6075