138fd1498Szrj /* A pass for lowering trees to RTL.
238fd1498Szrj Copyright (C) 2004-2018 Free Software Foundation, Inc.
338fd1498Szrj
438fd1498Szrj This file is part of GCC.
538fd1498Szrj
638fd1498Szrj GCC is free software; you can redistribute it and/or modify
738fd1498Szrj it under the terms of the GNU General Public License as published by
838fd1498Szrj the Free Software Foundation; either version 3, or (at your option)
938fd1498Szrj any later version.
1038fd1498Szrj
1138fd1498Szrj GCC is distributed in the hope that it will be useful,
1238fd1498Szrj but WITHOUT ANY WARRANTY; without even the implied warranty of
1338fd1498Szrj MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
1438fd1498Szrj GNU General Public License for more details.
1538fd1498Szrj
1638fd1498Szrj You should have received a copy of the GNU General Public License
1738fd1498Szrj along with GCC; see the file COPYING3. If not see
1838fd1498Szrj <http://www.gnu.org/licenses/>. */
1938fd1498Szrj
2038fd1498Szrj #include "config.h"
2138fd1498Szrj #include "system.h"
2238fd1498Szrj #include "coretypes.h"
2338fd1498Szrj #include "backend.h"
2438fd1498Szrj #include "target.h"
2538fd1498Szrj #include "rtl.h"
2638fd1498Szrj #include "tree.h"
2738fd1498Szrj #include "gimple.h"
2838fd1498Szrj #include "cfghooks.h"
2938fd1498Szrj #include "tree-pass.h"
3038fd1498Szrj #include "memmodel.h"
3138fd1498Szrj #include "tm_p.h"
3238fd1498Szrj #include "ssa.h"
3338fd1498Szrj #include "optabs.h"
3438fd1498Szrj #include "regs.h" /* For reg_renumber. */
3538fd1498Szrj #include "emit-rtl.h"
3638fd1498Szrj #include "recog.h"
3738fd1498Szrj #include "cgraph.h"
3838fd1498Szrj #include "diagnostic.h"
3938fd1498Szrj #include "fold-const.h"
4038fd1498Szrj #include "varasm.h"
4138fd1498Szrj #include "stor-layout.h"
4238fd1498Szrj #include "stmt.h"
4338fd1498Szrj #include "print-tree.h"
4438fd1498Szrj #include "cfgrtl.h"
4538fd1498Szrj #include "cfganal.h"
4638fd1498Szrj #include "cfgbuild.h"
4738fd1498Szrj #include "cfgcleanup.h"
4838fd1498Szrj #include "dojump.h"
4938fd1498Szrj #include "explow.h"
5038fd1498Szrj #include "calls.h"
5138fd1498Szrj #include "expr.h"
5238fd1498Szrj #include "internal-fn.h"
5338fd1498Szrj #include "tree-eh.h"
5438fd1498Szrj #include "gimple-iterator.h"
5538fd1498Szrj #include "gimple-expr.h"
5638fd1498Szrj #include "gimple-walk.h"
5738fd1498Szrj #include "tree-cfg.h"
5838fd1498Szrj #include "tree-dfa.h"
5938fd1498Szrj #include "tree-ssa.h"
6038fd1498Szrj #include "except.h"
6138fd1498Szrj #include "gimple-pretty-print.h"
6238fd1498Szrj #include "toplev.h"
6338fd1498Szrj #include "debug.h"
6438fd1498Szrj #include "params.h"
6538fd1498Szrj #include "tree-inline.h"
6638fd1498Szrj #include "value-prof.h"
6738fd1498Szrj #include "tree-ssa-live.h"
6838fd1498Szrj #include "tree-outof-ssa.h"
6938fd1498Szrj #include "cfgloop.h"
7038fd1498Szrj #include "insn-attr.h" /* For INSN_SCHEDULING. */
7138fd1498Szrj #include "stringpool.h"
7238fd1498Szrj #include "attribs.h"
7338fd1498Szrj #include "asan.h"
7438fd1498Szrj #include "tree-ssa-address.h"
7538fd1498Szrj #include "output.h"
7638fd1498Szrj #include "builtins.h"
7738fd1498Szrj #include "tree-chkp.h"
7838fd1498Szrj #include "rtl-chkp.h"
7938fd1498Szrj
8038fd1498Szrj /* Some systems use __main in a way incompatible with its use in gcc, in these
8138fd1498Szrj cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
8238fd1498Szrj give the same symbol without quotes for an alternative entry point. You
8338fd1498Szrj must define both, or neither. */
8438fd1498Szrj #ifndef NAME__MAIN
8538fd1498Szrj #define NAME__MAIN "__main"
8638fd1498Szrj #endif
8738fd1498Szrj
8838fd1498Szrj /* This variable holds information helping the rewriting of SSA trees
8938fd1498Szrj into RTL. */
9038fd1498Szrj struct ssaexpand SA;
9138fd1498Szrj
9238fd1498Szrj /* This variable holds the currently expanded gimple statement for purposes
9338fd1498Szrj of comminucating the profile info to the builtin expanders. */
9438fd1498Szrj gimple *currently_expanding_gimple_stmt;
9538fd1498Szrj
9638fd1498Szrj static rtx expand_debug_expr (tree);
9738fd1498Szrj
9838fd1498Szrj static bool defer_stack_allocation (tree, bool);
9938fd1498Szrj
10038fd1498Szrj static void record_alignment_for_reg_var (unsigned int);
10138fd1498Szrj
10238fd1498Szrj /* Return an expression tree corresponding to the RHS of GIMPLE
10338fd1498Szrj statement STMT. */
10438fd1498Szrj
10538fd1498Szrj tree
gimple_assign_rhs_to_tree(gimple * stmt)10638fd1498Szrj gimple_assign_rhs_to_tree (gimple *stmt)
10738fd1498Szrj {
10838fd1498Szrj tree t;
10938fd1498Szrj enum gimple_rhs_class grhs_class;
11038fd1498Szrj
11138fd1498Szrj grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
11238fd1498Szrj
11338fd1498Szrj if (grhs_class == GIMPLE_TERNARY_RHS)
11438fd1498Szrj t = build3 (gimple_assign_rhs_code (stmt),
11538fd1498Szrj TREE_TYPE (gimple_assign_lhs (stmt)),
11638fd1498Szrj gimple_assign_rhs1 (stmt),
11738fd1498Szrj gimple_assign_rhs2 (stmt),
11838fd1498Szrj gimple_assign_rhs3 (stmt));
11938fd1498Szrj else if (grhs_class == GIMPLE_BINARY_RHS)
12038fd1498Szrj t = build2 (gimple_assign_rhs_code (stmt),
12138fd1498Szrj TREE_TYPE (gimple_assign_lhs (stmt)),
12238fd1498Szrj gimple_assign_rhs1 (stmt),
12338fd1498Szrj gimple_assign_rhs2 (stmt));
12438fd1498Szrj else if (grhs_class == GIMPLE_UNARY_RHS)
12538fd1498Szrj t = build1 (gimple_assign_rhs_code (stmt),
12638fd1498Szrj TREE_TYPE (gimple_assign_lhs (stmt)),
12738fd1498Szrj gimple_assign_rhs1 (stmt));
12838fd1498Szrj else if (grhs_class == GIMPLE_SINGLE_RHS)
12938fd1498Szrj {
13038fd1498Szrj t = gimple_assign_rhs1 (stmt);
13138fd1498Szrj /* Avoid modifying this tree in place below. */
13238fd1498Szrj if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
13338fd1498Szrj && gimple_location (stmt) != EXPR_LOCATION (t))
13438fd1498Szrj || (gimple_block (stmt)
13538fd1498Szrj && currently_expanding_to_rtl
13638fd1498Szrj && EXPR_P (t)))
13738fd1498Szrj t = copy_node (t);
13838fd1498Szrj }
13938fd1498Szrj else
14038fd1498Szrj gcc_unreachable ();
14138fd1498Szrj
14238fd1498Szrj if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
14338fd1498Szrj SET_EXPR_LOCATION (t, gimple_location (stmt));
14438fd1498Szrj
14538fd1498Szrj return t;
14638fd1498Szrj }
14738fd1498Szrj
14838fd1498Szrj
14938fd1498Szrj #ifndef STACK_ALIGNMENT_NEEDED
15038fd1498Szrj #define STACK_ALIGNMENT_NEEDED 1
15138fd1498Szrj #endif
15238fd1498Szrj
15338fd1498Szrj #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
15438fd1498Szrj
15538fd1498Szrj /* Choose either CUR or NEXT as the leader DECL for a partition.
15638fd1498Szrj Prefer ignored decls, to simplify debug dumps and reduce ambiguity
15738fd1498Szrj out of the same user variable being in multiple partitions (this is
15838fd1498Szrj less likely for compiler-introduced temps). */
15938fd1498Szrj
16038fd1498Szrj static tree
leader_merge(tree cur,tree next)16138fd1498Szrj leader_merge (tree cur, tree next)
16238fd1498Szrj {
16338fd1498Szrj if (cur == NULL || cur == next)
16438fd1498Szrj return next;
16538fd1498Szrj
16638fd1498Szrj if (DECL_P (cur) && DECL_IGNORED_P (cur))
16738fd1498Szrj return cur;
16838fd1498Szrj
16938fd1498Szrj if (DECL_P (next) && DECL_IGNORED_P (next))
17038fd1498Szrj return next;
17138fd1498Szrj
17238fd1498Szrj return cur;
17338fd1498Szrj }
17438fd1498Szrj
17538fd1498Szrj /* Associate declaration T with storage space X. If T is no
17638fd1498Szrj SSA name this is exactly SET_DECL_RTL, otherwise make the
17738fd1498Szrj partition of T associated with X. */
17838fd1498Szrj static inline void
set_rtl(tree t,rtx x)17938fd1498Szrj set_rtl (tree t, rtx x)
18038fd1498Szrj {
18138fd1498Szrj gcc_checking_assert (!x
18238fd1498Szrj || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
18338fd1498Szrj || (use_register_for_decl (t)
18438fd1498Szrj ? (REG_P (x)
18538fd1498Szrj || (GET_CODE (x) == CONCAT
18638fd1498Szrj && (REG_P (XEXP (x, 0))
18738fd1498Szrj || SUBREG_P (XEXP (x, 0)))
18838fd1498Szrj && (REG_P (XEXP (x, 1))
18938fd1498Szrj || SUBREG_P (XEXP (x, 1))))
19038fd1498Szrj /* We need to accept PARALLELs for RESUT_DECLs
19138fd1498Szrj because of vector types with BLKmode returned
19238fd1498Szrj in multiple registers, but they are supposed
19338fd1498Szrj to be uncoalesced. */
19438fd1498Szrj || (GET_CODE (x) == PARALLEL
19538fd1498Szrj && SSAVAR (t)
19638fd1498Szrj && TREE_CODE (SSAVAR (t)) == RESULT_DECL
19738fd1498Szrj && (GET_MODE (x) == BLKmode
19838fd1498Szrj || !flag_tree_coalesce_vars)))
19938fd1498Szrj : (MEM_P (x) || x == pc_rtx
20038fd1498Szrj || (GET_CODE (x) == CONCAT
20138fd1498Szrj && MEM_P (XEXP (x, 0))
20238fd1498Szrj && MEM_P (XEXP (x, 1))))));
20338fd1498Szrj /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
20438fd1498Szrj RESULT_DECLs has the expected mode. For memory, we accept
20538fd1498Szrj unpromoted modes, since that's what we're likely to get. For
20638fd1498Szrj PARM_DECLs and RESULT_DECLs, we'll have been called by
20738fd1498Szrj set_parm_rtl, which will give us the default def, so we don't
20838fd1498Szrj have to compute it ourselves. For RESULT_DECLs, we accept mode
20938fd1498Szrj mismatches too, as long as we have BLKmode or are not coalescing
21038fd1498Szrj across variables, so that we don't reject BLKmode PARALLELs or
21138fd1498Szrj unpromoted REGs. */
21238fd1498Szrj gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
21338fd1498Szrj || (SSAVAR (t)
21438fd1498Szrj && TREE_CODE (SSAVAR (t)) == RESULT_DECL
21538fd1498Szrj && (promote_ssa_mode (t, NULL) == BLKmode
21638fd1498Szrj || !flag_tree_coalesce_vars))
21738fd1498Szrj || !use_register_for_decl (t)
21838fd1498Szrj || GET_MODE (x) == promote_ssa_mode (t, NULL));
21938fd1498Szrj
22038fd1498Szrj if (x)
22138fd1498Szrj {
22238fd1498Szrj bool skip = false;
22338fd1498Szrj tree cur = NULL_TREE;
22438fd1498Szrj rtx xm = x;
22538fd1498Szrj
22638fd1498Szrj retry:
22738fd1498Szrj if (MEM_P (xm))
22838fd1498Szrj cur = MEM_EXPR (xm);
22938fd1498Szrj else if (REG_P (xm))
23038fd1498Szrj cur = REG_EXPR (xm);
23138fd1498Szrj else if (SUBREG_P (xm))
23238fd1498Szrj {
23338fd1498Szrj gcc_assert (subreg_lowpart_p (xm));
23438fd1498Szrj xm = SUBREG_REG (xm);
23538fd1498Szrj goto retry;
23638fd1498Szrj }
23738fd1498Szrj else if (GET_CODE (xm) == CONCAT)
23838fd1498Szrj {
23938fd1498Szrj xm = XEXP (xm, 0);
24038fd1498Szrj goto retry;
24138fd1498Szrj }
24238fd1498Szrj else if (GET_CODE (xm) == PARALLEL)
24338fd1498Szrj {
24438fd1498Szrj xm = XVECEXP (xm, 0, 0);
24538fd1498Szrj gcc_assert (GET_CODE (xm) == EXPR_LIST);
24638fd1498Szrj xm = XEXP (xm, 0);
24738fd1498Szrj goto retry;
24838fd1498Szrj }
24938fd1498Szrj else if (xm == pc_rtx)
25038fd1498Szrj skip = true;
25138fd1498Szrj else
25238fd1498Szrj gcc_unreachable ();
25338fd1498Szrj
25438fd1498Szrj tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
25538fd1498Szrj
25638fd1498Szrj if (cur != next)
25738fd1498Szrj {
25838fd1498Szrj if (MEM_P (x))
25938fd1498Szrj set_mem_attributes (x,
26038fd1498Szrj next && TREE_CODE (next) == SSA_NAME
26138fd1498Szrj ? TREE_TYPE (next)
26238fd1498Szrj : next, true);
26338fd1498Szrj else
26438fd1498Szrj set_reg_attrs_for_decl_rtl (next, x);
26538fd1498Szrj }
26638fd1498Szrj }
26738fd1498Szrj
26838fd1498Szrj if (TREE_CODE (t) == SSA_NAME)
26938fd1498Szrj {
27038fd1498Szrj int part = var_to_partition (SA.map, t);
27138fd1498Szrj if (part != NO_PARTITION)
27238fd1498Szrj {
27338fd1498Szrj if (SA.partition_to_pseudo[part])
27438fd1498Szrj gcc_assert (SA.partition_to_pseudo[part] == x);
27538fd1498Szrj else if (x != pc_rtx)
27638fd1498Szrj SA.partition_to_pseudo[part] = x;
27738fd1498Szrj }
27838fd1498Szrj /* For the benefit of debug information at -O0 (where
27938fd1498Szrj vartracking doesn't run) record the place also in the base
28038fd1498Szrj DECL. For PARMs and RESULTs, do so only when setting the
28138fd1498Szrj default def. */
28238fd1498Szrj if (x && x != pc_rtx && SSA_NAME_VAR (t)
28338fd1498Szrj && (VAR_P (SSA_NAME_VAR (t))
28438fd1498Szrj || SSA_NAME_IS_DEFAULT_DEF (t)))
28538fd1498Szrj {
28638fd1498Szrj tree var = SSA_NAME_VAR (t);
28738fd1498Szrj /* If we don't yet have something recorded, just record it now. */
28838fd1498Szrj if (!DECL_RTL_SET_P (var))
28938fd1498Szrj SET_DECL_RTL (var, x);
29038fd1498Szrj /* If we have it set already to "multiple places" don't
29138fd1498Szrj change this. */
29238fd1498Szrj else if (DECL_RTL (var) == pc_rtx)
29338fd1498Szrj ;
29438fd1498Szrj /* If we have something recorded and it's not the same place
29538fd1498Szrj as we want to record now, we have multiple partitions for the
29638fd1498Szrj same base variable, with different places. We can't just
29738fd1498Szrj randomly chose one, hence we have to say that we don't know.
29838fd1498Szrj This only happens with optimization, and there var-tracking
29938fd1498Szrj will figure out the right thing. */
30038fd1498Szrj else if (DECL_RTL (var) != x)
30138fd1498Szrj SET_DECL_RTL (var, pc_rtx);
30238fd1498Szrj }
30338fd1498Szrj }
30438fd1498Szrj else
30538fd1498Szrj SET_DECL_RTL (t, x);
30638fd1498Szrj }
30738fd1498Szrj
30838fd1498Szrj /* This structure holds data relevant to one variable that will be
30938fd1498Szrj placed in a stack slot. */
31038fd1498Szrj struct stack_var
31138fd1498Szrj {
31238fd1498Szrj /* The Variable. */
31338fd1498Szrj tree decl;
31438fd1498Szrj
31538fd1498Szrj /* Initially, the size of the variable. Later, the size of the partition,
31638fd1498Szrj if this variable becomes it's partition's representative. */
31738fd1498Szrj poly_uint64 size;
31838fd1498Szrj
31938fd1498Szrj /* The *byte* alignment required for this variable. Or as, with the
32038fd1498Szrj size, the alignment for this partition. */
32138fd1498Szrj unsigned int alignb;
32238fd1498Szrj
32338fd1498Szrj /* The partition representative. */
32438fd1498Szrj size_t representative;
32538fd1498Szrj
32638fd1498Szrj /* The next stack variable in the partition, or EOC. */
32738fd1498Szrj size_t next;
32838fd1498Szrj
32938fd1498Szrj /* The numbers of conflicting stack variables. */
33038fd1498Szrj bitmap conflicts;
33138fd1498Szrj };
33238fd1498Szrj
33338fd1498Szrj #define EOC ((size_t)-1)
33438fd1498Szrj
33538fd1498Szrj /* We have an array of such objects while deciding allocation. */
33638fd1498Szrj static struct stack_var *stack_vars;
33738fd1498Szrj static size_t stack_vars_alloc;
33838fd1498Szrj static size_t stack_vars_num;
33938fd1498Szrj static hash_map<tree, size_t> *decl_to_stack_part;
34038fd1498Szrj
34138fd1498Szrj /* Conflict bitmaps go on this obstack. This allows us to destroy
34238fd1498Szrj all of them in one big sweep. */
34338fd1498Szrj static bitmap_obstack stack_var_bitmap_obstack;
34438fd1498Szrj
34538fd1498Szrj /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
34638fd1498Szrj is non-decreasing. */
34738fd1498Szrj static size_t *stack_vars_sorted;
34838fd1498Szrj
34938fd1498Szrj /* The phase of the stack frame. This is the known misalignment of
35038fd1498Szrj virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
35138fd1498Szrj (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
35238fd1498Szrj static int frame_phase;
35338fd1498Szrj
35438fd1498Szrj /* Used during expand_used_vars to remember if we saw any decls for
35538fd1498Szrj which we'd like to enable stack smashing protection. */
35638fd1498Szrj static bool has_protected_decls;
35738fd1498Szrj
35838fd1498Szrj /* Used during expand_used_vars. Remember if we say a character buffer
35938fd1498Szrj smaller than our cutoff threshold. Used for -Wstack-protector. */
36038fd1498Szrj static bool has_short_buffer;
36138fd1498Szrj
36238fd1498Szrj /* Compute the byte alignment to use for DECL. Ignore alignment
36338fd1498Szrj we can't do with expected alignment of the stack boundary. */
36438fd1498Szrj
36538fd1498Szrj static unsigned int
align_local_variable(tree decl)36638fd1498Szrj align_local_variable (tree decl)
36738fd1498Szrj {
36838fd1498Szrj unsigned int align;
36938fd1498Szrj
37038fd1498Szrj if (TREE_CODE (decl) == SSA_NAME)
37138fd1498Szrj align = TYPE_ALIGN (TREE_TYPE (decl));
37238fd1498Szrj else
37338fd1498Szrj {
37438fd1498Szrj align = LOCAL_DECL_ALIGNMENT (decl);
37538fd1498Szrj SET_DECL_ALIGN (decl, align);
37638fd1498Szrj }
37738fd1498Szrj return align / BITS_PER_UNIT;
37838fd1498Szrj }
37938fd1498Szrj
38038fd1498Szrj /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
38138fd1498Szrj down otherwise. Return truncated BASE value. */
38238fd1498Szrj
38338fd1498Szrj static inline unsigned HOST_WIDE_INT
align_base(HOST_WIDE_INT base,unsigned HOST_WIDE_INT align,bool align_up)38438fd1498Szrj align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
38538fd1498Szrj {
38638fd1498Szrj return align_up ? (base + align - 1) & -align : base & -align;
38738fd1498Szrj }
38838fd1498Szrj
38938fd1498Szrj /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
39038fd1498Szrj Return the frame offset. */
39138fd1498Szrj
39238fd1498Szrj static poly_int64
alloc_stack_frame_space(poly_int64 size,unsigned HOST_WIDE_INT align)39338fd1498Szrj alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
39438fd1498Szrj {
39538fd1498Szrj poly_int64 offset, new_frame_offset;
39638fd1498Szrj
39738fd1498Szrj if (FRAME_GROWS_DOWNWARD)
39838fd1498Szrj {
39938fd1498Szrj new_frame_offset
40038fd1498Szrj = aligned_lower_bound (frame_offset - frame_phase - size,
40138fd1498Szrj align) + frame_phase;
40238fd1498Szrj offset = new_frame_offset;
40338fd1498Szrj }
40438fd1498Szrj else
40538fd1498Szrj {
40638fd1498Szrj new_frame_offset
40738fd1498Szrj = aligned_upper_bound (frame_offset - frame_phase,
40838fd1498Szrj align) + frame_phase;
40938fd1498Szrj offset = new_frame_offset;
41038fd1498Szrj new_frame_offset += size;
41138fd1498Szrj }
41238fd1498Szrj frame_offset = new_frame_offset;
41338fd1498Szrj
41438fd1498Szrj if (frame_offset_overflow (frame_offset, cfun->decl))
41538fd1498Szrj frame_offset = offset = 0;
41638fd1498Szrj
41738fd1498Szrj return offset;
41838fd1498Szrj }
41938fd1498Szrj
42038fd1498Szrj /* Accumulate DECL into STACK_VARS. */
42138fd1498Szrj
42238fd1498Szrj static void
add_stack_var(tree decl)42338fd1498Szrj add_stack_var (tree decl)
42438fd1498Szrj {
42538fd1498Szrj struct stack_var *v;
42638fd1498Szrj
42738fd1498Szrj if (stack_vars_num >= stack_vars_alloc)
42838fd1498Szrj {
42938fd1498Szrj if (stack_vars_alloc)
43038fd1498Szrj stack_vars_alloc = stack_vars_alloc * 3 / 2;
43138fd1498Szrj else
43238fd1498Szrj stack_vars_alloc = 32;
43338fd1498Szrj stack_vars
43438fd1498Szrj = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
43538fd1498Szrj }
43638fd1498Szrj if (!decl_to_stack_part)
43738fd1498Szrj decl_to_stack_part = new hash_map<tree, size_t>;
43838fd1498Szrj
43938fd1498Szrj v = &stack_vars[stack_vars_num];
44038fd1498Szrj decl_to_stack_part->put (decl, stack_vars_num);
44138fd1498Szrj
44238fd1498Szrj v->decl = decl;
44338fd1498Szrj tree size = TREE_CODE (decl) == SSA_NAME
44438fd1498Szrj ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
44538fd1498Szrj : DECL_SIZE_UNIT (decl);
44638fd1498Szrj v->size = tree_to_poly_uint64 (size);
44738fd1498Szrj /* Ensure that all variables have size, so that &a != &b for any two
44838fd1498Szrj variables that are simultaneously live. */
44938fd1498Szrj if (known_eq (v->size, 0U))
45038fd1498Szrj v->size = 1;
45138fd1498Szrj v->alignb = align_local_variable (decl);
45238fd1498Szrj /* An alignment of zero can mightily confuse us later. */
45338fd1498Szrj gcc_assert (v->alignb != 0);
45438fd1498Szrj
45538fd1498Szrj /* All variables are initially in their own partition. */
45638fd1498Szrj v->representative = stack_vars_num;
45738fd1498Szrj v->next = EOC;
45838fd1498Szrj
45938fd1498Szrj /* All variables initially conflict with no other. */
46038fd1498Szrj v->conflicts = NULL;
46138fd1498Szrj
46238fd1498Szrj /* Ensure that this decl doesn't get put onto the list twice. */
46338fd1498Szrj set_rtl (decl, pc_rtx);
46438fd1498Szrj
46538fd1498Szrj stack_vars_num++;
46638fd1498Szrj }
46738fd1498Szrj
46838fd1498Szrj /* Make the decls associated with luid's X and Y conflict. */
46938fd1498Szrj
47038fd1498Szrj static void
add_stack_var_conflict(size_t x,size_t y)47138fd1498Szrj add_stack_var_conflict (size_t x, size_t y)
47238fd1498Szrj {
47338fd1498Szrj struct stack_var *a = &stack_vars[x];
47438fd1498Szrj struct stack_var *b = &stack_vars[y];
47538fd1498Szrj if (!a->conflicts)
47638fd1498Szrj a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47738fd1498Szrj if (!b->conflicts)
47838fd1498Szrj b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
47938fd1498Szrj bitmap_set_bit (a->conflicts, y);
48038fd1498Szrj bitmap_set_bit (b->conflicts, x);
48138fd1498Szrj }
48238fd1498Szrj
48338fd1498Szrj /* Check whether the decls associated with luid's X and Y conflict. */
48438fd1498Szrj
48538fd1498Szrj static bool
stack_var_conflict_p(size_t x,size_t y)48638fd1498Szrj stack_var_conflict_p (size_t x, size_t y)
48738fd1498Szrj {
48838fd1498Szrj struct stack_var *a = &stack_vars[x];
48938fd1498Szrj struct stack_var *b = &stack_vars[y];
49038fd1498Szrj if (x == y)
49138fd1498Szrj return false;
49238fd1498Szrj /* Partitions containing an SSA name result from gimple registers
49338fd1498Szrj with things like unsupported modes. They are top-level and
49438fd1498Szrj hence conflict with everything else. */
49538fd1498Szrj if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
49638fd1498Szrj return true;
49738fd1498Szrj
49838fd1498Szrj if (!a->conflicts || !b->conflicts)
49938fd1498Szrj return false;
50038fd1498Szrj return bitmap_bit_p (a->conflicts, y);
50138fd1498Szrj }
50238fd1498Szrj
50338fd1498Szrj /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
50438fd1498Szrj enter its partition number into bitmap DATA. */
50538fd1498Szrj
50638fd1498Szrj static bool
visit_op(gimple *,tree op,tree,void * data)50738fd1498Szrj visit_op (gimple *, tree op, tree, void *data)
50838fd1498Szrj {
50938fd1498Szrj bitmap active = (bitmap)data;
51038fd1498Szrj op = get_base_address (op);
51138fd1498Szrj if (op
51238fd1498Szrj && DECL_P (op)
51338fd1498Szrj && DECL_RTL_IF_SET (op) == pc_rtx)
51438fd1498Szrj {
51538fd1498Szrj size_t *v = decl_to_stack_part->get (op);
51638fd1498Szrj if (v)
51738fd1498Szrj bitmap_set_bit (active, *v);
51838fd1498Szrj }
51938fd1498Szrj return false;
52038fd1498Szrj }
52138fd1498Szrj
52238fd1498Szrj /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
52338fd1498Szrj record conflicts between it and all currently active other partitions
52438fd1498Szrj from bitmap DATA. */
52538fd1498Szrj
52638fd1498Szrj static bool
visit_conflict(gimple *,tree op,tree,void * data)52738fd1498Szrj visit_conflict (gimple *, tree op, tree, void *data)
52838fd1498Szrj {
52938fd1498Szrj bitmap active = (bitmap)data;
53038fd1498Szrj op = get_base_address (op);
53138fd1498Szrj if (op
53238fd1498Szrj && DECL_P (op)
53338fd1498Szrj && DECL_RTL_IF_SET (op) == pc_rtx)
53438fd1498Szrj {
53538fd1498Szrj size_t *v = decl_to_stack_part->get (op);
53638fd1498Szrj if (v && bitmap_set_bit (active, *v))
53738fd1498Szrj {
53838fd1498Szrj size_t num = *v;
53938fd1498Szrj bitmap_iterator bi;
54038fd1498Szrj unsigned i;
54138fd1498Szrj gcc_assert (num < stack_vars_num);
54238fd1498Szrj EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
54338fd1498Szrj add_stack_var_conflict (num, i);
54438fd1498Szrj }
54538fd1498Szrj }
54638fd1498Szrj return false;
54738fd1498Szrj }
54838fd1498Szrj
54938fd1498Szrj /* Helper routine for add_scope_conflicts, calculating the active partitions
55038fd1498Szrj at the end of BB, leaving the result in WORK. We're called to generate
55138fd1498Szrj conflicts when FOR_CONFLICT is true, otherwise we're just tracking
55238fd1498Szrj liveness. */
55338fd1498Szrj
55438fd1498Szrj static void
add_scope_conflicts_1(basic_block bb,bitmap work,bool for_conflict)55538fd1498Szrj add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
55638fd1498Szrj {
55738fd1498Szrj edge e;
55838fd1498Szrj edge_iterator ei;
55938fd1498Szrj gimple_stmt_iterator gsi;
56038fd1498Szrj walk_stmt_load_store_addr_fn visit;
56138fd1498Szrj
56238fd1498Szrj bitmap_clear (work);
56338fd1498Szrj FOR_EACH_EDGE (e, ei, bb->preds)
56438fd1498Szrj bitmap_ior_into (work, (bitmap)e->src->aux);
56538fd1498Szrj
56638fd1498Szrj visit = visit_op;
56738fd1498Szrj
56838fd1498Szrj for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
56938fd1498Szrj {
57038fd1498Szrj gimple *stmt = gsi_stmt (gsi);
57138fd1498Szrj walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
57238fd1498Szrj }
57338fd1498Szrj for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
57438fd1498Szrj {
57538fd1498Szrj gimple *stmt = gsi_stmt (gsi);
57638fd1498Szrj
57738fd1498Szrj if (gimple_clobber_p (stmt))
57838fd1498Szrj {
57938fd1498Szrj tree lhs = gimple_assign_lhs (stmt);
58038fd1498Szrj size_t *v;
58138fd1498Szrj /* Nested function lowering might introduce LHSs
58238fd1498Szrj that are COMPONENT_REFs. */
58338fd1498Szrj if (!VAR_P (lhs))
58438fd1498Szrj continue;
58538fd1498Szrj if (DECL_RTL_IF_SET (lhs) == pc_rtx
58638fd1498Szrj && (v = decl_to_stack_part->get (lhs)))
58738fd1498Szrj bitmap_clear_bit (work, *v);
58838fd1498Szrj }
58938fd1498Szrj else if (!is_gimple_debug (stmt))
59038fd1498Szrj {
59138fd1498Szrj if (for_conflict
59238fd1498Szrj && visit == visit_op)
59338fd1498Szrj {
59438fd1498Szrj /* If this is the first real instruction in this BB we need
59538fd1498Szrj to add conflicts for everything live at this point now.
59638fd1498Szrj Unlike classical liveness for named objects we can't
59738fd1498Szrj rely on seeing a def/use of the names we're interested in.
59838fd1498Szrj There might merely be indirect loads/stores. We'd not add any
59938fd1498Szrj conflicts for such partitions. */
60038fd1498Szrj bitmap_iterator bi;
60138fd1498Szrj unsigned i;
60238fd1498Szrj EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
60338fd1498Szrj {
60438fd1498Szrj struct stack_var *a = &stack_vars[i];
60538fd1498Szrj if (!a->conflicts)
60638fd1498Szrj a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
60738fd1498Szrj bitmap_ior_into (a->conflicts, work);
60838fd1498Szrj }
60938fd1498Szrj visit = visit_conflict;
61038fd1498Szrj }
61138fd1498Szrj walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
61238fd1498Szrj }
61338fd1498Szrj }
61438fd1498Szrj }
61538fd1498Szrj
61638fd1498Szrj /* Generate stack partition conflicts between all partitions that are
61738fd1498Szrj simultaneously live. */
61838fd1498Szrj
61938fd1498Szrj static void
add_scope_conflicts(void)62038fd1498Szrj add_scope_conflicts (void)
62138fd1498Szrj {
62238fd1498Szrj basic_block bb;
62338fd1498Szrj bool changed;
62438fd1498Szrj bitmap work = BITMAP_ALLOC (NULL);
62538fd1498Szrj int *rpo;
62638fd1498Szrj int n_bbs;
62738fd1498Szrj
62838fd1498Szrj /* We approximate the live range of a stack variable by taking the first
62938fd1498Szrj mention of its name as starting point(s), and by the end-of-scope
63038fd1498Szrj death clobber added by gimplify as ending point(s) of the range.
63138fd1498Szrj This overapproximates in the case we for instance moved an address-taken
63238fd1498Szrj operation upward, without also moving a dereference to it upwards.
63338fd1498Szrj But it's conservatively correct as a variable never can hold values
63438fd1498Szrj before its name is mentioned at least once.
63538fd1498Szrj
63638fd1498Szrj We then do a mostly classical bitmap liveness algorithm. */
63738fd1498Szrj
63838fd1498Szrj FOR_ALL_BB_FN (bb, cfun)
63938fd1498Szrj bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
64038fd1498Szrj
64138fd1498Szrj rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
64238fd1498Szrj n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
64338fd1498Szrj
64438fd1498Szrj changed = true;
64538fd1498Szrj while (changed)
64638fd1498Szrj {
64738fd1498Szrj int i;
64838fd1498Szrj changed = false;
64938fd1498Szrj for (i = 0; i < n_bbs; i++)
65038fd1498Szrj {
65138fd1498Szrj bitmap active;
65238fd1498Szrj bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
65338fd1498Szrj active = (bitmap)bb->aux;
65438fd1498Szrj add_scope_conflicts_1 (bb, work, false);
65538fd1498Szrj if (bitmap_ior_into (active, work))
65638fd1498Szrj changed = true;
65738fd1498Szrj }
65838fd1498Szrj }
65938fd1498Szrj
66038fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
66138fd1498Szrj add_scope_conflicts_1 (bb, work, true);
66238fd1498Szrj
66338fd1498Szrj free (rpo);
66438fd1498Szrj BITMAP_FREE (work);
66538fd1498Szrj FOR_ALL_BB_FN (bb, cfun)
66638fd1498Szrj BITMAP_FREE (bb->aux);
66738fd1498Szrj }
66838fd1498Szrj
66938fd1498Szrj /* A subroutine of partition_stack_vars. A comparison function for qsort,
67038fd1498Szrj sorting an array of indices by the properties of the object. */
67138fd1498Szrj
67238fd1498Szrj static int
stack_var_cmp(const void * a,const void * b)67338fd1498Szrj stack_var_cmp (const void *a, const void *b)
67438fd1498Szrj {
67538fd1498Szrj size_t ia = *(const size_t *)a;
67638fd1498Szrj size_t ib = *(const size_t *)b;
67738fd1498Szrj unsigned int aligna = stack_vars[ia].alignb;
67838fd1498Szrj unsigned int alignb = stack_vars[ib].alignb;
67938fd1498Szrj poly_int64 sizea = stack_vars[ia].size;
68038fd1498Szrj poly_int64 sizeb = stack_vars[ib].size;
68138fd1498Szrj tree decla = stack_vars[ia].decl;
68238fd1498Szrj tree declb = stack_vars[ib].decl;
68338fd1498Szrj bool largea, largeb;
68438fd1498Szrj unsigned int uida, uidb;
68538fd1498Szrj
68638fd1498Szrj /* Primary compare on "large" alignment. Large comes first. */
68738fd1498Szrj largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
68838fd1498Szrj largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
68938fd1498Szrj if (largea != largeb)
69038fd1498Szrj return (int)largeb - (int)largea;
69138fd1498Szrj
69238fd1498Szrj /* Secondary compare on size, decreasing */
69338fd1498Szrj int diff = compare_sizes_for_sort (sizeb, sizea);
69438fd1498Szrj if (diff != 0)
69538fd1498Szrj return diff;
69638fd1498Szrj
69738fd1498Szrj /* Tertiary compare on true alignment, decreasing. */
69838fd1498Szrj if (aligna < alignb)
69938fd1498Szrj return -1;
70038fd1498Szrj if (aligna > alignb)
70138fd1498Szrj return 1;
70238fd1498Szrj
70338fd1498Szrj /* Final compare on ID for sort stability, increasing.
70438fd1498Szrj Two SSA names are compared by their version, SSA names come before
70538fd1498Szrj non-SSA names, and two normal decls are compared by their DECL_UID. */
70638fd1498Szrj if (TREE_CODE (decla) == SSA_NAME)
70738fd1498Szrj {
70838fd1498Szrj if (TREE_CODE (declb) == SSA_NAME)
70938fd1498Szrj uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
71038fd1498Szrj else
71138fd1498Szrj return -1;
71238fd1498Szrj }
71338fd1498Szrj else if (TREE_CODE (declb) == SSA_NAME)
71438fd1498Szrj return 1;
71538fd1498Szrj else
71638fd1498Szrj uida = DECL_UID (decla), uidb = DECL_UID (declb);
71738fd1498Szrj if (uida < uidb)
71838fd1498Szrj return 1;
71938fd1498Szrj if (uida > uidb)
72038fd1498Szrj return -1;
72138fd1498Szrj return 0;
72238fd1498Szrj }
72338fd1498Szrj
72438fd1498Szrj struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
72538fd1498Szrj typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
72638fd1498Szrj
72738fd1498Szrj /* If the points-to solution *PI points to variables that are in a partition
72838fd1498Szrj together with other variables add all partition members to the pointed-to
72938fd1498Szrj variables bitmap. */
73038fd1498Szrj
73138fd1498Szrj static void
add_partitioned_vars_to_ptset(struct pt_solution * pt,part_hashmap * decls_to_partitions,hash_set<bitmap> * visited,bitmap temp)73238fd1498Szrj add_partitioned_vars_to_ptset (struct pt_solution *pt,
73338fd1498Szrj part_hashmap *decls_to_partitions,
73438fd1498Szrj hash_set<bitmap> *visited, bitmap temp)
73538fd1498Szrj {
73638fd1498Szrj bitmap_iterator bi;
73738fd1498Szrj unsigned i;
73838fd1498Szrj bitmap *part;
73938fd1498Szrj
74038fd1498Szrj if (pt->anything
74138fd1498Szrj || pt->vars == NULL
74238fd1498Szrj /* The pointed-to vars bitmap is shared, it is enough to
74338fd1498Szrj visit it once. */
74438fd1498Szrj || visited->add (pt->vars))
74538fd1498Szrj return;
74638fd1498Szrj
74738fd1498Szrj bitmap_clear (temp);
74838fd1498Szrj
74938fd1498Szrj /* By using a temporary bitmap to store all members of the partitions
75038fd1498Szrj we have to add we make sure to visit each of the partitions only
75138fd1498Szrj once. */
75238fd1498Szrj EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
75338fd1498Szrj if ((!temp
75438fd1498Szrj || !bitmap_bit_p (temp, i))
75538fd1498Szrj && (part = decls_to_partitions->get (i)))
75638fd1498Szrj bitmap_ior_into (temp, *part);
75738fd1498Szrj if (!bitmap_empty_p (temp))
75838fd1498Szrj bitmap_ior_into (pt->vars, temp);
75938fd1498Szrj }
76038fd1498Szrj
76138fd1498Szrj /* Update points-to sets based on partition info, so we can use them on RTL.
76238fd1498Szrj The bitmaps representing stack partitions will be saved until expand,
76338fd1498Szrj where partitioned decls used as bases in memory expressions will be
76438fd1498Szrj rewritten. */
76538fd1498Szrj
76638fd1498Szrj static void
update_alias_info_with_stack_vars(void)76738fd1498Szrj update_alias_info_with_stack_vars (void)
76838fd1498Szrj {
76938fd1498Szrj part_hashmap *decls_to_partitions = NULL;
77038fd1498Szrj size_t i, j;
77138fd1498Szrj tree var = NULL_TREE;
77238fd1498Szrj
77338fd1498Szrj for (i = 0; i < stack_vars_num; i++)
77438fd1498Szrj {
77538fd1498Szrj bitmap part = NULL;
77638fd1498Szrj tree name;
77738fd1498Szrj struct ptr_info_def *pi;
77838fd1498Szrj
77938fd1498Szrj /* Not interested in partitions with single variable. */
78038fd1498Szrj if (stack_vars[i].representative != i
78138fd1498Szrj || stack_vars[i].next == EOC)
78238fd1498Szrj continue;
78338fd1498Szrj
78438fd1498Szrj if (!decls_to_partitions)
78538fd1498Szrj {
78638fd1498Szrj decls_to_partitions = new part_hashmap;
78738fd1498Szrj cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
78838fd1498Szrj }
78938fd1498Szrj
79038fd1498Szrj /* Create an SSA_NAME that points to the partition for use
79138fd1498Szrj as base during alias-oracle queries on RTL for bases that
79238fd1498Szrj have been partitioned. */
79338fd1498Szrj if (var == NULL_TREE)
79438fd1498Szrj var = create_tmp_var (ptr_type_node);
79538fd1498Szrj name = make_ssa_name (var);
79638fd1498Szrj
79738fd1498Szrj /* Create bitmaps representing partitions. They will be used for
79838fd1498Szrj points-to sets later, so use GGC alloc. */
79938fd1498Szrj part = BITMAP_GGC_ALLOC ();
80038fd1498Szrj for (j = i; j != EOC; j = stack_vars[j].next)
80138fd1498Szrj {
80238fd1498Szrj tree decl = stack_vars[j].decl;
80338fd1498Szrj unsigned int uid = DECL_PT_UID (decl);
80438fd1498Szrj bitmap_set_bit (part, uid);
80538fd1498Szrj decls_to_partitions->put (uid, part);
80638fd1498Szrj cfun->gimple_df->decls_to_pointers->put (decl, name);
80738fd1498Szrj if (TREE_ADDRESSABLE (decl))
80838fd1498Szrj TREE_ADDRESSABLE (name) = 1;
80938fd1498Szrj }
81038fd1498Szrj
81138fd1498Szrj /* Make the SSA name point to all partition members. */
81238fd1498Szrj pi = get_ptr_info (name);
81338fd1498Szrj pt_solution_set (&pi->pt, part, false);
81438fd1498Szrj }
81538fd1498Szrj
81638fd1498Szrj /* Make all points-to sets that contain one member of a partition
81738fd1498Szrj contain all members of the partition. */
81838fd1498Szrj if (decls_to_partitions)
81938fd1498Szrj {
82038fd1498Szrj unsigned i;
82138fd1498Szrj tree name;
82238fd1498Szrj hash_set<bitmap> visited;
82338fd1498Szrj bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
82438fd1498Szrj
82538fd1498Szrj FOR_EACH_SSA_NAME (i, name, cfun)
82638fd1498Szrj {
82738fd1498Szrj struct ptr_info_def *pi;
82838fd1498Szrj
82938fd1498Szrj if (POINTER_TYPE_P (TREE_TYPE (name))
83038fd1498Szrj && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
83138fd1498Szrj add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
83238fd1498Szrj &visited, temp);
83338fd1498Szrj }
83438fd1498Szrj
83538fd1498Szrj add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
83638fd1498Szrj decls_to_partitions, &visited, temp);
83738fd1498Szrj
83838fd1498Szrj delete decls_to_partitions;
83938fd1498Szrj BITMAP_FREE (temp);
84038fd1498Szrj }
84138fd1498Szrj }
84238fd1498Szrj
84338fd1498Szrj /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
84438fd1498Szrj partitioning algorithm. Partitions A and B are known to be non-conflicting.
84538fd1498Szrj Merge them into a single partition A. */
84638fd1498Szrj
84738fd1498Szrj static void
union_stack_vars(size_t a,size_t b)84838fd1498Szrj union_stack_vars (size_t a, size_t b)
84938fd1498Szrj {
85038fd1498Szrj struct stack_var *vb = &stack_vars[b];
85138fd1498Szrj bitmap_iterator bi;
85238fd1498Szrj unsigned u;
85338fd1498Szrj
85438fd1498Szrj gcc_assert (stack_vars[b].next == EOC);
85538fd1498Szrj /* Add B to A's partition. */
85638fd1498Szrj stack_vars[b].next = stack_vars[a].next;
85738fd1498Szrj stack_vars[b].representative = a;
85838fd1498Szrj stack_vars[a].next = b;
85938fd1498Szrj
86038fd1498Szrj /* Update the required alignment of partition A to account for B. */
86138fd1498Szrj if (stack_vars[a].alignb < stack_vars[b].alignb)
86238fd1498Szrj stack_vars[a].alignb = stack_vars[b].alignb;
86338fd1498Szrj
86438fd1498Szrj /* Update the interference graph and merge the conflicts. */
86538fd1498Szrj if (vb->conflicts)
86638fd1498Szrj {
86738fd1498Szrj EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
86838fd1498Szrj add_stack_var_conflict (a, stack_vars[u].representative);
86938fd1498Szrj BITMAP_FREE (vb->conflicts);
87038fd1498Szrj }
87138fd1498Szrj }
87238fd1498Szrj
87338fd1498Szrj /* A subroutine of expand_used_vars. Binpack the variables into
87438fd1498Szrj partitions constrained by the interference graph. The overall
87538fd1498Szrj algorithm used is as follows:
87638fd1498Szrj
87738fd1498Szrj Sort the objects by size in descending order.
87838fd1498Szrj For each object A {
87938fd1498Szrj S = size(A)
88038fd1498Szrj O = 0
88138fd1498Szrj loop {
88238fd1498Szrj Look for the largest non-conflicting object B with size <= S.
88338fd1498Szrj UNION (A, B)
88438fd1498Szrj }
88538fd1498Szrj }
88638fd1498Szrj */
88738fd1498Szrj
88838fd1498Szrj static void
partition_stack_vars(void)88938fd1498Szrj partition_stack_vars (void)
89038fd1498Szrj {
89138fd1498Szrj size_t si, sj, n = stack_vars_num;
89238fd1498Szrj
89338fd1498Szrj stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
89438fd1498Szrj for (si = 0; si < n; ++si)
89538fd1498Szrj stack_vars_sorted[si] = si;
89638fd1498Szrj
89738fd1498Szrj if (n == 1)
89838fd1498Szrj return;
89938fd1498Szrj
90038fd1498Szrj qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
90138fd1498Szrj
90238fd1498Szrj for (si = 0; si < n; ++si)
90338fd1498Szrj {
90438fd1498Szrj size_t i = stack_vars_sorted[si];
90538fd1498Szrj unsigned int ialign = stack_vars[i].alignb;
90638fd1498Szrj poly_int64 isize = stack_vars[i].size;
90738fd1498Szrj
90838fd1498Szrj /* Ignore objects that aren't partition representatives. If we
90938fd1498Szrj see a var that is not a partition representative, it must
91038fd1498Szrj have been merged earlier. */
91138fd1498Szrj if (stack_vars[i].representative != i)
91238fd1498Szrj continue;
91338fd1498Szrj
91438fd1498Szrj for (sj = si + 1; sj < n; ++sj)
91538fd1498Szrj {
91638fd1498Szrj size_t j = stack_vars_sorted[sj];
91738fd1498Szrj unsigned int jalign = stack_vars[j].alignb;
91838fd1498Szrj poly_int64 jsize = stack_vars[j].size;
91938fd1498Szrj
92038fd1498Szrj /* Ignore objects that aren't partition representatives. */
92138fd1498Szrj if (stack_vars[j].representative != j)
92238fd1498Szrj continue;
92338fd1498Szrj
92438fd1498Szrj /* Do not mix objects of "small" (supported) alignment
92538fd1498Szrj and "large" (unsupported) alignment. */
92638fd1498Szrj if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
92738fd1498Szrj != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
92838fd1498Szrj break;
92938fd1498Szrj
93038fd1498Szrj /* For Address Sanitizer do not mix objects with different
93138fd1498Szrj sizes, as the shorter vars wouldn't be adequately protected.
93238fd1498Szrj Don't do that for "large" (unsupported) alignment objects,
93338fd1498Szrj those aren't protected anyway. */
93438fd1498Szrj if (asan_sanitize_stack_p ()
93538fd1498Szrj && maybe_ne (isize, jsize)
93638fd1498Szrj && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
93738fd1498Szrj break;
93838fd1498Szrj
93938fd1498Szrj /* Ignore conflicting objects. */
94038fd1498Szrj if (stack_var_conflict_p (i, j))
94138fd1498Szrj continue;
94238fd1498Szrj
94338fd1498Szrj /* UNION the objects, placing J at OFFSET. */
94438fd1498Szrj union_stack_vars (i, j);
94538fd1498Szrj }
94638fd1498Szrj }
94738fd1498Szrj
94838fd1498Szrj update_alias_info_with_stack_vars ();
94938fd1498Szrj }
95038fd1498Szrj
95138fd1498Szrj /* A debugging aid for expand_used_vars. Dump the generated partitions. */
95238fd1498Szrj
95338fd1498Szrj static void
dump_stack_var_partition(void)95438fd1498Szrj dump_stack_var_partition (void)
95538fd1498Szrj {
95638fd1498Szrj size_t si, i, j, n = stack_vars_num;
95738fd1498Szrj
95838fd1498Szrj for (si = 0; si < n; ++si)
95938fd1498Szrj {
96038fd1498Szrj i = stack_vars_sorted[si];
96138fd1498Szrj
96238fd1498Szrj /* Skip variables that aren't partition representatives, for now. */
96338fd1498Szrj if (stack_vars[i].representative != i)
96438fd1498Szrj continue;
96538fd1498Szrj
96638fd1498Szrj fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
96738fd1498Szrj print_dec (stack_vars[i].size, dump_file);
96838fd1498Szrj fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
96938fd1498Szrj
97038fd1498Szrj for (j = i; j != EOC; j = stack_vars[j].next)
97138fd1498Szrj {
97238fd1498Szrj fputc ('\t', dump_file);
97338fd1498Szrj print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
97438fd1498Szrj }
97538fd1498Szrj fputc ('\n', dump_file);
97638fd1498Szrj }
97738fd1498Szrj }
97838fd1498Szrj
97938fd1498Szrj /* Assign rtl to DECL at BASE + OFFSET. */
98038fd1498Szrj
98138fd1498Szrj static void
expand_one_stack_var_at(tree decl,rtx base,unsigned base_align,poly_int64 offset)98238fd1498Szrj expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
98338fd1498Szrj poly_int64 offset)
98438fd1498Szrj {
98538fd1498Szrj unsigned align;
98638fd1498Szrj rtx x;
98738fd1498Szrj
98838fd1498Szrj /* If this fails, we've overflowed the stack frame. Error nicely? */
98938fd1498Szrj gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
99038fd1498Szrj
99138fd1498Szrj x = plus_constant (Pmode, base, offset);
99238fd1498Szrj x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
99338fd1498Szrj ? TYPE_MODE (TREE_TYPE (decl))
99438fd1498Szrj : DECL_MODE (SSAVAR (decl)), x);
99538fd1498Szrj
99638fd1498Szrj if (TREE_CODE (decl) != SSA_NAME)
99738fd1498Szrj {
99838fd1498Szrj /* Set alignment we actually gave this decl if it isn't an SSA name.
99938fd1498Szrj If it is we generate stack slots only accidentally so it isn't as
100038fd1498Szrj important, we'll simply use the alignment that is already set. */
100138fd1498Szrj if (base == virtual_stack_vars_rtx)
100238fd1498Szrj offset -= frame_phase;
100338fd1498Szrj align = known_alignment (offset);
100438fd1498Szrj align *= BITS_PER_UNIT;
100538fd1498Szrj if (align == 0 || align > base_align)
100638fd1498Szrj align = base_align;
100738fd1498Szrj
100838fd1498Szrj /* One would think that we could assert that we're not decreasing
100938fd1498Szrj alignment here, but (at least) the i386 port does exactly this
101038fd1498Szrj via the MINIMUM_ALIGNMENT hook. */
101138fd1498Szrj
101238fd1498Szrj SET_DECL_ALIGN (decl, align);
101338fd1498Szrj DECL_USER_ALIGN (decl) = 0;
101438fd1498Szrj }
101538fd1498Szrj
101638fd1498Szrj set_rtl (decl, x);
101738fd1498Szrj }
101838fd1498Szrj
101938fd1498Szrj struct stack_vars_data
102038fd1498Szrj {
102138fd1498Szrj /* Vector of offset pairs, always end of some padding followed
102238fd1498Szrj by start of the padding that needs Address Sanitizer protection.
102338fd1498Szrj The vector is in reversed, highest offset pairs come first. */
102438fd1498Szrj auto_vec<HOST_WIDE_INT> asan_vec;
102538fd1498Szrj
102638fd1498Szrj /* Vector of partition representative decls in between the paddings. */
102738fd1498Szrj auto_vec<tree> asan_decl_vec;
102838fd1498Szrj
102938fd1498Szrj /* Base pseudo register for Address Sanitizer protected automatic vars. */
103038fd1498Szrj rtx asan_base;
103138fd1498Szrj
103238fd1498Szrj /* Alignment needed for the Address Sanitizer protected automatic vars. */
103338fd1498Szrj unsigned int asan_alignb;
103438fd1498Szrj };
103538fd1498Szrj
103638fd1498Szrj /* A subroutine of expand_used_vars. Give each partition representative
103738fd1498Szrj a unique location within the stack frame. Update each partition member
103838fd1498Szrj with that location. */
103938fd1498Szrj
104038fd1498Szrj static void
expand_stack_vars(bool (* pred)(size_t),struct stack_vars_data * data)104138fd1498Szrj expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
104238fd1498Szrj {
104338fd1498Szrj size_t si, i, j, n = stack_vars_num;
104438fd1498Szrj poly_uint64 large_size = 0, large_alloc = 0;
104538fd1498Szrj rtx large_base = NULL;
104638fd1498Szrj unsigned large_align = 0;
104738fd1498Szrj bool large_allocation_done = false;
104838fd1498Szrj tree decl;
104938fd1498Szrj
105038fd1498Szrj /* Determine if there are any variables requiring "large" alignment.
105138fd1498Szrj Since these are dynamically allocated, we only process these if
105238fd1498Szrj no predicate involved. */
105338fd1498Szrj large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
105438fd1498Szrj if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
105538fd1498Szrj {
105638fd1498Szrj /* Find the total size of these variables. */
105738fd1498Szrj for (si = 0; si < n; ++si)
105838fd1498Szrj {
105938fd1498Szrj unsigned alignb;
106038fd1498Szrj
106138fd1498Szrj i = stack_vars_sorted[si];
106238fd1498Szrj alignb = stack_vars[i].alignb;
106338fd1498Szrj
106438fd1498Szrj /* All "large" alignment decls come before all "small" alignment
106538fd1498Szrj decls, but "large" alignment decls are not sorted based on
106638fd1498Szrj their alignment. Increase large_align to track the largest
106738fd1498Szrj required alignment. */
106838fd1498Szrj if ((alignb * BITS_PER_UNIT) > large_align)
106938fd1498Szrj large_align = alignb * BITS_PER_UNIT;
107038fd1498Szrj
107138fd1498Szrj /* Stop when we get to the first decl with "small" alignment. */
107238fd1498Szrj if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
107338fd1498Szrj break;
107438fd1498Szrj
107538fd1498Szrj /* Skip variables that aren't partition representatives. */
107638fd1498Szrj if (stack_vars[i].representative != i)
107738fd1498Szrj continue;
107838fd1498Szrj
107938fd1498Szrj /* Skip variables that have already had rtl assigned. See also
108038fd1498Szrj add_stack_var where we perpetrate this pc_rtx hack. */
108138fd1498Szrj decl = stack_vars[i].decl;
108238fd1498Szrj if (TREE_CODE (decl) == SSA_NAME
108338fd1498Szrj ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
108438fd1498Szrj : DECL_RTL (decl) != pc_rtx)
108538fd1498Szrj continue;
108638fd1498Szrj
108738fd1498Szrj large_size = aligned_upper_bound (large_size, alignb);
108838fd1498Szrj large_size += stack_vars[i].size;
108938fd1498Szrj }
109038fd1498Szrj }
109138fd1498Szrj
109238fd1498Szrj for (si = 0; si < n; ++si)
109338fd1498Szrj {
109438fd1498Szrj rtx base;
109538fd1498Szrj unsigned base_align, alignb;
109638fd1498Szrj poly_int64 offset;
109738fd1498Szrj
109838fd1498Szrj i = stack_vars_sorted[si];
109938fd1498Szrj
110038fd1498Szrj /* Skip variables that aren't partition representatives, for now. */
110138fd1498Szrj if (stack_vars[i].representative != i)
110238fd1498Szrj continue;
110338fd1498Szrj
110438fd1498Szrj /* Skip variables that have already had rtl assigned. See also
110538fd1498Szrj add_stack_var where we perpetrate this pc_rtx hack. */
110638fd1498Szrj decl = stack_vars[i].decl;
110738fd1498Szrj if (TREE_CODE (decl) == SSA_NAME
110838fd1498Szrj ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
110938fd1498Szrj : DECL_RTL (decl) != pc_rtx)
111038fd1498Szrj continue;
111138fd1498Szrj
111238fd1498Szrj /* Check the predicate to see whether this variable should be
111338fd1498Szrj allocated in this pass. */
111438fd1498Szrj if (pred && !pred (i))
111538fd1498Szrj continue;
111638fd1498Szrj
111738fd1498Szrj alignb = stack_vars[i].alignb;
111838fd1498Szrj if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
111938fd1498Szrj {
112038fd1498Szrj base = virtual_stack_vars_rtx;
112138fd1498Szrj /* ASAN description strings don't yet have a syntax for expressing
112238fd1498Szrj polynomial offsets. */
112338fd1498Szrj HOST_WIDE_INT prev_offset;
112438fd1498Szrj if (asan_sanitize_stack_p ()
112538fd1498Szrj && pred
112638fd1498Szrj && frame_offset.is_constant (&prev_offset)
112738fd1498Szrj && stack_vars[i].size.is_constant ())
112838fd1498Szrj {
112938fd1498Szrj prev_offset = align_base (prev_offset,
113038fd1498Szrj MAX (alignb, ASAN_RED_ZONE_SIZE),
113138fd1498Szrj !FRAME_GROWS_DOWNWARD);
113238fd1498Szrj tree repr_decl = NULL_TREE;
113338fd1498Szrj offset
113438fd1498Szrj = alloc_stack_frame_space (stack_vars[i].size
113538fd1498Szrj + ASAN_RED_ZONE_SIZE,
113638fd1498Szrj MAX (alignb, ASAN_RED_ZONE_SIZE));
113738fd1498Szrj
113838fd1498Szrj data->asan_vec.safe_push (prev_offset);
113938fd1498Szrj /* Allocating a constant amount of space from a constant
114038fd1498Szrj starting offset must give a constant result. */
114138fd1498Szrj data->asan_vec.safe_push ((offset + stack_vars[i].size)
114238fd1498Szrj .to_constant ());
114338fd1498Szrj /* Find best representative of the partition.
114438fd1498Szrj Prefer those with DECL_NAME, even better
114538fd1498Szrj satisfying asan_protect_stack_decl predicate. */
114638fd1498Szrj for (j = i; j != EOC; j = stack_vars[j].next)
114738fd1498Szrj if (asan_protect_stack_decl (stack_vars[j].decl)
114838fd1498Szrj && DECL_NAME (stack_vars[j].decl))
114938fd1498Szrj {
115038fd1498Szrj repr_decl = stack_vars[j].decl;
115138fd1498Szrj break;
115238fd1498Szrj }
115338fd1498Szrj else if (repr_decl == NULL_TREE
115438fd1498Szrj && DECL_P (stack_vars[j].decl)
115538fd1498Szrj && DECL_NAME (stack_vars[j].decl))
115638fd1498Szrj repr_decl = stack_vars[j].decl;
115738fd1498Szrj if (repr_decl == NULL_TREE)
115838fd1498Szrj repr_decl = stack_vars[i].decl;
115938fd1498Szrj data->asan_decl_vec.safe_push (repr_decl);
1160*58e805e6Szrj
1161*58e805e6Szrj /* Make sure a representative is unpoison if another
1162*58e805e6Szrj variable in the partition is handled by
1163*58e805e6Szrj use-after-scope sanitization. */
1164*58e805e6Szrj if (asan_handled_variables != NULL
1165*58e805e6Szrj && !asan_handled_variables->contains (repr_decl))
1166*58e805e6Szrj {
1167*58e805e6Szrj for (j = i; j != EOC; j = stack_vars[j].next)
1168*58e805e6Szrj if (asan_handled_variables->contains (stack_vars[j].decl))
1169*58e805e6Szrj break;
1170*58e805e6Szrj if (j != EOC)
1171*58e805e6Szrj asan_handled_variables->add (repr_decl);
1172*58e805e6Szrj }
1173*58e805e6Szrj
117438fd1498Szrj data->asan_alignb = MAX (data->asan_alignb, alignb);
117538fd1498Szrj if (data->asan_base == NULL)
117638fd1498Szrj data->asan_base = gen_reg_rtx (Pmode);
117738fd1498Szrj base = data->asan_base;
117838fd1498Szrj
117938fd1498Szrj if (!STRICT_ALIGNMENT)
118038fd1498Szrj base_align = crtl->max_used_stack_slot_alignment;
118138fd1498Szrj else
118238fd1498Szrj base_align = MAX (crtl->max_used_stack_slot_alignment,
118338fd1498Szrj GET_MODE_ALIGNMENT (SImode)
118438fd1498Szrj << ASAN_SHADOW_SHIFT);
118538fd1498Szrj }
118638fd1498Szrj else
118738fd1498Szrj {
118838fd1498Szrj offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
118938fd1498Szrj base_align = crtl->max_used_stack_slot_alignment;
119038fd1498Szrj }
119138fd1498Szrj }
119238fd1498Szrj else
119338fd1498Szrj {
119438fd1498Szrj /* Large alignment is only processed in the last pass. */
119538fd1498Szrj if (pred)
119638fd1498Szrj continue;
119738fd1498Szrj
119838fd1498Szrj /* If there were any variables requiring "large" alignment, allocate
119938fd1498Szrj space. */
120038fd1498Szrj if (maybe_ne (large_size, 0U) && ! large_allocation_done)
120138fd1498Szrj {
120238fd1498Szrj poly_int64 loffset;
120338fd1498Szrj rtx large_allocsize;
120438fd1498Szrj
120538fd1498Szrj large_allocsize = gen_int_mode (large_size, Pmode);
120638fd1498Szrj get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
120738fd1498Szrj loffset = alloc_stack_frame_space
120838fd1498Szrj (rtx_to_poly_int64 (large_allocsize),
120938fd1498Szrj PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
121038fd1498Szrj large_base = get_dynamic_stack_base (loffset, large_align);
121138fd1498Szrj large_allocation_done = true;
121238fd1498Szrj }
121338fd1498Szrj gcc_assert (large_base != NULL);
121438fd1498Szrj
121538fd1498Szrj large_alloc = aligned_upper_bound (large_alloc, alignb);
121638fd1498Szrj offset = large_alloc;
121738fd1498Szrj large_alloc += stack_vars[i].size;
121838fd1498Szrj
121938fd1498Szrj base = large_base;
122038fd1498Szrj base_align = large_align;
122138fd1498Szrj }
122238fd1498Szrj
122338fd1498Szrj /* Create rtl for each variable based on their location within the
122438fd1498Szrj partition. */
122538fd1498Szrj for (j = i; j != EOC; j = stack_vars[j].next)
122638fd1498Szrj {
122738fd1498Szrj expand_one_stack_var_at (stack_vars[j].decl,
122838fd1498Szrj base, base_align,
122938fd1498Szrj offset);
123038fd1498Szrj }
123138fd1498Szrj }
123238fd1498Szrj
123338fd1498Szrj gcc_assert (known_eq (large_alloc, large_size));
123438fd1498Szrj }
123538fd1498Szrj
123638fd1498Szrj /* Take into account all sizes of partitions and reset DECL_RTLs. */
123738fd1498Szrj static poly_uint64
account_stack_vars(void)123838fd1498Szrj account_stack_vars (void)
123938fd1498Szrj {
124038fd1498Szrj size_t si, j, i, n = stack_vars_num;
124138fd1498Szrj poly_uint64 size = 0;
124238fd1498Szrj
124338fd1498Szrj for (si = 0; si < n; ++si)
124438fd1498Szrj {
124538fd1498Szrj i = stack_vars_sorted[si];
124638fd1498Szrj
124738fd1498Szrj /* Skip variables that aren't partition representatives, for now. */
124838fd1498Szrj if (stack_vars[i].representative != i)
124938fd1498Szrj continue;
125038fd1498Szrj
125138fd1498Szrj size += stack_vars[i].size;
125238fd1498Szrj for (j = i; j != EOC; j = stack_vars[j].next)
125338fd1498Szrj set_rtl (stack_vars[j].decl, NULL);
125438fd1498Szrj }
125538fd1498Szrj return size;
125638fd1498Szrj }
125738fd1498Szrj
125838fd1498Szrj /* Record the RTL assignment X for the default def of PARM. */
125938fd1498Szrj
126038fd1498Szrj extern void
set_parm_rtl(tree parm,rtx x)126138fd1498Szrj set_parm_rtl (tree parm, rtx x)
126238fd1498Szrj {
126338fd1498Szrj gcc_assert (TREE_CODE (parm) == PARM_DECL
126438fd1498Szrj || TREE_CODE (parm) == RESULT_DECL);
126538fd1498Szrj
126638fd1498Szrj if (x && !MEM_P (x))
126738fd1498Szrj {
126838fd1498Szrj unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
126938fd1498Szrj TYPE_MODE (TREE_TYPE (parm)),
127038fd1498Szrj TYPE_ALIGN (TREE_TYPE (parm)));
127138fd1498Szrj
127238fd1498Szrj /* If the variable alignment is very large we'll dynamicaly
127338fd1498Szrj allocate it, which means that in-frame portion is just a
127438fd1498Szrj pointer. ??? We've got a pseudo for sure here, do we
127538fd1498Szrj actually dynamically allocate its spilling area if needed?
1276*58e805e6Szrj ??? Isn't it a problem when Pmode alignment also exceeds
1277*58e805e6Szrj MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
127838fd1498Szrj if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1279*58e805e6Szrj align = GET_MODE_ALIGNMENT (Pmode);
128038fd1498Szrj
128138fd1498Szrj record_alignment_for_reg_var (align);
128238fd1498Szrj }
128338fd1498Szrj
128438fd1498Szrj tree ssa = ssa_default_def (cfun, parm);
128538fd1498Szrj if (!ssa)
128638fd1498Szrj return set_rtl (parm, x);
128738fd1498Szrj
128838fd1498Szrj int part = var_to_partition (SA.map, ssa);
128938fd1498Szrj gcc_assert (part != NO_PARTITION);
129038fd1498Szrj
129138fd1498Szrj bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
129238fd1498Szrj gcc_assert (changed);
129338fd1498Szrj
129438fd1498Szrj set_rtl (ssa, x);
129538fd1498Szrj gcc_assert (DECL_RTL (parm) == x);
129638fd1498Szrj }
129738fd1498Szrj
129838fd1498Szrj /* A subroutine of expand_one_var. Called to immediately assign rtl
129938fd1498Szrj to a variable to be allocated in the stack frame. */
130038fd1498Szrj
130138fd1498Szrj static void
expand_one_stack_var_1(tree var)130238fd1498Szrj expand_one_stack_var_1 (tree var)
130338fd1498Szrj {
130438fd1498Szrj poly_uint64 size;
130538fd1498Szrj poly_int64 offset;
130638fd1498Szrj unsigned byte_align;
130738fd1498Szrj
130838fd1498Szrj if (TREE_CODE (var) == SSA_NAME)
130938fd1498Szrj {
131038fd1498Szrj tree type = TREE_TYPE (var);
131138fd1498Szrj size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
131238fd1498Szrj byte_align = TYPE_ALIGN_UNIT (type);
131338fd1498Szrj }
131438fd1498Szrj else
131538fd1498Szrj {
131638fd1498Szrj size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
131738fd1498Szrj byte_align = align_local_variable (var);
131838fd1498Szrj }
131938fd1498Szrj
132038fd1498Szrj /* We handle highly aligned variables in expand_stack_vars. */
132138fd1498Szrj gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
132238fd1498Szrj
132338fd1498Szrj offset = alloc_stack_frame_space (size, byte_align);
132438fd1498Szrj
132538fd1498Szrj expand_one_stack_var_at (var, virtual_stack_vars_rtx,
132638fd1498Szrj crtl->max_used_stack_slot_alignment, offset);
132738fd1498Szrj }
132838fd1498Szrj
132938fd1498Szrj /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
133038fd1498Szrj already assigned some MEM. */
133138fd1498Szrj
133238fd1498Szrj static void
expand_one_stack_var(tree var)133338fd1498Szrj expand_one_stack_var (tree var)
133438fd1498Szrj {
133538fd1498Szrj if (TREE_CODE (var) == SSA_NAME)
133638fd1498Szrj {
133738fd1498Szrj int part = var_to_partition (SA.map, var);
133838fd1498Szrj if (part != NO_PARTITION)
133938fd1498Szrj {
134038fd1498Szrj rtx x = SA.partition_to_pseudo[part];
134138fd1498Szrj gcc_assert (x);
134238fd1498Szrj gcc_assert (MEM_P (x));
134338fd1498Szrj return;
134438fd1498Szrj }
134538fd1498Szrj }
134638fd1498Szrj
134738fd1498Szrj return expand_one_stack_var_1 (var);
134838fd1498Szrj }
134938fd1498Szrj
135038fd1498Szrj /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
135138fd1498Szrj that will reside in a hard register. */
135238fd1498Szrj
135338fd1498Szrj static void
expand_one_hard_reg_var(tree var)135438fd1498Szrj expand_one_hard_reg_var (tree var)
135538fd1498Szrj {
135638fd1498Szrj rest_of_decl_compilation (var, 0, 0);
135738fd1498Szrj }
135838fd1498Szrj
135938fd1498Szrj /* Record the alignment requirements of some variable assigned to a
136038fd1498Szrj pseudo. */
136138fd1498Szrj
136238fd1498Szrj static void
record_alignment_for_reg_var(unsigned int align)136338fd1498Szrj record_alignment_for_reg_var (unsigned int align)
136438fd1498Szrj {
136538fd1498Szrj if (SUPPORTS_STACK_ALIGNMENT
136638fd1498Szrj && crtl->stack_alignment_estimated < align)
136738fd1498Szrj {
136838fd1498Szrj /* stack_alignment_estimated shouldn't change after stack
136938fd1498Szrj realign decision made */
137038fd1498Szrj gcc_assert (!crtl->stack_realign_processed);
137138fd1498Szrj crtl->stack_alignment_estimated = align;
137238fd1498Szrj }
137338fd1498Szrj
137438fd1498Szrj /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
137538fd1498Szrj So here we only make sure stack_alignment_needed >= align. */
137638fd1498Szrj if (crtl->stack_alignment_needed < align)
137738fd1498Szrj crtl->stack_alignment_needed = align;
137838fd1498Szrj if (crtl->max_used_stack_slot_alignment < align)
137938fd1498Szrj crtl->max_used_stack_slot_alignment = align;
138038fd1498Szrj }
138138fd1498Szrj
138238fd1498Szrj /* Create RTL for an SSA partition. */
138338fd1498Szrj
138438fd1498Szrj static void
expand_one_ssa_partition(tree var)138538fd1498Szrj expand_one_ssa_partition (tree var)
138638fd1498Szrj {
138738fd1498Szrj int part = var_to_partition (SA.map, var);
138838fd1498Szrj gcc_assert (part != NO_PARTITION);
138938fd1498Szrj
139038fd1498Szrj if (SA.partition_to_pseudo[part])
139138fd1498Szrj return;
139238fd1498Szrj
139338fd1498Szrj unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
139438fd1498Szrj TYPE_MODE (TREE_TYPE (var)),
139538fd1498Szrj TYPE_ALIGN (TREE_TYPE (var)));
139638fd1498Szrj
139738fd1498Szrj /* If the variable alignment is very large we'll dynamicaly allocate
139838fd1498Szrj it, which means that in-frame portion is just a pointer. */
139938fd1498Szrj if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1400*58e805e6Szrj align = GET_MODE_ALIGNMENT (Pmode);
140138fd1498Szrj
140238fd1498Szrj record_alignment_for_reg_var (align);
140338fd1498Szrj
140438fd1498Szrj if (!use_register_for_decl (var))
140538fd1498Szrj {
140638fd1498Szrj if (defer_stack_allocation (var, true))
140738fd1498Szrj add_stack_var (var);
140838fd1498Szrj else
140938fd1498Szrj expand_one_stack_var_1 (var);
141038fd1498Szrj return;
141138fd1498Szrj }
141238fd1498Szrj
141338fd1498Szrj machine_mode reg_mode = promote_ssa_mode (var, NULL);
141438fd1498Szrj rtx x = gen_reg_rtx (reg_mode);
141538fd1498Szrj
141638fd1498Szrj set_rtl (var, x);
141738fd1498Szrj
141838fd1498Szrj /* For a promoted variable, X will not be used directly but wrapped in a
141938fd1498Szrj SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
142038fd1498Szrj will assume that its upper bits can be inferred from its lower bits.
142138fd1498Szrj Therefore, if X isn't initialized on every path from the entry, then
142238fd1498Szrj we must do it manually in order to fulfill the above assumption. */
142338fd1498Szrj if (reg_mode != TYPE_MODE (TREE_TYPE (var))
142438fd1498Szrj && bitmap_bit_p (SA.partitions_for_undefined_values, part))
142538fd1498Szrj emit_move_insn (x, CONST0_RTX (reg_mode));
142638fd1498Szrj }
142738fd1498Szrj
142838fd1498Szrj /* Record the association between the RTL generated for partition PART
142938fd1498Szrj and the underlying variable of the SSA_NAME VAR. */
143038fd1498Szrj
143138fd1498Szrj static void
adjust_one_expanded_partition_var(tree var)143238fd1498Szrj adjust_one_expanded_partition_var (tree var)
143338fd1498Szrj {
143438fd1498Szrj if (!var)
143538fd1498Szrj return;
143638fd1498Szrj
143738fd1498Szrj tree decl = SSA_NAME_VAR (var);
143838fd1498Szrj
143938fd1498Szrj int part = var_to_partition (SA.map, var);
144038fd1498Szrj if (part == NO_PARTITION)
144138fd1498Szrj return;
144238fd1498Szrj
144338fd1498Szrj rtx x = SA.partition_to_pseudo[part];
144438fd1498Szrj
144538fd1498Szrj gcc_assert (x);
144638fd1498Szrj
144738fd1498Szrj set_rtl (var, x);
144838fd1498Szrj
144938fd1498Szrj if (!REG_P (x))
145038fd1498Szrj return;
145138fd1498Szrj
145238fd1498Szrj /* Note if the object is a user variable. */
145338fd1498Szrj if (decl && !DECL_ARTIFICIAL (decl))
145438fd1498Szrj mark_user_reg (x);
145538fd1498Szrj
145638fd1498Szrj if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
145738fd1498Szrj mark_reg_pointer (x, get_pointer_alignment (var));
145838fd1498Szrj }
145938fd1498Szrj
146038fd1498Szrj /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
146138fd1498Szrj that will reside in a pseudo register. */
146238fd1498Szrj
146338fd1498Szrj static void
expand_one_register_var(tree var)146438fd1498Szrj expand_one_register_var (tree var)
146538fd1498Szrj {
146638fd1498Szrj if (TREE_CODE (var) == SSA_NAME)
146738fd1498Szrj {
146838fd1498Szrj int part = var_to_partition (SA.map, var);
146938fd1498Szrj if (part != NO_PARTITION)
147038fd1498Szrj {
147138fd1498Szrj rtx x = SA.partition_to_pseudo[part];
147238fd1498Szrj gcc_assert (x);
147338fd1498Szrj gcc_assert (REG_P (x));
147438fd1498Szrj return;
147538fd1498Szrj }
147638fd1498Szrj gcc_unreachable ();
147738fd1498Szrj }
147838fd1498Szrj
147938fd1498Szrj tree decl = var;
148038fd1498Szrj tree type = TREE_TYPE (decl);
148138fd1498Szrj machine_mode reg_mode = promote_decl_mode (decl, NULL);
148238fd1498Szrj rtx x = gen_reg_rtx (reg_mode);
148338fd1498Szrj
148438fd1498Szrj set_rtl (var, x);
148538fd1498Szrj
148638fd1498Szrj /* Note if the object is a user variable. */
148738fd1498Szrj if (!DECL_ARTIFICIAL (decl))
148838fd1498Szrj mark_user_reg (x);
148938fd1498Szrj
149038fd1498Szrj if (POINTER_TYPE_P (type))
149138fd1498Szrj mark_reg_pointer (x, get_pointer_alignment (var));
149238fd1498Szrj }
149338fd1498Szrj
149438fd1498Szrj /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
149538fd1498Szrj has some associated error, e.g. its type is error-mark. We just need
149638fd1498Szrj to pick something that won't crash the rest of the compiler. */
149738fd1498Szrj
149838fd1498Szrj static void
expand_one_error_var(tree var)149938fd1498Szrj expand_one_error_var (tree var)
150038fd1498Szrj {
150138fd1498Szrj machine_mode mode = DECL_MODE (var);
150238fd1498Szrj rtx x;
150338fd1498Szrj
150438fd1498Szrj if (mode == BLKmode)
150538fd1498Szrj x = gen_rtx_MEM (BLKmode, const0_rtx);
150638fd1498Szrj else if (mode == VOIDmode)
150738fd1498Szrj x = const0_rtx;
150838fd1498Szrj else
150938fd1498Szrj x = gen_reg_rtx (mode);
151038fd1498Szrj
151138fd1498Szrj SET_DECL_RTL (var, x);
151238fd1498Szrj }
151338fd1498Szrj
151438fd1498Szrj /* A subroutine of expand_one_var. VAR is a variable that will be
151538fd1498Szrj allocated to the local stack frame. Return true if we wish to
151638fd1498Szrj add VAR to STACK_VARS so that it will be coalesced with other
151738fd1498Szrj variables. Return false to allocate VAR immediately.
151838fd1498Szrj
151938fd1498Szrj This function is used to reduce the number of variables considered
152038fd1498Szrj for coalescing, which reduces the size of the quadratic problem. */
152138fd1498Szrj
152238fd1498Szrj static bool
defer_stack_allocation(tree var,bool toplevel)152338fd1498Szrj defer_stack_allocation (tree var, bool toplevel)
152438fd1498Szrj {
152538fd1498Szrj tree size_unit = TREE_CODE (var) == SSA_NAME
152638fd1498Szrj ? TYPE_SIZE_UNIT (TREE_TYPE (var))
152738fd1498Szrj : DECL_SIZE_UNIT (var);
152838fd1498Szrj poly_uint64 size;
152938fd1498Szrj
153038fd1498Szrj /* Whether the variable is small enough for immediate allocation not to be
153138fd1498Szrj a problem with regard to the frame size. */
153238fd1498Szrj bool smallish
153338fd1498Szrj = (poly_int_tree_p (size_unit, &size)
153438fd1498Szrj && (estimated_poly_value (size)
153538fd1498Szrj < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
153638fd1498Szrj
153738fd1498Szrj /* If stack protection is enabled, *all* stack variables must be deferred,
153838fd1498Szrj so that we can re-order the strings to the top of the frame.
153938fd1498Szrj Similarly for Address Sanitizer. */
154038fd1498Szrj if (flag_stack_protect || asan_sanitize_stack_p ())
154138fd1498Szrj return true;
154238fd1498Szrj
154338fd1498Szrj unsigned int align = TREE_CODE (var) == SSA_NAME
154438fd1498Szrj ? TYPE_ALIGN (TREE_TYPE (var))
154538fd1498Szrj : DECL_ALIGN (var);
154638fd1498Szrj
154738fd1498Szrj /* We handle "large" alignment via dynamic allocation. We want to handle
154838fd1498Szrj this extra complication in only one place, so defer them. */
154938fd1498Szrj if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
155038fd1498Szrj return true;
155138fd1498Szrj
155238fd1498Szrj bool ignored = TREE_CODE (var) == SSA_NAME
155338fd1498Szrj ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
155438fd1498Szrj : DECL_IGNORED_P (var);
155538fd1498Szrj
155638fd1498Szrj /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
155738fd1498Szrj might be detached from their block and appear at toplevel when we reach
155838fd1498Szrj here. We want to coalesce them with variables from other blocks when
155938fd1498Szrj the immediate contribution to the frame size would be noticeable. */
156038fd1498Szrj if (toplevel && optimize > 0 && ignored && !smallish)
156138fd1498Szrj return true;
156238fd1498Szrj
156338fd1498Szrj /* Variables declared in the outermost scope automatically conflict
156438fd1498Szrj with every other variable. The only reason to want to defer them
156538fd1498Szrj at all is that, after sorting, we can more efficiently pack
156638fd1498Szrj small variables in the stack frame. Continue to defer at -O2. */
156738fd1498Szrj if (toplevel && optimize < 2)
156838fd1498Szrj return false;
156938fd1498Szrj
157038fd1498Szrj /* Without optimization, *most* variables are allocated from the
157138fd1498Szrj stack, which makes the quadratic problem large exactly when we
157238fd1498Szrj want compilation to proceed as quickly as possible. On the
157338fd1498Szrj other hand, we don't want the function's stack frame size to
157438fd1498Szrj get completely out of hand. So we avoid adding scalars and
157538fd1498Szrj "small" aggregates to the list at all. */
157638fd1498Szrj if (optimize == 0 && smallish)
157738fd1498Szrj return false;
157838fd1498Szrj
157938fd1498Szrj return true;
158038fd1498Szrj }
158138fd1498Szrj
158238fd1498Szrj /* A subroutine of expand_used_vars. Expand one variable according to
158338fd1498Szrj its flavor. Variables to be placed on the stack are not actually
158438fd1498Szrj expanded yet, merely recorded.
158538fd1498Szrj When REALLY_EXPAND is false, only add stack values to be allocated.
158638fd1498Szrj Return stack usage this variable is supposed to take.
158738fd1498Szrj */
158838fd1498Szrj
158938fd1498Szrj static poly_uint64
expand_one_var(tree var,bool toplevel,bool really_expand)159038fd1498Szrj expand_one_var (tree var, bool toplevel, bool really_expand)
159138fd1498Szrj {
159238fd1498Szrj unsigned int align = BITS_PER_UNIT;
159338fd1498Szrj tree origvar = var;
159438fd1498Szrj
159538fd1498Szrj var = SSAVAR (var);
159638fd1498Szrj
159738fd1498Szrj if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
159838fd1498Szrj {
159938fd1498Szrj if (is_global_var (var))
160038fd1498Szrj return 0;
160138fd1498Szrj
160238fd1498Szrj /* Because we don't know if VAR will be in register or on stack,
160338fd1498Szrj we conservatively assume it will be on stack even if VAR is
160438fd1498Szrj eventually put into register after RA pass. For non-automatic
160538fd1498Szrj variables, which won't be on stack, we collect alignment of
160638fd1498Szrj type and ignore user specified alignment. Similarly for
160738fd1498Szrj SSA_NAMEs for which use_register_for_decl returns true. */
160838fd1498Szrj if (TREE_STATIC (var)
160938fd1498Szrj || DECL_EXTERNAL (var)
161038fd1498Szrj || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
161138fd1498Szrj align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
161238fd1498Szrj TYPE_MODE (TREE_TYPE (var)),
161338fd1498Szrj TYPE_ALIGN (TREE_TYPE (var)));
161438fd1498Szrj else if (DECL_HAS_VALUE_EXPR_P (var)
161538fd1498Szrj || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
161638fd1498Szrj /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
161738fd1498Szrj or variables which were assigned a stack slot already by
161838fd1498Szrj expand_one_stack_var_at - in the latter case DECL_ALIGN has been
161938fd1498Szrj changed from the offset chosen to it. */
162038fd1498Szrj align = crtl->stack_alignment_estimated;
162138fd1498Szrj else
162238fd1498Szrj align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
162338fd1498Szrj
162438fd1498Szrj /* If the variable alignment is very large we'll dynamicaly allocate
162538fd1498Szrj it, which means that in-frame portion is just a pointer. */
162638fd1498Szrj if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1627*58e805e6Szrj align = GET_MODE_ALIGNMENT (Pmode);
162838fd1498Szrj }
162938fd1498Szrj
163038fd1498Szrj record_alignment_for_reg_var (align);
163138fd1498Szrj
163238fd1498Szrj poly_uint64 size;
163338fd1498Szrj if (TREE_CODE (origvar) == SSA_NAME)
163438fd1498Szrj {
163538fd1498Szrj gcc_assert (!VAR_P (var)
163638fd1498Szrj || (!DECL_EXTERNAL (var)
163738fd1498Szrj && !DECL_HAS_VALUE_EXPR_P (var)
163838fd1498Szrj && !TREE_STATIC (var)
163938fd1498Szrj && TREE_TYPE (var) != error_mark_node
164038fd1498Szrj && !DECL_HARD_REGISTER (var)
164138fd1498Szrj && really_expand));
164238fd1498Szrj }
164338fd1498Szrj if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
164438fd1498Szrj ;
164538fd1498Szrj else if (DECL_EXTERNAL (var))
164638fd1498Szrj ;
164738fd1498Szrj else if (DECL_HAS_VALUE_EXPR_P (var))
164838fd1498Szrj ;
164938fd1498Szrj else if (TREE_STATIC (var))
165038fd1498Szrj ;
165138fd1498Szrj else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
165238fd1498Szrj ;
165338fd1498Szrj else if (TREE_TYPE (var) == error_mark_node)
165438fd1498Szrj {
165538fd1498Szrj if (really_expand)
165638fd1498Szrj expand_one_error_var (var);
165738fd1498Szrj }
165838fd1498Szrj else if (VAR_P (var) && DECL_HARD_REGISTER (var))
165938fd1498Szrj {
166038fd1498Szrj if (really_expand)
166138fd1498Szrj {
166238fd1498Szrj expand_one_hard_reg_var (var);
166338fd1498Szrj if (!DECL_HARD_REGISTER (var))
166438fd1498Szrj /* Invalid register specification. */
166538fd1498Szrj expand_one_error_var (var);
166638fd1498Szrj }
166738fd1498Szrj }
166838fd1498Szrj else if (use_register_for_decl (var))
166938fd1498Szrj {
167038fd1498Szrj if (really_expand)
167138fd1498Szrj expand_one_register_var (origvar);
167238fd1498Szrj }
167338fd1498Szrj else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
167438fd1498Szrj || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
167538fd1498Szrj {
167638fd1498Szrj /* Reject variables which cover more than half of the address-space. */
167738fd1498Szrj if (really_expand)
167838fd1498Szrj {
167938fd1498Szrj error ("size of variable %q+D is too large", var);
168038fd1498Szrj expand_one_error_var (var);
168138fd1498Szrj }
168238fd1498Szrj }
168338fd1498Szrj else if (defer_stack_allocation (var, toplevel))
168438fd1498Szrj add_stack_var (origvar);
168538fd1498Szrj else
168638fd1498Szrj {
168738fd1498Szrj if (really_expand)
168838fd1498Szrj {
168938fd1498Szrj if (lookup_attribute ("naked",
169038fd1498Szrj DECL_ATTRIBUTES (current_function_decl)))
169138fd1498Szrj error ("cannot allocate stack for variable %q+D, naked function.",
169238fd1498Szrj var);
169338fd1498Szrj
169438fd1498Szrj expand_one_stack_var (origvar);
169538fd1498Szrj }
169638fd1498Szrj return size;
169738fd1498Szrj }
169838fd1498Szrj return 0;
169938fd1498Szrj }
170038fd1498Szrj
170138fd1498Szrj /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
170238fd1498Szrj expanding variables. Those variables that can be put into registers
170338fd1498Szrj are allocated pseudos; those that can't are put on the stack.
170438fd1498Szrj
170538fd1498Szrj TOPLEVEL is true if this is the outermost BLOCK. */
170638fd1498Szrj
170738fd1498Szrj static void
expand_used_vars_for_block(tree block,bool toplevel)170838fd1498Szrj expand_used_vars_for_block (tree block, bool toplevel)
170938fd1498Szrj {
171038fd1498Szrj tree t;
171138fd1498Szrj
171238fd1498Szrj /* Expand all variables at this level. */
171338fd1498Szrj for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
171438fd1498Szrj if (TREE_USED (t)
171538fd1498Szrj && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
171638fd1498Szrj || !DECL_NONSHAREABLE (t)))
171738fd1498Szrj expand_one_var (t, toplevel, true);
171838fd1498Szrj
171938fd1498Szrj /* Expand all variables at containing levels. */
172038fd1498Szrj for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
172138fd1498Szrj expand_used_vars_for_block (t, false);
172238fd1498Szrj }
172338fd1498Szrj
172438fd1498Szrj /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
172538fd1498Szrj and clear TREE_USED on all local variables. */
172638fd1498Szrj
172738fd1498Szrj static void
clear_tree_used(tree block)172838fd1498Szrj clear_tree_used (tree block)
172938fd1498Szrj {
173038fd1498Szrj tree t;
173138fd1498Szrj
173238fd1498Szrj for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
173338fd1498Szrj /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
173438fd1498Szrj if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
173538fd1498Szrj || !DECL_NONSHAREABLE (t))
173638fd1498Szrj TREE_USED (t) = 0;
173738fd1498Szrj
173838fd1498Szrj for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
173938fd1498Szrj clear_tree_used (t);
174038fd1498Szrj }
174138fd1498Szrj
174238fd1498Szrj enum {
174338fd1498Szrj SPCT_FLAG_DEFAULT = 1,
174438fd1498Szrj SPCT_FLAG_ALL = 2,
174538fd1498Szrj SPCT_FLAG_STRONG = 3,
174638fd1498Szrj SPCT_FLAG_EXPLICIT = 4
174738fd1498Szrj };
174838fd1498Szrj
174938fd1498Szrj /* Examine TYPE and determine a bit mask of the following features. */
175038fd1498Szrj
175138fd1498Szrj #define SPCT_HAS_LARGE_CHAR_ARRAY 1
175238fd1498Szrj #define SPCT_HAS_SMALL_CHAR_ARRAY 2
175338fd1498Szrj #define SPCT_HAS_ARRAY 4
175438fd1498Szrj #define SPCT_HAS_AGGREGATE 8
175538fd1498Szrj
175638fd1498Szrj static unsigned int
stack_protect_classify_type(tree type)175738fd1498Szrj stack_protect_classify_type (tree type)
175838fd1498Szrj {
175938fd1498Szrj unsigned int ret = 0;
176038fd1498Szrj tree t;
176138fd1498Szrj
176238fd1498Szrj switch (TREE_CODE (type))
176338fd1498Szrj {
176438fd1498Szrj case ARRAY_TYPE:
176538fd1498Szrj t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
176638fd1498Szrj if (t == char_type_node
176738fd1498Szrj || t == signed_char_type_node
176838fd1498Szrj || t == unsigned_char_type_node)
176938fd1498Szrj {
177038fd1498Szrj unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
177138fd1498Szrj unsigned HOST_WIDE_INT len;
177238fd1498Szrj
177338fd1498Szrj if (!TYPE_SIZE_UNIT (type)
177438fd1498Szrj || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
177538fd1498Szrj len = max;
177638fd1498Szrj else
177738fd1498Szrj len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
177838fd1498Szrj
177938fd1498Szrj if (len < max)
178038fd1498Szrj ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
178138fd1498Szrj else
178238fd1498Szrj ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
178338fd1498Szrj }
178438fd1498Szrj else
178538fd1498Szrj ret = SPCT_HAS_ARRAY;
178638fd1498Szrj break;
178738fd1498Szrj
178838fd1498Szrj case UNION_TYPE:
178938fd1498Szrj case QUAL_UNION_TYPE:
179038fd1498Szrj case RECORD_TYPE:
179138fd1498Szrj ret = SPCT_HAS_AGGREGATE;
179238fd1498Szrj for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
179338fd1498Szrj if (TREE_CODE (t) == FIELD_DECL)
179438fd1498Szrj ret |= stack_protect_classify_type (TREE_TYPE (t));
179538fd1498Szrj break;
179638fd1498Szrj
179738fd1498Szrj default:
179838fd1498Szrj break;
179938fd1498Szrj }
180038fd1498Szrj
180138fd1498Szrj return ret;
180238fd1498Szrj }
180338fd1498Szrj
180438fd1498Szrj /* Return nonzero if DECL should be segregated into the "vulnerable" upper
180538fd1498Szrj part of the local stack frame. Remember if we ever return nonzero for
180638fd1498Szrj any variable in this function. The return value is the phase number in
180738fd1498Szrj which the variable should be allocated. */
180838fd1498Szrj
180938fd1498Szrj static int
stack_protect_decl_phase(tree decl)181038fd1498Szrj stack_protect_decl_phase (tree decl)
181138fd1498Szrj {
181238fd1498Szrj unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
181338fd1498Szrj int ret = 0;
181438fd1498Szrj
181538fd1498Szrj if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
181638fd1498Szrj has_short_buffer = true;
181738fd1498Szrj
181838fd1498Szrj if (flag_stack_protect == SPCT_FLAG_ALL
181938fd1498Szrj || flag_stack_protect == SPCT_FLAG_STRONG
182038fd1498Szrj || (flag_stack_protect == SPCT_FLAG_EXPLICIT
182138fd1498Szrj && lookup_attribute ("stack_protect",
182238fd1498Szrj DECL_ATTRIBUTES (current_function_decl))))
182338fd1498Szrj {
182438fd1498Szrj if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
182538fd1498Szrj && !(bits & SPCT_HAS_AGGREGATE))
182638fd1498Szrj ret = 1;
182738fd1498Szrj else if (bits & SPCT_HAS_ARRAY)
182838fd1498Szrj ret = 2;
182938fd1498Szrj }
183038fd1498Szrj else
183138fd1498Szrj ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
183238fd1498Szrj
183338fd1498Szrj if (ret)
183438fd1498Szrj has_protected_decls = true;
183538fd1498Szrj
183638fd1498Szrj return ret;
183738fd1498Szrj }
183838fd1498Szrj
183938fd1498Szrj /* Two helper routines that check for phase 1 and phase 2. These are used
184038fd1498Szrj as callbacks for expand_stack_vars. */
184138fd1498Szrj
184238fd1498Szrj static bool
stack_protect_decl_phase_1(size_t i)184338fd1498Szrj stack_protect_decl_phase_1 (size_t i)
184438fd1498Szrj {
184538fd1498Szrj return stack_protect_decl_phase (stack_vars[i].decl) == 1;
184638fd1498Szrj }
184738fd1498Szrj
184838fd1498Szrj static bool
stack_protect_decl_phase_2(size_t i)184938fd1498Szrj stack_protect_decl_phase_2 (size_t i)
185038fd1498Szrj {
185138fd1498Szrj return stack_protect_decl_phase (stack_vars[i].decl) == 2;
185238fd1498Szrj }
185338fd1498Szrj
185438fd1498Szrj /* And helper function that checks for asan phase (with stack protector
185538fd1498Szrj it is phase 3). This is used as callback for expand_stack_vars.
185638fd1498Szrj Returns true if any of the vars in the partition need to be protected. */
185738fd1498Szrj
185838fd1498Szrj static bool
asan_decl_phase_3(size_t i)185938fd1498Szrj asan_decl_phase_3 (size_t i)
186038fd1498Szrj {
186138fd1498Szrj while (i != EOC)
186238fd1498Szrj {
186338fd1498Szrj if (asan_protect_stack_decl (stack_vars[i].decl))
186438fd1498Szrj return true;
186538fd1498Szrj i = stack_vars[i].next;
186638fd1498Szrj }
186738fd1498Szrj return false;
186838fd1498Szrj }
186938fd1498Szrj
187038fd1498Szrj /* Ensure that variables in different stack protection phases conflict
187138fd1498Szrj so that they are not merged and share the same stack slot. */
187238fd1498Szrj
187338fd1498Szrj static void
add_stack_protection_conflicts(void)187438fd1498Szrj add_stack_protection_conflicts (void)
187538fd1498Szrj {
187638fd1498Szrj size_t i, j, n = stack_vars_num;
187738fd1498Szrj unsigned char *phase;
187838fd1498Szrj
187938fd1498Szrj phase = XNEWVEC (unsigned char, n);
188038fd1498Szrj for (i = 0; i < n; ++i)
188138fd1498Szrj phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
188238fd1498Szrj
188338fd1498Szrj for (i = 0; i < n; ++i)
188438fd1498Szrj {
188538fd1498Szrj unsigned char ph_i = phase[i];
188638fd1498Szrj for (j = i + 1; j < n; ++j)
188738fd1498Szrj if (ph_i != phase[j])
188838fd1498Szrj add_stack_var_conflict (i, j);
188938fd1498Szrj }
189038fd1498Szrj
189138fd1498Szrj XDELETEVEC (phase);
189238fd1498Szrj }
189338fd1498Szrj
189438fd1498Szrj /* Create a decl for the guard at the top of the stack frame. */
189538fd1498Szrj
189638fd1498Szrj static void
create_stack_guard(void)189738fd1498Szrj create_stack_guard (void)
189838fd1498Szrj {
189938fd1498Szrj tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
190038fd1498Szrj VAR_DECL, NULL, ptr_type_node);
190138fd1498Szrj TREE_THIS_VOLATILE (guard) = 1;
190238fd1498Szrj TREE_USED (guard) = 1;
190338fd1498Szrj expand_one_stack_var (guard);
190438fd1498Szrj crtl->stack_protect_guard = guard;
190538fd1498Szrj }
190638fd1498Szrj
190738fd1498Szrj /* Prepare for expanding variables. */
190838fd1498Szrj static void
init_vars_expansion(void)190938fd1498Szrj init_vars_expansion (void)
191038fd1498Szrj {
191138fd1498Szrj /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
191238fd1498Szrj bitmap_obstack_initialize (&stack_var_bitmap_obstack);
191338fd1498Szrj
191438fd1498Szrj /* A map from decl to stack partition. */
191538fd1498Szrj decl_to_stack_part = new hash_map<tree, size_t>;
191638fd1498Szrj
191738fd1498Szrj /* Initialize local stack smashing state. */
191838fd1498Szrj has_protected_decls = false;
191938fd1498Szrj has_short_buffer = false;
192038fd1498Szrj }
192138fd1498Szrj
192238fd1498Szrj /* Free up stack variable graph data. */
192338fd1498Szrj static void
fini_vars_expansion(void)192438fd1498Szrj fini_vars_expansion (void)
192538fd1498Szrj {
192638fd1498Szrj bitmap_obstack_release (&stack_var_bitmap_obstack);
192738fd1498Szrj if (stack_vars)
192838fd1498Szrj XDELETEVEC (stack_vars);
192938fd1498Szrj if (stack_vars_sorted)
193038fd1498Szrj XDELETEVEC (stack_vars_sorted);
193138fd1498Szrj stack_vars = NULL;
193238fd1498Szrj stack_vars_sorted = NULL;
193338fd1498Szrj stack_vars_alloc = stack_vars_num = 0;
193438fd1498Szrj delete decl_to_stack_part;
193538fd1498Szrj decl_to_stack_part = NULL;
193638fd1498Szrj }
193738fd1498Szrj
193838fd1498Szrj /* Make a fair guess for the size of the stack frame of the function
193938fd1498Szrj in NODE. This doesn't have to be exact, the result is only used in
194038fd1498Szrj the inline heuristics. So we don't want to run the full stack var
194138fd1498Szrj packing algorithm (which is quadratic in the number of stack vars).
194238fd1498Szrj Instead, we calculate the total size of all stack vars. This turns
194338fd1498Szrj out to be a pretty fair estimate -- packing of stack vars doesn't
194438fd1498Szrj happen very often. */
194538fd1498Szrj
194638fd1498Szrj HOST_WIDE_INT
estimated_stack_frame_size(struct cgraph_node * node)194738fd1498Szrj estimated_stack_frame_size (struct cgraph_node *node)
194838fd1498Szrj {
194938fd1498Szrj poly_int64 size = 0;
195038fd1498Szrj size_t i;
195138fd1498Szrj tree var;
195238fd1498Szrj struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
195338fd1498Szrj
195438fd1498Szrj push_cfun (fn);
195538fd1498Szrj
195638fd1498Szrj init_vars_expansion ();
195738fd1498Szrj
195838fd1498Szrj FOR_EACH_LOCAL_DECL (fn, i, var)
195938fd1498Szrj if (auto_var_in_fn_p (var, fn->decl))
196038fd1498Szrj size += expand_one_var (var, true, false);
196138fd1498Szrj
196238fd1498Szrj if (stack_vars_num > 0)
196338fd1498Szrj {
196438fd1498Szrj /* Fake sorting the stack vars for account_stack_vars (). */
196538fd1498Szrj stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
196638fd1498Szrj for (i = 0; i < stack_vars_num; ++i)
196738fd1498Szrj stack_vars_sorted[i] = i;
196838fd1498Szrj size += account_stack_vars ();
196938fd1498Szrj }
197038fd1498Szrj
197138fd1498Szrj fini_vars_expansion ();
197238fd1498Szrj pop_cfun ();
197338fd1498Szrj return estimated_poly_value (size);
197438fd1498Szrj }
197538fd1498Szrj
197638fd1498Szrj /* Helper routine to check if a record or union contains an array field. */
197738fd1498Szrj
197838fd1498Szrj static int
record_or_union_type_has_array_p(const_tree tree_type)197938fd1498Szrj record_or_union_type_has_array_p (const_tree tree_type)
198038fd1498Szrj {
198138fd1498Szrj tree fields = TYPE_FIELDS (tree_type);
198238fd1498Szrj tree f;
198338fd1498Szrj
198438fd1498Szrj for (f = fields; f; f = DECL_CHAIN (f))
198538fd1498Szrj if (TREE_CODE (f) == FIELD_DECL)
198638fd1498Szrj {
198738fd1498Szrj tree field_type = TREE_TYPE (f);
198838fd1498Szrj if (RECORD_OR_UNION_TYPE_P (field_type)
198938fd1498Szrj && record_or_union_type_has_array_p (field_type))
199038fd1498Szrj return 1;
199138fd1498Szrj if (TREE_CODE (field_type) == ARRAY_TYPE)
199238fd1498Szrj return 1;
199338fd1498Szrj }
199438fd1498Szrj return 0;
199538fd1498Szrj }
199638fd1498Szrj
199738fd1498Szrj /* Check if the current function has local referenced variables that
199838fd1498Szrj have their addresses taken, contain an array, or are arrays. */
199938fd1498Szrj
200038fd1498Szrj static bool
stack_protect_decl_p()200138fd1498Szrj stack_protect_decl_p ()
200238fd1498Szrj {
200338fd1498Szrj unsigned i;
200438fd1498Szrj tree var;
200538fd1498Szrj
200638fd1498Szrj FOR_EACH_LOCAL_DECL (cfun, i, var)
200738fd1498Szrj if (!is_global_var (var))
200838fd1498Szrj {
200938fd1498Szrj tree var_type = TREE_TYPE (var);
201038fd1498Szrj if (VAR_P (var)
201138fd1498Szrj && (TREE_CODE (var_type) == ARRAY_TYPE
201238fd1498Szrj || TREE_ADDRESSABLE (var)
201338fd1498Szrj || (RECORD_OR_UNION_TYPE_P (var_type)
201438fd1498Szrj && record_or_union_type_has_array_p (var_type))))
201538fd1498Szrj return true;
201638fd1498Szrj }
201738fd1498Szrj return false;
201838fd1498Szrj }
201938fd1498Szrj
202038fd1498Szrj /* Check if the current function has calls that use a return slot. */
202138fd1498Szrj
202238fd1498Szrj static bool
stack_protect_return_slot_p()202338fd1498Szrj stack_protect_return_slot_p ()
202438fd1498Szrj {
202538fd1498Szrj basic_block bb;
202638fd1498Szrj
202738fd1498Szrj FOR_ALL_BB_FN (bb, cfun)
202838fd1498Szrj for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
202938fd1498Szrj !gsi_end_p (gsi); gsi_next (&gsi))
203038fd1498Szrj {
203138fd1498Szrj gimple *stmt = gsi_stmt (gsi);
203238fd1498Szrj /* This assumes that calls to internal-only functions never
203338fd1498Szrj use a return slot. */
203438fd1498Szrj if (is_gimple_call (stmt)
203538fd1498Szrj && !gimple_call_internal_p (stmt)
203638fd1498Szrj && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
203738fd1498Szrj gimple_call_fndecl (stmt)))
203838fd1498Szrj return true;
203938fd1498Szrj }
204038fd1498Szrj return false;
204138fd1498Szrj }
204238fd1498Szrj
204338fd1498Szrj /* Expand all variables used in the function. */
204438fd1498Szrj
204538fd1498Szrj static rtx_insn *
expand_used_vars(void)204638fd1498Szrj expand_used_vars (void)
204738fd1498Szrj {
204838fd1498Szrj tree var, outer_block = DECL_INITIAL (current_function_decl);
204938fd1498Szrj auto_vec<tree> maybe_local_decls;
205038fd1498Szrj rtx_insn *var_end_seq = NULL;
205138fd1498Szrj unsigned i;
205238fd1498Szrj unsigned len;
205338fd1498Szrj bool gen_stack_protect_signal = false;
205438fd1498Szrj
205538fd1498Szrj /* Compute the phase of the stack frame for this function. */
205638fd1498Szrj {
205738fd1498Szrj int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
205838fd1498Szrj int off = targetm.starting_frame_offset () % align;
205938fd1498Szrj frame_phase = off ? align - off : 0;
206038fd1498Szrj }
206138fd1498Szrj
206238fd1498Szrj /* Set TREE_USED on all variables in the local_decls. */
206338fd1498Szrj FOR_EACH_LOCAL_DECL (cfun, i, var)
206438fd1498Szrj TREE_USED (var) = 1;
206538fd1498Szrj /* Clear TREE_USED on all variables associated with a block scope. */
206638fd1498Szrj clear_tree_used (DECL_INITIAL (current_function_decl));
206738fd1498Szrj
206838fd1498Szrj init_vars_expansion ();
206938fd1498Szrj
207038fd1498Szrj if (targetm.use_pseudo_pic_reg ())
207138fd1498Szrj pic_offset_table_rtx = gen_reg_rtx (Pmode);
207238fd1498Szrj
207338fd1498Szrj for (i = 0; i < SA.map->num_partitions; i++)
207438fd1498Szrj {
207538fd1498Szrj if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
207638fd1498Szrj continue;
207738fd1498Szrj
207838fd1498Szrj tree var = partition_to_var (SA.map, i);
207938fd1498Szrj
208038fd1498Szrj gcc_assert (!virtual_operand_p (var));
208138fd1498Szrj
208238fd1498Szrj expand_one_ssa_partition (var);
208338fd1498Szrj }
208438fd1498Szrj
208538fd1498Szrj if (flag_stack_protect == SPCT_FLAG_STRONG)
208638fd1498Szrj gen_stack_protect_signal
208738fd1498Szrj = stack_protect_decl_p () || stack_protect_return_slot_p ();
208838fd1498Szrj
208938fd1498Szrj /* At this point all variables on the local_decls with TREE_USED
209038fd1498Szrj set are not associated with any block scope. Lay them out. */
209138fd1498Szrj
209238fd1498Szrj len = vec_safe_length (cfun->local_decls);
209338fd1498Szrj FOR_EACH_LOCAL_DECL (cfun, i, var)
209438fd1498Szrj {
209538fd1498Szrj bool expand_now = false;
209638fd1498Szrj
209738fd1498Szrj /* Expanded above already. */
209838fd1498Szrj if (is_gimple_reg (var))
209938fd1498Szrj {
210038fd1498Szrj TREE_USED (var) = 0;
210138fd1498Szrj goto next;
210238fd1498Szrj }
210338fd1498Szrj /* We didn't set a block for static or extern because it's hard
210438fd1498Szrj to tell the difference between a global variable (re)declared
210538fd1498Szrj in a local scope, and one that's really declared there to
210638fd1498Szrj begin with. And it doesn't really matter much, since we're
210738fd1498Szrj not giving them stack space. Expand them now. */
210838fd1498Szrj else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
210938fd1498Szrj expand_now = true;
211038fd1498Szrj
211138fd1498Szrj /* Expand variables not associated with any block now. Those created by
211238fd1498Szrj the optimizers could be live anywhere in the function. Those that
211338fd1498Szrj could possibly have been scoped originally and detached from their
211438fd1498Szrj block will have their allocation deferred so we coalesce them with
211538fd1498Szrj others when optimization is enabled. */
211638fd1498Szrj else if (TREE_USED (var))
211738fd1498Szrj expand_now = true;
211838fd1498Szrj
211938fd1498Szrj /* Finally, mark all variables on the list as used. We'll use
212038fd1498Szrj this in a moment when we expand those associated with scopes. */
212138fd1498Szrj TREE_USED (var) = 1;
212238fd1498Szrj
212338fd1498Szrj if (expand_now)
212438fd1498Szrj expand_one_var (var, true, true);
212538fd1498Szrj
212638fd1498Szrj next:
212738fd1498Szrj if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
212838fd1498Szrj {
212938fd1498Szrj rtx rtl = DECL_RTL_IF_SET (var);
213038fd1498Szrj
213138fd1498Szrj /* Keep artificial non-ignored vars in cfun->local_decls
213238fd1498Szrj chain until instantiate_decls. */
213338fd1498Szrj if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
213438fd1498Szrj add_local_decl (cfun, var);
213538fd1498Szrj else if (rtl == NULL_RTX)
213638fd1498Szrj /* If rtl isn't set yet, which can happen e.g. with
213738fd1498Szrj -fstack-protector, retry before returning from this
213838fd1498Szrj function. */
213938fd1498Szrj maybe_local_decls.safe_push (var);
214038fd1498Szrj }
214138fd1498Szrj }
214238fd1498Szrj
214338fd1498Szrj /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
214438fd1498Szrj
214538fd1498Szrj +-----------------+-----------------+
214638fd1498Szrj | ...processed... | ...duplicates...|
214738fd1498Szrj +-----------------+-----------------+
214838fd1498Szrj ^
214938fd1498Szrj +-- LEN points here.
215038fd1498Szrj
215138fd1498Szrj We just want the duplicates, as those are the artificial
215238fd1498Szrj non-ignored vars that we want to keep until instantiate_decls.
215338fd1498Szrj Move them down and truncate the array. */
215438fd1498Szrj if (!vec_safe_is_empty (cfun->local_decls))
215538fd1498Szrj cfun->local_decls->block_remove (0, len);
215638fd1498Szrj
215738fd1498Szrj /* At this point, all variables within the block tree with TREE_USED
215838fd1498Szrj set are actually used by the optimized function. Lay them out. */
215938fd1498Szrj expand_used_vars_for_block (outer_block, true);
216038fd1498Szrj
216138fd1498Szrj if (stack_vars_num > 0)
216238fd1498Szrj {
216338fd1498Szrj add_scope_conflicts ();
216438fd1498Szrj
216538fd1498Szrj /* If stack protection is enabled, we don't share space between
216638fd1498Szrj vulnerable data and non-vulnerable data. */
216738fd1498Szrj if (flag_stack_protect != 0
216838fd1498Szrj && (flag_stack_protect != SPCT_FLAG_EXPLICIT
216938fd1498Szrj || (flag_stack_protect == SPCT_FLAG_EXPLICIT
217038fd1498Szrj && lookup_attribute ("stack_protect",
217138fd1498Szrj DECL_ATTRIBUTES (current_function_decl)))))
217238fd1498Szrj add_stack_protection_conflicts ();
217338fd1498Szrj
217438fd1498Szrj /* Now that we have collected all stack variables, and have computed a
217538fd1498Szrj minimal interference graph, attempt to save some stack space. */
217638fd1498Szrj partition_stack_vars ();
217738fd1498Szrj if (dump_file)
217838fd1498Szrj dump_stack_var_partition ();
217938fd1498Szrj }
218038fd1498Szrj
218138fd1498Szrj switch (flag_stack_protect)
218238fd1498Szrj {
218338fd1498Szrj case SPCT_FLAG_ALL:
218438fd1498Szrj create_stack_guard ();
218538fd1498Szrj break;
218638fd1498Szrj
218738fd1498Szrj case SPCT_FLAG_STRONG:
218838fd1498Szrj if (gen_stack_protect_signal
218938fd1498Szrj || cfun->calls_alloca || has_protected_decls
219038fd1498Szrj || lookup_attribute ("stack_protect",
219138fd1498Szrj DECL_ATTRIBUTES (current_function_decl)))
219238fd1498Szrj create_stack_guard ();
219338fd1498Szrj break;
219438fd1498Szrj
219538fd1498Szrj case SPCT_FLAG_DEFAULT:
219638fd1498Szrj if (cfun->calls_alloca || has_protected_decls
219738fd1498Szrj || lookup_attribute ("stack_protect",
219838fd1498Szrj DECL_ATTRIBUTES (current_function_decl)))
219938fd1498Szrj create_stack_guard ();
220038fd1498Szrj break;
220138fd1498Szrj
220238fd1498Szrj case SPCT_FLAG_EXPLICIT:
220338fd1498Szrj if (lookup_attribute ("stack_protect",
220438fd1498Szrj DECL_ATTRIBUTES (current_function_decl)))
220538fd1498Szrj create_stack_guard ();
220638fd1498Szrj break;
220738fd1498Szrj default:
220838fd1498Szrj ;
220938fd1498Szrj }
221038fd1498Szrj
221138fd1498Szrj /* Assign rtl to each variable based on these partitions. */
221238fd1498Szrj if (stack_vars_num > 0)
221338fd1498Szrj {
221438fd1498Szrj struct stack_vars_data data;
221538fd1498Szrj
221638fd1498Szrj data.asan_base = NULL_RTX;
221738fd1498Szrj data.asan_alignb = 0;
221838fd1498Szrj
221938fd1498Szrj /* Reorder decls to be protected by iterating over the variables
222038fd1498Szrj array multiple times, and allocating out of each phase in turn. */
222138fd1498Szrj /* ??? We could probably integrate this into the qsort we did
222238fd1498Szrj earlier, such that we naturally see these variables first,
222338fd1498Szrj and thus naturally allocate things in the right order. */
222438fd1498Szrj if (has_protected_decls)
222538fd1498Szrj {
222638fd1498Szrj /* Phase 1 contains only character arrays. */
222738fd1498Szrj expand_stack_vars (stack_protect_decl_phase_1, &data);
222838fd1498Szrj
222938fd1498Szrj /* Phase 2 contains other kinds of arrays. */
223038fd1498Szrj if (flag_stack_protect == SPCT_FLAG_ALL
223138fd1498Szrj || flag_stack_protect == SPCT_FLAG_STRONG
223238fd1498Szrj || (flag_stack_protect == SPCT_FLAG_EXPLICIT
223338fd1498Szrj && lookup_attribute ("stack_protect",
223438fd1498Szrj DECL_ATTRIBUTES (current_function_decl))))
223538fd1498Szrj expand_stack_vars (stack_protect_decl_phase_2, &data);
223638fd1498Szrj }
223738fd1498Szrj
223838fd1498Szrj if (asan_sanitize_stack_p ())
223938fd1498Szrj /* Phase 3, any partitions that need asan protection
224038fd1498Szrj in addition to phase 1 and 2. */
224138fd1498Szrj expand_stack_vars (asan_decl_phase_3, &data);
224238fd1498Szrj
224338fd1498Szrj /* ASAN description strings don't yet have a syntax for expressing
224438fd1498Szrj polynomial offsets. */
224538fd1498Szrj HOST_WIDE_INT prev_offset;
224638fd1498Szrj if (!data.asan_vec.is_empty ()
224738fd1498Szrj && frame_offset.is_constant (&prev_offset))
224838fd1498Szrj {
224938fd1498Szrj HOST_WIDE_INT offset, sz, redzonesz;
225038fd1498Szrj redzonesz = ASAN_RED_ZONE_SIZE;
225138fd1498Szrj sz = data.asan_vec[0] - prev_offset;
225238fd1498Szrj if (data.asan_alignb > ASAN_RED_ZONE_SIZE
225338fd1498Szrj && data.asan_alignb <= 4096
225438fd1498Szrj && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
225538fd1498Szrj redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
225638fd1498Szrj & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
225738fd1498Szrj /* Allocating a constant amount of space from a constant
225838fd1498Szrj starting offset must give a constant result. */
225938fd1498Szrj offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
226038fd1498Szrj .to_constant ());
226138fd1498Szrj data.asan_vec.safe_push (prev_offset);
226238fd1498Szrj data.asan_vec.safe_push (offset);
226338fd1498Szrj /* Leave space for alignment if STRICT_ALIGNMENT. */
226438fd1498Szrj if (STRICT_ALIGNMENT)
226538fd1498Szrj alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
226638fd1498Szrj << ASAN_SHADOW_SHIFT)
226738fd1498Szrj / BITS_PER_UNIT, 1);
226838fd1498Szrj
226938fd1498Szrj var_end_seq
227038fd1498Szrj = asan_emit_stack_protection (virtual_stack_vars_rtx,
227138fd1498Szrj data.asan_base,
227238fd1498Szrj data.asan_alignb,
227338fd1498Szrj data.asan_vec.address (),
227438fd1498Szrj data.asan_decl_vec.address (),
227538fd1498Szrj data.asan_vec.length ());
227638fd1498Szrj }
227738fd1498Szrj
227838fd1498Szrj expand_stack_vars (NULL, &data);
227938fd1498Szrj }
228038fd1498Szrj
228138fd1498Szrj if (asan_sanitize_allocas_p () && cfun->calls_alloca)
228238fd1498Szrj var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
228338fd1498Szrj virtual_stack_vars_rtx,
228438fd1498Szrj var_end_seq);
228538fd1498Szrj
228638fd1498Szrj fini_vars_expansion ();
228738fd1498Szrj
228838fd1498Szrj /* If there were any artificial non-ignored vars without rtl
228938fd1498Szrj found earlier, see if deferred stack allocation hasn't assigned
229038fd1498Szrj rtl to them. */
229138fd1498Szrj FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
229238fd1498Szrj {
229338fd1498Szrj rtx rtl = DECL_RTL_IF_SET (var);
229438fd1498Szrj
229538fd1498Szrj /* Keep artificial non-ignored vars in cfun->local_decls
229638fd1498Szrj chain until instantiate_decls. */
229738fd1498Szrj if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
229838fd1498Szrj add_local_decl (cfun, var);
229938fd1498Szrj }
230038fd1498Szrj
230138fd1498Szrj /* If the target requires that FRAME_OFFSET be aligned, do it. */
230238fd1498Szrj if (STACK_ALIGNMENT_NEEDED)
230338fd1498Szrj {
230438fd1498Szrj HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
230538fd1498Szrj if (FRAME_GROWS_DOWNWARD)
230638fd1498Szrj frame_offset = aligned_lower_bound (frame_offset, align);
230738fd1498Szrj else
230838fd1498Szrj frame_offset = aligned_upper_bound (frame_offset, align);
230938fd1498Szrj }
231038fd1498Szrj
231138fd1498Szrj return var_end_seq;
231238fd1498Szrj }
231338fd1498Szrj
231438fd1498Szrj
231538fd1498Szrj /* If we need to produce a detailed dump, print the tree representation
231638fd1498Szrj for STMT to the dump file. SINCE is the last RTX after which the RTL
231738fd1498Szrj generated for STMT should have been appended. */
231838fd1498Szrj
231938fd1498Szrj static void
maybe_dump_rtl_for_gimple_stmt(gimple * stmt,rtx_insn * since)232038fd1498Szrj maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
232138fd1498Szrj {
232238fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
232338fd1498Szrj {
232438fd1498Szrj fprintf (dump_file, "\n;; ");
232538fd1498Szrj print_gimple_stmt (dump_file, stmt, 0,
232638fd1498Szrj TDF_SLIM | (dump_flags & TDF_LINENO));
232738fd1498Szrj fprintf (dump_file, "\n");
232838fd1498Szrj
232938fd1498Szrj print_rtl (dump_file, since ? NEXT_INSN (since) : since);
233038fd1498Szrj }
233138fd1498Szrj }
233238fd1498Szrj
233338fd1498Szrj /* Maps the blocks that do not contain tree labels to rtx labels. */
233438fd1498Szrj
233538fd1498Szrj static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
233638fd1498Szrj
233738fd1498Szrj /* Returns the label_rtx expression for a label starting basic block BB. */
233838fd1498Szrj
233938fd1498Szrj static rtx_code_label *
label_rtx_for_bb(basic_block bb ATTRIBUTE_UNUSED)234038fd1498Szrj label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
234138fd1498Szrj {
234238fd1498Szrj gimple_stmt_iterator gsi;
234338fd1498Szrj tree lab;
234438fd1498Szrj
234538fd1498Szrj if (bb->flags & BB_RTL)
234638fd1498Szrj return block_label (bb);
234738fd1498Szrj
234838fd1498Szrj rtx_code_label **elt = lab_rtx_for_bb->get (bb);
234938fd1498Szrj if (elt)
235038fd1498Szrj return *elt;
235138fd1498Szrj
235238fd1498Szrj /* Find the tree label if it is present. */
235338fd1498Szrj
235438fd1498Szrj for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
235538fd1498Szrj {
235638fd1498Szrj glabel *lab_stmt;
235738fd1498Szrj
235838fd1498Szrj lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
235938fd1498Szrj if (!lab_stmt)
236038fd1498Szrj break;
236138fd1498Szrj
236238fd1498Szrj lab = gimple_label_label (lab_stmt);
236338fd1498Szrj if (DECL_NONLOCAL (lab))
236438fd1498Szrj break;
236538fd1498Szrj
236638fd1498Szrj return jump_target_rtx (lab);
236738fd1498Szrj }
236838fd1498Szrj
236938fd1498Szrj rtx_code_label *l = gen_label_rtx ();
237038fd1498Szrj lab_rtx_for_bb->put (bb, l);
237138fd1498Szrj return l;
237238fd1498Szrj }
237338fd1498Szrj
237438fd1498Szrj
237538fd1498Szrj /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
237638fd1498Szrj of a basic block where we just expanded the conditional at the end,
237738fd1498Szrj possibly clean up the CFG and instruction sequence. LAST is the
237838fd1498Szrj last instruction before the just emitted jump sequence. */
237938fd1498Szrj
238038fd1498Szrj static void
maybe_cleanup_end_of_block(edge e,rtx_insn * last)238138fd1498Szrj maybe_cleanup_end_of_block (edge e, rtx_insn *last)
238238fd1498Szrj {
238338fd1498Szrj /* Special case: when jumpif decides that the condition is
238438fd1498Szrj trivial it emits an unconditional jump (and the necessary
238538fd1498Szrj barrier). But we still have two edges, the fallthru one is
238638fd1498Szrj wrong. purge_dead_edges would clean this up later. Unfortunately
238738fd1498Szrj we have to insert insns (and split edges) before
238838fd1498Szrj find_many_sub_basic_blocks and hence before purge_dead_edges.
238938fd1498Szrj But splitting edges might create new blocks which depend on the
239038fd1498Szrj fact that if there are two edges there's no barrier. So the
239138fd1498Szrj barrier would get lost and verify_flow_info would ICE. Instead
239238fd1498Szrj of auditing all edge splitters to care for the barrier (which
239338fd1498Szrj normally isn't there in a cleaned CFG), fix it here. */
239438fd1498Szrj if (BARRIER_P (get_last_insn ()))
239538fd1498Szrj {
239638fd1498Szrj rtx_insn *insn;
239738fd1498Szrj remove_edge (e);
239838fd1498Szrj /* Now, we have a single successor block, if we have insns to
239938fd1498Szrj insert on the remaining edge we potentially will insert
240038fd1498Szrj it at the end of this block (if the dest block isn't feasible)
240138fd1498Szrj in order to avoid splitting the edge. This insertion will take
240238fd1498Szrj place in front of the last jump. But we might have emitted
240338fd1498Szrj multiple jumps (conditional and one unconditional) to the
240438fd1498Szrj same destination. Inserting in front of the last one then
240538fd1498Szrj is a problem. See PR 40021. We fix this by deleting all
240638fd1498Szrj jumps except the last unconditional one. */
240738fd1498Szrj insn = PREV_INSN (get_last_insn ());
240838fd1498Szrj /* Make sure we have an unconditional jump. Otherwise we're
240938fd1498Szrj confused. */
241038fd1498Szrj gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
241138fd1498Szrj for (insn = PREV_INSN (insn); insn != last;)
241238fd1498Szrj {
241338fd1498Szrj insn = PREV_INSN (insn);
241438fd1498Szrj if (JUMP_P (NEXT_INSN (insn)))
241538fd1498Szrj {
241638fd1498Szrj if (!any_condjump_p (NEXT_INSN (insn)))
241738fd1498Szrj {
241838fd1498Szrj gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
241938fd1498Szrj delete_insn (NEXT_INSN (NEXT_INSN (insn)));
242038fd1498Szrj }
242138fd1498Szrj delete_insn (NEXT_INSN (insn));
242238fd1498Szrj }
242338fd1498Szrj }
242438fd1498Szrj }
242538fd1498Szrj }
242638fd1498Szrj
242738fd1498Szrj /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
242838fd1498Szrj Returns a new basic block if we've terminated the current basic
242938fd1498Szrj block and created a new one. */
243038fd1498Szrj
243138fd1498Szrj static basic_block
expand_gimple_cond(basic_block bb,gcond * stmt)243238fd1498Szrj expand_gimple_cond (basic_block bb, gcond *stmt)
243338fd1498Szrj {
243438fd1498Szrj basic_block new_bb, dest;
243538fd1498Szrj edge true_edge;
243638fd1498Szrj edge false_edge;
243738fd1498Szrj rtx_insn *last2, *last;
243838fd1498Szrj enum tree_code code;
243938fd1498Szrj tree op0, op1;
244038fd1498Szrj
244138fd1498Szrj code = gimple_cond_code (stmt);
244238fd1498Szrj op0 = gimple_cond_lhs (stmt);
244338fd1498Szrj op1 = gimple_cond_rhs (stmt);
244438fd1498Szrj /* We're sometimes presented with such code:
244538fd1498Szrj D.123_1 = x < y;
244638fd1498Szrj if (D.123_1 != 0)
244738fd1498Szrj ...
244838fd1498Szrj This would expand to two comparisons which then later might
244938fd1498Szrj be cleaned up by combine. But some pattern matchers like if-conversion
245038fd1498Szrj work better when there's only one compare, so make up for this
245138fd1498Szrj here as special exception if TER would have made the same change. */
245238fd1498Szrj if (SA.values
245338fd1498Szrj && TREE_CODE (op0) == SSA_NAME
245438fd1498Szrj && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
245538fd1498Szrj && TREE_CODE (op1) == INTEGER_CST
245638fd1498Szrj && ((gimple_cond_code (stmt) == NE_EXPR
245738fd1498Szrj && integer_zerop (op1))
245838fd1498Szrj || (gimple_cond_code (stmt) == EQ_EXPR
245938fd1498Szrj && integer_onep (op1)))
246038fd1498Szrj && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
246138fd1498Szrj {
246238fd1498Szrj gimple *second = SSA_NAME_DEF_STMT (op0);
246338fd1498Szrj if (gimple_code (second) == GIMPLE_ASSIGN)
246438fd1498Szrj {
246538fd1498Szrj enum tree_code code2 = gimple_assign_rhs_code (second);
246638fd1498Szrj if (TREE_CODE_CLASS (code2) == tcc_comparison)
246738fd1498Szrj {
246838fd1498Szrj code = code2;
246938fd1498Szrj op0 = gimple_assign_rhs1 (second);
247038fd1498Szrj op1 = gimple_assign_rhs2 (second);
247138fd1498Szrj }
247238fd1498Szrj /* If jumps are cheap and the target does not support conditional
247338fd1498Szrj compare, turn some more codes into jumpy sequences. */
247438fd1498Szrj else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
247538fd1498Szrj && targetm.gen_ccmp_first == NULL)
247638fd1498Szrj {
247738fd1498Szrj if ((code2 == BIT_AND_EXPR
247838fd1498Szrj && TYPE_PRECISION (TREE_TYPE (op0)) == 1
247938fd1498Szrj && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
248038fd1498Szrj || code2 == TRUTH_AND_EXPR)
248138fd1498Szrj {
248238fd1498Szrj code = TRUTH_ANDIF_EXPR;
248338fd1498Szrj op0 = gimple_assign_rhs1 (second);
248438fd1498Szrj op1 = gimple_assign_rhs2 (second);
248538fd1498Szrj }
248638fd1498Szrj else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
248738fd1498Szrj {
248838fd1498Szrj code = TRUTH_ORIF_EXPR;
248938fd1498Szrj op0 = gimple_assign_rhs1 (second);
249038fd1498Szrj op1 = gimple_assign_rhs2 (second);
249138fd1498Szrj }
249238fd1498Szrj }
249338fd1498Szrj }
249438fd1498Szrj }
249538fd1498Szrj
249638fd1498Szrj last2 = last = get_last_insn ();
249738fd1498Szrj
249838fd1498Szrj extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
249938fd1498Szrj set_curr_insn_location (gimple_location (stmt));
250038fd1498Szrj
250138fd1498Szrj /* These flags have no purpose in RTL land. */
250238fd1498Szrj true_edge->flags &= ~EDGE_TRUE_VALUE;
250338fd1498Szrj false_edge->flags &= ~EDGE_FALSE_VALUE;
250438fd1498Szrj
250538fd1498Szrj /* We can either have a pure conditional jump with one fallthru edge or
250638fd1498Szrj two-way jump that needs to be decomposed into two basic blocks. */
250738fd1498Szrj if (false_edge->dest == bb->next_bb)
250838fd1498Szrj {
250938fd1498Szrj jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
251038fd1498Szrj true_edge->probability);
251138fd1498Szrj maybe_dump_rtl_for_gimple_stmt (stmt, last);
251238fd1498Szrj if (true_edge->goto_locus != UNKNOWN_LOCATION)
251338fd1498Szrj set_curr_insn_location (true_edge->goto_locus);
251438fd1498Szrj false_edge->flags |= EDGE_FALLTHRU;
251538fd1498Szrj maybe_cleanup_end_of_block (false_edge, last);
251638fd1498Szrj return NULL;
251738fd1498Szrj }
251838fd1498Szrj if (true_edge->dest == bb->next_bb)
251938fd1498Szrj {
252038fd1498Szrj jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
252138fd1498Szrj false_edge->probability);
252238fd1498Szrj maybe_dump_rtl_for_gimple_stmt (stmt, last);
252338fd1498Szrj if (false_edge->goto_locus != UNKNOWN_LOCATION)
252438fd1498Szrj set_curr_insn_location (false_edge->goto_locus);
252538fd1498Szrj true_edge->flags |= EDGE_FALLTHRU;
252638fd1498Szrj maybe_cleanup_end_of_block (true_edge, last);
252738fd1498Szrj return NULL;
252838fd1498Szrj }
252938fd1498Szrj
253038fd1498Szrj jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
253138fd1498Szrj true_edge->probability);
253238fd1498Szrj last = get_last_insn ();
253338fd1498Szrj if (false_edge->goto_locus != UNKNOWN_LOCATION)
253438fd1498Szrj set_curr_insn_location (false_edge->goto_locus);
253538fd1498Szrj emit_jump (label_rtx_for_bb (false_edge->dest));
253638fd1498Szrj
253738fd1498Szrj BB_END (bb) = last;
253838fd1498Szrj if (BARRIER_P (BB_END (bb)))
253938fd1498Szrj BB_END (bb) = PREV_INSN (BB_END (bb));
254038fd1498Szrj update_bb_for_insn (bb);
254138fd1498Szrj
254238fd1498Szrj new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
254338fd1498Szrj dest = false_edge->dest;
254438fd1498Szrj redirect_edge_succ (false_edge, new_bb);
254538fd1498Szrj false_edge->flags |= EDGE_FALLTHRU;
254638fd1498Szrj new_bb->count = false_edge->count ();
254738fd1498Szrj loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
254838fd1498Szrj add_bb_to_loop (new_bb, loop);
254938fd1498Szrj if (loop->latch == bb
255038fd1498Szrj && loop->header == dest)
255138fd1498Szrj loop->latch = new_bb;
255238fd1498Szrj make_single_succ_edge (new_bb, dest, 0);
255338fd1498Szrj if (BARRIER_P (BB_END (new_bb)))
255438fd1498Szrj BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
255538fd1498Szrj update_bb_for_insn (new_bb);
255638fd1498Szrj
255738fd1498Szrj maybe_dump_rtl_for_gimple_stmt (stmt, last2);
255838fd1498Szrj
255938fd1498Szrj if (true_edge->goto_locus != UNKNOWN_LOCATION)
256038fd1498Szrj {
256138fd1498Szrj set_curr_insn_location (true_edge->goto_locus);
256238fd1498Szrj true_edge->goto_locus = curr_insn_location ();
256338fd1498Szrj }
256438fd1498Szrj
256538fd1498Szrj return new_bb;
256638fd1498Szrj }
256738fd1498Szrj
256838fd1498Szrj /* Mark all calls that can have a transaction restart. */
256938fd1498Szrj
257038fd1498Szrj static void
mark_transaction_restart_calls(gimple * stmt)257138fd1498Szrj mark_transaction_restart_calls (gimple *stmt)
257238fd1498Szrj {
257338fd1498Szrj struct tm_restart_node dummy;
257438fd1498Szrj tm_restart_node **slot;
257538fd1498Szrj
257638fd1498Szrj if (!cfun->gimple_df->tm_restart)
257738fd1498Szrj return;
257838fd1498Szrj
257938fd1498Szrj dummy.stmt = stmt;
258038fd1498Szrj slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
258138fd1498Szrj if (slot)
258238fd1498Szrj {
258338fd1498Szrj struct tm_restart_node *n = *slot;
258438fd1498Szrj tree list = n->label_or_list;
258538fd1498Szrj rtx_insn *insn;
258638fd1498Szrj
258738fd1498Szrj for (insn = next_real_insn (get_last_insn ());
258838fd1498Szrj !CALL_P (insn);
258938fd1498Szrj insn = next_real_insn (insn))
259038fd1498Szrj continue;
259138fd1498Szrj
259238fd1498Szrj if (TREE_CODE (list) == LABEL_DECL)
259338fd1498Szrj add_reg_note (insn, REG_TM, label_rtx (list));
259438fd1498Szrj else
259538fd1498Szrj for (; list ; list = TREE_CHAIN (list))
259638fd1498Szrj add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
259738fd1498Szrj }
259838fd1498Szrj }
259938fd1498Szrj
260038fd1498Szrj /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
260138fd1498Szrj statement STMT. */
260238fd1498Szrj
260338fd1498Szrj static void
expand_call_stmt(gcall * stmt)260438fd1498Szrj expand_call_stmt (gcall *stmt)
260538fd1498Szrj {
260638fd1498Szrj tree exp, decl, lhs;
260738fd1498Szrj bool builtin_p;
260838fd1498Szrj size_t i;
260938fd1498Szrj
261038fd1498Szrj if (gimple_call_internal_p (stmt))
261138fd1498Szrj {
261238fd1498Szrj expand_internal_call (stmt);
261338fd1498Szrj return;
261438fd1498Szrj }
261538fd1498Szrj
261638fd1498Szrj /* If this is a call to a built-in function and it has no effect other
261738fd1498Szrj than setting the lhs, try to implement it using an internal function
261838fd1498Szrj instead. */
261938fd1498Szrj decl = gimple_call_fndecl (stmt);
262038fd1498Szrj if (gimple_call_lhs (stmt)
262138fd1498Szrj && !gimple_has_side_effects (stmt)
262238fd1498Szrj && (optimize || (decl && called_as_built_in (decl))))
262338fd1498Szrj {
262438fd1498Szrj internal_fn ifn = replacement_internal_fn (stmt);
262538fd1498Szrj if (ifn != IFN_LAST)
262638fd1498Szrj {
262738fd1498Szrj expand_internal_call (ifn, stmt);
262838fd1498Szrj return;
262938fd1498Szrj }
263038fd1498Szrj }
263138fd1498Szrj
263238fd1498Szrj exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
263338fd1498Szrj
263438fd1498Szrj CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
263538fd1498Szrj builtin_p = decl && DECL_BUILT_IN (decl);
263638fd1498Szrj
263738fd1498Szrj /* If this is not a builtin function, the function type through which the
263838fd1498Szrj call is made may be different from the type of the function. */
263938fd1498Szrj if (!builtin_p)
264038fd1498Szrj CALL_EXPR_FN (exp)
264138fd1498Szrj = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
264238fd1498Szrj CALL_EXPR_FN (exp));
264338fd1498Szrj
264438fd1498Szrj TREE_TYPE (exp) = gimple_call_return_type (stmt);
264538fd1498Szrj CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
264638fd1498Szrj
264738fd1498Szrj for (i = 0; i < gimple_call_num_args (stmt); i++)
264838fd1498Szrj {
264938fd1498Szrj tree arg = gimple_call_arg (stmt, i);
265038fd1498Szrj gimple *def;
265138fd1498Szrj /* TER addresses into arguments of builtin functions so we have a
265238fd1498Szrj chance to infer more correct alignment information. See PR39954. */
265338fd1498Szrj if (builtin_p
265438fd1498Szrj && TREE_CODE (arg) == SSA_NAME
265538fd1498Szrj && (def = get_gimple_for_ssa_name (arg))
265638fd1498Szrj && gimple_assign_rhs_code (def) == ADDR_EXPR)
265738fd1498Szrj arg = gimple_assign_rhs1 (def);
265838fd1498Szrj CALL_EXPR_ARG (exp, i) = arg;
265938fd1498Szrj }
266038fd1498Szrj
266138fd1498Szrj if (gimple_has_side_effects (stmt))
266238fd1498Szrj TREE_SIDE_EFFECTS (exp) = 1;
266338fd1498Szrj
266438fd1498Szrj if (gimple_call_nothrow_p (stmt))
266538fd1498Szrj TREE_NOTHROW (exp) = 1;
266638fd1498Szrj
266738fd1498Szrj if (gimple_no_warning_p (stmt))
266838fd1498Szrj TREE_NO_WARNING (exp) = 1;
266938fd1498Szrj
267038fd1498Szrj CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
267138fd1498Szrj CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
267238fd1498Szrj CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
267338fd1498Szrj if (decl
267438fd1498Szrj && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
267538fd1498Szrj && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
267638fd1498Szrj CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
267738fd1498Szrj else
267838fd1498Szrj CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
267938fd1498Szrj CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
268038fd1498Szrj CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
268138fd1498Szrj SET_EXPR_LOCATION (exp, gimple_location (stmt));
268238fd1498Szrj CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
268338fd1498Szrj
268438fd1498Szrj /* Ensure RTL is created for debug args. */
268538fd1498Szrj if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
268638fd1498Szrj {
268738fd1498Szrj vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
268838fd1498Szrj unsigned int ix;
268938fd1498Szrj tree dtemp;
269038fd1498Szrj
269138fd1498Szrj if (debug_args)
269238fd1498Szrj for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
269338fd1498Szrj {
269438fd1498Szrj gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
269538fd1498Szrj expand_debug_expr (dtemp);
269638fd1498Szrj }
269738fd1498Szrj }
269838fd1498Szrj
269938fd1498Szrj rtx_insn *before_call = get_last_insn ();
270038fd1498Szrj lhs = gimple_call_lhs (stmt);
270138fd1498Szrj if (lhs)
270238fd1498Szrj expand_assignment (lhs, exp, false);
270338fd1498Szrj else
270438fd1498Szrj expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
270538fd1498Szrj
270638fd1498Szrj /* If the gimple call is an indirect call and has 'nocf_check'
270738fd1498Szrj attribute find a generated CALL insn to mark it as no
270838fd1498Szrj control-flow verification is needed. */
270938fd1498Szrj if (gimple_call_nocf_check_p (stmt)
271038fd1498Szrj && !gimple_call_fndecl (stmt))
271138fd1498Szrj {
271238fd1498Szrj rtx_insn *last = get_last_insn ();
271338fd1498Szrj while (!CALL_P (last)
271438fd1498Szrj && last != before_call)
271538fd1498Szrj last = PREV_INSN (last);
271638fd1498Szrj
271738fd1498Szrj if (last != before_call)
271838fd1498Szrj add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
271938fd1498Szrj }
272038fd1498Szrj
272138fd1498Szrj mark_transaction_restart_calls (stmt);
272238fd1498Szrj }
272338fd1498Szrj
272438fd1498Szrj
272538fd1498Szrj /* Generate RTL for an asm statement (explicit assembler code).
272638fd1498Szrj STRING is a STRING_CST node containing the assembler code text,
272738fd1498Szrj or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
272838fd1498Szrj insn is volatile; don't optimize it. */
272938fd1498Szrj
273038fd1498Szrj static void
expand_asm_loc(tree string,int vol,location_t locus)273138fd1498Szrj expand_asm_loc (tree string, int vol, location_t locus)
273238fd1498Szrj {
273338fd1498Szrj rtx body;
273438fd1498Szrj
273538fd1498Szrj body = gen_rtx_ASM_INPUT_loc (VOIDmode,
273638fd1498Szrj ggc_strdup (TREE_STRING_POINTER (string)),
273738fd1498Szrj locus);
273838fd1498Szrj
273938fd1498Szrj MEM_VOLATILE_P (body) = vol;
274038fd1498Szrj
274138fd1498Szrj /* Non-empty basic ASM implicitly clobbers memory. */
274238fd1498Szrj if (TREE_STRING_LENGTH (string) != 0)
274338fd1498Szrj {
274438fd1498Szrj rtx asm_op, clob;
274538fd1498Szrj unsigned i, nclobbers;
274638fd1498Szrj auto_vec<rtx> input_rvec, output_rvec;
274738fd1498Szrj auto_vec<const char *> constraints;
274838fd1498Szrj auto_vec<rtx> clobber_rvec;
274938fd1498Szrj HARD_REG_SET clobbered_regs;
275038fd1498Szrj CLEAR_HARD_REG_SET (clobbered_regs);
275138fd1498Szrj
275238fd1498Szrj clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
275338fd1498Szrj clobber_rvec.safe_push (clob);
275438fd1498Szrj
275538fd1498Szrj if (targetm.md_asm_adjust)
275638fd1498Szrj targetm.md_asm_adjust (output_rvec, input_rvec,
275738fd1498Szrj constraints, clobber_rvec,
275838fd1498Szrj clobbered_regs);
275938fd1498Szrj
276038fd1498Szrj asm_op = body;
276138fd1498Szrj nclobbers = clobber_rvec.length ();
276238fd1498Szrj body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
276338fd1498Szrj
276438fd1498Szrj XVECEXP (body, 0, 0) = asm_op;
276538fd1498Szrj for (i = 0; i < nclobbers; i++)
276638fd1498Szrj XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
276738fd1498Szrj }
276838fd1498Szrj
276938fd1498Szrj emit_insn (body);
277038fd1498Szrj }
277138fd1498Szrj
277238fd1498Szrj /* Return the number of times character C occurs in string S. */
277338fd1498Szrj static int
n_occurrences(int c,const char * s)277438fd1498Szrj n_occurrences (int c, const char *s)
277538fd1498Szrj {
277638fd1498Szrj int n = 0;
277738fd1498Szrj while (*s)
277838fd1498Szrj n += (*s++ == c);
277938fd1498Szrj return n;
278038fd1498Szrj }
278138fd1498Szrj
278238fd1498Szrj /* A subroutine of expand_asm_operands. Check that all operands have
278338fd1498Szrj the same number of alternatives. Return true if so. */
278438fd1498Szrj
278538fd1498Szrj static bool
check_operand_nalternatives(const vec<const char * > & constraints)278638fd1498Szrj check_operand_nalternatives (const vec<const char *> &constraints)
278738fd1498Szrj {
278838fd1498Szrj unsigned len = constraints.length();
278938fd1498Szrj if (len > 0)
279038fd1498Szrj {
279138fd1498Szrj int nalternatives = n_occurrences (',', constraints[0]);
279238fd1498Szrj
279338fd1498Szrj if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
279438fd1498Szrj {
279538fd1498Szrj error ("too many alternatives in %<asm%>");
279638fd1498Szrj return false;
279738fd1498Szrj }
279838fd1498Szrj
279938fd1498Szrj for (unsigned i = 1; i < len; ++i)
280038fd1498Szrj if (n_occurrences (',', constraints[i]) != nalternatives)
280138fd1498Szrj {
280238fd1498Szrj error ("operand constraints for %<asm%> differ "
280338fd1498Szrj "in number of alternatives");
280438fd1498Szrj return false;
280538fd1498Szrj }
280638fd1498Szrj }
280738fd1498Szrj return true;
280838fd1498Szrj }
280938fd1498Szrj
281038fd1498Szrj /* Check for overlap between registers marked in CLOBBERED_REGS and
281138fd1498Szrj anything inappropriate in T. Emit error and return the register
281238fd1498Szrj variable definition for error, NULL_TREE for ok. */
281338fd1498Szrj
281438fd1498Szrj static bool
tree_conflicts_with_clobbers_p(tree t,HARD_REG_SET * clobbered_regs)281538fd1498Szrj tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
281638fd1498Szrj {
281738fd1498Szrj /* Conflicts between asm-declared register variables and the clobber
281838fd1498Szrj list are not allowed. */
281938fd1498Szrj tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
282038fd1498Szrj
282138fd1498Szrj if (overlap)
282238fd1498Szrj {
282338fd1498Szrj error ("asm-specifier for variable %qE conflicts with asm clobber list",
282438fd1498Szrj DECL_NAME (overlap));
282538fd1498Szrj
282638fd1498Szrj /* Reset registerness to stop multiple errors emitted for a single
282738fd1498Szrj variable. */
282838fd1498Szrj DECL_REGISTER (overlap) = 0;
282938fd1498Szrj return true;
283038fd1498Szrj }
283138fd1498Szrj
283238fd1498Szrj return false;
283338fd1498Szrj }
283438fd1498Szrj
283538fd1498Szrj /* Generate RTL for an asm statement with arguments.
283638fd1498Szrj STRING is the instruction template.
283738fd1498Szrj OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
283838fd1498Szrj Each output or input has an expression in the TREE_VALUE and
283938fd1498Szrj a tree list in TREE_PURPOSE which in turn contains a constraint
284038fd1498Szrj name in TREE_VALUE (or NULL_TREE) and a constraint string
284138fd1498Szrj in TREE_PURPOSE.
284238fd1498Szrj CLOBBERS is a list of STRING_CST nodes each naming a hard register
284338fd1498Szrj that is clobbered by this insn.
284438fd1498Szrj
284538fd1498Szrj LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
284638fd1498Szrj should be the fallthru basic block of the asm goto.
284738fd1498Szrj
284838fd1498Szrj Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
284938fd1498Szrj Some elements of OUTPUTS may be replaced with trees representing temporary
285038fd1498Szrj values. The caller should copy those temporary values to the originally
285138fd1498Szrj specified lvalues.
285238fd1498Szrj
285338fd1498Szrj VOL nonzero means the insn is volatile; don't optimize it. */
285438fd1498Szrj
285538fd1498Szrj static void
expand_asm_stmt(gasm * stmt)285638fd1498Szrj expand_asm_stmt (gasm *stmt)
285738fd1498Szrj {
285838fd1498Szrj class save_input_location
285938fd1498Szrj {
286038fd1498Szrj location_t old;
286138fd1498Szrj
286238fd1498Szrj public:
286338fd1498Szrj explicit save_input_location(location_t where)
286438fd1498Szrj {
286538fd1498Szrj old = input_location;
286638fd1498Szrj input_location = where;
286738fd1498Szrj }
286838fd1498Szrj
286938fd1498Szrj ~save_input_location()
287038fd1498Szrj {
287138fd1498Szrj input_location = old;
287238fd1498Szrj }
287338fd1498Szrj };
287438fd1498Szrj
287538fd1498Szrj location_t locus = gimple_location (stmt);
287638fd1498Szrj
287738fd1498Szrj if (gimple_asm_input_p (stmt))
287838fd1498Szrj {
287938fd1498Szrj const char *s = gimple_asm_string (stmt);
288038fd1498Szrj tree string = build_string (strlen (s), s);
288138fd1498Szrj expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
288238fd1498Szrj return;
288338fd1498Szrj }
288438fd1498Szrj
288538fd1498Szrj /* There are some legacy diagnostics in here, and also avoids a
288638fd1498Szrj sixth parameger to targetm.md_asm_adjust. */
288738fd1498Szrj save_input_location s_i_l(locus);
288838fd1498Szrj
288938fd1498Szrj unsigned noutputs = gimple_asm_noutputs (stmt);
289038fd1498Szrj unsigned ninputs = gimple_asm_ninputs (stmt);
289138fd1498Szrj unsigned nlabels = gimple_asm_nlabels (stmt);
289238fd1498Szrj unsigned i;
289338fd1498Szrj
289438fd1498Szrj /* ??? Diagnose during gimplification? */
289538fd1498Szrj if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
289638fd1498Szrj {
289738fd1498Szrj error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
289838fd1498Szrj return;
289938fd1498Szrj }
290038fd1498Szrj
290138fd1498Szrj auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
290238fd1498Szrj auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
290338fd1498Szrj auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
290438fd1498Szrj
290538fd1498Szrj /* Copy the gimple vectors into new vectors that we can manipulate. */
290638fd1498Szrj
290738fd1498Szrj output_tvec.safe_grow (noutputs);
290838fd1498Szrj input_tvec.safe_grow (ninputs);
290938fd1498Szrj constraints.safe_grow (noutputs + ninputs);
291038fd1498Szrj
291138fd1498Szrj for (i = 0; i < noutputs; ++i)
291238fd1498Szrj {
291338fd1498Szrj tree t = gimple_asm_output_op (stmt, i);
291438fd1498Szrj output_tvec[i] = TREE_VALUE (t);
291538fd1498Szrj constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
291638fd1498Szrj }
291738fd1498Szrj for (i = 0; i < ninputs; i++)
291838fd1498Szrj {
291938fd1498Szrj tree t = gimple_asm_input_op (stmt, i);
292038fd1498Szrj input_tvec[i] = TREE_VALUE (t);
292138fd1498Szrj constraints[i + noutputs]
292238fd1498Szrj = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
292338fd1498Szrj }
292438fd1498Szrj
292538fd1498Szrj /* ??? Diagnose during gimplification? */
292638fd1498Szrj if (! check_operand_nalternatives (constraints))
292738fd1498Szrj return;
292838fd1498Szrj
292938fd1498Szrj /* Count the number of meaningful clobbered registers, ignoring what
293038fd1498Szrj we would ignore later. */
293138fd1498Szrj auto_vec<rtx> clobber_rvec;
293238fd1498Szrj HARD_REG_SET clobbered_regs;
293338fd1498Szrj CLEAR_HARD_REG_SET (clobbered_regs);
293438fd1498Szrj
293538fd1498Szrj if (unsigned n = gimple_asm_nclobbers (stmt))
293638fd1498Szrj {
293738fd1498Szrj clobber_rvec.reserve (n);
293838fd1498Szrj for (i = 0; i < n; i++)
293938fd1498Szrj {
294038fd1498Szrj tree t = gimple_asm_clobber_op (stmt, i);
294138fd1498Szrj const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
294238fd1498Szrj int nregs, j;
294338fd1498Szrj
294438fd1498Szrj j = decode_reg_name_and_count (regname, &nregs);
294538fd1498Szrj if (j < 0)
294638fd1498Szrj {
294738fd1498Szrj if (j == -2)
294838fd1498Szrj {
294938fd1498Szrj /* ??? Diagnose during gimplification? */
295038fd1498Szrj error ("unknown register name %qs in %<asm%>", regname);
295138fd1498Szrj }
295238fd1498Szrj else if (j == -4)
295338fd1498Szrj {
295438fd1498Szrj rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
295538fd1498Szrj clobber_rvec.safe_push (x);
295638fd1498Szrj }
295738fd1498Szrj else
295838fd1498Szrj {
295938fd1498Szrj /* Otherwise we should have -1 == empty string
296038fd1498Szrj or -3 == cc, which is not a register. */
296138fd1498Szrj gcc_assert (j == -1 || j == -3);
296238fd1498Szrj }
296338fd1498Szrj }
296438fd1498Szrj else
296538fd1498Szrj for (int reg = j; reg < j + nregs; reg++)
296638fd1498Szrj {
296738fd1498Szrj /* Clobbering the PIC register is an error. */
296838fd1498Szrj if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
296938fd1498Szrj {
297038fd1498Szrj /* ??? Diagnose during gimplification? */
297138fd1498Szrj error ("PIC register clobbered by %qs in %<asm%>",
297238fd1498Szrj regname);
297338fd1498Szrj return;
297438fd1498Szrj }
297538fd1498Szrj
297638fd1498Szrj SET_HARD_REG_BIT (clobbered_regs, reg);
297738fd1498Szrj rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
297838fd1498Szrj clobber_rvec.safe_push (x);
297938fd1498Szrj }
298038fd1498Szrj }
298138fd1498Szrj }
298238fd1498Szrj unsigned nclobbers = clobber_rvec.length();
298338fd1498Szrj
298438fd1498Szrj /* First pass over inputs and outputs checks validity and sets
298538fd1498Szrj mark_addressable if needed. */
298638fd1498Szrj /* ??? Diagnose during gimplification? */
298738fd1498Szrj
298838fd1498Szrj for (i = 0; i < noutputs; ++i)
298938fd1498Szrj {
299038fd1498Szrj tree val = output_tvec[i];
299138fd1498Szrj tree type = TREE_TYPE (val);
299238fd1498Szrj const char *constraint;
299338fd1498Szrj bool is_inout;
299438fd1498Szrj bool allows_reg;
299538fd1498Szrj bool allows_mem;
299638fd1498Szrj
299738fd1498Szrj /* Try to parse the output constraint. If that fails, there's
299838fd1498Szrj no point in going further. */
299938fd1498Szrj constraint = constraints[i];
300038fd1498Szrj if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
300138fd1498Szrj &allows_mem, &allows_reg, &is_inout))
300238fd1498Szrj return;
300338fd1498Szrj
300438fd1498Szrj if (! allows_reg
300538fd1498Szrj && (allows_mem
300638fd1498Szrj || is_inout
300738fd1498Szrj || (DECL_P (val)
300838fd1498Szrj && REG_P (DECL_RTL (val))
300938fd1498Szrj && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
301038fd1498Szrj mark_addressable (val);
301138fd1498Szrj }
301238fd1498Szrj
301338fd1498Szrj for (i = 0; i < ninputs; ++i)
301438fd1498Szrj {
301538fd1498Szrj bool allows_reg, allows_mem;
301638fd1498Szrj const char *constraint;
301738fd1498Szrj
301838fd1498Szrj constraint = constraints[i + noutputs];
301938fd1498Szrj if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
302038fd1498Szrj constraints.address (),
302138fd1498Szrj &allows_mem, &allows_reg))
302238fd1498Szrj return;
302338fd1498Szrj
302438fd1498Szrj if (! allows_reg && allows_mem)
302538fd1498Szrj mark_addressable (input_tvec[i]);
302638fd1498Szrj }
302738fd1498Szrj
302838fd1498Szrj /* Second pass evaluates arguments. */
302938fd1498Szrj
303038fd1498Szrj /* Make sure stack is consistent for asm goto. */
303138fd1498Szrj if (nlabels > 0)
303238fd1498Szrj do_pending_stack_adjust ();
303338fd1498Szrj int old_generating_concat_p = generating_concat_p;
303438fd1498Szrj
303538fd1498Szrj /* Vector of RTX's of evaluated output operands. */
303638fd1498Szrj auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
303738fd1498Szrj auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
303838fd1498Szrj rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
303938fd1498Szrj
304038fd1498Szrj output_rvec.safe_grow (noutputs);
304138fd1498Szrj
304238fd1498Szrj for (i = 0; i < noutputs; ++i)
304338fd1498Szrj {
304438fd1498Szrj tree val = output_tvec[i];
304538fd1498Szrj tree type = TREE_TYPE (val);
304638fd1498Szrj bool is_inout, allows_reg, allows_mem, ok;
304738fd1498Szrj rtx op;
304838fd1498Szrj
304938fd1498Szrj ok = parse_output_constraint (&constraints[i], i, ninputs,
305038fd1498Szrj noutputs, &allows_mem, &allows_reg,
305138fd1498Szrj &is_inout);
305238fd1498Szrj gcc_assert (ok);
305338fd1498Szrj
305438fd1498Szrj /* If an output operand is not a decl or indirect ref and our constraint
305538fd1498Szrj allows a register, make a temporary to act as an intermediate.
305638fd1498Szrj Make the asm insn write into that, then we will copy it to
305738fd1498Szrj the real output operand. Likewise for promoted variables. */
305838fd1498Szrj
305938fd1498Szrj generating_concat_p = 0;
306038fd1498Szrj
3061*58e805e6Szrj if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
306238fd1498Szrj || (DECL_P (val)
306338fd1498Szrj && (allows_mem || REG_P (DECL_RTL (val)))
306438fd1498Szrj && ! (REG_P (DECL_RTL (val))
306538fd1498Szrj && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
306638fd1498Szrj || ! allows_reg
3067*58e805e6Szrj || is_inout
3068*58e805e6Szrj || TREE_ADDRESSABLE (type))
306938fd1498Szrj {
307038fd1498Szrj op = expand_expr (val, NULL_RTX, VOIDmode,
307138fd1498Szrj !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
307238fd1498Szrj if (MEM_P (op))
307338fd1498Szrj op = validize_mem (op);
307438fd1498Szrj
307538fd1498Szrj if (! allows_reg && !MEM_P (op))
307638fd1498Szrj error ("output number %d not directly addressable", i);
3077*58e805e6Szrj if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
307838fd1498Szrj || GET_CODE (op) == CONCAT)
307938fd1498Szrj {
308038fd1498Szrj rtx old_op = op;
308138fd1498Szrj op = gen_reg_rtx (GET_MODE (op));
308238fd1498Szrj
308338fd1498Szrj generating_concat_p = old_generating_concat_p;
308438fd1498Szrj
308538fd1498Szrj if (is_inout)
308638fd1498Szrj emit_move_insn (op, old_op);
308738fd1498Szrj
308838fd1498Szrj push_to_sequence2 (after_rtl_seq, after_rtl_end);
308938fd1498Szrj emit_move_insn (old_op, op);
309038fd1498Szrj after_rtl_seq = get_insns ();
309138fd1498Szrj after_rtl_end = get_last_insn ();
309238fd1498Szrj end_sequence ();
309338fd1498Szrj }
309438fd1498Szrj }
309538fd1498Szrj else
309638fd1498Szrj {
309738fd1498Szrj op = assign_temp (type, 0, 1);
309838fd1498Szrj op = validize_mem (op);
309938fd1498Szrj if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
310038fd1498Szrj set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
310138fd1498Szrj
310238fd1498Szrj generating_concat_p = old_generating_concat_p;
310338fd1498Szrj
310438fd1498Szrj push_to_sequence2 (after_rtl_seq, after_rtl_end);
310538fd1498Szrj expand_assignment (val, make_tree (type, op), false);
310638fd1498Szrj after_rtl_seq = get_insns ();
310738fd1498Szrj after_rtl_end = get_last_insn ();
310838fd1498Szrj end_sequence ();
310938fd1498Szrj }
311038fd1498Szrj output_rvec[i] = op;
311138fd1498Szrj
311238fd1498Szrj if (is_inout)
311338fd1498Szrj inout_opnum.safe_push (i);
311438fd1498Szrj }
311538fd1498Szrj
311638fd1498Szrj auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
311738fd1498Szrj auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
311838fd1498Szrj
311938fd1498Szrj input_rvec.safe_grow (ninputs);
312038fd1498Szrj input_mode.safe_grow (ninputs);
312138fd1498Szrj
312238fd1498Szrj generating_concat_p = 0;
312338fd1498Szrj
312438fd1498Szrj for (i = 0; i < ninputs; ++i)
312538fd1498Szrj {
312638fd1498Szrj tree val = input_tvec[i];
312738fd1498Szrj tree type = TREE_TYPE (val);
312838fd1498Szrj bool allows_reg, allows_mem, ok;
312938fd1498Szrj const char *constraint;
313038fd1498Szrj rtx op;
313138fd1498Szrj
313238fd1498Szrj constraint = constraints[i + noutputs];
313338fd1498Szrj ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
313438fd1498Szrj constraints.address (),
313538fd1498Szrj &allows_mem, &allows_reg);
313638fd1498Szrj gcc_assert (ok);
313738fd1498Szrj
313838fd1498Szrj /* EXPAND_INITIALIZER will not generate code for valid initializer
313938fd1498Szrj constants, but will still generate code for other types of operand.
314038fd1498Szrj This is the behavior we want for constant constraints. */
314138fd1498Szrj op = expand_expr (val, NULL_RTX, VOIDmode,
314238fd1498Szrj allows_reg ? EXPAND_NORMAL
314338fd1498Szrj : allows_mem ? EXPAND_MEMORY
314438fd1498Szrj : EXPAND_INITIALIZER);
314538fd1498Szrj
314638fd1498Szrj /* Never pass a CONCAT to an ASM. */
314738fd1498Szrj if (GET_CODE (op) == CONCAT)
314838fd1498Szrj op = force_reg (GET_MODE (op), op);
314938fd1498Szrj else if (MEM_P (op))
315038fd1498Szrj op = validize_mem (op);
315138fd1498Szrj
315238fd1498Szrj if (asm_operand_ok (op, constraint, NULL) <= 0)
315338fd1498Szrj {
315438fd1498Szrj if (allows_reg && TYPE_MODE (type) != BLKmode)
315538fd1498Szrj op = force_reg (TYPE_MODE (type), op);
315638fd1498Szrj else if (!allows_mem)
315738fd1498Szrj warning (0, "asm operand %d probably doesn%'t match constraints",
315838fd1498Szrj i + noutputs);
315938fd1498Szrj else if (MEM_P (op))
316038fd1498Szrj {
316138fd1498Szrj /* We won't recognize either volatile memory or memory
316238fd1498Szrj with a queued address as available a memory_operand
316338fd1498Szrj at this point. Ignore it: clearly this *is* a memory. */
316438fd1498Szrj }
316538fd1498Szrj else
316638fd1498Szrj gcc_unreachable ();
316738fd1498Szrj }
316838fd1498Szrj input_rvec[i] = op;
316938fd1498Szrj input_mode[i] = TYPE_MODE (type);
317038fd1498Szrj }
317138fd1498Szrj
317238fd1498Szrj /* For in-out operands, copy output rtx to input rtx. */
317338fd1498Szrj unsigned ninout = inout_opnum.length();
317438fd1498Szrj for (i = 0; i < ninout; i++)
317538fd1498Szrj {
317638fd1498Szrj int j = inout_opnum[i];
317738fd1498Szrj rtx o = output_rvec[j];
317838fd1498Szrj
317938fd1498Szrj input_rvec.safe_push (o);
318038fd1498Szrj input_mode.safe_push (GET_MODE (o));
318138fd1498Szrj
318238fd1498Szrj char buffer[16];
318338fd1498Szrj sprintf (buffer, "%d", j);
318438fd1498Szrj constraints.safe_push (ggc_strdup (buffer));
318538fd1498Szrj }
318638fd1498Szrj ninputs += ninout;
318738fd1498Szrj
318838fd1498Szrj /* Sometimes we wish to automatically clobber registers across an asm.
318938fd1498Szrj Case in point is when the i386 backend moved from cc0 to a hard reg --
319038fd1498Szrj maintaining source-level compatibility means automatically clobbering
319138fd1498Szrj the flags register. */
319238fd1498Szrj rtx_insn *after_md_seq = NULL;
319338fd1498Szrj if (targetm.md_asm_adjust)
319438fd1498Szrj after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
319538fd1498Szrj constraints, clobber_rvec,
319638fd1498Szrj clobbered_regs);
319738fd1498Szrj
319838fd1498Szrj /* Do not allow the hook to change the output and input count,
319938fd1498Szrj lest it mess up the operand numbering. */
320038fd1498Szrj gcc_assert (output_rvec.length() == noutputs);
320138fd1498Szrj gcc_assert (input_rvec.length() == ninputs);
320238fd1498Szrj gcc_assert (constraints.length() == noutputs + ninputs);
320338fd1498Szrj
320438fd1498Szrj /* But it certainly can adjust the clobbers. */
320538fd1498Szrj nclobbers = clobber_rvec.length();
320638fd1498Szrj
320738fd1498Szrj /* Third pass checks for easy conflicts. */
320838fd1498Szrj /* ??? Why are we doing this on trees instead of rtx. */
320938fd1498Szrj
321038fd1498Szrj bool clobber_conflict_found = 0;
321138fd1498Szrj for (i = 0; i < noutputs; ++i)
321238fd1498Szrj if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
321338fd1498Szrj clobber_conflict_found = 1;
321438fd1498Szrj for (i = 0; i < ninputs - ninout; ++i)
321538fd1498Szrj if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
321638fd1498Szrj clobber_conflict_found = 1;
321738fd1498Szrj
321838fd1498Szrj /* Make vectors for the expression-rtx, constraint strings,
321938fd1498Szrj and named operands. */
322038fd1498Szrj
322138fd1498Szrj rtvec argvec = rtvec_alloc (ninputs);
322238fd1498Szrj rtvec constraintvec = rtvec_alloc (ninputs);
322338fd1498Szrj rtvec labelvec = rtvec_alloc (nlabels);
322438fd1498Szrj
322538fd1498Szrj rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
322638fd1498Szrj : GET_MODE (output_rvec[0])),
322738fd1498Szrj ggc_strdup (gimple_asm_string (stmt)),
322838fd1498Szrj "", 0, argvec, constraintvec,
322938fd1498Szrj labelvec, locus);
323038fd1498Szrj MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
323138fd1498Szrj
323238fd1498Szrj for (i = 0; i < ninputs; ++i)
323338fd1498Szrj {
323438fd1498Szrj ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
323538fd1498Szrj ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
323638fd1498Szrj = gen_rtx_ASM_INPUT_loc (input_mode[i],
323738fd1498Szrj constraints[i + noutputs],
323838fd1498Szrj locus);
323938fd1498Szrj }
324038fd1498Szrj
324138fd1498Szrj /* Copy labels to the vector. */
324238fd1498Szrj rtx_code_label *fallthru_label = NULL;
324338fd1498Szrj if (nlabels > 0)
324438fd1498Szrj {
324538fd1498Szrj basic_block fallthru_bb = NULL;
324638fd1498Szrj edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
324738fd1498Szrj if (fallthru)
324838fd1498Szrj fallthru_bb = fallthru->dest;
324938fd1498Szrj
325038fd1498Szrj for (i = 0; i < nlabels; ++i)
325138fd1498Szrj {
325238fd1498Szrj tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
325338fd1498Szrj rtx_insn *r;
325438fd1498Szrj /* If asm goto has any labels in the fallthru basic block, use
325538fd1498Szrj a label that we emit immediately after the asm goto. Expansion
325638fd1498Szrj may insert further instructions into the same basic block after
325738fd1498Szrj asm goto and if we don't do this, insertion of instructions on
325838fd1498Szrj the fallthru edge might misbehave. See PR58670. */
325938fd1498Szrj if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
326038fd1498Szrj {
326138fd1498Szrj if (fallthru_label == NULL_RTX)
326238fd1498Szrj fallthru_label = gen_label_rtx ();
326338fd1498Szrj r = fallthru_label;
326438fd1498Szrj }
326538fd1498Szrj else
326638fd1498Szrj r = label_rtx (label);
326738fd1498Szrj ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
326838fd1498Szrj }
326938fd1498Szrj }
327038fd1498Szrj
327138fd1498Szrj /* Now, for each output, construct an rtx
327238fd1498Szrj (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
327338fd1498Szrj ARGVEC CONSTRAINTS OPNAMES))
327438fd1498Szrj If there is more than one, put them inside a PARALLEL. */
327538fd1498Szrj
327638fd1498Szrj if (nlabels > 0 && nclobbers == 0)
327738fd1498Szrj {
327838fd1498Szrj gcc_assert (noutputs == 0);
327938fd1498Szrj emit_jump_insn (body);
328038fd1498Szrj }
328138fd1498Szrj else if (noutputs == 0 && nclobbers == 0)
328238fd1498Szrj {
328338fd1498Szrj /* No output operands: put in a raw ASM_OPERANDS rtx. */
328438fd1498Szrj emit_insn (body);
328538fd1498Szrj }
328638fd1498Szrj else if (noutputs == 1 && nclobbers == 0)
328738fd1498Szrj {
328838fd1498Szrj ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
328938fd1498Szrj emit_insn (gen_rtx_SET (output_rvec[0], body));
329038fd1498Szrj }
329138fd1498Szrj else
329238fd1498Szrj {
329338fd1498Szrj rtx obody = body;
329438fd1498Szrj int num = noutputs;
329538fd1498Szrj
329638fd1498Szrj if (num == 0)
329738fd1498Szrj num = 1;
329838fd1498Szrj
329938fd1498Szrj body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
330038fd1498Szrj
330138fd1498Szrj /* For each output operand, store a SET. */
330238fd1498Szrj for (i = 0; i < noutputs; ++i)
330338fd1498Szrj {
330438fd1498Szrj rtx src, o = output_rvec[i];
330538fd1498Szrj if (i == 0)
330638fd1498Szrj {
330738fd1498Szrj ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
330838fd1498Szrj src = obody;
330938fd1498Szrj }
331038fd1498Szrj else
331138fd1498Szrj {
331238fd1498Szrj src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
331338fd1498Szrj ASM_OPERANDS_TEMPLATE (obody),
331438fd1498Szrj constraints[i], i, argvec,
331538fd1498Szrj constraintvec, labelvec, locus);
331638fd1498Szrj MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
331738fd1498Szrj }
331838fd1498Szrj XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
331938fd1498Szrj }
332038fd1498Szrj
332138fd1498Szrj /* If there are no outputs (but there are some clobbers)
332238fd1498Szrj store the bare ASM_OPERANDS into the PARALLEL. */
332338fd1498Szrj if (i == 0)
332438fd1498Szrj XVECEXP (body, 0, i++) = obody;
332538fd1498Szrj
332638fd1498Szrj /* Store (clobber REG) for each clobbered register specified. */
332738fd1498Szrj for (unsigned j = 0; j < nclobbers; ++j)
332838fd1498Szrj {
332938fd1498Szrj rtx clobbered_reg = clobber_rvec[j];
333038fd1498Szrj
333138fd1498Szrj /* Do sanity check for overlap between clobbers and respectively
333238fd1498Szrj input and outputs that hasn't been handled. Such overlap
333338fd1498Szrj should have been detected and reported above. */
333438fd1498Szrj if (!clobber_conflict_found && REG_P (clobbered_reg))
333538fd1498Szrj {
333638fd1498Szrj /* We test the old body (obody) contents to avoid
333738fd1498Szrj tripping over the under-construction body. */
333838fd1498Szrj for (unsigned k = 0; k < noutputs; ++k)
333938fd1498Szrj if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
334038fd1498Szrj internal_error ("asm clobber conflict with output operand");
334138fd1498Szrj
334238fd1498Szrj for (unsigned k = 0; k < ninputs - ninout; ++k)
334338fd1498Szrj if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
334438fd1498Szrj internal_error ("asm clobber conflict with input operand");
334538fd1498Szrj }
334638fd1498Szrj
334738fd1498Szrj XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
334838fd1498Szrj }
334938fd1498Szrj
335038fd1498Szrj if (nlabels > 0)
335138fd1498Szrj emit_jump_insn (body);
335238fd1498Szrj else
335338fd1498Szrj emit_insn (body);
335438fd1498Szrj }
335538fd1498Szrj
335638fd1498Szrj generating_concat_p = old_generating_concat_p;
335738fd1498Szrj
335838fd1498Szrj if (fallthru_label)
335938fd1498Szrj emit_label (fallthru_label);
336038fd1498Szrj
336138fd1498Szrj if (after_md_seq)
336238fd1498Szrj emit_insn (after_md_seq);
336338fd1498Szrj if (after_rtl_seq)
336438fd1498Szrj emit_insn (after_rtl_seq);
336538fd1498Szrj
336638fd1498Szrj free_temp_slots ();
336738fd1498Szrj crtl->has_asm_statement = 1;
336838fd1498Szrj }
336938fd1498Szrj
337038fd1498Szrj /* Emit code to jump to the address
337138fd1498Szrj specified by the pointer expression EXP. */
337238fd1498Szrj
337338fd1498Szrj static void
expand_computed_goto(tree exp)337438fd1498Szrj expand_computed_goto (tree exp)
337538fd1498Szrj {
337638fd1498Szrj rtx x = expand_normal (exp);
337738fd1498Szrj
337838fd1498Szrj do_pending_stack_adjust ();
337938fd1498Szrj emit_indirect_jump (x);
338038fd1498Szrj }
338138fd1498Szrj
338238fd1498Szrj /* Generate RTL code for a `goto' statement with target label LABEL.
338338fd1498Szrj LABEL should be a LABEL_DECL tree node that was or will later be
338438fd1498Szrj defined with `expand_label'. */
338538fd1498Szrj
338638fd1498Szrj static void
expand_goto(tree label)338738fd1498Szrj expand_goto (tree label)
338838fd1498Szrj {
338938fd1498Szrj if (flag_checking)
339038fd1498Szrj {
339138fd1498Szrj /* Check for a nonlocal goto to a containing function. Should have
339238fd1498Szrj gotten translated to __builtin_nonlocal_goto. */
339338fd1498Szrj tree context = decl_function_context (label);
339438fd1498Szrj gcc_assert (!context || context == current_function_decl);
339538fd1498Szrj }
339638fd1498Szrj
339738fd1498Szrj emit_jump (jump_target_rtx (label));
339838fd1498Szrj }
339938fd1498Szrj
340038fd1498Szrj /* Output a return with no value. */
340138fd1498Szrj
340238fd1498Szrj static void
expand_null_return_1(void)340338fd1498Szrj expand_null_return_1 (void)
340438fd1498Szrj {
340538fd1498Szrj clear_pending_stack_adjust ();
340638fd1498Szrj do_pending_stack_adjust ();
340738fd1498Szrj emit_jump (return_label);
340838fd1498Szrj }
340938fd1498Szrj
341038fd1498Szrj /* Generate RTL to return from the current function, with no value.
341138fd1498Szrj (That is, we do not do anything about returning any value.) */
341238fd1498Szrj
341338fd1498Szrj void
expand_null_return(void)341438fd1498Szrj expand_null_return (void)
341538fd1498Szrj {
341638fd1498Szrj /* If this function was declared to return a value, but we
341738fd1498Szrj didn't, clobber the return registers so that they are not
341838fd1498Szrj propagated live to the rest of the function. */
341938fd1498Szrj clobber_return_register ();
342038fd1498Szrj
342138fd1498Szrj expand_null_return_1 ();
342238fd1498Szrj }
342338fd1498Szrj
342438fd1498Szrj /* Generate RTL to return from the current function, with value VAL. */
342538fd1498Szrj
342638fd1498Szrj static void
expand_value_return(rtx val)342738fd1498Szrj expand_value_return (rtx val)
342838fd1498Szrj {
342938fd1498Szrj /* Copy the value to the return location unless it's already there. */
343038fd1498Szrj
343138fd1498Szrj tree decl = DECL_RESULT (current_function_decl);
343238fd1498Szrj rtx return_reg = DECL_RTL (decl);
343338fd1498Szrj if (return_reg != val)
343438fd1498Szrj {
343538fd1498Szrj tree funtype = TREE_TYPE (current_function_decl);
343638fd1498Szrj tree type = TREE_TYPE (decl);
343738fd1498Szrj int unsignedp = TYPE_UNSIGNED (type);
343838fd1498Szrj machine_mode old_mode = DECL_MODE (decl);
343938fd1498Szrj machine_mode mode;
344038fd1498Szrj if (DECL_BY_REFERENCE (decl))
344138fd1498Szrj mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
344238fd1498Szrj else
344338fd1498Szrj mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
344438fd1498Szrj
344538fd1498Szrj if (mode != old_mode)
344638fd1498Szrj val = convert_modes (mode, old_mode, val, unsignedp);
344738fd1498Szrj
344838fd1498Szrj if (GET_CODE (return_reg) == PARALLEL)
344938fd1498Szrj emit_group_load (return_reg, val, type, int_size_in_bytes (type));
345038fd1498Szrj else
345138fd1498Szrj emit_move_insn (return_reg, val);
345238fd1498Szrj }
345338fd1498Szrj
345438fd1498Szrj expand_null_return_1 ();
345538fd1498Szrj }
345638fd1498Szrj
345738fd1498Szrj /* Generate RTL to evaluate the expression RETVAL and return it
345838fd1498Szrj from the current function. */
345938fd1498Szrj
346038fd1498Szrj static void
expand_return(tree retval,tree bounds)346138fd1498Szrj expand_return (tree retval, tree bounds)
346238fd1498Szrj {
346338fd1498Szrj rtx result_rtl;
346438fd1498Szrj rtx val = 0;
346538fd1498Szrj tree retval_rhs;
346638fd1498Szrj rtx bounds_rtl;
346738fd1498Szrj
346838fd1498Szrj /* If function wants no value, give it none. */
346938fd1498Szrj if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
347038fd1498Szrj {
347138fd1498Szrj expand_normal (retval);
347238fd1498Szrj expand_null_return ();
347338fd1498Szrj return;
347438fd1498Szrj }
347538fd1498Szrj
347638fd1498Szrj if (retval == error_mark_node)
347738fd1498Szrj {
347838fd1498Szrj /* Treat this like a return of no value from a function that
347938fd1498Szrj returns a value. */
348038fd1498Szrj expand_null_return ();
348138fd1498Szrj return;
348238fd1498Szrj }
348338fd1498Szrj else if ((TREE_CODE (retval) == MODIFY_EXPR
348438fd1498Szrj || TREE_CODE (retval) == INIT_EXPR)
348538fd1498Szrj && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
348638fd1498Szrj retval_rhs = TREE_OPERAND (retval, 1);
348738fd1498Szrj else
348838fd1498Szrj retval_rhs = retval;
348938fd1498Szrj
349038fd1498Szrj result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
349138fd1498Szrj
349238fd1498Szrj /* Put returned bounds to the right place. */
349338fd1498Szrj bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
349438fd1498Szrj if (bounds_rtl)
349538fd1498Szrj {
349638fd1498Szrj rtx addr = NULL;
349738fd1498Szrj rtx bnd = NULL;
349838fd1498Szrj
349938fd1498Szrj if (bounds && bounds != error_mark_node)
350038fd1498Szrj {
350138fd1498Szrj bnd = expand_normal (bounds);
350238fd1498Szrj targetm.calls.store_returned_bounds (bounds_rtl, bnd);
350338fd1498Szrj }
350438fd1498Szrj else if (REG_P (bounds_rtl))
350538fd1498Szrj {
350638fd1498Szrj if (bounds)
350738fd1498Szrj bnd = chkp_expand_zero_bounds ();
350838fd1498Szrj else
350938fd1498Szrj {
351038fd1498Szrj addr = expand_normal (build_fold_addr_expr (retval_rhs));
351138fd1498Szrj addr = gen_rtx_MEM (Pmode, addr);
351238fd1498Szrj bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
351338fd1498Szrj }
351438fd1498Szrj
351538fd1498Szrj targetm.calls.store_returned_bounds (bounds_rtl, bnd);
351638fd1498Szrj }
351738fd1498Szrj else
351838fd1498Szrj {
351938fd1498Szrj int n;
352038fd1498Szrj
352138fd1498Szrj gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
352238fd1498Szrj
352338fd1498Szrj if (bounds)
352438fd1498Szrj bnd = chkp_expand_zero_bounds ();
352538fd1498Szrj else
352638fd1498Szrj {
352738fd1498Szrj addr = expand_normal (build_fold_addr_expr (retval_rhs));
352838fd1498Szrj addr = gen_rtx_MEM (Pmode, addr);
352938fd1498Szrj }
353038fd1498Szrj
353138fd1498Szrj for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
353238fd1498Szrj {
353338fd1498Szrj rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
353438fd1498Szrj if (!bounds)
353538fd1498Szrj {
353638fd1498Szrj rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
353738fd1498Szrj rtx from = adjust_address (addr, Pmode, INTVAL (offs));
353838fd1498Szrj bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
353938fd1498Szrj }
354038fd1498Szrj targetm.calls.store_returned_bounds (slot, bnd);
354138fd1498Szrj }
354238fd1498Szrj }
354338fd1498Szrj }
354438fd1498Szrj else if (chkp_function_instrumented_p (current_function_decl)
354538fd1498Szrj && !BOUNDED_P (retval_rhs)
354638fd1498Szrj && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
354738fd1498Szrj && TREE_CODE (retval_rhs) != RESULT_DECL)
354838fd1498Szrj {
354938fd1498Szrj rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
355038fd1498Szrj addr = gen_rtx_MEM (Pmode, addr);
355138fd1498Szrj
355238fd1498Szrj gcc_assert (MEM_P (result_rtl));
355338fd1498Szrj
355438fd1498Szrj chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
355538fd1498Szrj }
355638fd1498Szrj
355738fd1498Szrj /* If we are returning the RESULT_DECL, then the value has already
355838fd1498Szrj been stored into it, so we don't have to do anything special. */
355938fd1498Szrj if (TREE_CODE (retval_rhs) == RESULT_DECL)
356038fd1498Szrj expand_value_return (result_rtl);
356138fd1498Szrj
356238fd1498Szrj /* If the result is an aggregate that is being returned in one (or more)
356338fd1498Szrj registers, load the registers here. */
356438fd1498Szrj
356538fd1498Szrj else if (retval_rhs != 0
356638fd1498Szrj && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
356738fd1498Szrj && REG_P (result_rtl))
356838fd1498Szrj {
356938fd1498Szrj val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
357038fd1498Szrj if (val)
357138fd1498Szrj {
357238fd1498Szrj /* Use the mode of the result value on the return register. */
357338fd1498Szrj PUT_MODE (result_rtl, GET_MODE (val));
357438fd1498Szrj expand_value_return (val);
357538fd1498Szrj }
357638fd1498Szrj else
357738fd1498Szrj expand_null_return ();
357838fd1498Szrj }
357938fd1498Szrj else if (retval_rhs != 0
358038fd1498Szrj && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
358138fd1498Szrj && (REG_P (result_rtl)
358238fd1498Szrj || (GET_CODE (result_rtl) == PARALLEL)))
358338fd1498Szrj {
358438fd1498Szrj /* Compute the return value into a temporary (usually a pseudo reg). */
358538fd1498Szrj val
358638fd1498Szrj = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
358738fd1498Szrj val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
358838fd1498Szrj val = force_not_mem (val);
358938fd1498Szrj expand_value_return (val);
359038fd1498Szrj }
359138fd1498Szrj else
359238fd1498Szrj {
359338fd1498Szrj /* No hard reg used; calculate value into hard return reg. */
359438fd1498Szrj expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
359538fd1498Szrj expand_value_return (result_rtl);
359638fd1498Szrj }
359738fd1498Szrj }
359838fd1498Szrj
359938fd1498Szrj /* A subroutine of expand_gimple_stmt, expanding one gimple statement
360038fd1498Szrj STMT that doesn't require special handling for outgoing edges. That
360138fd1498Szrj is no tailcalls and no GIMPLE_COND. */
360238fd1498Szrj
360338fd1498Szrj static void
expand_gimple_stmt_1(gimple * stmt)360438fd1498Szrj expand_gimple_stmt_1 (gimple *stmt)
360538fd1498Szrj {
360638fd1498Szrj tree op0;
360738fd1498Szrj
360838fd1498Szrj set_curr_insn_location (gimple_location (stmt));
360938fd1498Szrj
361038fd1498Szrj switch (gimple_code (stmt))
361138fd1498Szrj {
361238fd1498Szrj case GIMPLE_GOTO:
361338fd1498Szrj op0 = gimple_goto_dest (stmt);
361438fd1498Szrj if (TREE_CODE (op0) == LABEL_DECL)
361538fd1498Szrj expand_goto (op0);
361638fd1498Szrj else
361738fd1498Szrj expand_computed_goto (op0);
361838fd1498Szrj break;
361938fd1498Szrj case GIMPLE_LABEL:
362038fd1498Szrj expand_label (gimple_label_label (as_a <glabel *> (stmt)));
362138fd1498Szrj break;
362238fd1498Szrj case GIMPLE_NOP:
362338fd1498Szrj case GIMPLE_PREDICT:
362438fd1498Szrj break;
362538fd1498Szrj case GIMPLE_SWITCH:
362638fd1498Szrj {
362738fd1498Szrj gswitch *swtch = as_a <gswitch *> (stmt);
362838fd1498Szrj if (gimple_switch_num_labels (swtch) == 1)
362938fd1498Szrj expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
363038fd1498Szrj else
363138fd1498Szrj expand_case (swtch);
363238fd1498Szrj }
363338fd1498Szrj break;
363438fd1498Szrj case GIMPLE_ASM:
363538fd1498Szrj expand_asm_stmt (as_a <gasm *> (stmt));
363638fd1498Szrj break;
363738fd1498Szrj case GIMPLE_CALL:
363838fd1498Szrj expand_call_stmt (as_a <gcall *> (stmt));
363938fd1498Szrj break;
364038fd1498Szrj
364138fd1498Szrj case GIMPLE_RETURN:
364238fd1498Szrj {
364338fd1498Szrj tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
364438fd1498Szrj op0 = gimple_return_retval (as_a <greturn *> (stmt));
364538fd1498Szrj
364638fd1498Szrj if (op0 && op0 != error_mark_node)
364738fd1498Szrj {
364838fd1498Szrj tree result = DECL_RESULT (current_function_decl);
364938fd1498Szrj
365038fd1498Szrj /* Mark we have return statement with missing bounds. */
365138fd1498Szrj if (!bnd
365238fd1498Szrj && chkp_function_instrumented_p (cfun->decl)
365338fd1498Szrj && !DECL_P (op0))
365438fd1498Szrj bnd = error_mark_node;
365538fd1498Szrj
365638fd1498Szrj /* If we are not returning the current function's RESULT_DECL,
365738fd1498Szrj build an assignment to it. */
365838fd1498Szrj if (op0 != result)
365938fd1498Szrj {
366038fd1498Szrj /* I believe that a function's RESULT_DECL is unique. */
366138fd1498Szrj gcc_assert (TREE_CODE (op0) != RESULT_DECL);
366238fd1498Szrj
366338fd1498Szrj /* ??? We'd like to use simply expand_assignment here,
366438fd1498Szrj but this fails if the value is of BLKmode but the return
366538fd1498Szrj decl is a register. expand_return has special handling
366638fd1498Szrj for this combination, which eventually should move
366738fd1498Szrj to common code. See comments there. Until then, let's
366838fd1498Szrj build a modify expression :-/ */
366938fd1498Szrj op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
367038fd1498Szrj result, op0);
367138fd1498Szrj }
367238fd1498Szrj }
367338fd1498Szrj
367438fd1498Szrj if (!op0)
367538fd1498Szrj expand_null_return ();
367638fd1498Szrj else
367738fd1498Szrj expand_return (op0, bnd);
367838fd1498Szrj }
367938fd1498Szrj break;
368038fd1498Szrj
368138fd1498Szrj case GIMPLE_ASSIGN:
368238fd1498Szrj {
368338fd1498Szrj gassign *assign_stmt = as_a <gassign *> (stmt);
368438fd1498Szrj tree lhs = gimple_assign_lhs (assign_stmt);
368538fd1498Szrj
368638fd1498Szrj /* Tree expand used to fiddle with |= and &= of two bitfield
368738fd1498Szrj COMPONENT_REFs here. This can't happen with gimple, the LHS
368838fd1498Szrj of binary assigns must be a gimple reg. */
368938fd1498Szrj
369038fd1498Szrj if (TREE_CODE (lhs) != SSA_NAME
369138fd1498Szrj || get_gimple_rhs_class (gimple_expr_code (stmt))
369238fd1498Szrj == GIMPLE_SINGLE_RHS)
369338fd1498Szrj {
369438fd1498Szrj tree rhs = gimple_assign_rhs1 (assign_stmt);
369538fd1498Szrj gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
369638fd1498Szrj == GIMPLE_SINGLE_RHS);
369738fd1498Szrj if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
369838fd1498Szrj /* Do not put locations on possibly shared trees. */
369938fd1498Szrj && !is_gimple_min_invariant (rhs))
370038fd1498Szrj SET_EXPR_LOCATION (rhs, gimple_location (stmt));
370138fd1498Szrj if (TREE_CLOBBER_P (rhs))
370238fd1498Szrj /* This is a clobber to mark the going out of scope for
370338fd1498Szrj this LHS. */
370438fd1498Szrj ;
370538fd1498Szrj else
370638fd1498Szrj expand_assignment (lhs, rhs,
370738fd1498Szrj gimple_assign_nontemporal_move_p (
370838fd1498Szrj assign_stmt));
370938fd1498Szrj }
371038fd1498Szrj else
371138fd1498Szrj {
371238fd1498Szrj rtx target, temp;
371338fd1498Szrj bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
371438fd1498Szrj struct separate_ops ops;
371538fd1498Szrj bool promoted = false;
371638fd1498Szrj
371738fd1498Szrj target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
371838fd1498Szrj if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
371938fd1498Szrj promoted = true;
372038fd1498Szrj
372138fd1498Szrj ops.code = gimple_assign_rhs_code (assign_stmt);
372238fd1498Szrj ops.type = TREE_TYPE (lhs);
372338fd1498Szrj switch (get_gimple_rhs_class (ops.code))
372438fd1498Szrj {
372538fd1498Szrj case GIMPLE_TERNARY_RHS:
372638fd1498Szrj ops.op2 = gimple_assign_rhs3 (assign_stmt);
372738fd1498Szrj /* Fallthru */
372838fd1498Szrj case GIMPLE_BINARY_RHS:
372938fd1498Szrj ops.op1 = gimple_assign_rhs2 (assign_stmt);
373038fd1498Szrj /* Fallthru */
373138fd1498Szrj case GIMPLE_UNARY_RHS:
373238fd1498Szrj ops.op0 = gimple_assign_rhs1 (assign_stmt);
373338fd1498Szrj break;
373438fd1498Szrj default:
373538fd1498Szrj gcc_unreachable ();
373638fd1498Szrj }
373738fd1498Szrj ops.location = gimple_location (stmt);
373838fd1498Szrj
373938fd1498Szrj /* If we want to use a nontemporal store, force the value to
374038fd1498Szrj register first. If we store into a promoted register,
374138fd1498Szrj don't directly expand to target. */
374238fd1498Szrj temp = nontemporal || promoted ? NULL_RTX : target;
374338fd1498Szrj temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
374438fd1498Szrj EXPAND_NORMAL);
374538fd1498Szrj
374638fd1498Szrj if (temp == target)
374738fd1498Szrj ;
374838fd1498Szrj else if (promoted)
374938fd1498Szrj {
375038fd1498Szrj int unsignedp = SUBREG_PROMOTED_SIGN (target);
375138fd1498Szrj /* If TEMP is a VOIDmode constant, use convert_modes to make
375238fd1498Szrj sure that we properly convert it. */
375338fd1498Szrj if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
375438fd1498Szrj {
375538fd1498Szrj temp = convert_modes (GET_MODE (target),
375638fd1498Szrj TYPE_MODE (ops.type),
375738fd1498Szrj temp, unsignedp);
375838fd1498Szrj temp = convert_modes (GET_MODE (SUBREG_REG (target)),
375938fd1498Szrj GET_MODE (target), temp, unsignedp);
376038fd1498Szrj }
376138fd1498Szrj
376238fd1498Szrj convert_move (SUBREG_REG (target), temp, unsignedp);
376338fd1498Szrj }
376438fd1498Szrj else if (nontemporal && emit_storent_insn (target, temp))
376538fd1498Szrj ;
376638fd1498Szrj else
376738fd1498Szrj {
376838fd1498Szrj temp = force_operand (temp, target);
376938fd1498Szrj if (temp != target)
377038fd1498Szrj emit_move_insn (target, temp);
377138fd1498Szrj }
377238fd1498Szrj }
377338fd1498Szrj }
377438fd1498Szrj break;
377538fd1498Szrj
377638fd1498Szrj default:
377738fd1498Szrj gcc_unreachable ();
377838fd1498Szrj }
377938fd1498Szrj }
378038fd1498Szrj
378138fd1498Szrj /* Expand one gimple statement STMT and return the last RTL instruction
378238fd1498Szrj before any of the newly generated ones.
378338fd1498Szrj
378438fd1498Szrj In addition to generating the necessary RTL instructions this also
378538fd1498Szrj sets REG_EH_REGION notes if necessary and sets the current source
378638fd1498Szrj location for diagnostics. */
378738fd1498Szrj
378838fd1498Szrj static rtx_insn *
expand_gimple_stmt(gimple * stmt)378938fd1498Szrj expand_gimple_stmt (gimple *stmt)
379038fd1498Szrj {
379138fd1498Szrj location_t saved_location = input_location;
379238fd1498Szrj rtx_insn *last = get_last_insn ();
379338fd1498Szrj int lp_nr;
379438fd1498Szrj
379538fd1498Szrj gcc_assert (cfun);
379638fd1498Szrj
379738fd1498Szrj /* We need to save and restore the current source location so that errors
379838fd1498Szrj discovered during expansion are emitted with the right location. But
379938fd1498Szrj it would be better if the diagnostic routines used the source location
380038fd1498Szrj embedded in the tree nodes rather than globals. */
380138fd1498Szrj if (gimple_has_location (stmt))
380238fd1498Szrj input_location = gimple_location (stmt);
380338fd1498Szrj
380438fd1498Szrj expand_gimple_stmt_1 (stmt);
380538fd1498Szrj
380638fd1498Szrj /* Free any temporaries used to evaluate this statement. */
380738fd1498Szrj free_temp_slots ();
380838fd1498Szrj
380938fd1498Szrj input_location = saved_location;
381038fd1498Szrj
381138fd1498Szrj /* Mark all insns that may trap. */
381238fd1498Szrj lp_nr = lookup_stmt_eh_lp (stmt);
381338fd1498Szrj if (lp_nr)
381438fd1498Szrj {
381538fd1498Szrj rtx_insn *insn;
381638fd1498Szrj for (insn = next_real_insn (last); insn;
381738fd1498Szrj insn = next_real_insn (insn))
381838fd1498Szrj {
381938fd1498Szrj if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
382038fd1498Szrj /* If we want exceptions for non-call insns, any
382138fd1498Szrj may_trap_p instruction may throw. */
382238fd1498Szrj && GET_CODE (PATTERN (insn)) != CLOBBER
382338fd1498Szrj && GET_CODE (PATTERN (insn)) != USE
382438fd1498Szrj && insn_could_throw_p (insn))
382538fd1498Szrj make_reg_eh_region_note (insn, 0, lp_nr);
382638fd1498Szrj }
382738fd1498Szrj }
382838fd1498Szrj
382938fd1498Szrj return last;
383038fd1498Szrj }
383138fd1498Szrj
383238fd1498Szrj /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
383338fd1498Szrj that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
383438fd1498Szrj generated a tail call (something that might be denied by the ABI
383538fd1498Szrj rules governing the call; see calls.c).
383638fd1498Szrj
383738fd1498Szrj Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
383838fd1498Szrj can still reach the rest of BB. The case here is __builtin_sqrt,
383938fd1498Szrj where the NaN result goes through the external function (with a
384038fd1498Szrj tailcall) and the normal result happens via a sqrt instruction. */
384138fd1498Szrj
384238fd1498Szrj static basic_block
expand_gimple_tailcall(basic_block bb,gcall * stmt,bool * can_fallthru)384338fd1498Szrj expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
384438fd1498Szrj {
384538fd1498Szrj rtx_insn *last2, *last;
384638fd1498Szrj edge e;
384738fd1498Szrj edge_iterator ei;
384838fd1498Szrj profile_probability probability;
384938fd1498Szrj
385038fd1498Szrj last2 = last = expand_gimple_stmt (stmt);
385138fd1498Szrj
385238fd1498Szrj for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
385338fd1498Szrj if (CALL_P (last) && SIBLING_CALL_P (last))
385438fd1498Szrj goto found;
385538fd1498Szrj
385638fd1498Szrj maybe_dump_rtl_for_gimple_stmt (stmt, last2);
385738fd1498Szrj
385838fd1498Szrj *can_fallthru = true;
385938fd1498Szrj return NULL;
386038fd1498Szrj
386138fd1498Szrj found:
386238fd1498Szrj /* ??? Wouldn't it be better to just reset any pending stack adjust?
386338fd1498Szrj Any instructions emitted here are about to be deleted. */
386438fd1498Szrj do_pending_stack_adjust ();
386538fd1498Szrj
386638fd1498Szrj /* Remove any non-eh, non-abnormal edges that don't go to exit. */
386738fd1498Szrj /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
386838fd1498Szrj EH or abnormal edges, we shouldn't have created a tail call in
386938fd1498Szrj the first place. So it seems to me we should just be removing
387038fd1498Szrj all edges here, or redirecting the existing fallthru edge to
387138fd1498Szrj the exit block. */
387238fd1498Szrj
387338fd1498Szrj probability = profile_probability::never ();
387438fd1498Szrj
387538fd1498Szrj for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
387638fd1498Szrj {
387738fd1498Szrj if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
387838fd1498Szrj {
387938fd1498Szrj if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
388038fd1498Szrj e->dest->count -= e->count ();
388138fd1498Szrj probability += e->probability;
388238fd1498Szrj remove_edge (e);
388338fd1498Szrj }
388438fd1498Szrj else
388538fd1498Szrj ei_next (&ei);
388638fd1498Szrj }
388738fd1498Szrj
388838fd1498Szrj /* This is somewhat ugly: the call_expr expander often emits instructions
388938fd1498Szrj after the sibcall (to perform the function return). These confuse the
389038fd1498Szrj find_many_sub_basic_blocks code, so we need to get rid of these. */
389138fd1498Szrj last = NEXT_INSN (last);
389238fd1498Szrj gcc_assert (BARRIER_P (last));
389338fd1498Szrj
389438fd1498Szrj *can_fallthru = false;
389538fd1498Szrj while (NEXT_INSN (last))
389638fd1498Szrj {
389738fd1498Szrj /* For instance an sqrt builtin expander expands if with
389838fd1498Szrj sibcall in the then and label for `else`. */
389938fd1498Szrj if (LABEL_P (NEXT_INSN (last)))
390038fd1498Szrj {
390138fd1498Szrj *can_fallthru = true;
390238fd1498Szrj break;
390338fd1498Szrj }
390438fd1498Szrj delete_insn (NEXT_INSN (last));
390538fd1498Szrj }
390638fd1498Szrj
390738fd1498Szrj e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
390838fd1498Szrj | EDGE_SIBCALL);
390938fd1498Szrj e->probability = probability;
391038fd1498Szrj BB_END (bb) = last;
391138fd1498Szrj update_bb_for_insn (bb);
391238fd1498Szrj
391338fd1498Szrj if (NEXT_INSN (last))
391438fd1498Szrj {
391538fd1498Szrj bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
391638fd1498Szrj
391738fd1498Szrj last = BB_END (bb);
391838fd1498Szrj if (BARRIER_P (last))
391938fd1498Szrj BB_END (bb) = PREV_INSN (last);
392038fd1498Szrj }
392138fd1498Szrj
392238fd1498Szrj maybe_dump_rtl_for_gimple_stmt (stmt, last2);
392338fd1498Szrj
392438fd1498Szrj return bb;
392538fd1498Szrj }
392638fd1498Szrj
392738fd1498Szrj /* Return the difference between the floor and the truncated result of
392838fd1498Szrj a signed division by OP1 with remainder MOD. */
392938fd1498Szrj static rtx
floor_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)393038fd1498Szrj floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
393138fd1498Szrj {
393238fd1498Szrj /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
393338fd1498Szrj return gen_rtx_IF_THEN_ELSE
393438fd1498Szrj (mode, gen_rtx_NE (BImode, mod, const0_rtx),
393538fd1498Szrj gen_rtx_IF_THEN_ELSE
393638fd1498Szrj (mode, gen_rtx_LT (BImode,
393738fd1498Szrj gen_rtx_DIV (mode, op1, mod),
393838fd1498Szrj const0_rtx),
393938fd1498Szrj constm1_rtx, const0_rtx),
394038fd1498Szrj const0_rtx);
394138fd1498Szrj }
394238fd1498Szrj
394338fd1498Szrj /* Return the difference between the ceil and the truncated result of
394438fd1498Szrj a signed division by OP1 with remainder MOD. */
394538fd1498Szrj static rtx
ceil_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)394638fd1498Szrj ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
394738fd1498Szrj {
394838fd1498Szrj /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
394938fd1498Szrj return gen_rtx_IF_THEN_ELSE
395038fd1498Szrj (mode, gen_rtx_NE (BImode, mod, const0_rtx),
395138fd1498Szrj gen_rtx_IF_THEN_ELSE
395238fd1498Szrj (mode, gen_rtx_GT (BImode,
395338fd1498Szrj gen_rtx_DIV (mode, op1, mod),
395438fd1498Szrj const0_rtx),
395538fd1498Szrj const1_rtx, const0_rtx),
395638fd1498Szrj const0_rtx);
395738fd1498Szrj }
395838fd1498Szrj
395938fd1498Szrj /* Return the difference between the ceil and the truncated result of
396038fd1498Szrj an unsigned division by OP1 with remainder MOD. */
396138fd1498Szrj static rtx
ceil_udiv_adjust(machine_mode mode,rtx mod,rtx op1 ATTRIBUTE_UNUSED)396238fd1498Szrj ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
396338fd1498Szrj {
396438fd1498Szrj /* (mod != 0 ? 1 : 0) */
396538fd1498Szrj return gen_rtx_IF_THEN_ELSE
396638fd1498Szrj (mode, gen_rtx_NE (BImode, mod, const0_rtx),
396738fd1498Szrj const1_rtx, const0_rtx);
396838fd1498Szrj }
396938fd1498Szrj
397038fd1498Szrj /* Return the difference between the rounded and the truncated result
397138fd1498Szrj of a signed division by OP1 with remainder MOD. Halfway cases are
397238fd1498Szrj rounded away from zero, rather than to the nearest even number. */
397338fd1498Szrj static rtx
round_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)397438fd1498Szrj round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
397538fd1498Szrj {
397638fd1498Szrj /* (abs (mod) >= abs (op1) - abs (mod)
397738fd1498Szrj ? (op1 / mod > 0 ? 1 : -1)
397838fd1498Szrj : 0) */
397938fd1498Szrj return gen_rtx_IF_THEN_ELSE
398038fd1498Szrj (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
398138fd1498Szrj gen_rtx_MINUS (mode,
398238fd1498Szrj gen_rtx_ABS (mode, op1),
398338fd1498Szrj gen_rtx_ABS (mode, mod))),
398438fd1498Szrj gen_rtx_IF_THEN_ELSE
398538fd1498Szrj (mode, gen_rtx_GT (BImode,
398638fd1498Szrj gen_rtx_DIV (mode, op1, mod),
398738fd1498Szrj const0_rtx),
398838fd1498Szrj const1_rtx, constm1_rtx),
398938fd1498Szrj const0_rtx);
399038fd1498Szrj }
399138fd1498Szrj
399238fd1498Szrj /* Return the difference between the rounded and the truncated result
399338fd1498Szrj of a unsigned division by OP1 with remainder MOD. Halfway cases
399438fd1498Szrj are rounded away from zero, rather than to the nearest even
399538fd1498Szrj number. */
399638fd1498Szrj static rtx
round_udiv_adjust(machine_mode mode,rtx mod,rtx op1)399738fd1498Szrj round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
399838fd1498Szrj {
399938fd1498Szrj /* (mod >= op1 - mod ? 1 : 0) */
400038fd1498Szrj return gen_rtx_IF_THEN_ELSE
400138fd1498Szrj (mode, gen_rtx_GE (BImode, mod,
400238fd1498Szrj gen_rtx_MINUS (mode, op1, mod)),
400338fd1498Szrj const1_rtx, const0_rtx);
400438fd1498Szrj }
400538fd1498Szrj
400638fd1498Szrj /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
400738fd1498Szrj any rtl. */
400838fd1498Szrj
400938fd1498Szrj static rtx
convert_debug_memory_address(scalar_int_mode mode,rtx x,addr_space_t as)401038fd1498Szrj convert_debug_memory_address (scalar_int_mode mode, rtx x,
401138fd1498Szrj addr_space_t as)
401238fd1498Szrj {
401338fd1498Szrj #ifndef POINTERS_EXTEND_UNSIGNED
401438fd1498Szrj gcc_assert (mode == Pmode
401538fd1498Szrj || mode == targetm.addr_space.address_mode (as));
401638fd1498Szrj gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
401738fd1498Szrj #else
401838fd1498Szrj rtx temp;
401938fd1498Szrj
402038fd1498Szrj gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
402138fd1498Szrj
402238fd1498Szrj if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
402338fd1498Szrj return x;
402438fd1498Szrj
402538fd1498Szrj /* X must have some form of address mode already. */
402638fd1498Szrj scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
402738fd1498Szrj if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
402838fd1498Szrj x = lowpart_subreg (mode, x, xmode);
402938fd1498Szrj else if (POINTERS_EXTEND_UNSIGNED > 0)
403038fd1498Szrj x = gen_rtx_ZERO_EXTEND (mode, x);
403138fd1498Szrj else if (!POINTERS_EXTEND_UNSIGNED)
403238fd1498Szrj x = gen_rtx_SIGN_EXTEND (mode, x);
403338fd1498Szrj else
403438fd1498Szrj {
403538fd1498Szrj switch (GET_CODE (x))
403638fd1498Szrj {
403738fd1498Szrj case SUBREG:
403838fd1498Szrj if ((SUBREG_PROMOTED_VAR_P (x)
403938fd1498Szrj || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
404038fd1498Szrj || (GET_CODE (SUBREG_REG (x)) == PLUS
404138fd1498Szrj && REG_P (XEXP (SUBREG_REG (x), 0))
404238fd1498Szrj && REG_POINTER (XEXP (SUBREG_REG (x), 0))
404338fd1498Szrj && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
404438fd1498Szrj && GET_MODE (SUBREG_REG (x)) == mode)
404538fd1498Szrj return SUBREG_REG (x);
404638fd1498Szrj break;
404738fd1498Szrj case LABEL_REF:
404838fd1498Szrj temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
404938fd1498Szrj LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
405038fd1498Szrj return temp;
405138fd1498Szrj case SYMBOL_REF:
405238fd1498Szrj temp = shallow_copy_rtx (x);
405338fd1498Szrj PUT_MODE (temp, mode);
405438fd1498Szrj return temp;
405538fd1498Szrj case CONST:
405638fd1498Szrj temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
405738fd1498Szrj if (temp)
405838fd1498Szrj temp = gen_rtx_CONST (mode, temp);
405938fd1498Szrj return temp;
406038fd1498Szrj case PLUS:
406138fd1498Szrj case MINUS:
406238fd1498Szrj if (CONST_INT_P (XEXP (x, 1)))
406338fd1498Szrj {
406438fd1498Szrj temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
406538fd1498Szrj if (temp)
406638fd1498Szrj return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
406738fd1498Szrj }
406838fd1498Szrj break;
406938fd1498Szrj default:
407038fd1498Szrj break;
407138fd1498Szrj }
407238fd1498Szrj /* Don't know how to express ptr_extend as operation in debug info. */
407338fd1498Szrj return NULL;
407438fd1498Szrj }
407538fd1498Szrj #endif /* POINTERS_EXTEND_UNSIGNED */
407638fd1498Szrj
407738fd1498Szrj return x;
407838fd1498Szrj }
407938fd1498Szrj
408038fd1498Szrj /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
408138fd1498Szrj by avoid_deep_ter_for_debug. */
408238fd1498Szrj
408338fd1498Szrj static hash_map<tree, tree> *deep_ter_debug_map;
408438fd1498Szrj
408538fd1498Szrj /* Split too deep TER chains for debug stmts using debug temporaries. */
408638fd1498Szrj
408738fd1498Szrj static void
avoid_deep_ter_for_debug(gimple * stmt,int depth)408838fd1498Szrj avoid_deep_ter_for_debug (gimple *stmt, int depth)
408938fd1498Szrj {
409038fd1498Szrj use_operand_p use_p;
409138fd1498Szrj ssa_op_iter iter;
409238fd1498Szrj FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
409338fd1498Szrj {
409438fd1498Szrj tree use = USE_FROM_PTR (use_p);
409538fd1498Szrj if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
409638fd1498Szrj continue;
409738fd1498Szrj gimple *g = get_gimple_for_ssa_name (use);
409838fd1498Szrj if (g == NULL)
409938fd1498Szrj continue;
410038fd1498Szrj if (depth > 6 && !stmt_ends_bb_p (g))
410138fd1498Szrj {
410238fd1498Szrj if (deep_ter_debug_map == NULL)
410338fd1498Szrj deep_ter_debug_map = new hash_map<tree, tree>;
410438fd1498Szrj
410538fd1498Szrj tree &vexpr = deep_ter_debug_map->get_or_insert (use);
410638fd1498Szrj if (vexpr != NULL)
410738fd1498Szrj continue;
410838fd1498Szrj vexpr = make_node (DEBUG_EXPR_DECL);
410938fd1498Szrj gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
411038fd1498Szrj DECL_ARTIFICIAL (vexpr) = 1;
411138fd1498Szrj TREE_TYPE (vexpr) = TREE_TYPE (use);
411238fd1498Szrj SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
411338fd1498Szrj gimple_stmt_iterator gsi = gsi_for_stmt (g);
411438fd1498Szrj gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
411538fd1498Szrj avoid_deep_ter_for_debug (def_temp, 0);
411638fd1498Szrj }
411738fd1498Szrj else
411838fd1498Szrj avoid_deep_ter_for_debug (g, depth + 1);
411938fd1498Szrj }
412038fd1498Szrj }
412138fd1498Szrj
412238fd1498Szrj /* Return an RTX equivalent to the value of the parameter DECL. */
412338fd1498Szrj
412438fd1498Szrj static rtx
expand_debug_parm_decl(tree decl)412538fd1498Szrj expand_debug_parm_decl (tree decl)
412638fd1498Szrj {
412738fd1498Szrj rtx incoming = DECL_INCOMING_RTL (decl);
412838fd1498Szrj
412938fd1498Szrj if (incoming
413038fd1498Szrj && GET_MODE (incoming) != BLKmode
413138fd1498Szrj && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
413238fd1498Szrj || (MEM_P (incoming)
413338fd1498Szrj && REG_P (XEXP (incoming, 0))
413438fd1498Szrj && HARD_REGISTER_P (XEXP (incoming, 0)))))
413538fd1498Szrj {
413638fd1498Szrj rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
413738fd1498Szrj
413838fd1498Szrj #ifdef HAVE_window_save
413938fd1498Szrj /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
414038fd1498Szrj If the target machine has an explicit window save instruction, the
414138fd1498Szrj actual entry value is the corresponding OUTGOING_REGNO instead. */
414238fd1498Szrj if (REG_P (incoming)
414338fd1498Szrj && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
414438fd1498Szrj incoming
414538fd1498Szrj = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
414638fd1498Szrj OUTGOING_REGNO (REGNO (incoming)), 0);
414738fd1498Szrj else if (MEM_P (incoming))
414838fd1498Szrj {
414938fd1498Szrj rtx reg = XEXP (incoming, 0);
415038fd1498Szrj if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
415138fd1498Szrj {
415238fd1498Szrj reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
415338fd1498Szrj incoming = replace_equiv_address_nv (incoming, reg);
415438fd1498Szrj }
415538fd1498Szrj else
415638fd1498Szrj incoming = copy_rtx (incoming);
415738fd1498Szrj }
415838fd1498Szrj #endif
415938fd1498Szrj
416038fd1498Szrj ENTRY_VALUE_EXP (rtl) = incoming;
416138fd1498Szrj return rtl;
416238fd1498Szrj }
416338fd1498Szrj
416438fd1498Szrj if (incoming
416538fd1498Szrj && GET_MODE (incoming) != BLKmode
416638fd1498Szrj && !TREE_ADDRESSABLE (decl)
416738fd1498Szrj && MEM_P (incoming)
416838fd1498Szrj && (XEXP (incoming, 0) == virtual_incoming_args_rtx
416938fd1498Szrj || (GET_CODE (XEXP (incoming, 0)) == PLUS
417038fd1498Szrj && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
417138fd1498Szrj && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
417238fd1498Szrj return copy_rtx (incoming);
417338fd1498Szrj
417438fd1498Szrj return NULL_RTX;
417538fd1498Szrj }
417638fd1498Szrj
417738fd1498Szrj /* Return an RTX equivalent to the value of the tree expression EXP. */
417838fd1498Szrj
417938fd1498Szrj static rtx
expand_debug_expr(tree exp)418038fd1498Szrj expand_debug_expr (tree exp)
418138fd1498Szrj {
418238fd1498Szrj rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
418338fd1498Szrj machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
418438fd1498Szrj machine_mode inner_mode = VOIDmode;
418538fd1498Szrj int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
418638fd1498Szrj addr_space_t as;
418738fd1498Szrj scalar_int_mode op0_mode, op1_mode, addr_mode;
418838fd1498Szrj
418938fd1498Szrj switch (TREE_CODE_CLASS (TREE_CODE (exp)))
419038fd1498Szrj {
419138fd1498Szrj case tcc_expression:
419238fd1498Szrj switch (TREE_CODE (exp))
419338fd1498Szrj {
419438fd1498Szrj case COND_EXPR:
419538fd1498Szrj case DOT_PROD_EXPR:
419638fd1498Szrj case SAD_EXPR:
419738fd1498Szrj case WIDEN_MULT_PLUS_EXPR:
419838fd1498Szrj case WIDEN_MULT_MINUS_EXPR:
419938fd1498Szrj case FMA_EXPR:
420038fd1498Szrj goto ternary;
420138fd1498Szrj
420238fd1498Szrj case TRUTH_ANDIF_EXPR:
420338fd1498Szrj case TRUTH_ORIF_EXPR:
420438fd1498Szrj case TRUTH_AND_EXPR:
420538fd1498Szrj case TRUTH_OR_EXPR:
420638fd1498Szrj case TRUTH_XOR_EXPR:
420738fd1498Szrj goto binary;
420838fd1498Szrj
420938fd1498Szrj case TRUTH_NOT_EXPR:
421038fd1498Szrj goto unary;
421138fd1498Szrj
421238fd1498Szrj default:
421338fd1498Szrj break;
421438fd1498Szrj }
421538fd1498Szrj break;
421638fd1498Szrj
421738fd1498Szrj ternary:
421838fd1498Szrj op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
421938fd1498Szrj if (!op2)
422038fd1498Szrj return NULL_RTX;
422138fd1498Szrj /* Fall through. */
422238fd1498Szrj
422338fd1498Szrj binary:
422438fd1498Szrj case tcc_binary:
422538fd1498Szrj if (mode == BLKmode)
422638fd1498Szrj return NULL_RTX;
422738fd1498Szrj op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
422838fd1498Szrj if (!op1)
422938fd1498Szrj return NULL_RTX;
423038fd1498Szrj switch (TREE_CODE (exp))
423138fd1498Szrj {
423238fd1498Szrj case LSHIFT_EXPR:
423338fd1498Szrj case RSHIFT_EXPR:
423438fd1498Szrj case LROTATE_EXPR:
423538fd1498Szrj case RROTATE_EXPR:
423638fd1498Szrj case WIDEN_LSHIFT_EXPR:
423738fd1498Szrj /* Ensure second operand isn't wider than the first one. */
423838fd1498Szrj inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
423938fd1498Szrj if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
424038fd1498Szrj && (GET_MODE_UNIT_PRECISION (mode)
424138fd1498Szrj < GET_MODE_PRECISION (op1_mode)))
424238fd1498Szrj op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
424338fd1498Szrj break;
424438fd1498Szrj default:
424538fd1498Szrj break;
424638fd1498Szrj }
424738fd1498Szrj /* Fall through. */
424838fd1498Szrj
424938fd1498Szrj unary:
425038fd1498Szrj case tcc_unary:
425138fd1498Szrj if (mode == BLKmode)
425238fd1498Szrj return NULL_RTX;
425338fd1498Szrj inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
425438fd1498Szrj op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
425538fd1498Szrj if (!op0)
425638fd1498Szrj return NULL_RTX;
425738fd1498Szrj break;
425838fd1498Szrj
425938fd1498Szrj case tcc_comparison:
426038fd1498Szrj unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
426138fd1498Szrj goto binary;
426238fd1498Szrj
426338fd1498Szrj case tcc_type:
426438fd1498Szrj case tcc_statement:
426538fd1498Szrj gcc_unreachable ();
426638fd1498Szrj
426738fd1498Szrj case tcc_constant:
426838fd1498Szrj case tcc_exceptional:
426938fd1498Szrj case tcc_declaration:
427038fd1498Szrj case tcc_reference:
427138fd1498Szrj case tcc_vl_exp:
427238fd1498Szrj break;
427338fd1498Szrj }
427438fd1498Szrj
427538fd1498Szrj switch (TREE_CODE (exp))
427638fd1498Szrj {
427738fd1498Szrj case STRING_CST:
427838fd1498Szrj if (!lookup_constant_def (exp))
427938fd1498Szrj {
428038fd1498Szrj if (strlen (TREE_STRING_POINTER (exp)) + 1
428138fd1498Szrj != (size_t) TREE_STRING_LENGTH (exp))
428238fd1498Szrj return NULL_RTX;
428338fd1498Szrj op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
428438fd1498Szrj op0 = gen_rtx_MEM (BLKmode, op0);
428538fd1498Szrj set_mem_attributes (op0, exp, 0);
428638fd1498Szrj return op0;
428738fd1498Szrj }
428838fd1498Szrj /* Fall through. */
428938fd1498Szrj
429038fd1498Szrj case INTEGER_CST:
429138fd1498Szrj case REAL_CST:
429238fd1498Szrj case FIXED_CST:
429338fd1498Szrj op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
429438fd1498Szrj return op0;
429538fd1498Szrj
429638fd1498Szrj case POLY_INT_CST:
429738fd1498Szrj return immed_wide_int_const (poly_int_cst_value (exp), mode);
429838fd1498Szrj
429938fd1498Szrj case COMPLEX_CST:
430038fd1498Szrj gcc_assert (COMPLEX_MODE_P (mode));
430138fd1498Szrj op0 = expand_debug_expr (TREE_REALPART (exp));
430238fd1498Szrj op1 = expand_debug_expr (TREE_IMAGPART (exp));
430338fd1498Szrj return gen_rtx_CONCAT (mode, op0, op1);
430438fd1498Szrj
430538fd1498Szrj case DEBUG_EXPR_DECL:
430638fd1498Szrj op0 = DECL_RTL_IF_SET (exp);
430738fd1498Szrj
430838fd1498Szrj if (op0)
430938fd1498Szrj return op0;
431038fd1498Szrj
431138fd1498Szrj op0 = gen_rtx_DEBUG_EXPR (mode);
431238fd1498Szrj DEBUG_EXPR_TREE_DECL (op0) = exp;
431338fd1498Szrj SET_DECL_RTL (exp, op0);
431438fd1498Szrj
431538fd1498Szrj return op0;
431638fd1498Szrj
431738fd1498Szrj case VAR_DECL:
431838fd1498Szrj case PARM_DECL:
431938fd1498Szrj case FUNCTION_DECL:
432038fd1498Szrj case LABEL_DECL:
432138fd1498Szrj case CONST_DECL:
432238fd1498Szrj case RESULT_DECL:
432338fd1498Szrj op0 = DECL_RTL_IF_SET (exp);
432438fd1498Szrj
432538fd1498Szrj /* This decl was probably optimized away. */
432638fd1498Szrj if (!op0)
432738fd1498Szrj {
432838fd1498Szrj if (!VAR_P (exp)
432938fd1498Szrj || DECL_EXTERNAL (exp)
433038fd1498Szrj || !TREE_STATIC (exp)
433138fd1498Szrj || !DECL_NAME (exp)
433238fd1498Szrj || DECL_HARD_REGISTER (exp)
433338fd1498Szrj || DECL_IN_CONSTANT_POOL (exp)
433438fd1498Szrj || mode == VOIDmode)
433538fd1498Szrj return NULL;
433638fd1498Szrj
433738fd1498Szrj op0 = make_decl_rtl_for_debug (exp);
433838fd1498Szrj if (!MEM_P (op0)
433938fd1498Szrj || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
434038fd1498Szrj || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
434138fd1498Szrj return NULL;
434238fd1498Szrj }
434338fd1498Szrj else
434438fd1498Szrj op0 = copy_rtx (op0);
434538fd1498Szrj
434638fd1498Szrj if (GET_MODE (op0) == BLKmode
434738fd1498Szrj /* If op0 is not BLKmode, but mode is, adjust_mode
434838fd1498Szrj below would ICE. While it is likely a FE bug,
434938fd1498Szrj try to be robust here. See PR43166. */
435038fd1498Szrj || mode == BLKmode
435138fd1498Szrj || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
435238fd1498Szrj {
435338fd1498Szrj gcc_assert (MEM_P (op0));
435438fd1498Szrj op0 = adjust_address_nv (op0, mode, 0);
435538fd1498Szrj return op0;
435638fd1498Szrj }
435738fd1498Szrj
435838fd1498Szrj /* Fall through. */
435938fd1498Szrj
436038fd1498Szrj adjust_mode:
436138fd1498Szrj case PAREN_EXPR:
436238fd1498Szrj CASE_CONVERT:
436338fd1498Szrj {
436438fd1498Szrj inner_mode = GET_MODE (op0);
436538fd1498Szrj
436638fd1498Szrj if (mode == inner_mode)
436738fd1498Szrj return op0;
436838fd1498Szrj
436938fd1498Szrj if (inner_mode == VOIDmode)
437038fd1498Szrj {
437138fd1498Szrj if (TREE_CODE (exp) == SSA_NAME)
437238fd1498Szrj inner_mode = TYPE_MODE (TREE_TYPE (exp));
437338fd1498Szrj else
437438fd1498Szrj inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
437538fd1498Szrj if (mode == inner_mode)
437638fd1498Szrj return op0;
437738fd1498Szrj }
437838fd1498Szrj
437938fd1498Szrj if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
438038fd1498Szrj {
438138fd1498Szrj if (GET_MODE_UNIT_BITSIZE (mode)
438238fd1498Szrj == GET_MODE_UNIT_BITSIZE (inner_mode))
438338fd1498Szrj op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
438438fd1498Szrj else if (GET_MODE_UNIT_BITSIZE (mode)
438538fd1498Szrj < GET_MODE_UNIT_BITSIZE (inner_mode))
438638fd1498Szrj op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
438738fd1498Szrj else
438838fd1498Szrj op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
438938fd1498Szrj }
439038fd1498Szrj else if (FLOAT_MODE_P (mode))
439138fd1498Szrj {
439238fd1498Szrj gcc_assert (TREE_CODE (exp) != SSA_NAME);
439338fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
439438fd1498Szrj op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
439538fd1498Szrj else
439638fd1498Szrj op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
439738fd1498Szrj }
439838fd1498Szrj else if (FLOAT_MODE_P (inner_mode))
439938fd1498Szrj {
440038fd1498Szrj if (unsignedp)
440138fd1498Szrj op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
440238fd1498Szrj else
440338fd1498Szrj op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
440438fd1498Szrj }
440538fd1498Szrj else if (GET_MODE_UNIT_PRECISION (mode)
440638fd1498Szrj == GET_MODE_UNIT_PRECISION (inner_mode))
440738fd1498Szrj op0 = lowpart_subreg (mode, op0, inner_mode);
440838fd1498Szrj else if (GET_MODE_UNIT_PRECISION (mode)
440938fd1498Szrj < GET_MODE_UNIT_PRECISION (inner_mode))
441038fd1498Szrj op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
441138fd1498Szrj else if (UNARY_CLASS_P (exp)
441238fd1498Szrj ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
441338fd1498Szrj : unsignedp)
441438fd1498Szrj op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
441538fd1498Szrj else
441638fd1498Szrj op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
441738fd1498Szrj
441838fd1498Szrj return op0;
441938fd1498Szrj }
442038fd1498Szrj
442138fd1498Szrj case MEM_REF:
442238fd1498Szrj if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
442338fd1498Szrj {
442438fd1498Szrj tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
442538fd1498Szrj TREE_OPERAND (exp, 0),
442638fd1498Szrj TREE_OPERAND (exp, 1));
442738fd1498Szrj if (newexp)
442838fd1498Szrj return expand_debug_expr (newexp);
442938fd1498Szrj }
443038fd1498Szrj /* FALLTHROUGH */
443138fd1498Szrj case INDIRECT_REF:
443238fd1498Szrj inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
443338fd1498Szrj op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
443438fd1498Szrj if (!op0)
443538fd1498Szrj return NULL;
443638fd1498Szrj
443738fd1498Szrj if (TREE_CODE (exp) == MEM_REF)
443838fd1498Szrj {
443938fd1498Szrj if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
444038fd1498Szrj || (GET_CODE (op0) == PLUS
444138fd1498Szrj && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
444238fd1498Szrj /* (mem (debug_implicit_ptr)) might confuse aliasing.
444338fd1498Szrj Instead just use get_inner_reference. */
444438fd1498Szrj goto component_ref;
444538fd1498Szrj
444638fd1498Szrj op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
444738fd1498Szrj if (!op1 || !CONST_INT_P (op1))
444838fd1498Szrj return NULL;
444938fd1498Szrj
445038fd1498Szrj op0 = plus_constant (inner_mode, op0, INTVAL (op1));
445138fd1498Szrj }
445238fd1498Szrj
445338fd1498Szrj as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
445438fd1498Szrj
445538fd1498Szrj op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
445638fd1498Szrj op0, as);
445738fd1498Szrj if (op0 == NULL_RTX)
445838fd1498Szrj return NULL;
445938fd1498Szrj
446038fd1498Szrj op0 = gen_rtx_MEM (mode, op0);
446138fd1498Szrj set_mem_attributes (op0, exp, 0);
446238fd1498Szrj if (TREE_CODE (exp) == MEM_REF
446338fd1498Szrj && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
446438fd1498Szrj set_mem_expr (op0, NULL_TREE);
446538fd1498Szrj set_mem_addr_space (op0, as);
446638fd1498Szrj
446738fd1498Szrj return op0;
446838fd1498Szrj
446938fd1498Szrj case TARGET_MEM_REF:
447038fd1498Szrj if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
447138fd1498Szrj && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
447238fd1498Szrj return NULL;
447338fd1498Szrj
447438fd1498Szrj op0 = expand_debug_expr
447538fd1498Szrj (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
447638fd1498Szrj if (!op0)
447738fd1498Szrj return NULL;
447838fd1498Szrj
447938fd1498Szrj as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
448038fd1498Szrj op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
448138fd1498Szrj op0, as);
448238fd1498Szrj if (op0 == NULL_RTX)
448338fd1498Szrj return NULL;
448438fd1498Szrj
448538fd1498Szrj op0 = gen_rtx_MEM (mode, op0);
448638fd1498Szrj
448738fd1498Szrj set_mem_attributes (op0, exp, 0);
448838fd1498Szrj set_mem_addr_space (op0, as);
448938fd1498Szrj
449038fd1498Szrj return op0;
449138fd1498Szrj
449238fd1498Szrj component_ref:
449338fd1498Szrj case ARRAY_REF:
449438fd1498Szrj case ARRAY_RANGE_REF:
449538fd1498Szrj case COMPONENT_REF:
449638fd1498Szrj case BIT_FIELD_REF:
449738fd1498Szrj case REALPART_EXPR:
449838fd1498Szrj case IMAGPART_EXPR:
449938fd1498Szrj case VIEW_CONVERT_EXPR:
450038fd1498Szrj {
450138fd1498Szrj machine_mode mode1;
450238fd1498Szrj poly_int64 bitsize, bitpos;
450338fd1498Szrj tree offset;
450438fd1498Szrj int reversep, volatilep = 0;
450538fd1498Szrj tree tem
450638fd1498Szrj = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
450738fd1498Szrj &unsignedp, &reversep, &volatilep);
450838fd1498Szrj rtx orig_op0;
450938fd1498Szrj
451038fd1498Szrj if (known_eq (bitsize, 0))
451138fd1498Szrj return NULL;
451238fd1498Szrj
451338fd1498Szrj orig_op0 = op0 = expand_debug_expr (tem);
451438fd1498Szrj
451538fd1498Szrj if (!op0)
451638fd1498Szrj return NULL;
451738fd1498Szrj
451838fd1498Szrj if (offset)
451938fd1498Szrj {
452038fd1498Szrj machine_mode addrmode, offmode;
452138fd1498Szrj
452238fd1498Szrj if (!MEM_P (op0))
452338fd1498Szrj return NULL;
452438fd1498Szrj
452538fd1498Szrj op0 = XEXP (op0, 0);
452638fd1498Szrj addrmode = GET_MODE (op0);
452738fd1498Szrj if (addrmode == VOIDmode)
452838fd1498Szrj addrmode = Pmode;
452938fd1498Szrj
453038fd1498Szrj op1 = expand_debug_expr (offset);
453138fd1498Szrj if (!op1)
453238fd1498Szrj return NULL;
453338fd1498Szrj
453438fd1498Szrj offmode = GET_MODE (op1);
453538fd1498Szrj if (offmode == VOIDmode)
453638fd1498Szrj offmode = TYPE_MODE (TREE_TYPE (offset));
453738fd1498Szrj
453838fd1498Szrj if (addrmode != offmode)
453938fd1498Szrj op1 = lowpart_subreg (addrmode, op1, offmode);
454038fd1498Szrj
454138fd1498Szrj /* Don't use offset_address here, we don't need a
454238fd1498Szrj recognizable address, and we don't want to generate
454338fd1498Szrj code. */
454438fd1498Szrj op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
454538fd1498Szrj op0, op1));
454638fd1498Szrj }
454738fd1498Szrj
454838fd1498Szrj if (MEM_P (op0))
454938fd1498Szrj {
455038fd1498Szrj if (mode1 == VOIDmode)
455138fd1498Szrj {
455238fd1498Szrj if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
455338fd1498Szrj return NULL;
455438fd1498Szrj /* Bitfield. */
455538fd1498Szrj mode1 = smallest_int_mode_for_size (bitsize);
455638fd1498Szrj }
455738fd1498Szrj poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
455838fd1498Szrj if (maybe_ne (bytepos, 0))
455938fd1498Szrj {
456038fd1498Szrj op0 = adjust_address_nv (op0, mode1, bytepos);
456138fd1498Szrj bitpos = num_trailing_bits (bitpos);
456238fd1498Szrj }
456338fd1498Szrj else if (known_eq (bitpos, 0)
456438fd1498Szrj && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
456538fd1498Szrj op0 = adjust_address_nv (op0, mode, 0);
456638fd1498Szrj else if (GET_MODE (op0) != mode1)
456738fd1498Szrj op0 = adjust_address_nv (op0, mode1, 0);
456838fd1498Szrj else
456938fd1498Szrj op0 = copy_rtx (op0);
457038fd1498Szrj if (op0 == orig_op0)
457138fd1498Szrj op0 = shallow_copy_rtx (op0);
457238fd1498Szrj set_mem_attributes (op0, exp, 0);
457338fd1498Szrj }
457438fd1498Szrj
457538fd1498Szrj if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
457638fd1498Szrj return op0;
457738fd1498Szrj
457838fd1498Szrj if (maybe_lt (bitpos, 0))
457938fd1498Szrj return NULL;
458038fd1498Szrj
458138fd1498Szrj if (GET_MODE (op0) == BLKmode || mode == BLKmode)
458238fd1498Szrj return NULL;
458338fd1498Szrj
458438fd1498Szrj poly_int64 bytepos;
458538fd1498Szrj if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
458638fd1498Szrj && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
458738fd1498Szrj {
458838fd1498Szrj machine_mode opmode = GET_MODE (op0);
458938fd1498Szrj
459038fd1498Szrj if (opmode == VOIDmode)
459138fd1498Szrj opmode = TYPE_MODE (TREE_TYPE (tem));
459238fd1498Szrj
459338fd1498Szrj /* This condition may hold if we're expanding the address
459438fd1498Szrj right past the end of an array that turned out not to
459538fd1498Szrj be addressable (i.e., the address was only computed in
459638fd1498Szrj debug stmts). The gen_subreg below would rightfully
459738fd1498Szrj crash, and the address doesn't really exist, so just
459838fd1498Szrj drop it. */
459938fd1498Szrj if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
460038fd1498Szrj return NULL;
460138fd1498Szrj
460238fd1498Szrj if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
460338fd1498Szrj return simplify_gen_subreg (mode, op0, opmode, bytepos);
460438fd1498Szrj }
460538fd1498Szrj
460638fd1498Szrj return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
460738fd1498Szrj && TYPE_UNSIGNED (TREE_TYPE (exp))
460838fd1498Szrj ? SIGN_EXTRACT
460938fd1498Szrj : ZERO_EXTRACT, mode,
461038fd1498Szrj GET_MODE (op0) != VOIDmode
461138fd1498Szrj ? GET_MODE (op0)
461238fd1498Szrj : TYPE_MODE (TREE_TYPE (tem)),
461338fd1498Szrj op0, gen_int_mode (bitsize, word_mode),
461438fd1498Szrj gen_int_mode (bitpos, word_mode));
461538fd1498Szrj }
461638fd1498Szrj
461738fd1498Szrj case ABS_EXPR:
461838fd1498Szrj return simplify_gen_unary (ABS, mode, op0, mode);
461938fd1498Szrj
462038fd1498Szrj case NEGATE_EXPR:
462138fd1498Szrj return simplify_gen_unary (NEG, mode, op0, mode);
462238fd1498Szrj
462338fd1498Szrj case BIT_NOT_EXPR:
462438fd1498Szrj return simplify_gen_unary (NOT, mode, op0, mode);
462538fd1498Szrj
462638fd1498Szrj case FLOAT_EXPR:
462738fd1498Szrj return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
462838fd1498Szrj 0)))
462938fd1498Szrj ? UNSIGNED_FLOAT : FLOAT, mode, op0,
463038fd1498Szrj inner_mode);
463138fd1498Szrj
463238fd1498Szrj case FIX_TRUNC_EXPR:
463338fd1498Szrj return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
463438fd1498Szrj inner_mode);
463538fd1498Szrj
463638fd1498Szrj case POINTER_PLUS_EXPR:
463738fd1498Szrj /* For the rare target where pointers are not the same size as
463838fd1498Szrj size_t, we need to check for mis-matched modes and correct
463938fd1498Szrj the addend. */
464038fd1498Szrj if (op0 && op1
464138fd1498Szrj && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
464238fd1498Szrj && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
464338fd1498Szrj && op0_mode != op1_mode)
464438fd1498Szrj {
464538fd1498Szrj if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
464638fd1498Szrj /* If OP0 is a partial mode, then we must truncate, even
464738fd1498Szrj if it has the same bitsize as OP1 as GCC's
464838fd1498Szrj representation of partial modes is opaque. */
464938fd1498Szrj || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
465038fd1498Szrj && (GET_MODE_BITSIZE (op0_mode)
465138fd1498Szrj == GET_MODE_BITSIZE (op1_mode))))
465238fd1498Szrj op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
465338fd1498Szrj else
465438fd1498Szrj /* We always sign-extend, regardless of the signedness of
465538fd1498Szrj the operand, because the operand is always unsigned
465638fd1498Szrj here even if the original C expression is signed. */
465738fd1498Szrj op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
465838fd1498Szrj }
465938fd1498Szrj /* Fall through. */
466038fd1498Szrj case PLUS_EXPR:
466138fd1498Szrj return simplify_gen_binary (PLUS, mode, op0, op1);
466238fd1498Szrj
466338fd1498Szrj case MINUS_EXPR:
466438fd1498Szrj case POINTER_DIFF_EXPR:
466538fd1498Szrj return simplify_gen_binary (MINUS, mode, op0, op1);
466638fd1498Szrj
466738fd1498Szrj case MULT_EXPR:
466838fd1498Szrj return simplify_gen_binary (MULT, mode, op0, op1);
466938fd1498Szrj
467038fd1498Szrj case RDIV_EXPR:
467138fd1498Szrj case TRUNC_DIV_EXPR:
467238fd1498Szrj case EXACT_DIV_EXPR:
467338fd1498Szrj if (unsignedp)
467438fd1498Szrj return simplify_gen_binary (UDIV, mode, op0, op1);
467538fd1498Szrj else
467638fd1498Szrj return simplify_gen_binary (DIV, mode, op0, op1);
467738fd1498Szrj
467838fd1498Szrj case TRUNC_MOD_EXPR:
467938fd1498Szrj return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
468038fd1498Szrj
468138fd1498Szrj case FLOOR_DIV_EXPR:
468238fd1498Szrj if (unsignedp)
468338fd1498Szrj return simplify_gen_binary (UDIV, mode, op0, op1);
468438fd1498Szrj else
468538fd1498Szrj {
468638fd1498Szrj rtx div = simplify_gen_binary (DIV, mode, op0, op1);
468738fd1498Szrj rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
468838fd1498Szrj rtx adj = floor_sdiv_adjust (mode, mod, op1);
468938fd1498Szrj return simplify_gen_binary (PLUS, mode, div, adj);
469038fd1498Szrj }
469138fd1498Szrj
469238fd1498Szrj case FLOOR_MOD_EXPR:
469338fd1498Szrj if (unsignedp)
469438fd1498Szrj return simplify_gen_binary (UMOD, mode, op0, op1);
469538fd1498Szrj else
469638fd1498Szrj {
469738fd1498Szrj rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
469838fd1498Szrj rtx adj = floor_sdiv_adjust (mode, mod, op1);
469938fd1498Szrj adj = simplify_gen_unary (NEG, mode,
470038fd1498Szrj simplify_gen_binary (MULT, mode, adj, op1),
470138fd1498Szrj mode);
470238fd1498Szrj return simplify_gen_binary (PLUS, mode, mod, adj);
470338fd1498Szrj }
470438fd1498Szrj
470538fd1498Szrj case CEIL_DIV_EXPR:
470638fd1498Szrj if (unsignedp)
470738fd1498Szrj {
470838fd1498Szrj rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
470938fd1498Szrj rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
471038fd1498Szrj rtx adj = ceil_udiv_adjust (mode, mod, op1);
471138fd1498Szrj return simplify_gen_binary (PLUS, mode, div, adj);
471238fd1498Szrj }
471338fd1498Szrj else
471438fd1498Szrj {
471538fd1498Szrj rtx div = simplify_gen_binary (DIV, mode, op0, op1);
471638fd1498Szrj rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
471738fd1498Szrj rtx adj = ceil_sdiv_adjust (mode, mod, op1);
471838fd1498Szrj return simplify_gen_binary (PLUS, mode, div, adj);
471938fd1498Szrj }
472038fd1498Szrj
472138fd1498Szrj case CEIL_MOD_EXPR:
472238fd1498Szrj if (unsignedp)
472338fd1498Szrj {
472438fd1498Szrj rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
472538fd1498Szrj rtx adj = ceil_udiv_adjust (mode, mod, op1);
472638fd1498Szrj adj = simplify_gen_unary (NEG, mode,
472738fd1498Szrj simplify_gen_binary (MULT, mode, adj, op1),
472838fd1498Szrj mode);
472938fd1498Szrj return simplify_gen_binary (PLUS, mode, mod, adj);
473038fd1498Szrj }
473138fd1498Szrj else
473238fd1498Szrj {
473338fd1498Szrj rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
473438fd1498Szrj rtx adj = ceil_sdiv_adjust (mode, mod, op1);
473538fd1498Szrj adj = simplify_gen_unary (NEG, mode,
473638fd1498Szrj simplify_gen_binary (MULT, mode, adj, op1),
473738fd1498Szrj mode);
473838fd1498Szrj return simplify_gen_binary (PLUS, mode, mod, adj);
473938fd1498Szrj }
474038fd1498Szrj
474138fd1498Szrj case ROUND_DIV_EXPR:
474238fd1498Szrj if (unsignedp)
474338fd1498Szrj {
474438fd1498Szrj rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
474538fd1498Szrj rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
474638fd1498Szrj rtx adj = round_udiv_adjust (mode, mod, op1);
474738fd1498Szrj return simplify_gen_binary (PLUS, mode, div, adj);
474838fd1498Szrj }
474938fd1498Szrj else
475038fd1498Szrj {
475138fd1498Szrj rtx div = simplify_gen_binary (DIV, mode, op0, op1);
475238fd1498Szrj rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
475338fd1498Szrj rtx adj = round_sdiv_adjust (mode, mod, op1);
475438fd1498Szrj return simplify_gen_binary (PLUS, mode, div, adj);
475538fd1498Szrj }
475638fd1498Szrj
475738fd1498Szrj case ROUND_MOD_EXPR:
475838fd1498Szrj if (unsignedp)
475938fd1498Szrj {
476038fd1498Szrj rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
476138fd1498Szrj rtx adj = round_udiv_adjust (mode, mod, op1);
476238fd1498Szrj adj = simplify_gen_unary (NEG, mode,
476338fd1498Szrj simplify_gen_binary (MULT, mode, adj, op1),
476438fd1498Szrj mode);
476538fd1498Szrj return simplify_gen_binary (PLUS, mode, mod, adj);
476638fd1498Szrj }
476738fd1498Szrj else
476838fd1498Szrj {
476938fd1498Szrj rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
477038fd1498Szrj rtx adj = round_sdiv_adjust (mode, mod, op1);
477138fd1498Szrj adj = simplify_gen_unary (NEG, mode,
477238fd1498Szrj simplify_gen_binary (MULT, mode, adj, op1),
477338fd1498Szrj mode);
477438fd1498Szrj return simplify_gen_binary (PLUS, mode, mod, adj);
477538fd1498Szrj }
477638fd1498Szrj
477738fd1498Szrj case LSHIFT_EXPR:
477838fd1498Szrj return simplify_gen_binary (ASHIFT, mode, op0, op1);
477938fd1498Szrj
478038fd1498Szrj case RSHIFT_EXPR:
478138fd1498Szrj if (unsignedp)
478238fd1498Szrj return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
478338fd1498Szrj else
478438fd1498Szrj return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
478538fd1498Szrj
478638fd1498Szrj case LROTATE_EXPR:
478738fd1498Szrj return simplify_gen_binary (ROTATE, mode, op0, op1);
478838fd1498Szrj
478938fd1498Szrj case RROTATE_EXPR:
479038fd1498Szrj return simplify_gen_binary (ROTATERT, mode, op0, op1);
479138fd1498Szrj
479238fd1498Szrj case MIN_EXPR:
479338fd1498Szrj return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
479438fd1498Szrj
479538fd1498Szrj case MAX_EXPR:
479638fd1498Szrj return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
479738fd1498Szrj
479838fd1498Szrj case BIT_AND_EXPR:
479938fd1498Szrj case TRUTH_AND_EXPR:
480038fd1498Szrj return simplify_gen_binary (AND, mode, op0, op1);
480138fd1498Szrj
480238fd1498Szrj case BIT_IOR_EXPR:
480338fd1498Szrj case TRUTH_OR_EXPR:
480438fd1498Szrj return simplify_gen_binary (IOR, mode, op0, op1);
480538fd1498Szrj
480638fd1498Szrj case BIT_XOR_EXPR:
480738fd1498Szrj case TRUTH_XOR_EXPR:
480838fd1498Szrj return simplify_gen_binary (XOR, mode, op0, op1);
480938fd1498Szrj
481038fd1498Szrj case TRUTH_ANDIF_EXPR:
481138fd1498Szrj return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
481238fd1498Szrj
481338fd1498Szrj case TRUTH_ORIF_EXPR:
481438fd1498Szrj return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
481538fd1498Szrj
481638fd1498Szrj case TRUTH_NOT_EXPR:
481738fd1498Szrj return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
481838fd1498Szrj
481938fd1498Szrj case LT_EXPR:
482038fd1498Szrj return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
482138fd1498Szrj op0, op1);
482238fd1498Szrj
482338fd1498Szrj case LE_EXPR:
482438fd1498Szrj return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
482538fd1498Szrj op0, op1);
482638fd1498Szrj
482738fd1498Szrj case GT_EXPR:
482838fd1498Szrj return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
482938fd1498Szrj op0, op1);
483038fd1498Szrj
483138fd1498Szrj case GE_EXPR:
483238fd1498Szrj return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
483338fd1498Szrj op0, op1);
483438fd1498Szrj
483538fd1498Szrj case EQ_EXPR:
483638fd1498Szrj return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
483738fd1498Szrj
483838fd1498Szrj case NE_EXPR:
483938fd1498Szrj return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
484038fd1498Szrj
484138fd1498Szrj case UNORDERED_EXPR:
484238fd1498Szrj return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
484338fd1498Szrj
484438fd1498Szrj case ORDERED_EXPR:
484538fd1498Szrj return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
484638fd1498Szrj
484738fd1498Szrj case UNLT_EXPR:
484838fd1498Szrj return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
484938fd1498Szrj
485038fd1498Szrj case UNLE_EXPR:
485138fd1498Szrj return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
485238fd1498Szrj
485338fd1498Szrj case UNGT_EXPR:
485438fd1498Szrj return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
485538fd1498Szrj
485638fd1498Szrj case UNGE_EXPR:
485738fd1498Szrj return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
485838fd1498Szrj
485938fd1498Szrj case UNEQ_EXPR:
486038fd1498Szrj return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
486138fd1498Szrj
486238fd1498Szrj case LTGT_EXPR:
486338fd1498Szrj return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
486438fd1498Szrj
486538fd1498Szrj case COND_EXPR:
486638fd1498Szrj return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
486738fd1498Szrj
486838fd1498Szrj case COMPLEX_EXPR:
486938fd1498Szrj gcc_assert (COMPLEX_MODE_P (mode));
487038fd1498Szrj if (GET_MODE (op0) == VOIDmode)
487138fd1498Szrj op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
487238fd1498Szrj if (GET_MODE (op1) == VOIDmode)
487338fd1498Szrj op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
487438fd1498Szrj return gen_rtx_CONCAT (mode, op0, op1);
487538fd1498Szrj
487638fd1498Szrj case CONJ_EXPR:
487738fd1498Szrj if (GET_CODE (op0) == CONCAT)
487838fd1498Szrj return gen_rtx_CONCAT (mode, XEXP (op0, 0),
487938fd1498Szrj simplify_gen_unary (NEG, GET_MODE_INNER (mode),
488038fd1498Szrj XEXP (op0, 1),
488138fd1498Szrj GET_MODE_INNER (mode)));
488238fd1498Szrj else
488338fd1498Szrj {
488438fd1498Szrj scalar_mode imode = GET_MODE_INNER (mode);
488538fd1498Szrj rtx re, im;
488638fd1498Szrj
488738fd1498Szrj if (MEM_P (op0))
488838fd1498Szrj {
488938fd1498Szrj re = adjust_address_nv (op0, imode, 0);
489038fd1498Szrj im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
489138fd1498Szrj }
489238fd1498Szrj else
489338fd1498Szrj {
489438fd1498Szrj scalar_int_mode ifmode;
489538fd1498Szrj scalar_int_mode ihmode;
489638fd1498Szrj rtx halfsize;
489738fd1498Szrj if (!int_mode_for_mode (mode).exists (&ifmode)
489838fd1498Szrj || !int_mode_for_mode (imode).exists (&ihmode))
489938fd1498Szrj return NULL;
490038fd1498Szrj halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
490138fd1498Szrj re = op0;
490238fd1498Szrj if (mode != ifmode)
490338fd1498Szrj re = gen_rtx_SUBREG (ifmode, re, 0);
490438fd1498Szrj re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
490538fd1498Szrj if (imode != ihmode)
490638fd1498Szrj re = gen_rtx_SUBREG (imode, re, 0);
490738fd1498Szrj im = copy_rtx (op0);
490838fd1498Szrj if (mode != ifmode)
490938fd1498Szrj im = gen_rtx_SUBREG (ifmode, im, 0);
491038fd1498Szrj im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
491138fd1498Szrj if (imode != ihmode)
491238fd1498Szrj im = gen_rtx_SUBREG (imode, im, 0);
491338fd1498Szrj }
491438fd1498Szrj im = gen_rtx_NEG (imode, im);
491538fd1498Szrj return gen_rtx_CONCAT (mode, re, im);
491638fd1498Szrj }
491738fd1498Szrj
491838fd1498Szrj case ADDR_EXPR:
491938fd1498Szrj op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
492038fd1498Szrj if (!op0 || !MEM_P (op0))
492138fd1498Szrj {
492238fd1498Szrj if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
492338fd1498Szrj || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
492438fd1498Szrj || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
492538fd1498Szrj && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
492638fd1498Szrj || target_for_debug_bind (TREE_OPERAND (exp, 0))))
492738fd1498Szrj return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
492838fd1498Szrj
492938fd1498Szrj if (handled_component_p (TREE_OPERAND (exp, 0)))
493038fd1498Szrj {
493138fd1498Szrj poly_int64 bitoffset, bitsize, maxsize, byteoffset;
493238fd1498Szrj bool reverse;
493338fd1498Szrj tree decl
493438fd1498Szrj = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
493538fd1498Szrj &bitsize, &maxsize, &reverse);
493638fd1498Szrj if ((VAR_P (decl)
493738fd1498Szrj || TREE_CODE (decl) == PARM_DECL
493838fd1498Szrj || TREE_CODE (decl) == RESULT_DECL)
493938fd1498Szrj && (!TREE_ADDRESSABLE (decl)
494038fd1498Szrj || target_for_debug_bind (decl))
494138fd1498Szrj && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
494238fd1498Szrj && known_gt (bitsize, 0)
494338fd1498Szrj && known_eq (bitsize, maxsize))
494438fd1498Szrj {
494538fd1498Szrj rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
494638fd1498Szrj return plus_constant (mode, base, byteoffset);
494738fd1498Szrj }
494838fd1498Szrj }
494938fd1498Szrj
495038fd1498Szrj if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
495138fd1498Szrj && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
495238fd1498Szrj == ADDR_EXPR)
495338fd1498Szrj {
495438fd1498Szrj op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
495538fd1498Szrj 0));
495638fd1498Szrj if (op0 != NULL
495738fd1498Szrj && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
495838fd1498Szrj || (GET_CODE (op0) == PLUS
495938fd1498Szrj && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
496038fd1498Szrj && CONST_INT_P (XEXP (op0, 1)))))
496138fd1498Szrj {
496238fd1498Szrj op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
496338fd1498Szrj 1));
496438fd1498Szrj if (!op1 || !CONST_INT_P (op1))
496538fd1498Szrj return NULL;
496638fd1498Szrj
496738fd1498Szrj return plus_constant (mode, op0, INTVAL (op1));
496838fd1498Szrj }
496938fd1498Szrj }
497038fd1498Szrj
497138fd1498Szrj return NULL;
497238fd1498Szrj }
497338fd1498Szrj
497438fd1498Szrj as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
497538fd1498Szrj addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
497638fd1498Szrj op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
497738fd1498Szrj
497838fd1498Szrj return op0;
497938fd1498Szrj
498038fd1498Szrj case VECTOR_CST:
498138fd1498Szrj {
498238fd1498Szrj unsigned HOST_WIDE_INT i, nelts;
498338fd1498Szrj
498438fd1498Szrj if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
498538fd1498Szrj return NULL;
498638fd1498Szrj
498738fd1498Szrj op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
498838fd1498Szrj
498938fd1498Szrj for (i = 0; i < nelts; ++i)
499038fd1498Szrj {
499138fd1498Szrj op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
499238fd1498Szrj if (!op1)
499338fd1498Szrj return NULL;
499438fd1498Szrj XVECEXP (op0, 0, i) = op1;
499538fd1498Szrj }
499638fd1498Szrj
499738fd1498Szrj return op0;
499838fd1498Szrj }
499938fd1498Szrj
500038fd1498Szrj case CONSTRUCTOR:
500138fd1498Szrj if (TREE_CLOBBER_P (exp))
500238fd1498Szrj return NULL;
500338fd1498Szrj else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
500438fd1498Szrj {
500538fd1498Szrj unsigned i;
500638fd1498Szrj unsigned HOST_WIDE_INT nelts;
500738fd1498Szrj tree val;
500838fd1498Szrj
500938fd1498Szrj if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
501038fd1498Szrj goto flag_unsupported;
501138fd1498Szrj
501238fd1498Szrj op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
501338fd1498Szrj
501438fd1498Szrj FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
501538fd1498Szrj {
501638fd1498Szrj op1 = expand_debug_expr (val);
501738fd1498Szrj if (!op1)
501838fd1498Szrj return NULL;
501938fd1498Szrj XVECEXP (op0, 0, i) = op1;
502038fd1498Szrj }
502138fd1498Szrj
502238fd1498Szrj if (i < nelts)
502338fd1498Szrj {
502438fd1498Szrj op1 = expand_debug_expr
502538fd1498Szrj (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
502638fd1498Szrj
502738fd1498Szrj if (!op1)
502838fd1498Szrj return NULL;
502938fd1498Szrj
503038fd1498Szrj for (; i < nelts; i++)
503138fd1498Szrj XVECEXP (op0, 0, i) = op1;
503238fd1498Szrj }
503338fd1498Szrj
503438fd1498Szrj return op0;
503538fd1498Szrj }
503638fd1498Szrj else
503738fd1498Szrj goto flag_unsupported;
503838fd1498Szrj
503938fd1498Szrj case CALL_EXPR:
504038fd1498Szrj /* ??? Maybe handle some builtins? */
504138fd1498Szrj return NULL;
504238fd1498Szrj
504338fd1498Szrj case SSA_NAME:
504438fd1498Szrj {
504538fd1498Szrj gimple *g = get_gimple_for_ssa_name (exp);
504638fd1498Szrj if (g)
504738fd1498Szrj {
504838fd1498Szrj tree t = NULL_TREE;
504938fd1498Szrj if (deep_ter_debug_map)
505038fd1498Szrj {
505138fd1498Szrj tree *slot = deep_ter_debug_map->get (exp);
505238fd1498Szrj if (slot)
505338fd1498Szrj t = *slot;
505438fd1498Szrj }
505538fd1498Szrj if (t == NULL_TREE)
505638fd1498Szrj t = gimple_assign_rhs_to_tree (g);
505738fd1498Szrj op0 = expand_debug_expr (t);
505838fd1498Szrj if (!op0)
505938fd1498Szrj return NULL;
506038fd1498Szrj }
506138fd1498Szrj else
506238fd1498Szrj {
506338fd1498Szrj /* If this is a reference to an incoming value of
506438fd1498Szrj parameter that is never used in the code or where the
506538fd1498Szrj incoming value is never used in the code, use
506638fd1498Szrj PARM_DECL's DECL_RTL if set. */
506738fd1498Szrj if (SSA_NAME_IS_DEFAULT_DEF (exp)
506838fd1498Szrj && SSA_NAME_VAR (exp)
506938fd1498Szrj && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
507038fd1498Szrj && has_zero_uses (exp))
507138fd1498Szrj {
507238fd1498Szrj op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
507338fd1498Szrj if (op0)
507438fd1498Szrj goto adjust_mode;
507538fd1498Szrj op0 = expand_debug_expr (SSA_NAME_VAR (exp));
507638fd1498Szrj if (op0)
507738fd1498Szrj goto adjust_mode;
507838fd1498Szrj }
507938fd1498Szrj
508038fd1498Szrj int part = var_to_partition (SA.map, exp);
508138fd1498Szrj
508238fd1498Szrj if (part == NO_PARTITION)
508338fd1498Szrj return NULL;
508438fd1498Szrj
508538fd1498Szrj gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
508638fd1498Szrj
508738fd1498Szrj op0 = copy_rtx (SA.partition_to_pseudo[part]);
508838fd1498Szrj }
508938fd1498Szrj goto adjust_mode;
509038fd1498Szrj }
509138fd1498Szrj
509238fd1498Szrj case ERROR_MARK:
509338fd1498Szrj return NULL;
509438fd1498Szrj
509538fd1498Szrj /* Vector stuff. For most of the codes we don't have rtl codes. */
509638fd1498Szrj case REALIGN_LOAD_EXPR:
509738fd1498Szrj case VEC_COND_EXPR:
509838fd1498Szrj case VEC_PACK_FIX_TRUNC_EXPR:
509938fd1498Szrj case VEC_PACK_SAT_EXPR:
510038fd1498Szrj case VEC_PACK_TRUNC_EXPR:
510138fd1498Szrj case VEC_UNPACK_FLOAT_HI_EXPR:
510238fd1498Szrj case VEC_UNPACK_FLOAT_LO_EXPR:
510338fd1498Szrj case VEC_UNPACK_HI_EXPR:
510438fd1498Szrj case VEC_UNPACK_LO_EXPR:
510538fd1498Szrj case VEC_WIDEN_MULT_HI_EXPR:
510638fd1498Szrj case VEC_WIDEN_MULT_LO_EXPR:
510738fd1498Szrj case VEC_WIDEN_MULT_EVEN_EXPR:
510838fd1498Szrj case VEC_WIDEN_MULT_ODD_EXPR:
510938fd1498Szrj case VEC_WIDEN_LSHIFT_HI_EXPR:
511038fd1498Szrj case VEC_WIDEN_LSHIFT_LO_EXPR:
511138fd1498Szrj case VEC_PERM_EXPR:
511238fd1498Szrj case VEC_DUPLICATE_EXPR:
511338fd1498Szrj case VEC_SERIES_EXPR:
511438fd1498Szrj return NULL;
511538fd1498Szrj
511638fd1498Szrj /* Misc codes. */
511738fd1498Szrj case ADDR_SPACE_CONVERT_EXPR:
511838fd1498Szrj case FIXED_CONVERT_EXPR:
511938fd1498Szrj case OBJ_TYPE_REF:
512038fd1498Szrj case WITH_SIZE_EXPR:
512138fd1498Szrj case BIT_INSERT_EXPR:
512238fd1498Szrj return NULL;
512338fd1498Szrj
512438fd1498Szrj case DOT_PROD_EXPR:
512538fd1498Szrj if (SCALAR_INT_MODE_P (GET_MODE (op0))
512638fd1498Szrj && SCALAR_INT_MODE_P (mode))
512738fd1498Szrj {
512838fd1498Szrj op0
512938fd1498Szrj = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
513038fd1498Szrj 0)))
513138fd1498Szrj ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
513238fd1498Szrj inner_mode);
513338fd1498Szrj op1
513438fd1498Szrj = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
513538fd1498Szrj 1)))
513638fd1498Szrj ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
513738fd1498Szrj inner_mode);
513838fd1498Szrj op0 = simplify_gen_binary (MULT, mode, op0, op1);
513938fd1498Szrj return simplify_gen_binary (PLUS, mode, op0, op2);
514038fd1498Szrj }
514138fd1498Szrj return NULL;
514238fd1498Szrj
514338fd1498Szrj case WIDEN_MULT_EXPR:
514438fd1498Szrj case WIDEN_MULT_PLUS_EXPR:
514538fd1498Szrj case WIDEN_MULT_MINUS_EXPR:
514638fd1498Szrj if (SCALAR_INT_MODE_P (GET_MODE (op0))
514738fd1498Szrj && SCALAR_INT_MODE_P (mode))
514838fd1498Szrj {
514938fd1498Szrj inner_mode = GET_MODE (op0);
515038fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
515138fd1498Szrj op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
515238fd1498Szrj else
515338fd1498Szrj op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
515438fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
515538fd1498Szrj op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
515638fd1498Szrj else
515738fd1498Szrj op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
515838fd1498Szrj op0 = simplify_gen_binary (MULT, mode, op0, op1);
515938fd1498Szrj if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
516038fd1498Szrj return op0;
516138fd1498Szrj else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
516238fd1498Szrj return simplify_gen_binary (PLUS, mode, op0, op2);
516338fd1498Szrj else
516438fd1498Szrj return simplify_gen_binary (MINUS, mode, op2, op0);
516538fd1498Szrj }
516638fd1498Szrj return NULL;
516738fd1498Szrj
516838fd1498Szrj case MULT_HIGHPART_EXPR:
516938fd1498Szrj /* ??? Similar to the above. */
517038fd1498Szrj return NULL;
517138fd1498Szrj
517238fd1498Szrj case WIDEN_SUM_EXPR:
517338fd1498Szrj case WIDEN_LSHIFT_EXPR:
517438fd1498Szrj if (SCALAR_INT_MODE_P (GET_MODE (op0))
517538fd1498Szrj && SCALAR_INT_MODE_P (mode))
517638fd1498Szrj {
517738fd1498Szrj op0
517838fd1498Szrj = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
517938fd1498Szrj 0)))
518038fd1498Szrj ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
518138fd1498Szrj inner_mode);
518238fd1498Szrj return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
518338fd1498Szrj ? ASHIFT : PLUS, mode, op0, op1);
518438fd1498Szrj }
518538fd1498Szrj return NULL;
518638fd1498Szrj
518738fd1498Szrj case FMA_EXPR:
518838fd1498Szrj return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
518938fd1498Szrj
519038fd1498Szrj default:
519138fd1498Szrj flag_unsupported:
519238fd1498Szrj if (flag_checking)
519338fd1498Szrj {
519438fd1498Szrj debug_tree (exp);
519538fd1498Szrj gcc_unreachable ();
519638fd1498Szrj }
519738fd1498Szrj return NULL;
519838fd1498Szrj }
519938fd1498Szrj }
520038fd1498Szrj
520138fd1498Szrj /* Return an RTX equivalent to the source bind value of the tree expression
520238fd1498Szrj EXP. */
520338fd1498Szrj
520438fd1498Szrj static rtx
expand_debug_source_expr(tree exp)520538fd1498Szrj expand_debug_source_expr (tree exp)
520638fd1498Szrj {
520738fd1498Szrj rtx op0 = NULL_RTX;
520838fd1498Szrj machine_mode mode = VOIDmode, inner_mode;
520938fd1498Szrj
521038fd1498Szrj switch (TREE_CODE (exp))
521138fd1498Szrj {
521238fd1498Szrj case PARM_DECL:
521338fd1498Szrj {
521438fd1498Szrj mode = DECL_MODE (exp);
521538fd1498Szrj op0 = expand_debug_parm_decl (exp);
521638fd1498Szrj if (op0)
521738fd1498Szrj break;
521838fd1498Szrj /* See if this isn't an argument that has been completely
521938fd1498Szrj optimized out. */
522038fd1498Szrj if (!DECL_RTL_SET_P (exp)
522138fd1498Szrj && !DECL_INCOMING_RTL (exp)
522238fd1498Szrj && DECL_ABSTRACT_ORIGIN (current_function_decl))
522338fd1498Szrj {
522438fd1498Szrj tree aexp = DECL_ORIGIN (exp);
522538fd1498Szrj if (DECL_CONTEXT (aexp)
522638fd1498Szrj == DECL_ABSTRACT_ORIGIN (current_function_decl))
522738fd1498Szrj {
522838fd1498Szrj vec<tree, va_gc> **debug_args;
522938fd1498Szrj unsigned int ix;
523038fd1498Szrj tree ddecl;
523138fd1498Szrj debug_args = decl_debug_args_lookup (current_function_decl);
523238fd1498Szrj if (debug_args != NULL)
523338fd1498Szrj {
523438fd1498Szrj for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
523538fd1498Szrj ix += 2)
523638fd1498Szrj if (ddecl == aexp)
523738fd1498Szrj return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
523838fd1498Szrj }
523938fd1498Szrj }
524038fd1498Szrj }
524138fd1498Szrj break;
524238fd1498Szrj }
524338fd1498Szrj default:
524438fd1498Szrj break;
524538fd1498Szrj }
524638fd1498Szrj
524738fd1498Szrj if (op0 == NULL_RTX)
524838fd1498Szrj return NULL_RTX;
524938fd1498Szrj
525038fd1498Szrj inner_mode = GET_MODE (op0);
525138fd1498Szrj if (mode == inner_mode)
525238fd1498Szrj return op0;
525338fd1498Szrj
525438fd1498Szrj if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
525538fd1498Szrj {
525638fd1498Szrj if (GET_MODE_UNIT_BITSIZE (mode)
525738fd1498Szrj == GET_MODE_UNIT_BITSIZE (inner_mode))
525838fd1498Szrj op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
525938fd1498Szrj else if (GET_MODE_UNIT_BITSIZE (mode)
526038fd1498Szrj < GET_MODE_UNIT_BITSIZE (inner_mode))
526138fd1498Szrj op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
526238fd1498Szrj else
526338fd1498Szrj op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
526438fd1498Szrj }
526538fd1498Szrj else if (FLOAT_MODE_P (mode))
526638fd1498Szrj gcc_unreachable ();
526738fd1498Szrj else if (FLOAT_MODE_P (inner_mode))
526838fd1498Szrj {
526938fd1498Szrj if (TYPE_UNSIGNED (TREE_TYPE (exp)))
527038fd1498Szrj op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
527138fd1498Szrj else
527238fd1498Szrj op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
527338fd1498Szrj }
527438fd1498Szrj else if (GET_MODE_UNIT_PRECISION (mode)
527538fd1498Szrj == GET_MODE_UNIT_PRECISION (inner_mode))
527638fd1498Szrj op0 = lowpart_subreg (mode, op0, inner_mode);
527738fd1498Szrj else if (GET_MODE_UNIT_PRECISION (mode)
527838fd1498Szrj < GET_MODE_UNIT_PRECISION (inner_mode))
527938fd1498Szrj op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
528038fd1498Szrj else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
528138fd1498Szrj op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
528238fd1498Szrj else
528338fd1498Szrj op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
528438fd1498Szrj
528538fd1498Szrj return op0;
528638fd1498Szrj }
528738fd1498Szrj
528838fd1498Szrj /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
528938fd1498Szrj Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
529038fd1498Szrj deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
529138fd1498Szrj
529238fd1498Szrj static void
avoid_complex_debug_insns(rtx_insn * insn,rtx * exp_p,int depth)529338fd1498Szrj avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
529438fd1498Szrj {
529538fd1498Szrj rtx exp = *exp_p;
529638fd1498Szrj
529738fd1498Szrj if (exp == NULL_RTX)
529838fd1498Szrj return;
529938fd1498Szrj
530038fd1498Szrj if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
530138fd1498Szrj return;
530238fd1498Szrj
530338fd1498Szrj if (depth == 4)
530438fd1498Szrj {
530538fd1498Szrj /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
530638fd1498Szrj rtx dval = make_debug_expr_from_rtl (exp);
530738fd1498Szrj
530838fd1498Szrj /* Emit a debug bind insn before INSN. */
530938fd1498Szrj rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
531038fd1498Szrj DEBUG_EXPR_TREE_DECL (dval), exp,
531138fd1498Szrj VAR_INIT_STATUS_INITIALIZED);
531238fd1498Szrj
531338fd1498Szrj emit_debug_insn_before (bind, insn);
531438fd1498Szrj *exp_p = dval;
531538fd1498Szrj return;
531638fd1498Szrj }
531738fd1498Szrj
531838fd1498Szrj const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
531938fd1498Szrj int i, j;
532038fd1498Szrj for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
532138fd1498Szrj switch (*format_ptr++)
532238fd1498Szrj {
532338fd1498Szrj case 'e':
532438fd1498Szrj avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
532538fd1498Szrj break;
532638fd1498Szrj
532738fd1498Szrj case 'E':
532838fd1498Szrj case 'V':
532938fd1498Szrj for (j = 0; j < XVECLEN (exp, i); j++)
533038fd1498Szrj avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
533138fd1498Szrj break;
533238fd1498Szrj
533338fd1498Szrj default:
533438fd1498Szrj break;
533538fd1498Szrj }
533638fd1498Szrj }
533738fd1498Szrj
533838fd1498Szrj /* Expand the _LOCs in debug insns. We run this after expanding all
533938fd1498Szrj regular insns, so that any variables referenced in the function
534038fd1498Szrj will have their DECL_RTLs set. */
534138fd1498Szrj
534238fd1498Szrj static void
expand_debug_locations(void)534338fd1498Szrj expand_debug_locations (void)
534438fd1498Szrj {
534538fd1498Szrj rtx_insn *insn;
534638fd1498Szrj rtx_insn *last = get_last_insn ();
534738fd1498Szrj int save_strict_alias = flag_strict_aliasing;
534838fd1498Szrj
534938fd1498Szrj /* New alias sets while setting up memory attributes cause
535038fd1498Szrj -fcompare-debug failures, even though it doesn't bring about any
535138fd1498Szrj codegen changes. */
535238fd1498Szrj flag_strict_aliasing = 0;
535338fd1498Szrj
535438fd1498Szrj for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
535538fd1498Szrj if (DEBUG_BIND_INSN_P (insn))
535638fd1498Szrj {
535738fd1498Szrj tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
535838fd1498Szrj rtx val;
535938fd1498Szrj rtx_insn *prev_insn, *insn2;
536038fd1498Szrj machine_mode mode;
536138fd1498Szrj
536238fd1498Szrj if (value == NULL_TREE)
536338fd1498Szrj val = NULL_RTX;
536438fd1498Szrj else
536538fd1498Szrj {
536638fd1498Szrj if (INSN_VAR_LOCATION_STATUS (insn)
536738fd1498Szrj == VAR_INIT_STATUS_UNINITIALIZED)
536838fd1498Szrj val = expand_debug_source_expr (value);
536938fd1498Szrj /* The avoid_deep_ter_for_debug function inserts
537038fd1498Szrj debug bind stmts after SSA_NAME definition, with the
537138fd1498Szrj SSA_NAME as the whole bind location. Disable temporarily
537238fd1498Szrj expansion of that SSA_NAME into the DEBUG_EXPR_DECL
537338fd1498Szrj being defined in this DEBUG_INSN. */
537438fd1498Szrj else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
537538fd1498Szrj {
537638fd1498Szrj tree *slot = deep_ter_debug_map->get (value);
537738fd1498Szrj if (slot)
537838fd1498Szrj {
537938fd1498Szrj if (*slot == INSN_VAR_LOCATION_DECL (insn))
538038fd1498Szrj *slot = NULL_TREE;
538138fd1498Szrj else
538238fd1498Szrj slot = NULL;
538338fd1498Szrj }
538438fd1498Szrj val = expand_debug_expr (value);
538538fd1498Szrj if (slot)
538638fd1498Szrj *slot = INSN_VAR_LOCATION_DECL (insn);
538738fd1498Szrj }
538838fd1498Szrj else
538938fd1498Szrj val = expand_debug_expr (value);
539038fd1498Szrj gcc_assert (last == get_last_insn ());
539138fd1498Szrj }
539238fd1498Szrj
539338fd1498Szrj if (!val)
539438fd1498Szrj val = gen_rtx_UNKNOWN_VAR_LOC ();
539538fd1498Szrj else
539638fd1498Szrj {
539738fd1498Szrj mode = GET_MODE (INSN_VAR_LOCATION (insn));
539838fd1498Szrj
539938fd1498Szrj gcc_assert (mode == GET_MODE (val)
540038fd1498Szrj || (GET_MODE (val) == VOIDmode
540138fd1498Szrj && (CONST_SCALAR_INT_P (val)
540238fd1498Szrj || GET_CODE (val) == CONST_FIXED
540338fd1498Szrj || GET_CODE (val) == LABEL_REF)));
540438fd1498Szrj }
540538fd1498Szrj
540638fd1498Szrj INSN_VAR_LOCATION_LOC (insn) = val;
540738fd1498Szrj prev_insn = PREV_INSN (insn);
540838fd1498Szrj for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
540938fd1498Szrj avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
541038fd1498Szrj }
541138fd1498Szrj
541238fd1498Szrj flag_strict_aliasing = save_strict_alias;
541338fd1498Szrj }
541438fd1498Szrj
541538fd1498Szrj /* Performs swapping operands of commutative operations to expand
541638fd1498Szrj the expensive one first. */
541738fd1498Szrj
541838fd1498Szrj static void
reorder_operands(basic_block bb)541938fd1498Szrj reorder_operands (basic_block bb)
542038fd1498Szrj {
542138fd1498Szrj unsigned int *lattice; /* Hold cost of each statement. */
542238fd1498Szrj unsigned int i = 0, n = 0;
542338fd1498Szrj gimple_stmt_iterator gsi;
542438fd1498Szrj gimple_seq stmts;
542538fd1498Szrj gimple *stmt;
542638fd1498Szrj bool swap;
542738fd1498Szrj tree op0, op1;
542838fd1498Szrj ssa_op_iter iter;
542938fd1498Szrj use_operand_p use_p;
543038fd1498Szrj gimple *def0, *def1;
543138fd1498Szrj
543238fd1498Szrj /* Compute cost of each statement using estimate_num_insns. */
543338fd1498Szrj stmts = bb_seq (bb);
543438fd1498Szrj for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
543538fd1498Szrj {
543638fd1498Szrj stmt = gsi_stmt (gsi);
543738fd1498Szrj if (!is_gimple_debug (stmt))
543838fd1498Szrj gimple_set_uid (stmt, n++);
543938fd1498Szrj }
544038fd1498Szrj lattice = XNEWVEC (unsigned int, n);
544138fd1498Szrj for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
544238fd1498Szrj {
544338fd1498Szrj unsigned cost;
544438fd1498Szrj stmt = gsi_stmt (gsi);
544538fd1498Szrj if (is_gimple_debug (stmt))
544638fd1498Szrj continue;
544738fd1498Szrj cost = estimate_num_insns (stmt, &eni_size_weights);
544838fd1498Szrj lattice[i] = cost;
544938fd1498Szrj FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
545038fd1498Szrj {
545138fd1498Szrj tree use = USE_FROM_PTR (use_p);
545238fd1498Szrj gimple *def_stmt;
545338fd1498Szrj if (TREE_CODE (use) != SSA_NAME)
545438fd1498Szrj continue;
545538fd1498Szrj def_stmt = get_gimple_for_ssa_name (use);
545638fd1498Szrj if (!def_stmt)
545738fd1498Szrj continue;
545838fd1498Szrj lattice[i] += lattice[gimple_uid (def_stmt)];
545938fd1498Szrj }
546038fd1498Szrj i++;
546138fd1498Szrj if (!is_gimple_assign (stmt)
546238fd1498Szrj || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
546338fd1498Szrj continue;
546438fd1498Szrj op0 = gimple_op (stmt, 1);
546538fd1498Szrj op1 = gimple_op (stmt, 2);
546638fd1498Szrj if (TREE_CODE (op0) != SSA_NAME
546738fd1498Szrj || TREE_CODE (op1) != SSA_NAME)
546838fd1498Szrj continue;
546938fd1498Szrj /* Swap operands if the second one is more expensive. */
547038fd1498Szrj def0 = get_gimple_for_ssa_name (op0);
547138fd1498Szrj def1 = get_gimple_for_ssa_name (op1);
547238fd1498Szrj if (!def1)
547338fd1498Szrj continue;
547438fd1498Szrj swap = false;
547538fd1498Szrj if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
547638fd1498Szrj swap = true;
547738fd1498Szrj if (swap)
547838fd1498Szrj {
547938fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
548038fd1498Szrj {
548138fd1498Szrj fprintf (dump_file, "Swap operands in stmt:\n");
548238fd1498Szrj print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
548338fd1498Szrj fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
548438fd1498Szrj def0 ? lattice[gimple_uid (def0)] : 0,
548538fd1498Szrj lattice[gimple_uid (def1)]);
548638fd1498Szrj }
548738fd1498Szrj swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
548838fd1498Szrj gimple_assign_rhs2_ptr (stmt));
548938fd1498Szrj }
549038fd1498Szrj }
549138fd1498Szrj XDELETE (lattice);
549238fd1498Szrj }
549338fd1498Szrj
549438fd1498Szrj /* Expand basic block BB from GIMPLE trees to RTL. */
549538fd1498Szrj
549638fd1498Szrj static basic_block
expand_gimple_basic_block(basic_block bb,bool disable_tail_calls)549738fd1498Szrj expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
549838fd1498Szrj {
549938fd1498Szrj gimple_stmt_iterator gsi;
550038fd1498Szrj gimple_seq stmts;
550138fd1498Szrj gimple *stmt = NULL;
550238fd1498Szrj rtx_note *note = NULL;
550338fd1498Szrj rtx_insn *last;
550438fd1498Szrj edge e;
550538fd1498Szrj edge_iterator ei;
550638fd1498Szrj
550738fd1498Szrj if (dump_file)
550838fd1498Szrj fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
550938fd1498Szrj bb->index);
551038fd1498Szrj
551138fd1498Szrj /* Note that since we are now transitioning from GIMPLE to RTL, we
551238fd1498Szrj cannot use the gsi_*_bb() routines because they expect the basic
551338fd1498Szrj block to be in GIMPLE, instead of RTL. Therefore, we need to
551438fd1498Szrj access the BB sequence directly. */
551538fd1498Szrj if (optimize)
551638fd1498Szrj reorder_operands (bb);
551738fd1498Szrj stmts = bb_seq (bb);
551838fd1498Szrj bb->il.gimple.seq = NULL;
551938fd1498Szrj bb->il.gimple.phi_nodes = NULL;
552038fd1498Szrj rtl_profile_for_bb (bb);
552138fd1498Szrj init_rtl_bb_info (bb);
552238fd1498Szrj bb->flags |= BB_RTL;
552338fd1498Szrj
552438fd1498Szrj /* Remove the RETURN_EXPR if we may fall though to the exit
552538fd1498Szrj instead. */
552638fd1498Szrj gsi = gsi_last (stmts);
552738fd1498Szrj if (!gsi_end_p (gsi)
552838fd1498Szrj && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
552938fd1498Szrj {
553038fd1498Szrj greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
553138fd1498Szrj
553238fd1498Szrj gcc_assert (single_succ_p (bb));
553338fd1498Szrj gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
553438fd1498Szrj
553538fd1498Szrj if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
553638fd1498Szrj && !gimple_return_retval (ret_stmt))
553738fd1498Szrj {
553838fd1498Szrj gsi_remove (&gsi, false);
553938fd1498Szrj single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
554038fd1498Szrj }
554138fd1498Szrj }
554238fd1498Szrj
554338fd1498Szrj gsi = gsi_start (stmts);
554438fd1498Szrj if (!gsi_end_p (gsi))
554538fd1498Szrj {
554638fd1498Szrj stmt = gsi_stmt (gsi);
554738fd1498Szrj if (gimple_code (stmt) != GIMPLE_LABEL)
554838fd1498Szrj stmt = NULL;
554938fd1498Szrj }
555038fd1498Szrj
555138fd1498Szrj rtx_code_label **elt = lab_rtx_for_bb->get (bb);
555238fd1498Szrj
555338fd1498Szrj if (stmt || elt)
555438fd1498Szrj {
555538fd1498Szrj gcc_checking_assert (!note);
555638fd1498Szrj last = get_last_insn ();
555738fd1498Szrj
555838fd1498Szrj if (stmt)
555938fd1498Szrj {
556038fd1498Szrj expand_gimple_stmt (stmt);
556138fd1498Szrj gsi_next (&gsi);
556238fd1498Szrj }
556338fd1498Szrj
556438fd1498Szrj if (elt)
556538fd1498Szrj emit_label (*elt);
556638fd1498Szrj
556738fd1498Szrj BB_HEAD (bb) = NEXT_INSN (last);
556838fd1498Szrj if (NOTE_P (BB_HEAD (bb)))
556938fd1498Szrj BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
557038fd1498Szrj gcc_assert (LABEL_P (BB_HEAD (bb)));
557138fd1498Szrj note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
557238fd1498Szrj
557338fd1498Szrj maybe_dump_rtl_for_gimple_stmt (stmt, last);
557438fd1498Szrj }
557538fd1498Szrj else
557638fd1498Szrj BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
557738fd1498Szrj
557838fd1498Szrj if (note)
557938fd1498Szrj NOTE_BASIC_BLOCK (note) = bb;
558038fd1498Szrj
558138fd1498Szrj for (; !gsi_end_p (gsi); gsi_next (&gsi))
558238fd1498Szrj {
558338fd1498Szrj basic_block new_bb;
558438fd1498Szrj
558538fd1498Szrj stmt = gsi_stmt (gsi);
558638fd1498Szrj
558738fd1498Szrj /* If this statement is a non-debug one, and we generate debug
558838fd1498Szrj insns, then this one might be the last real use of a TERed
558938fd1498Szrj SSA_NAME, but where there are still some debug uses further
559038fd1498Szrj down. Expanding the current SSA name in such further debug
559138fd1498Szrj uses by their RHS might lead to wrong debug info, as coalescing
559238fd1498Szrj might make the operands of such RHS be placed into the same
559338fd1498Szrj pseudo as something else. Like so:
559438fd1498Szrj a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
559538fd1498Szrj use(a_1);
559638fd1498Szrj a_2 = ...
559738fd1498Szrj #DEBUG ... => a_1
559838fd1498Szrj As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
559938fd1498Szrj If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
560038fd1498Szrj the write to a_2 would actually have clobbered the place which
560138fd1498Szrj formerly held a_0.
560238fd1498Szrj
560338fd1498Szrj So, instead of that, we recognize the situation, and generate
560438fd1498Szrj debug temporaries at the last real use of TERed SSA names:
560538fd1498Szrj a_1 = a_0 + 1;
560638fd1498Szrj #DEBUG #D1 => a_1
560738fd1498Szrj use(a_1);
560838fd1498Szrj a_2 = ...
560938fd1498Szrj #DEBUG ... => #D1
561038fd1498Szrj */
561138fd1498Szrj if (MAY_HAVE_DEBUG_BIND_INSNS
561238fd1498Szrj && SA.values
561338fd1498Szrj && !is_gimple_debug (stmt))
561438fd1498Szrj {
561538fd1498Szrj ssa_op_iter iter;
561638fd1498Szrj tree op;
561738fd1498Szrj gimple *def;
561838fd1498Szrj
561938fd1498Szrj location_t sloc = curr_insn_location ();
562038fd1498Szrj
562138fd1498Szrj /* Look for SSA names that have their last use here (TERed
562238fd1498Szrj names always have only one real use). */
562338fd1498Szrj FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
562438fd1498Szrj if ((def = get_gimple_for_ssa_name (op)))
562538fd1498Szrj {
562638fd1498Szrj imm_use_iterator imm_iter;
562738fd1498Szrj use_operand_p use_p;
562838fd1498Szrj bool have_debug_uses = false;
562938fd1498Szrj
563038fd1498Szrj FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
563138fd1498Szrj {
563238fd1498Szrj if (gimple_debug_bind_p (USE_STMT (use_p)))
563338fd1498Szrj {
563438fd1498Szrj have_debug_uses = true;
563538fd1498Szrj break;
563638fd1498Szrj }
563738fd1498Szrj }
563838fd1498Szrj
563938fd1498Szrj if (have_debug_uses)
564038fd1498Szrj {
564138fd1498Szrj /* OP is a TERed SSA name, with DEF its defining
564238fd1498Szrj statement, and where OP is used in further debug
564338fd1498Szrj instructions. Generate a debug temporary, and
564438fd1498Szrj replace all uses of OP in debug insns with that
564538fd1498Szrj temporary. */
564638fd1498Szrj gimple *debugstmt;
564738fd1498Szrj tree value = gimple_assign_rhs_to_tree (def);
564838fd1498Szrj tree vexpr = make_node (DEBUG_EXPR_DECL);
564938fd1498Szrj rtx val;
565038fd1498Szrj machine_mode mode;
565138fd1498Szrj
565238fd1498Szrj set_curr_insn_location (gimple_location (def));
565338fd1498Szrj
565438fd1498Szrj DECL_ARTIFICIAL (vexpr) = 1;
565538fd1498Szrj TREE_TYPE (vexpr) = TREE_TYPE (value);
565638fd1498Szrj if (DECL_P (value))
565738fd1498Szrj mode = DECL_MODE (value);
565838fd1498Szrj else
565938fd1498Szrj mode = TYPE_MODE (TREE_TYPE (value));
566038fd1498Szrj SET_DECL_MODE (vexpr, mode);
566138fd1498Szrj
566238fd1498Szrj val = gen_rtx_VAR_LOCATION
566338fd1498Szrj (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
566438fd1498Szrj
566538fd1498Szrj emit_debug_insn (val);
566638fd1498Szrj
566738fd1498Szrj FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
566838fd1498Szrj {
566938fd1498Szrj if (!gimple_debug_bind_p (debugstmt))
567038fd1498Szrj continue;
567138fd1498Szrj
567238fd1498Szrj FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
567338fd1498Szrj SET_USE (use_p, vexpr);
567438fd1498Szrj
567538fd1498Szrj update_stmt (debugstmt);
567638fd1498Szrj }
567738fd1498Szrj }
567838fd1498Szrj }
567938fd1498Szrj set_curr_insn_location (sloc);
568038fd1498Szrj }
568138fd1498Szrj
568238fd1498Szrj currently_expanding_gimple_stmt = stmt;
568338fd1498Szrj
568438fd1498Szrj /* Expand this statement, then evaluate the resulting RTL and
568538fd1498Szrj fixup the CFG accordingly. */
568638fd1498Szrj if (gimple_code (stmt) == GIMPLE_COND)
568738fd1498Szrj {
568838fd1498Szrj new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
568938fd1498Szrj if (new_bb)
569038fd1498Szrj return new_bb;
569138fd1498Szrj }
569238fd1498Szrj else if (is_gimple_debug (stmt))
569338fd1498Szrj {
569438fd1498Szrj location_t sloc = curr_insn_location ();
569538fd1498Szrj gimple_stmt_iterator nsi = gsi;
569638fd1498Szrj
569738fd1498Szrj for (;;)
569838fd1498Szrj {
569938fd1498Szrj tree var;
570038fd1498Szrj tree value = NULL_TREE;
570138fd1498Szrj rtx val = NULL_RTX;
570238fd1498Szrj machine_mode mode;
570338fd1498Szrj
570438fd1498Szrj if (!gimple_debug_nonbind_marker_p (stmt))
570538fd1498Szrj {
570638fd1498Szrj if (gimple_debug_bind_p (stmt))
570738fd1498Szrj {
570838fd1498Szrj var = gimple_debug_bind_get_var (stmt);
570938fd1498Szrj
571038fd1498Szrj if (TREE_CODE (var) != DEBUG_EXPR_DECL
571138fd1498Szrj && TREE_CODE (var) != LABEL_DECL
571238fd1498Szrj && !target_for_debug_bind (var))
571338fd1498Szrj goto delink_debug_stmt;
571438fd1498Szrj
571538fd1498Szrj if (DECL_P (var))
571638fd1498Szrj mode = DECL_MODE (var);
571738fd1498Szrj else
571838fd1498Szrj mode = TYPE_MODE (TREE_TYPE (var));
571938fd1498Szrj
572038fd1498Szrj if (gimple_debug_bind_has_value_p (stmt))
572138fd1498Szrj value = gimple_debug_bind_get_value (stmt);
572238fd1498Szrj
572338fd1498Szrj val = gen_rtx_VAR_LOCATION
572438fd1498Szrj (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
572538fd1498Szrj }
572638fd1498Szrj else if (gimple_debug_source_bind_p (stmt))
572738fd1498Szrj {
572838fd1498Szrj var = gimple_debug_source_bind_get_var (stmt);
572938fd1498Szrj
573038fd1498Szrj value = gimple_debug_source_bind_get_value (stmt);
573138fd1498Szrj
573238fd1498Szrj mode = DECL_MODE (var);
573338fd1498Szrj
573438fd1498Szrj val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
573538fd1498Szrj VAR_INIT_STATUS_UNINITIALIZED);
573638fd1498Szrj }
573738fd1498Szrj else
573838fd1498Szrj gcc_unreachable ();
573938fd1498Szrj }
574038fd1498Szrj /* If this function was first compiled with markers
574138fd1498Szrj enabled, but they're now disable (e.g. LTO), drop
574238fd1498Szrj them on the floor. */
574338fd1498Szrj else if (gimple_debug_nonbind_marker_p (stmt)
574438fd1498Szrj && !MAY_HAVE_DEBUG_MARKER_INSNS)
574538fd1498Szrj goto delink_debug_stmt;
574638fd1498Szrj else if (gimple_debug_begin_stmt_p (stmt))
574738fd1498Szrj val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
574838fd1498Szrj else if (gimple_debug_inline_entry_p (stmt))
574938fd1498Szrj {
575038fd1498Szrj tree block = gimple_block (stmt);
575138fd1498Szrj
575238fd1498Szrj if (block)
575338fd1498Szrj val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
575438fd1498Szrj else
575538fd1498Szrj goto delink_debug_stmt;
575638fd1498Szrj }
575738fd1498Szrj else
575838fd1498Szrj gcc_unreachable ();
575938fd1498Szrj
576038fd1498Szrj last = get_last_insn ();
576138fd1498Szrj
576238fd1498Szrj set_curr_insn_location (gimple_location (stmt));
576338fd1498Szrj
576438fd1498Szrj emit_debug_insn (val);
576538fd1498Szrj
576638fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
576738fd1498Szrj {
576838fd1498Szrj /* We can't dump the insn with a TREE where an RTX
576938fd1498Szrj is expected. */
577038fd1498Szrj if (GET_CODE (val) == VAR_LOCATION)
577138fd1498Szrj {
577238fd1498Szrj gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
577338fd1498Szrj PAT_VAR_LOCATION_LOC (val) = const0_rtx;
577438fd1498Szrj }
577538fd1498Szrj maybe_dump_rtl_for_gimple_stmt (stmt, last);
577638fd1498Szrj if (GET_CODE (val) == VAR_LOCATION)
577738fd1498Szrj PAT_VAR_LOCATION_LOC (val) = (rtx)value;
577838fd1498Szrj }
577938fd1498Szrj
578038fd1498Szrj delink_debug_stmt:
578138fd1498Szrj /* In order not to generate too many debug temporaries,
578238fd1498Szrj we delink all uses of debug statements we already expanded.
578338fd1498Szrj Therefore debug statements between definition and real
578438fd1498Szrj use of TERed SSA names will continue to use the SSA name,
578538fd1498Szrj and not be replaced with debug temps. */
578638fd1498Szrj delink_stmt_imm_use (stmt);
578738fd1498Szrj
578838fd1498Szrj gsi = nsi;
578938fd1498Szrj gsi_next (&nsi);
579038fd1498Szrj if (gsi_end_p (nsi))
579138fd1498Szrj break;
579238fd1498Szrj stmt = gsi_stmt (nsi);
579338fd1498Szrj if (!is_gimple_debug (stmt))
579438fd1498Szrj break;
579538fd1498Szrj }
579638fd1498Szrj
579738fd1498Szrj set_curr_insn_location (sloc);
579838fd1498Szrj }
579938fd1498Szrj else
580038fd1498Szrj {
580138fd1498Szrj gcall *call_stmt = dyn_cast <gcall *> (stmt);
580238fd1498Szrj if (call_stmt
580338fd1498Szrj && gimple_call_tail_p (call_stmt)
580438fd1498Szrj && disable_tail_calls)
580538fd1498Szrj gimple_call_set_tail (call_stmt, false);
580638fd1498Szrj
580738fd1498Szrj if (call_stmt && gimple_call_tail_p (call_stmt))
580838fd1498Szrj {
580938fd1498Szrj bool can_fallthru;
581038fd1498Szrj new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
581138fd1498Szrj if (new_bb)
581238fd1498Szrj {
581338fd1498Szrj if (can_fallthru)
581438fd1498Szrj bb = new_bb;
581538fd1498Szrj else
581638fd1498Szrj return new_bb;
581738fd1498Szrj }
581838fd1498Szrj }
581938fd1498Szrj else
582038fd1498Szrj {
582138fd1498Szrj def_operand_p def_p;
582238fd1498Szrj def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
582338fd1498Szrj
582438fd1498Szrj if (def_p != NULL)
582538fd1498Szrj {
582638fd1498Szrj /* Ignore this stmt if it is in the list of
582738fd1498Szrj replaceable expressions. */
582838fd1498Szrj if (SA.values
582938fd1498Szrj && bitmap_bit_p (SA.values,
583038fd1498Szrj SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
583138fd1498Szrj continue;
583238fd1498Szrj }
583338fd1498Szrj last = expand_gimple_stmt (stmt);
583438fd1498Szrj maybe_dump_rtl_for_gimple_stmt (stmt, last);
583538fd1498Szrj }
583638fd1498Szrj }
583738fd1498Szrj }
583838fd1498Szrj
583938fd1498Szrj currently_expanding_gimple_stmt = NULL;
584038fd1498Szrj
584138fd1498Szrj /* Expand implicit goto and convert goto_locus. */
584238fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
584338fd1498Szrj {
584438fd1498Szrj if (e->goto_locus != UNKNOWN_LOCATION)
584538fd1498Szrj set_curr_insn_location (e->goto_locus);
584638fd1498Szrj if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
584738fd1498Szrj {
584838fd1498Szrj emit_jump (label_rtx_for_bb (e->dest));
584938fd1498Szrj e->flags &= ~EDGE_FALLTHRU;
585038fd1498Szrj }
585138fd1498Szrj }
585238fd1498Szrj
585338fd1498Szrj /* Expanded RTL can create a jump in the last instruction of block.
585438fd1498Szrj This later might be assumed to be a jump to successor and break edge insertion.
585538fd1498Szrj We need to insert dummy move to prevent this. PR41440. */
585638fd1498Szrj if (single_succ_p (bb)
585738fd1498Szrj && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
585838fd1498Szrj && (last = get_last_insn ())
585938fd1498Szrj && (JUMP_P (last)
586038fd1498Szrj || (DEBUG_INSN_P (last)
586138fd1498Szrj && JUMP_P (prev_nondebug_insn (last)))))
586238fd1498Szrj {
586338fd1498Szrj rtx dummy = gen_reg_rtx (SImode);
586438fd1498Szrj emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
586538fd1498Szrj }
586638fd1498Szrj
586738fd1498Szrj do_pending_stack_adjust ();
586838fd1498Szrj
586938fd1498Szrj /* Find the block tail. The last insn in the block is the insn
587038fd1498Szrj before a barrier and/or table jump insn. */
587138fd1498Szrj last = get_last_insn ();
587238fd1498Szrj if (BARRIER_P (last))
587338fd1498Szrj last = PREV_INSN (last);
587438fd1498Szrj if (JUMP_TABLE_DATA_P (last))
587538fd1498Szrj last = PREV_INSN (PREV_INSN (last));
587638fd1498Szrj BB_END (bb) = last;
587738fd1498Szrj
587838fd1498Szrj update_bb_for_insn (bb);
587938fd1498Szrj
588038fd1498Szrj return bb;
588138fd1498Szrj }
588238fd1498Szrj
588338fd1498Szrj
588438fd1498Szrj /* Create a basic block for initialization code. */
588538fd1498Szrj
588638fd1498Szrj static basic_block
construct_init_block(void)588738fd1498Szrj construct_init_block (void)
588838fd1498Szrj {
588938fd1498Szrj basic_block init_block, first_block;
589038fd1498Szrj edge e = NULL;
589138fd1498Szrj int flags;
589238fd1498Szrj
589338fd1498Szrj /* Multiple entry points not supported yet. */
589438fd1498Szrj gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
589538fd1498Szrj init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
589638fd1498Szrj init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
589738fd1498Szrj ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
589838fd1498Szrj EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
589938fd1498Szrj
590038fd1498Szrj e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
590138fd1498Szrj
590238fd1498Szrj /* When entry edge points to first basic block, we don't need jump,
590338fd1498Szrj otherwise we have to jump into proper target. */
590438fd1498Szrj if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
590538fd1498Szrj {
590638fd1498Szrj tree label = gimple_block_label (e->dest);
590738fd1498Szrj
590838fd1498Szrj emit_jump (jump_target_rtx (label));
590938fd1498Szrj flags = 0;
591038fd1498Szrj }
591138fd1498Szrj else
591238fd1498Szrj flags = EDGE_FALLTHRU;
591338fd1498Szrj
591438fd1498Szrj init_block = create_basic_block (NEXT_INSN (get_insns ()),
591538fd1498Szrj get_last_insn (),
591638fd1498Szrj ENTRY_BLOCK_PTR_FOR_FN (cfun));
591738fd1498Szrj init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
591838fd1498Szrj add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
591938fd1498Szrj if (e)
592038fd1498Szrj {
592138fd1498Szrj first_block = e->dest;
592238fd1498Szrj redirect_edge_succ (e, init_block);
592338fd1498Szrj e = make_single_succ_edge (init_block, first_block, flags);
592438fd1498Szrj }
592538fd1498Szrj else
592638fd1498Szrj e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
592738fd1498Szrj EDGE_FALLTHRU);
592838fd1498Szrj
592938fd1498Szrj update_bb_for_insn (init_block);
593038fd1498Szrj return init_block;
593138fd1498Szrj }
593238fd1498Szrj
593338fd1498Szrj /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
593438fd1498Szrj found in the block tree. */
593538fd1498Szrj
593638fd1498Szrj static void
set_block_levels(tree block,int level)593738fd1498Szrj set_block_levels (tree block, int level)
593838fd1498Szrj {
593938fd1498Szrj while (block)
594038fd1498Szrj {
594138fd1498Szrj BLOCK_NUMBER (block) = level;
594238fd1498Szrj set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
594338fd1498Szrj block = BLOCK_CHAIN (block);
594438fd1498Szrj }
594538fd1498Szrj }
594638fd1498Szrj
594738fd1498Szrj /* Create a block containing landing pads and similar stuff. */
594838fd1498Szrj
594938fd1498Szrj static void
construct_exit_block(void)595038fd1498Szrj construct_exit_block (void)
595138fd1498Szrj {
595238fd1498Szrj rtx_insn *head = get_last_insn ();
595338fd1498Szrj rtx_insn *end;
595438fd1498Szrj basic_block exit_block;
595538fd1498Szrj edge e, e2;
595638fd1498Szrj unsigned ix;
595738fd1498Szrj edge_iterator ei;
595838fd1498Szrj basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
595938fd1498Szrj rtx_insn *orig_end = BB_END (prev_bb);
596038fd1498Szrj
596138fd1498Szrj rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
596238fd1498Szrj
596338fd1498Szrj /* Make sure the locus is set to the end of the function, so that
596438fd1498Szrj epilogue line numbers and warnings are set properly. */
596538fd1498Szrj if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
596638fd1498Szrj input_location = cfun->function_end_locus;
596738fd1498Szrj
596838fd1498Szrj /* Generate rtl for function exit. */
596938fd1498Szrj expand_function_end ();
597038fd1498Szrj
597138fd1498Szrj end = get_last_insn ();
597238fd1498Szrj if (head == end)
597338fd1498Szrj return;
597438fd1498Szrj /* While emitting the function end we could move end of the last basic
597538fd1498Szrj block. */
597638fd1498Szrj BB_END (prev_bb) = orig_end;
597738fd1498Szrj while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
597838fd1498Szrj head = NEXT_INSN (head);
597938fd1498Szrj /* But make sure exit_block starts with RETURN_LABEL, otherwise the
598038fd1498Szrj bb count counting will be confused. Any instructions before that
598138fd1498Szrj label are emitted for the case where PREV_BB falls through into the
598238fd1498Szrj exit block, so append those instructions to prev_bb in that case. */
598338fd1498Szrj if (NEXT_INSN (head) != return_label)
598438fd1498Szrj {
598538fd1498Szrj while (NEXT_INSN (head) != return_label)
598638fd1498Szrj {
598738fd1498Szrj if (!NOTE_P (NEXT_INSN (head)))
598838fd1498Szrj BB_END (prev_bb) = NEXT_INSN (head);
598938fd1498Szrj head = NEXT_INSN (head);
599038fd1498Szrj }
599138fd1498Szrj }
599238fd1498Szrj exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
599338fd1498Szrj exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
599438fd1498Szrj add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
599538fd1498Szrj
599638fd1498Szrj ix = 0;
599738fd1498Szrj while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
599838fd1498Szrj {
599938fd1498Szrj e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
600038fd1498Szrj if (!(e->flags & EDGE_ABNORMAL))
600138fd1498Szrj redirect_edge_succ (e, exit_block);
600238fd1498Szrj else
600338fd1498Szrj ix++;
600438fd1498Szrj }
600538fd1498Szrj
600638fd1498Szrj e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
600738fd1498Szrj EDGE_FALLTHRU);
600838fd1498Szrj FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
600938fd1498Szrj if (e2 != e)
601038fd1498Szrj {
601138fd1498Szrj exit_block->count -= e2->count ();
601238fd1498Szrj }
601338fd1498Szrj update_bb_for_insn (exit_block);
601438fd1498Szrj }
601538fd1498Szrj
601638fd1498Szrj /* Helper function for discover_nonconstant_array_refs.
601738fd1498Szrj Look for ARRAY_REF nodes with non-constant indexes and mark them
601838fd1498Szrj addressable. */
601938fd1498Szrj
602038fd1498Szrj static tree
discover_nonconstant_array_refs_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)602138fd1498Szrj discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
602238fd1498Szrj void *data ATTRIBUTE_UNUSED)
602338fd1498Szrj {
602438fd1498Szrj tree t = *tp;
602538fd1498Szrj
602638fd1498Szrj if (IS_TYPE_OR_DECL_P (t))
602738fd1498Szrj *walk_subtrees = 0;
602838fd1498Szrj else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
602938fd1498Szrj {
603038fd1498Szrj while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
603138fd1498Szrj && is_gimple_min_invariant (TREE_OPERAND (t, 1))
603238fd1498Szrj && (!TREE_OPERAND (t, 2)
603338fd1498Szrj || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
603438fd1498Szrj || (TREE_CODE (t) == COMPONENT_REF
603538fd1498Szrj && (!TREE_OPERAND (t,2)
603638fd1498Szrj || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
603738fd1498Szrj || TREE_CODE (t) == BIT_FIELD_REF
603838fd1498Szrj || TREE_CODE (t) == REALPART_EXPR
603938fd1498Szrj || TREE_CODE (t) == IMAGPART_EXPR
604038fd1498Szrj || TREE_CODE (t) == VIEW_CONVERT_EXPR
604138fd1498Szrj || CONVERT_EXPR_P (t))
604238fd1498Szrj t = TREE_OPERAND (t, 0);
604338fd1498Szrj
604438fd1498Szrj if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
604538fd1498Szrj {
604638fd1498Szrj t = get_base_address (t);
604738fd1498Szrj if (t && DECL_P (t)
604838fd1498Szrj && DECL_MODE (t) != BLKmode)
604938fd1498Szrj TREE_ADDRESSABLE (t) = 1;
605038fd1498Szrj }
605138fd1498Szrj
605238fd1498Szrj *walk_subtrees = 0;
605338fd1498Szrj }
605438fd1498Szrj
605538fd1498Szrj return NULL_TREE;
605638fd1498Szrj }
605738fd1498Szrj
605838fd1498Szrj /* RTL expansion is not able to compile array references with variable
605938fd1498Szrj offsets for arrays stored in single register. Discover such
606038fd1498Szrj expressions and mark variables as addressable to avoid this
606138fd1498Szrj scenario. */
606238fd1498Szrj
606338fd1498Szrj static void
discover_nonconstant_array_refs(void)606438fd1498Szrj discover_nonconstant_array_refs (void)
606538fd1498Szrj {
606638fd1498Szrj basic_block bb;
606738fd1498Szrj gimple_stmt_iterator gsi;
606838fd1498Szrj
606938fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
607038fd1498Szrj for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
607138fd1498Szrj {
607238fd1498Szrj gimple *stmt = gsi_stmt (gsi);
607338fd1498Szrj if (!is_gimple_debug (stmt))
607438fd1498Szrj walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
607538fd1498Szrj }
607638fd1498Szrj }
607738fd1498Szrj
607838fd1498Szrj /* This function sets crtl->args.internal_arg_pointer to a virtual
607938fd1498Szrj register if DRAP is needed. Local register allocator will replace
608038fd1498Szrj virtual_incoming_args_rtx with the virtual register. */
608138fd1498Szrj
608238fd1498Szrj static void
expand_stack_alignment(void)608338fd1498Szrj expand_stack_alignment (void)
608438fd1498Szrj {
608538fd1498Szrj rtx drap_rtx;
608638fd1498Szrj unsigned int preferred_stack_boundary;
608738fd1498Szrj
608838fd1498Szrj if (! SUPPORTS_STACK_ALIGNMENT)
608938fd1498Szrj return;
609038fd1498Szrj
609138fd1498Szrj if (cfun->calls_alloca
609238fd1498Szrj || cfun->has_nonlocal_label
609338fd1498Szrj || crtl->has_nonlocal_goto)
609438fd1498Szrj crtl->need_drap = true;
609538fd1498Szrj
609638fd1498Szrj /* Call update_stack_boundary here again to update incoming stack
609738fd1498Szrj boundary. It may set incoming stack alignment to a different
609838fd1498Szrj value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
609938fd1498Szrj use the minimum incoming stack alignment to check if it is OK
610038fd1498Szrj to perform sibcall optimization since sibcall optimization will
610138fd1498Szrj only align the outgoing stack to incoming stack boundary. */
610238fd1498Szrj if (targetm.calls.update_stack_boundary)
610338fd1498Szrj targetm.calls.update_stack_boundary ();
610438fd1498Szrj
610538fd1498Szrj /* The incoming stack frame has to be aligned at least at
610638fd1498Szrj parm_stack_boundary. */
610738fd1498Szrj gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
610838fd1498Szrj
610938fd1498Szrj /* Update crtl->stack_alignment_estimated and use it later to align
611038fd1498Szrj stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
611138fd1498Szrj exceptions since callgraph doesn't collect incoming stack alignment
611238fd1498Szrj in this case. */
611338fd1498Szrj if (cfun->can_throw_non_call_exceptions
611438fd1498Szrj && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
611538fd1498Szrj preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
611638fd1498Szrj else
611738fd1498Szrj preferred_stack_boundary = crtl->preferred_stack_boundary;
611838fd1498Szrj if (preferred_stack_boundary > crtl->stack_alignment_estimated)
611938fd1498Szrj crtl->stack_alignment_estimated = preferred_stack_boundary;
612038fd1498Szrj if (preferred_stack_boundary > crtl->stack_alignment_needed)
612138fd1498Szrj crtl->stack_alignment_needed = preferred_stack_boundary;
612238fd1498Szrj
612338fd1498Szrj gcc_assert (crtl->stack_alignment_needed
612438fd1498Szrj <= crtl->stack_alignment_estimated);
612538fd1498Szrj
612638fd1498Szrj crtl->stack_realign_needed
612738fd1498Szrj = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
612838fd1498Szrj crtl->stack_realign_tried = crtl->stack_realign_needed;
612938fd1498Szrj
613038fd1498Szrj crtl->stack_realign_processed = true;
613138fd1498Szrj
613238fd1498Szrj /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
613338fd1498Szrj alignment. */
613438fd1498Szrj gcc_assert (targetm.calls.get_drap_rtx != NULL);
613538fd1498Szrj drap_rtx = targetm.calls.get_drap_rtx ();
613638fd1498Szrj
613738fd1498Szrj /* stack_realign_drap and drap_rtx must match. */
613838fd1498Szrj gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
613938fd1498Szrj
614038fd1498Szrj /* Do nothing if NULL is returned, which means DRAP is not needed. */
614138fd1498Szrj if (drap_rtx != NULL)
614238fd1498Szrj {
614338fd1498Szrj crtl->args.internal_arg_pointer = drap_rtx;
614438fd1498Szrj
614538fd1498Szrj /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
614638fd1498Szrj needed. */
614738fd1498Szrj fixup_tail_calls ();
614838fd1498Szrj }
614938fd1498Szrj }
615038fd1498Szrj
615138fd1498Szrj
615238fd1498Szrj static void
expand_main_function(void)615338fd1498Szrj expand_main_function (void)
615438fd1498Szrj {
615538fd1498Szrj #if (defined(INVOKE__main) \
615638fd1498Szrj || (!defined(HAS_INIT_SECTION) \
615738fd1498Szrj && !defined(INIT_SECTION_ASM_OP) \
615838fd1498Szrj && !defined(INIT_ARRAY_SECTION_ASM_OP)))
615938fd1498Szrj emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
616038fd1498Szrj #endif
616138fd1498Szrj }
616238fd1498Szrj
616338fd1498Szrj
616438fd1498Szrj /* Expand code to initialize the stack_protect_guard. This is invoked at
616538fd1498Szrj the beginning of a function to be protected. */
616638fd1498Szrj
616738fd1498Szrj static void
stack_protect_prologue(void)616838fd1498Szrj stack_protect_prologue (void)
616938fd1498Szrj {
617038fd1498Szrj tree guard_decl = targetm.stack_protect_guard ();
617138fd1498Szrj rtx x, y;
617238fd1498Szrj
617338fd1498Szrj x = expand_normal (crtl->stack_protect_guard);
617438fd1498Szrj if (guard_decl)
617538fd1498Szrj y = expand_normal (guard_decl);
617638fd1498Szrj else
617738fd1498Szrj y = const0_rtx;
617838fd1498Szrj
617938fd1498Szrj /* Allow the target to copy from Y to X without leaking Y into a
618038fd1498Szrj register. */
618138fd1498Szrj if (targetm.have_stack_protect_set ())
618238fd1498Szrj if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
618338fd1498Szrj {
618438fd1498Szrj emit_insn (insn);
618538fd1498Szrj return;
618638fd1498Szrj }
618738fd1498Szrj
618838fd1498Szrj /* Otherwise do a straight move. */
618938fd1498Szrj emit_move_insn (x, y);
619038fd1498Szrj }
619138fd1498Szrj
619238fd1498Szrj /* Translate the intermediate representation contained in the CFG
619338fd1498Szrj from GIMPLE trees to RTL.
619438fd1498Szrj
619538fd1498Szrj We do conversion per basic block and preserve/update the tree CFG.
619638fd1498Szrj This implies we have to do some magic as the CFG can simultaneously
619738fd1498Szrj consist of basic blocks containing RTL and GIMPLE trees. This can
619838fd1498Szrj confuse the CFG hooks, so be careful to not manipulate CFG during
619938fd1498Szrj the expansion. */
620038fd1498Szrj
620138fd1498Szrj namespace {
620238fd1498Szrj
620338fd1498Szrj const pass_data pass_data_expand =
620438fd1498Szrj {
620538fd1498Szrj RTL_PASS, /* type */
620638fd1498Szrj "expand", /* name */
620738fd1498Szrj OPTGROUP_NONE, /* optinfo_flags */
620838fd1498Szrj TV_EXPAND, /* tv_id */
620938fd1498Szrj ( PROP_ssa | PROP_gimple_leh | PROP_cfg
621038fd1498Szrj | PROP_gimple_lcx
621138fd1498Szrj | PROP_gimple_lvec
621238fd1498Szrj | PROP_gimple_lva), /* properties_required */
621338fd1498Szrj PROP_rtl, /* properties_provided */
621438fd1498Szrj ( PROP_ssa | PROP_trees ), /* properties_destroyed */
621538fd1498Szrj 0, /* todo_flags_start */
621638fd1498Szrj 0, /* todo_flags_finish */
621738fd1498Szrj };
621838fd1498Szrj
621938fd1498Szrj class pass_expand : public rtl_opt_pass
622038fd1498Szrj {
622138fd1498Szrj public:
pass_expand(gcc::context * ctxt)622238fd1498Szrj pass_expand (gcc::context *ctxt)
622338fd1498Szrj : rtl_opt_pass (pass_data_expand, ctxt)
622438fd1498Szrj {}
622538fd1498Szrj
622638fd1498Szrj /* opt_pass methods: */
622738fd1498Szrj virtual unsigned int execute (function *);
622838fd1498Szrj
622938fd1498Szrj }; // class pass_expand
623038fd1498Szrj
623138fd1498Szrj unsigned int
execute(function * fun)623238fd1498Szrj pass_expand::execute (function *fun)
623338fd1498Szrj {
623438fd1498Szrj basic_block bb, init_block;
623538fd1498Szrj edge_iterator ei;
623638fd1498Szrj edge e;
623738fd1498Szrj rtx_insn *var_seq, *var_ret_seq;
623838fd1498Szrj unsigned i;
623938fd1498Szrj
624038fd1498Szrj timevar_push (TV_OUT_OF_SSA);
624138fd1498Szrj rewrite_out_of_ssa (&SA);
624238fd1498Szrj timevar_pop (TV_OUT_OF_SSA);
624338fd1498Szrj SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
624438fd1498Szrj
624538fd1498Szrj if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
624638fd1498Szrj {
624738fd1498Szrj gimple_stmt_iterator gsi;
624838fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
624938fd1498Szrj for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
625038fd1498Szrj if (gimple_debug_bind_p (gsi_stmt (gsi)))
625138fd1498Szrj avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
625238fd1498Szrj }
625338fd1498Szrj
625438fd1498Szrj /* Make sure all values used by the optimization passes have sane
625538fd1498Szrj defaults. */
625638fd1498Szrj reg_renumber = 0;
625738fd1498Szrj
625838fd1498Szrj /* Some backends want to know that we are expanding to RTL. */
625938fd1498Szrj currently_expanding_to_rtl = 1;
626038fd1498Szrj /* Dominators are not kept up-to-date as we may create new basic-blocks. */
626138fd1498Szrj free_dominance_info (CDI_DOMINATORS);
626238fd1498Szrj
626338fd1498Szrj rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
626438fd1498Szrj
626538fd1498Szrj if (chkp_function_instrumented_p (current_function_decl))
626638fd1498Szrj chkp_reset_rtl_bounds ();
626738fd1498Szrj
626838fd1498Szrj insn_locations_init ();
626938fd1498Szrj if (!DECL_IS_BUILTIN (current_function_decl))
627038fd1498Szrj {
627138fd1498Szrj /* Eventually, all FEs should explicitly set function_start_locus. */
627238fd1498Szrj if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
627338fd1498Szrj set_curr_insn_location
627438fd1498Szrj (DECL_SOURCE_LOCATION (current_function_decl));
627538fd1498Szrj else
627638fd1498Szrj set_curr_insn_location (fun->function_start_locus);
627738fd1498Szrj }
627838fd1498Szrj else
627938fd1498Szrj set_curr_insn_location (UNKNOWN_LOCATION);
628038fd1498Szrj prologue_location = curr_insn_location ();
628138fd1498Szrj
628238fd1498Szrj #ifdef INSN_SCHEDULING
628338fd1498Szrj init_sched_attrs ();
628438fd1498Szrj #endif
628538fd1498Szrj
628638fd1498Szrj /* Make sure first insn is a note even if we don't want linenums.
628738fd1498Szrj This makes sure the first insn will never be deleted.
628838fd1498Szrj Also, final expects a note to appear there. */
628938fd1498Szrj emit_note (NOTE_INSN_DELETED);
629038fd1498Szrj
629138fd1498Szrj /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
629238fd1498Szrj discover_nonconstant_array_refs ();
629338fd1498Szrj
629438fd1498Szrj targetm.expand_to_rtl_hook ();
629538fd1498Szrj crtl->init_stack_alignment ();
629638fd1498Szrj fun->cfg->max_jumptable_ents = 0;
629738fd1498Szrj
629838fd1498Szrj /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
629938fd1498Szrj of the function section at exapnsion time to predict distance of calls. */
630038fd1498Szrj resolve_unique_section (current_function_decl, 0, flag_function_sections);
630138fd1498Szrj
630238fd1498Szrj /* Expand the variables recorded during gimple lowering. */
630338fd1498Szrj timevar_push (TV_VAR_EXPAND);
630438fd1498Szrj start_sequence ();
630538fd1498Szrj
630638fd1498Szrj var_ret_seq = expand_used_vars ();
630738fd1498Szrj
630838fd1498Szrj var_seq = get_insns ();
630938fd1498Szrj end_sequence ();
631038fd1498Szrj timevar_pop (TV_VAR_EXPAND);
631138fd1498Szrj
631238fd1498Szrj /* Honor stack protection warnings. */
631338fd1498Szrj if (warn_stack_protect)
631438fd1498Szrj {
631538fd1498Szrj if (fun->calls_alloca)
631638fd1498Szrj warning (OPT_Wstack_protector,
631738fd1498Szrj "stack protector not protecting local variables: "
631838fd1498Szrj "variable length buffer");
631938fd1498Szrj if (has_short_buffer && !crtl->stack_protect_guard)
632038fd1498Szrj warning (OPT_Wstack_protector,
632138fd1498Szrj "stack protector not protecting function: "
632238fd1498Szrj "all local arrays are less than %d bytes long",
632338fd1498Szrj (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
632438fd1498Szrj }
632538fd1498Szrj
632638fd1498Szrj /* Set up parameters and prepare for return, for the function. */
632738fd1498Szrj expand_function_start (current_function_decl);
632838fd1498Szrj
632938fd1498Szrj /* If we emitted any instructions for setting up the variables,
633038fd1498Szrj emit them before the FUNCTION_START note. */
633138fd1498Szrj if (var_seq)
633238fd1498Szrj {
633338fd1498Szrj emit_insn_before (var_seq, parm_birth_insn);
633438fd1498Szrj
633538fd1498Szrj /* In expand_function_end we'll insert the alloca save/restore
633638fd1498Szrj before parm_birth_insn. We've just insertted an alloca call.
633738fd1498Szrj Adjust the pointer to match. */
633838fd1498Szrj parm_birth_insn = var_seq;
633938fd1498Szrj }
634038fd1498Szrj
634138fd1498Szrj /* Now propagate the RTL assignment of each partition to the
634238fd1498Szrj underlying var of each SSA_NAME. */
634338fd1498Szrj tree name;
634438fd1498Szrj
634538fd1498Szrj FOR_EACH_SSA_NAME (i, name, cfun)
634638fd1498Szrj {
634738fd1498Szrj /* We might have generated new SSA names in
634838fd1498Szrj update_alias_info_with_stack_vars. They will have a NULL
634938fd1498Szrj defining statements, and won't be part of the partitioning,
635038fd1498Szrj so ignore those. */
635138fd1498Szrj if (!SSA_NAME_DEF_STMT (name))
635238fd1498Szrj continue;
635338fd1498Szrj
635438fd1498Szrj adjust_one_expanded_partition_var (name);
635538fd1498Szrj }
635638fd1498Szrj
635738fd1498Szrj /* Clean up RTL of variables that straddle across multiple
635838fd1498Szrj partitions, and check that the rtl of any PARM_DECLs that are not
635938fd1498Szrj cleaned up is that of their default defs. */
636038fd1498Szrj FOR_EACH_SSA_NAME (i, name, cfun)
636138fd1498Szrj {
636238fd1498Szrj int part;
636338fd1498Szrj
636438fd1498Szrj /* We might have generated new SSA names in
636538fd1498Szrj update_alias_info_with_stack_vars. They will have a NULL
636638fd1498Szrj defining statements, and won't be part of the partitioning,
636738fd1498Szrj so ignore those. */
636838fd1498Szrj if (!SSA_NAME_DEF_STMT (name))
636938fd1498Szrj continue;
637038fd1498Szrj part = var_to_partition (SA.map, name);
637138fd1498Szrj if (part == NO_PARTITION)
637238fd1498Szrj continue;
637338fd1498Szrj
637438fd1498Szrj /* If this decl was marked as living in multiple places, reset
637538fd1498Szrj this now to NULL. */
637638fd1498Szrj tree var = SSA_NAME_VAR (name);
637738fd1498Szrj if (var && DECL_RTL_IF_SET (var) == pc_rtx)
637838fd1498Szrj SET_DECL_RTL (var, NULL);
637938fd1498Szrj /* Check that the pseudos chosen by assign_parms are those of
638038fd1498Szrj the corresponding default defs. */
638138fd1498Szrj else if (SSA_NAME_IS_DEFAULT_DEF (name)
638238fd1498Szrj && (TREE_CODE (var) == PARM_DECL
638338fd1498Szrj || TREE_CODE (var) == RESULT_DECL))
638438fd1498Szrj {
638538fd1498Szrj rtx in = DECL_RTL_IF_SET (var);
638638fd1498Szrj gcc_assert (in);
638738fd1498Szrj rtx out = SA.partition_to_pseudo[part];
638838fd1498Szrj gcc_assert (in == out);
638938fd1498Szrj
639038fd1498Szrj /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
639138fd1498Szrj those expected by debug backends for each parm and for
639238fd1498Szrj the result. This is particularly important for stabs,
639338fd1498Szrj whose register elimination from parm's DECL_RTL may cause
639438fd1498Szrj -fcompare-debug differences as SET_DECL_RTL changes reg's
639538fd1498Szrj attrs. So, make sure the RTL already has the parm as the
639638fd1498Szrj EXPR, so that it won't change. */
639738fd1498Szrj SET_DECL_RTL (var, NULL_RTX);
639838fd1498Szrj if (MEM_P (in))
639938fd1498Szrj set_mem_attributes (in, var, true);
640038fd1498Szrj SET_DECL_RTL (var, in);
640138fd1498Szrj }
640238fd1498Szrj }
640338fd1498Szrj
640438fd1498Szrj /* If this function is `main', emit a call to `__main'
640538fd1498Szrj to run global initializers, etc. */
640638fd1498Szrj if (DECL_NAME (current_function_decl)
640738fd1498Szrj && MAIN_NAME_P (DECL_NAME (current_function_decl))
640838fd1498Szrj && DECL_FILE_SCOPE_P (current_function_decl))
640938fd1498Szrj expand_main_function ();
641038fd1498Szrj
641138fd1498Szrj /* Initialize the stack_protect_guard field. This must happen after the
641238fd1498Szrj call to __main (if any) so that the external decl is initialized. */
641338fd1498Szrj if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
641438fd1498Szrj stack_protect_prologue ();
641538fd1498Szrj
641638fd1498Szrj expand_phi_nodes (&SA);
641738fd1498Szrj
641838fd1498Szrj /* Release any stale SSA redirection data. */
641938fd1498Szrj redirect_edge_var_map_empty ();
642038fd1498Szrj
642138fd1498Szrj /* Register rtl specific functions for cfg. */
642238fd1498Szrj rtl_register_cfg_hooks ();
642338fd1498Szrj
642438fd1498Szrj init_block = construct_init_block ();
642538fd1498Szrj
642638fd1498Szrj /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
642738fd1498Szrj remaining edges later. */
642838fd1498Szrj FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
642938fd1498Szrj e->flags &= ~EDGE_EXECUTABLE;
643038fd1498Szrj
643138fd1498Szrj /* If the function has too many markers, drop them while expanding. */
643238fd1498Szrj if (cfun->debug_marker_count
643338fd1498Szrj >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
643438fd1498Szrj cfun->debug_nonbind_markers = false;
643538fd1498Szrj
643638fd1498Szrj lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
643738fd1498Szrj FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
643838fd1498Szrj next_bb)
643938fd1498Szrj bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
644038fd1498Szrj
644138fd1498Szrj if (MAY_HAVE_DEBUG_BIND_INSNS)
644238fd1498Szrj expand_debug_locations ();
644338fd1498Szrj
644438fd1498Szrj if (deep_ter_debug_map)
644538fd1498Szrj {
644638fd1498Szrj delete deep_ter_debug_map;
644738fd1498Szrj deep_ter_debug_map = NULL;
644838fd1498Szrj }
644938fd1498Szrj
645038fd1498Szrj /* Free stuff we no longer need after GIMPLE optimizations. */
645138fd1498Szrj free_dominance_info (CDI_DOMINATORS);
645238fd1498Szrj free_dominance_info (CDI_POST_DOMINATORS);
645338fd1498Szrj delete_tree_cfg_annotations (fun);
645438fd1498Szrj
645538fd1498Szrj timevar_push (TV_OUT_OF_SSA);
645638fd1498Szrj finish_out_of_ssa (&SA);
645738fd1498Szrj timevar_pop (TV_OUT_OF_SSA);
645838fd1498Szrj
645938fd1498Szrj timevar_push (TV_POST_EXPAND);
646038fd1498Szrj /* We are no longer in SSA form. */
646138fd1498Szrj fun->gimple_df->in_ssa_p = false;
646238fd1498Szrj loops_state_clear (LOOP_CLOSED_SSA);
646338fd1498Szrj
646438fd1498Szrj /* Expansion is used by optimization passes too, set maybe_hot_insn_p
646538fd1498Szrj conservatively to true until they are all profile aware. */
646638fd1498Szrj delete lab_rtx_for_bb;
646738fd1498Szrj free_histograms (fun);
646838fd1498Szrj
646938fd1498Szrj construct_exit_block ();
647038fd1498Szrj insn_locations_finalize ();
647138fd1498Szrj
647238fd1498Szrj if (var_ret_seq)
647338fd1498Szrj {
647438fd1498Szrj rtx_insn *after = return_label;
647538fd1498Szrj rtx_insn *next = NEXT_INSN (after);
647638fd1498Szrj if (next && NOTE_INSN_BASIC_BLOCK_P (next))
647738fd1498Szrj after = next;
647838fd1498Szrj emit_insn_after (var_ret_seq, after);
647938fd1498Szrj }
648038fd1498Szrj
648138fd1498Szrj /* Zap the tree EH table. */
648238fd1498Szrj set_eh_throw_stmt_table (fun, NULL);
648338fd1498Szrj
648438fd1498Szrj /* We need JUMP_LABEL be set in order to redirect jumps, and hence
648538fd1498Szrj split edges which edge insertions might do. */
648638fd1498Szrj rebuild_jump_labels (get_insns ());
648738fd1498Szrj
648838fd1498Szrj FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
648938fd1498Szrj EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
649038fd1498Szrj {
649138fd1498Szrj edge e;
649238fd1498Szrj edge_iterator ei;
649338fd1498Szrj for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
649438fd1498Szrj {
649538fd1498Szrj if (e->insns.r)
649638fd1498Szrj {
649738fd1498Szrj rebuild_jump_labels_chain (e->insns.r);
649838fd1498Szrj /* Put insns after parm birth, but before
649938fd1498Szrj NOTE_INSNS_FUNCTION_BEG. */
650038fd1498Szrj if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
650138fd1498Szrj && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
650238fd1498Szrj {
650338fd1498Szrj rtx_insn *insns = e->insns.r;
650438fd1498Szrj e->insns.r = NULL;
650538fd1498Szrj if (NOTE_P (parm_birth_insn)
650638fd1498Szrj && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
650738fd1498Szrj emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
650838fd1498Szrj else
650938fd1498Szrj emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
651038fd1498Szrj }
651138fd1498Szrj else
651238fd1498Szrj commit_one_edge_insertion (e);
651338fd1498Szrj }
651438fd1498Szrj else
651538fd1498Szrj ei_next (&ei);
651638fd1498Szrj }
651738fd1498Szrj }
651838fd1498Szrj
651938fd1498Szrj /* We're done expanding trees to RTL. */
652038fd1498Szrj currently_expanding_to_rtl = 0;
652138fd1498Szrj
652238fd1498Szrj flush_mark_addressable_queue ();
652338fd1498Szrj
652438fd1498Szrj FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
652538fd1498Szrj EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
652638fd1498Szrj {
652738fd1498Szrj edge e;
652838fd1498Szrj edge_iterator ei;
652938fd1498Szrj for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
653038fd1498Szrj {
653138fd1498Szrj /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
653238fd1498Szrj e->flags &= ~EDGE_EXECUTABLE;
653338fd1498Szrj
653438fd1498Szrj /* At the moment not all abnormal edges match the RTL
653538fd1498Szrj representation. It is safe to remove them here as
653638fd1498Szrj find_many_sub_basic_blocks will rediscover them.
653738fd1498Szrj In the future we should get this fixed properly. */
653838fd1498Szrj if ((e->flags & EDGE_ABNORMAL)
653938fd1498Szrj && !(e->flags & EDGE_SIBCALL))
654038fd1498Szrj remove_edge (e);
654138fd1498Szrj else
654238fd1498Szrj ei_next (&ei);
654338fd1498Szrj }
654438fd1498Szrj }
654538fd1498Szrj
654638fd1498Szrj auto_sbitmap blocks (last_basic_block_for_fn (fun));
654738fd1498Szrj bitmap_ones (blocks);
654838fd1498Szrj find_many_sub_basic_blocks (blocks);
654938fd1498Szrj purge_all_dead_edges ();
655038fd1498Szrj
6551*58e805e6Szrj /* After initial rtl generation, call back to finish generating
6552*58e805e6Szrj exception support code. We need to do this before cleaning up
6553*58e805e6Szrj the CFG as the code does not expect dead landing pads. */
6554*58e805e6Szrj if (fun->eh->region_tree != NULL)
6555*58e805e6Szrj finish_eh_generation ();
6556*58e805e6Szrj
6557*58e805e6Szrj /* Call expand_stack_alignment after finishing all
6558*58e805e6Szrj updates to crtl->preferred_stack_boundary. */
655938fd1498Szrj expand_stack_alignment ();
656038fd1498Szrj
656138fd1498Szrj /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
656238fd1498Szrj function. */
656338fd1498Szrj if (crtl->tail_call_emit)
656438fd1498Szrj fixup_tail_calls ();
656538fd1498Szrj
656638fd1498Szrj /* BB subdivision may have created basic blocks that are are only reachable
656738fd1498Szrj from unlikely bbs but not marked as such in the profile. */
656838fd1498Szrj if (optimize)
656938fd1498Szrj propagate_unlikely_bbs_forward ();
657038fd1498Szrj
657138fd1498Szrj /* Remove unreachable blocks, otherwise we cannot compute dominators
657238fd1498Szrj which are needed for loop state verification. As a side-effect
657338fd1498Szrj this also compacts blocks.
657438fd1498Szrj ??? We cannot remove trivially dead insns here as for example
657538fd1498Szrj the DRAP reg on i?86 is not magically live at this point.
657638fd1498Szrj gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
657738fd1498Szrj cleanup_cfg (CLEANUP_NO_INSN_DEL);
657838fd1498Szrj
657938fd1498Szrj checking_verify_flow_info ();
658038fd1498Szrj
658138fd1498Szrj /* Initialize pseudos allocated for hard registers. */
658238fd1498Szrj emit_initial_value_sets ();
658338fd1498Szrj
658438fd1498Szrj /* And finally unshare all RTL. */
658538fd1498Szrj unshare_all_rtl ();
658638fd1498Szrj
658738fd1498Szrj /* There's no need to defer outputting this function any more; we
658838fd1498Szrj know we want to output it. */
658938fd1498Szrj DECL_DEFER_OUTPUT (current_function_decl) = 0;
659038fd1498Szrj
659138fd1498Szrj /* Now that we're done expanding trees to RTL, we shouldn't have any
659238fd1498Szrj more CONCATs anywhere. */
659338fd1498Szrj generating_concat_p = 0;
659438fd1498Szrj
659538fd1498Szrj if (dump_file)
659638fd1498Szrj {
659738fd1498Szrj fprintf (dump_file,
659838fd1498Szrj "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
659938fd1498Szrj /* And the pass manager will dump RTL for us. */
660038fd1498Szrj }
660138fd1498Szrj
660238fd1498Szrj /* If we're emitting a nested function, make sure its parent gets
660338fd1498Szrj emitted as well. Doing otherwise confuses debug info. */
660438fd1498Szrj {
660538fd1498Szrj tree parent;
660638fd1498Szrj for (parent = DECL_CONTEXT (current_function_decl);
660738fd1498Szrj parent != NULL_TREE;
660838fd1498Szrj parent = get_containing_scope (parent))
660938fd1498Szrj if (TREE_CODE (parent) == FUNCTION_DECL)
661038fd1498Szrj TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
661138fd1498Szrj }
661238fd1498Szrj
661338fd1498Szrj TREE_ASM_WRITTEN (current_function_decl) = 1;
661438fd1498Szrj
661538fd1498Szrj /* After expanding, the return labels are no longer needed. */
661638fd1498Szrj return_label = NULL;
661738fd1498Szrj naked_return_label = NULL;
661838fd1498Szrj
661938fd1498Szrj /* After expanding, the tm_restart map is no longer needed. */
662038fd1498Szrj if (fun->gimple_df->tm_restart)
662138fd1498Szrj fun->gimple_df->tm_restart = NULL;
662238fd1498Szrj
662338fd1498Szrj /* Tag the blocks with a depth number so that change_scope can find
662438fd1498Szrj the common parent easily. */
662538fd1498Szrj set_block_levels (DECL_INITIAL (fun->decl), 0);
662638fd1498Szrj default_rtl_profile ();
662738fd1498Szrj
662838fd1498Szrj /* For -dx discard loops now, otherwise IL verify in clean_state will
662938fd1498Szrj ICE. */
663038fd1498Szrj if (rtl_dump_and_exit)
663138fd1498Szrj {
663238fd1498Szrj cfun->curr_properties &= ~PROP_loops;
663338fd1498Szrj loop_optimizer_finalize ();
663438fd1498Szrj }
663538fd1498Szrj
663638fd1498Szrj timevar_pop (TV_POST_EXPAND);
663738fd1498Szrj
663838fd1498Szrj return 0;
663938fd1498Szrj }
664038fd1498Szrj
664138fd1498Szrj } // anon namespace
664238fd1498Szrj
664338fd1498Szrj rtl_opt_pass *
make_pass_expand(gcc::context * ctxt)664438fd1498Szrj make_pass_expand (gcc::context *ctxt)
664538fd1498Szrj {
664638fd1498Szrj return new pass_expand (ctxt);
664738fd1498Szrj }
6648