138fd1498Szrj /* Generic SSA value propagation engine.
238fd1498Szrj Copyright (C) 2004-2018 Free Software Foundation, Inc.
338fd1498Szrj Contributed by Diego Novillo <dnovillo@redhat.com>
438fd1498Szrj
538fd1498Szrj This file is part of GCC.
638fd1498Szrj
738fd1498Szrj GCC is free software; you can redistribute it and/or modify it
838fd1498Szrj under the terms of the GNU General Public License as published by the
938fd1498Szrj Free Software Foundation; either version 3, or (at your option) any
1038fd1498Szrj later version.
1138fd1498Szrj
1238fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT
1338fd1498Szrj ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
1438fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
1538fd1498Szrj for more details.
1638fd1498Szrj
1738fd1498Szrj You should have received a copy of the GNU General Public License
1838fd1498Szrj along with GCC; see the file COPYING3. If not see
1938fd1498Szrj <http://www.gnu.org/licenses/>. */
2038fd1498Szrj
2138fd1498Szrj #include "config.h"
2238fd1498Szrj #include "system.h"
2338fd1498Szrj #include "coretypes.h"
2438fd1498Szrj #include "backend.h"
2538fd1498Szrj #include "tree.h"
2638fd1498Szrj #include "gimple.h"
2738fd1498Szrj #include "ssa.h"
2838fd1498Szrj #include "gimple-pretty-print.h"
2938fd1498Szrj #include "dumpfile.h"
3038fd1498Szrj #include "gimple-fold.h"
3138fd1498Szrj #include "tree-eh.h"
3238fd1498Szrj #include "gimplify.h"
3338fd1498Szrj #include "gimple-iterator.h"
3438fd1498Szrj #include "tree-cfg.h"
3538fd1498Szrj #include "tree-ssa.h"
3638fd1498Szrj #include "tree-ssa-propagate.h"
3738fd1498Szrj #include "domwalk.h"
3838fd1498Szrj #include "cfgloop.h"
3938fd1498Szrj #include "tree-cfgcleanup.h"
4038fd1498Szrj #include "cfganal.h"
4138fd1498Szrj
4238fd1498Szrj /* This file implements a generic value propagation engine based on
4338fd1498Szrj the same propagation used by the SSA-CCP algorithm [1].
4438fd1498Szrj
4538fd1498Szrj Propagation is performed by simulating the execution of every
4638fd1498Szrj statement that produces the value being propagated. Simulation
4738fd1498Szrj proceeds as follows:
4838fd1498Szrj
4938fd1498Szrj 1- Initially, all edges of the CFG are marked not executable and
5038fd1498Szrj the CFG worklist is seeded with all the statements in the entry
5138fd1498Szrj basic block (block 0).
5238fd1498Szrj
5338fd1498Szrj 2- Every statement S is simulated with a call to the call-back
5438fd1498Szrj function SSA_PROP_VISIT_STMT. This evaluation may produce 3
5538fd1498Szrj results:
5638fd1498Szrj
5738fd1498Szrj SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
5838fd1498Szrj interest and does not affect any of the work lists.
5938fd1498Szrj The statement may be simulated again if any of its input
6038fd1498Szrj operands change in future iterations of the simulator.
6138fd1498Szrj
6238fd1498Szrj SSA_PROP_VARYING: The value produced by S cannot be determined
6338fd1498Szrj at compile time. Further simulation of S is not required.
6438fd1498Szrj If S is a conditional jump, all the outgoing edges for the
6538fd1498Szrj block are considered executable and added to the work
6638fd1498Szrj list.
6738fd1498Szrj
6838fd1498Szrj SSA_PROP_INTERESTING: S produces a value that can be computed
6938fd1498Szrj at compile time. Its result can be propagated into the
7038fd1498Szrj statements that feed from S. Furthermore, if S is a
7138fd1498Szrj conditional jump, only the edge known to be taken is added
7238fd1498Szrj to the work list. Edges that are known not to execute are
7338fd1498Szrj never simulated.
7438fd1498Szrj
7538fd1498Szrj 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
7638fd1498Szrj return value from SSA_PROP_VISIT_PHI has the same semantics as
7738fd1498Szrj described in #2.
7838fd1498Szrj
7938fd1498Szrj 4- Three work lists are kept. Statements are only added to these
8038fd1498Szrj lists if they produce one of SSA_PROP_INTERESTING or
8138fd1498Szrj SSA_PROP_VARYING.
8238fd1498Szrj
8338fd1498Szrj CFG_BLOCKS contains the list of blocks to be simulated.
8438fd1498Szrj Blocks are added to this list if their incoming edges are
8538fd1498Szrj found executable.
8638fd1498Szrj
8738fd1498Szrj SSA_EDGE_WORKLIST contains the list of statements that we
8838fd1498Szrj need to revisit.
8938fd1498Szrj
9038fd1498Szrj 5- Simulation terminates when all three work lists are drained.
9138fd1498Szrj
9238fd1498Szrj Before calling ssa_propagate, it is important to clear
9338fd1498Szrj prop_simulate_again_p for all the statements in the program that
9438fd1498Szrj should be simulated. This initialization allows an implementation
9538fd1498Szrj to specify which statements should never be simulated.
9638fd1498Szrj
9738fd1498Szrj It is also important to compute def-use information before calling
9838fd1498Szrj ssa_propagate.
9938fd1498Szrj
10038fd1498Szrj References:
10138fd1498Szrj
10238fd1498Szrj [1] Constant propagation with conditional branches,
10338fd1498Szrj Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
10438fd1498Szrj
10538fd1498Szrj [2] Building an Optimizing Compiler,
10638fd1498Szrj Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
10738fd1498Szrj
10838fd1498Szrj [3] Advanced Compiler Design and Implementation,
10938fd1498Szrj Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
11038fd1498Szrj
111*58e805e6Szrj /* Worklists of control flow edge destinations. This contains
11238fd1498Szrj the CFG order number of the blocks so we can iterate in CFG
113*58e805e6Szrj order by visiting in bit-order. We use two worklists to
114*58e805e6Szrj first make forward progress before iterating. */
11538fd1498Szrj static bitmap cfg_blocks;
116*58e805e6Szrj static bitmap cfg_blocks_back;
11738fd1498Szrj static int *bb_to_cfg_order;
11838fd1498Szrj static int *cfg_order_to_bb;
11938fd1498Szrj
120*58e805e6Szrj /* Worklists of SSA edges which will need reexamination as their
12138fd1498Szrj definition has changed. SSA edges are def-use edges in the SSA
12238fd1498Szrj web. For each D-U edge, we store the target statement or PHI node
123*58e805e6Szrj UID in a bitmap. UIDs order stmts in execution order. We use
124*58e805e6Szrj two worklists to first make forward progress before iterating. */
12538fd1498Szrj static bitmap ssa_edge_worklist;
126*58e805e6Szrj static bitmap ssa_edge_worklist_back;
12738fd1498Szrj static vec<gimple *> uid_to_stmt;
12838fd1498Szrj
129*58e805e6Szrj /* Current RPO index in the iteration. */
130*58e805e6Szrj static int curr_order;
13138fd1498Szrj
13238fd1498Szrj
13338fd1498Szrj /* We have just defined a new value for VAR. If IS_VARYING is true,
13438fd1498Szrj add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
13538fd1498Szrj them to INTERESTING_SSA_EDGES. */
13638fd1498Szrj
13738fd1498Szrj static void
add_ssa_edge(tree var)13838fd1498Szrj add_ssa_edge (tree var)
13938fd1498Szrj {
14038fd1498Szrj imm_use_iterator iter;
14138fd1498Szrj use_operand_p use_p;
14238fd1498Szrj
14338fd1498Szrj FOR_EACH_IMM_USE_FAST (use_p, iter, var)
14438fd1498Szrj {
14538fd1498Szrj gimple *use_stmt = USE_STMT (use_p);
146*58e805e6Szrj if (!prop_simulate_again_p (use_stmt))
147*58e805e6Szrj continue;
14838fd1498Szrj
14938fd1498Szrj /* If we did not yet simulate the block wait for this to happen
15038fd1498Szrj and do not add the stmt to the SSA edge worklist. */
151*58e805e6Szrj basic_block use_bb = gimple_bb (use_stmt);
152*58e805e6Szrj if (! (use_bb->flags & BB_VISITED))
15338fd1498Szrj continue;
15438fd1498Szrj
155*58e805e6Szrj /* If this is a use on a not yet executable edge do not bother to
156*58e805e6Szrj queue it. */
157*58e805e6Szrj if (gimple_code (use_stmt) == GIMPLE_PHI
158*58e805e6Szrj && !(EDGE_PRED (use_bb, PHI_ARG_INDEX_FROM_USE (use_p))->flags
159*58e805e6Szrj & EDGE_EXECUTABLE))
160*58e805e6Szrj continue;
161*58e805e6Szrj
162*58e805e6Szrj bitmap worklist;
163*58e805e6Szrj if (bb_to_cfg_order[gimple_bb (use_stmt)->index] < curr_order)
164*58e805e6Szrj worklist = ssa_edge_worklist_back;
165*58e805e6Szrj else
166*58e805e6Szrj worklist = ssa_edge_worklist;
167*58e805e6Szrj if (bitmap_set_bit (worklist, gimple_uid (use_stmt)))
16838fd1498Szrj {
16938fd1498Szrj uid_to_stmt[gimple_uid (use_stmt)] = use_stmt;
17038fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
17138fd1498Szrj {
17238fd1498Szrj fprintf (dump_file, "ssa_edge_worklist: adding SSA use in ");
17338fd1498Szrj print_gimple_stmt (dump_file, use_stmt, 0, TDF_SLIM);
17438fd1498Szrj }
17538fd1498Szrj }
17638fd1498Szrj }
17738fd1498Szrj }
17838fd1498Szrj
17938fd1498Szrj
18038fd1498Szrj /* Add edge E to the control flow worklist. */
18138fd1498Szrj
18238fd1498Szrj static void
add_control_edge(edge e)18338fd1498Szrj add_control_edge (edge e)
18438fd1498Szrj {
18538fd1498Szrj basic_block bb = e->dest;
18638fd1498Szrj if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
18738fd1498Szrj return;
18838fd1498Szrj
18938fd1498Szrj /* If the edge had already been executed, skip it. */
19038fd1498Szrj if (e->flags & EDGE_EXECUTABLE)
19138fd1498Szrj return;
19238fd1498Szrj
19338fd1498Szrj e->flags |= EDGE_EXECUTABLE;
19438fd1498Szrj
195*58e805e6Szrj int bb_order = bb_to_cfg_order[bb->index];
196*58e805e6Szrj if (bb_order < curr_order)
197*58e805e6Szrj bitmap_set_bit (cfg_blocks_back, bb_order);
198*58e805e6Szrj else
199*58e805e6Szrj bitmap_set_bit (cfg_blocks, bb_order);
20038fd1498Szrj
20138fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
20238fd1498Szrj fprintf (dump_file, "Adding destination of edge (%d -> %d) to worklist\n",
20338fd1498Szrj e->src->index, e->dest->index);
20438fd1498Szrj }
20538fd1498Szrj
20638fd1498Szrj
20738fd1498Szrj /* Simulate the execution of STMT and update the work lists accordingly. */
20838fd1498Szrj
20938fd1498Szrj void
simulate_stmt(gimple * stmt)21038fd1498Szrj ssa_propagation_engine::simulate_stmt (gimple *stmt)
21138fd1498Szrj {
21238fd1498Szrj enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
21338fd1498Szrj edge taken_edge = NULL;
21438fd1498Szrj tree output_name = NULL_TREE;
21538fd1498Szrj
21638fd1498Szrj /* Pull the stmt off the SSA edge worklist. */
21738fd1498Szrj bitmap_clear_bit (ssa_edge_worklist, gimple_uid (stmt));
21838fd1498Szrj
21938fd1498Szrj /* Don't bother visiting statements that are already
22038fd1498Szrj considered varying by the propagator. */
22138fd1498Szrj if (!prop_simulate_again_p (stmt))
22238fd1498Szrj return;
22338fd1498Szrj
22438fd1498Szrj if (gimple_code (stmt) == GIMPLE_PHI)
22538fd1498Szrj {
22638fd1498Szrj val = visit_phi (as_a <gphi *> (stmt));
22738fd1498Szrj output_name = gimple_phi_result (stmt);
22838fd1498Szrj }
22938fd1498Szrj else
23038fd1498Szrj val = visit_stmt (stmt, &taken_edge, &output_name);
23138fd1498Szrj
23238fd1498Szrj if (val == SSA_PROP_VARYING)
23338fd1498Szrj {
23438fd1498Szrj prop_set_simulate_again (stmt, false);
23538fd1498Szrj
23638fd1498Szrj /* If the statement produced a new varying value, add the SSA
23738fd1498Szrj edges coming out of OUTPUT_NAME. */
23838fd1498Szrj if (output_name)
23938fd1498Szrj add_ssa_edge (output_name);
24038fd1498Szrj
24138fd1498Szrj /* If STMT transfers control out of its basic block, add
24238fd1498Szrj all outgoing edges to the work list. */
24338fd1498Szrj if (stmt_ends_bb_p (stmt))
24438fd1498Szrj {
24538fd1498Szrj edge e;
24638fd1498Szrj edge_iterator ei;
24738fd1498Szrj basic_block bb = gimple_bb (stmt);
24838fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
24938fd1498Szrj add_control_edge (e);
25038fd1498Szrj }
25138fd1498Szrj return;
25238fd1498Szrj }
25338fd1498Szrj else if (val == SSA_PROP_INTERESTING)
25438fd1498Szrj {
25538fd1498Szrj /* If the statement produced new value, add the SSA edges coming
25638fd1498Szrj out of OUTPUT_NAME. */
25738fd1498Szrj if (output_name)
25838fd1498Szrj add_ssa_edge (output_name);
25938fd1498Szrj
26038fd1498Szrj /* If we know which edge is going to be taken out of this block,
26138fd1498Szrj add it to the CFG work list. */
26238fd1498Szrj if (taken_edge)
26338fd1498Szrj add_control_edge (taken_edge);
26438fd1498Szrj }
26538fd1498Szrj
26638fd1498Szrj /* If there are no SSA uses on the stmt whose defs are simulated
26738fd1498Szrj again then this stmt will be never visited again. */
26838fd1498Szrj bool has_simulate_again_uses = false;
26938fd1498Szrj use_operand_p use_p;
27038fd1498Szrj ssa_op_iter iter;
27138fd1498Szrj if (gimple_code (stmt) == GIMPLE_PHI)
27238fd1498Szrj {
27338fd1498Szrj edge_iterator ei;
27438fd1498Szrj edge e;
27538fd1498Szrj tree arg;
27638fd1498Szrj FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds)
27738fd1498Szrj if (!(e->flags & EDGE_EXECUTABLE)
27838fd1498Szrj || ((arg = PHI_ARG_DEF_FROM_EDGE (stmt, e))
27938fd1498Szrj && TREE_CODE (arg) == SSA_NAME
28038fd1498Szrj && !SSA_NAME_IS_DEFAULT_DEF (arg)
28138fd1498Szrj && prop_simulate_again_p (SSA_NAME_DEF_STMT (arg))))
28238fd1498Szrj {
28338fd1498Szrj has_simulate_again_uses = true;
28438fd1498Szrj break;
28538fd1498Szrj }
28638fd1498Szrj }
28738fd1498Szrj else
28838fd1498Szrj FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
28938fd1498Szrj {
29038fd1498Szrj gimple *def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
29138fd1498Szrj if (!gimple_nop_p (def_stmt)
29238fd1498Szrj && prop_simulate_again_p (def_stmt))
29338fd1498Szrj {
29438fd1498Szrj has_simulate_again_uses = true;
29538fd1498Szrj break;
29638fd1498Szrj }
29738fd1498Szrj }
29838fd1498Szrj if (!has_simulate_again_uses)
29938fd1498Szrj {
30038fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
30138fd1498Szrj fprintf (dump_file, "marking stmt to be not simulated again\n");
30238fd1498Szrj prop_set_simulate_again (stmt, false);
30338fd1498Szrj }
30438fd1498Szrj }
30538fd1498Szrj
30638fd1498Szrj
30738fd1498Szrj /* Simulate the execution of BLOCK. Evaluate the statement associated
30838fd1498Szrj with each variable reference inside the block. */
30938fd1498Szrj
31038fd1498Szrj void
simulate_block(basic_block block)31138fd1498Szrj ssa_propagation_engine::simulate_block (basic_block block)
31238fd1498Szrj {
31338fd1498Szrj gimple_stmt_iterator gsi;
31438fd1498Szrj
31538fd1498Szrj /* There is nothing to do for the exit block. */
31638fd1498Szrj if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
31738fd1498Szrj return;
31838fd1498Szrj
31938fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
32038fd1498Szrj fprintf (dump_file, "\nSimulating block %d\n", block->index);
32138fd1498Szrj
32238fd1498Szrj /* Always simulate PHI nodes, even if we have simulated this block
32338fd1498Szrj before. */
32438fd1498Szrj for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
32538fd1498Szrj simulate_stmt (gsi_stmt (gsi));
32638fd1498Szrj
32738fd1498Szrj /* If this is the first time we've simulated this block, then we
32838fd1498Szrj must simulate each of its statements. */
32938fd1498Szrj if (! (block->flags & BB_VISITED))
33038fd1498Szrj {
33138fd1498Szrj gimple_stmt_iterator j;
33238fd1498Szrj unsigned int normal_edge_count;
33338fd1498Szrj edge e, normal_edge;
33438fd1498Szrj edge_iterator ei;
33538fd1498Szrj
33638fd1498Szrj for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
33738fd1498Szrj simulate_stmt (gsi_stmt (j));
33838fd1498Szrj
33938fd1498Szrj /* Note that we have simulated this block. */
34038fd1498Szrj block->flags |= BB_VISITED;
34138fd1498Szrj
34238fd1498Szrj /* We can not predict when abnormal and EH edges will be executed, so
34338fd1498Szrj once a block is considered executable, we consider any
34438fd1498Szrj outgoing abnormal edges as executable.
34538fd1498Szrj
34638fd1498Szrj TODO: This is not exactly true. Simplifying statement might
34738fd1498Szrj prove it non-throwing and also computed goto can be handled
34838fd1498Szrj when destination is known.
34938fd1498Szrj
35038fd1498Szrj At the same time, if this block has only one successor that is
35138fd1498Szrj reached by non-abnormal edges, then add that successor to the
35238fd1498Szrj worklist. */
35338fd1498Szrj normal_edge_count = 0;
35438fd1498Szrj normal_edge = NULL;
35538fd1498Szrj FOR_EACH_EDGE (e, ei, block->succs)
35638fd1498Szrj {
35738fd1498Szrj if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
35838fd1498Szrj add_control_edge (e);
35938fd1498Szrj else
36038fd1498Szrj {
36138fd1498Szrj normal_edge_count++;
36238fd1498Szrj normal_edge = e;
36338fd1498Szrj }
36438fd1498Szrj }
36538fd1498Szrj
36638fd1498Szrj if (normal_edge_count == 1)
36738fd1498Szrj add_control_edge (normal_edge);
36838fd1498Szrj }
36938fd1498Szrj }
37038fd1498Szrj
37138fd1498Szrj
37238fd1498Szrj /* Initialize local data structures and work lists. */
37338fd1498Szrj
37438fd1498Szrj static void
ssa_prop_init(void)37538fd1498Szrj ssa_prop_init (void)
37638fd1498Szrj {
37738fd1498Szrj edge e;
37838fd1498Szrj edge_iterator ei;
37938fd1498Szrj basic_block bb;
38038fd1498Szrj
38138fd1498Szrj /* Worklists of SSA edges. */
38238fd1498Szrj ssa_edge_worklist = BITMAP_ALLOC (NULL);
383*58e805e6Szrj ssa_edge_worklist_back = BITMAP_ALLOC (NULL);
38438fd1498Szrj
38538fd1498Szrj /* Worklist of basic-blocks. */
38638fd1498Szrj bb_to_cfg_order = XNEWVEC (int, last_basic_block_for_fn (cfun) + 1);
38738fd1498Szrj cfg_order_to_bb = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
38838fd1498Szrj int n = pre_and_rev_post_order_compute_fn (cfun, NULL,
38938fd1498Szrj cfg_order_to_bb, false);
39038fd1498Szrj for (int i = 0; i < n; ++i)
39138fd1498Szrj bb_to_cfg_order[cfg_order_to_bb[i]] = i;
39238fd1498Szrj cfg_blocks = BITMAP_ALLOC (NULL);
393*58e805e6Szrj cfg_blocks_back = BITMAP_ALLOC (NULL);
39438fd1498Szrj
39538fd1498Szrj /* Initially assume that every edge in the CFG is not executable.
39638fd1498Szrj (including the edges coming out of the entry block). Mark blocks
39738fd1498Szrj as not visited, blocks not yet visited will have all their statements
39838fd1498Szrj simulated once an incoming edge gets executable. */
39938fd1498Szrj set_gimple_stmt_max_uid (cfun, 0);
40038fd1498Szrj for (int i = 0; i < n; ++i)
40138fd1498Szrj {
40238fd1498Szrj gimple_stmt_iterator si;
40338fd1498Szrj bb = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb[i]);
40438fd1498Szrj
40538fd1498Szrj for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
40638fd1498Szrj {
40738fd1498Szrj gimple *stmt = gsi_stmt (si);
40838fd1498Szrj gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
40938fd1498Szrj }
41038fd1498Szrj
41138fd1498Szrj for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
41238fd1498Szrj {
41338fd1498Szrj gimple *stmt = gsi_stmt (si);
41438fd1498Szrj gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
41538fd1498Szrj }
41638fd1498Szrj
41738fd1498Szrj bb->flags &= ~BB_VISITED;
41838fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
41938fd1498Szrj e->flags &= ~EDGE_EXECUTABLE;
42038fd1498Szrj }
42138fd1498Szrj uid_to_stmt.safe_grow (gimple_stmt_max_uid (cfun));
42238fd1498Szrj
42338fd1498Szrj /* Seed the algorithm by adding the successors of the entry block to the
42438fd1498Szrj edge worklist. */
42538fd1498Szrj FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
42638fd1498Szrj {
42738fd1498Szrj e->flags &= ~EDGE_EXECUTABLE;
42838fd1498Szrj add_control_edge (e);
42938fd1498Szrj }
43038fd1498Szrj }
43138fd1498Szrj
43238fd1498Szrj
43338fd1498Szrj /* Free allocated storage. */
43438fd1498Szrj
43538fd1498Szrj static void
ssa_prop_fini(void)43638fd1498Szrj ssa_prop_fini (void)
43738fd1498Szrj {
43838fd1498Szrj BITMAP_FREE (cfg_blocks);
439*58e805e6Szrj BITMAP_FREE (cfg_blocks_back);
44038fd1498Szrj free (bb_to_cfg_order);
44138fd1498Szrj free (cfg_order_to_bb);
44238fd1498Szrj BITMAP_FREE (ssa_edge_worklist);
443*58e805e6Szrj BITMAP_FREE (ssa_edge_worklist_back);
44438fd1498Szrj uid_to_stmt.release ();
44538fd1498Szrj }
44638fd1498Szrj
44738fd1498Szrj
44838fd1498Szrj /* Return true if EXPR is an acceptable right-hand-side for a
44938fd1498Szrj GIMPLE assignment. We validate the entire tree, not just
45038fd1498Szrj the root node, thus catching expressions that embed complex
45138fd1498Szrj operands that are not permitted in GIMPLE. This function
45238fd1498Szrj is needed because the folding routines in fold-const.c
45338fd1498Szrj may return such expressions in some cases, e.g., an array
45438fd1498Szrj access with an embedded index addition. It may make more
45538fd1498Szrj sense to have folding routines that are sensitive to the
45638fd1498Szrj constraints on GIMPLE operands, rather than abandoning any
45738fd1498Szrj any attempt to fold if the usual folding turns out to be too
45838fd1498Szrj aggressive. */
45938fd1498Szrj
46038fd1498Szrj bool
valid_gimple_rhs_p(tree expr)46138fd1498Szrj valid_gimple_rhs_p (tree expr)
46238fd1498Szrj {
46338fd1498Szrj enum tree_code code = TREE_CODE (expr);
46438fd1498Szrj
46538fd1498Szrj switch (TREE_CODE_CLASS (code))
46638fd1498Szrj {
46738fd1498Szrj case tcc_declaration:
46838fd1498Szrj if (!is_gimple_variable (expr))
46938fd1498Szrj return false;
47038fd1498Szrj break;
47138fd1498Szrj
47238fd1498Szrj case tcc_constant:
47338fd1498Szrj /* All constants are ok. */
47438fd1498Szrj break;
47538fd1498Szrj
47638fd1498Szrj case tcc_comparison:
47738fd1498Szrj /* GENERIC allows comparisons with non-boolean types, reject
47838fd1498Szrj those for GIMPLE. Let vector-typed comparisons pass - rules
47938fd1498Szrj for GENERIC and GIMPLE are the same here. */
48038fd1498Szrj if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
48138fd1498Szrj && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
48238fd1498Szrj || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
48338fd1498Szrj && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
48438fd1498Szrj return false;
48538fd1498Szrj
48638fd1498Szrj /* Fallthru. */
48738fd1498Szrj case tcc_binary:
48838fd1498Szrj if (!is_gimple_val (TREE_OPERAND (expr, 0))
48938fd1498Szrj || !is_gimple_val (TREE_OPERAND (expr, 1)))
49038fd1498Szrj return false;
49138fd1498Szrj break;
49238fd1498Szrj
49338fd1498Szrj case tcc_unary:
49438fd1498Szrj if (!is_gimple_val (TREE_OPERAND (expr, 0)))
49538fd1498Szrj return false;
49638fd1498Szrj break;
49738fd1498Szrj
49838fd1498Szrj case tcc_expression:
49938fd1498Szrj switch (code)
50038fd1498Szrj {
50138fd1498Szrj case ADDR_EXPR:
50238fd1498Szrj {
50338fd1498Szrj tree t;
50438fd1498Szrj if (is_gimple_min_invariant (expr))
50538fd1498Szrj return true;
50638fd1498Szrj t = TREE_OPERAND (expr, 0);
50738fd1498Szrj while (handled_component_p (t))
50838fd1498Szrj {
50938fd1498Szrj /* ??? More checks needed, see the GIMPLE verifier. */
51038fd1498Szrj if ((TREE_CODE (t) == ARRAY_REF
51138fd1498Szrj || TREE_CODE (t) == ARRAY_RANGE_REF)
51238fd1498Szrj && !is_gimple_val (TREE_OPERAND (t, 1)))
51338fd1498Szrj return false;
51438fd1498Szrj t = TREE_OPERAND (t, 0);
51538fd1498Szrj }
51638fd1498Szrj if (!is_gimple_id (t))
51738fd1498Szrj return false;
51838fd1498Szrj }
51938fd1498Szrj break;
52038fd1498Szrj
52138fd1498Szrj default:
52238fd1498Szrj if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
52338fd1498Szrj {
52438fd1498Szrj if (((code == VEC_COND_EXPR || code == COND_EXPR)
52538fd1498Szrj ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
52638fd1498Szrj : !is_gimple_val (TREE_OPERAND (expr, 0)))
52738fd1498Szrj || !is_gimple_val (TREE_OPERAND (expr, 1))
52838fd1498Szrj || !is_gimple_val (TREE_OPERAND (expr, 2)))
52938fd1498Szrj return false;
53038fd1498Szrj break;
53138fd1498Szrj }
53238fd1498Szrj return false;
53338fd1498Szrj }
53438fd1498Szrj break;
53538fd1498Szrj
53638fd1498Szrj case tcc_vl_exp:
53738fd1498Szrj return false;
53838fd1498Szrj
53938fd1498Szrj case tcc_exceptional:
54038fd1498Szrj if (code == CONSTRUCTOR)
54138fd1498Szrj {
54238fd1498Szrj unsigned i;
54338fd1498Szrj tree elt;
54438fd1498Szrj FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
54538fd1498Szrj if (!is_gimple_val (elt))
54638fd1498Szrj return false;
54738fd1498Szrj return true;
54838fd1498Szrj }
54938fd1498Szrj if (code != SSA_NAME)
55038fd1498Szrj return false;
55138fd1498Szrj break;
55238fd1498Szrj
55338fd1498Szrj case tcc_reference:
55438fd1498Szrj if (code == BIT_FIELD_REF)
55538fd1498Szrj return is_gimple_val (TREE_OPERAND (expr, 0));
55638fd1498Szrj return false;
55738fd1498Szrj
55838fd1498Szrj default:
55938fd1498Szrj return false;
56038fd1498Szrj }
56138fd1498Szrj
56238fd1498Szrj return true;
56338fd1498Szrj }
56438fd1498Szrj
56538fd1498Szrj
56638fd1498Szrj /* Return true if EXPR is a CALL_EXPR suitable for representation
56738fd1498Szrj as a single GIMPLE_CALL statement. If the arguments require
56838fd1498Szrj further gimplification, return false. */
56938fd1498Szrj
57038fd1498Szrj static bool
valid_gimple_call_p(tree expr)57138fd1498Szrj valid_gimple_call_p (tree expr)
57238fd1498Szrj {
57338fd1498Szrj unsigned i, nargs;
57438fd1498Szrj
57538fd1498Szrj if (TREE_CODE (expr) != CALL_EXPR)
57638fd1498Szrj return false;
57738fd1498Szrj
57838fd1498Szrj nargs = call_expr_nargs (expr);
57938fd1498Szrj for (i = 0; i < nargs; i++)
58038fd1498Szrj {
58138fd1498Szrj tree arg = CALL_EXPR_ARG (expr, i);
58238fd1498Szrj if (is_gimple_reg_type (TREE_TYPE (arg)))
58338fd1498Szrj {
58438fd1498Szrj if (!is_gimple_val (arg))
58538fd1498Szrj return false;
58638fd1498Szrj }
58738fd1498Szrj else
58838fd1498Szrj if (!is_gimple_lvalue (arg))
58938fd1498Szrj return false;
59038fd1498Szrj }
59138fd1498Szrj
59238fd1498Szrj return true;
59338fd1498Szrj }
59438fd1498Szrj
59538fd1498Szrj
59638fd1498Szrj /* Make SSA names defined by OLD_STMT point to NEW_STMT
59738fd1498Szrj as their defining statement. */
59838fd1498Szrj
59938fd1498Szrj void
move_ssa_defining_stmt_for_defs(gimple * new_stmt,gimple * old_stmt)60038fd1498Szrj move_ssa_defining_stmt_for_defs (gimple *new_stmt, gimple *old_stmt)
60138fd1498Szrj {
60238fd1498Szrj tree var;
60338fd1498Szrj ssa_op_iter iter;
60438fd1498Szrj
60538fd1498Szrj if (gimple_in_ssa_p (cfun))
60638fd1498Szrj {
60738fd1498Szrj /* Make defined SSA_NAMEs point to the new
60838fd1498Szrj statement as their definition. */
60938fd1498Szrj FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS)
61038fd1498Szrj {
61138fd1498Szrj if (TREE_CODE (var) == SSA_NAME)
61238fd1498Szrj SSA_NAME_DEF_STMT (var) = new_stmt;
61338fd1498Szrj }
61438fd1498Szrj }
61538fd1498Szrj }
61638fd1498Szrj
61738fd1498Szrj /* Helper function for update_gimple_call and update_call_from_tree.
61838fd1498Szrj A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT. */
61938fd1498Szrj
62038fd1498Szrj static void
finish_update_gimple_call(gimple_stmt_iterator * si_p,gimple * new_stmt,gimple * stmt)62138fd1498Szrj finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
62238fd1498Szrj gimple *stmt)
62338fd1498Szrj {
62438fd1498Szrj gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
62538fd1498Szrj move_ssa_defining_stmt_for_defs (new_stmt, stmt);
62638fd1498Szrj gimple_set_vuse (new_stmt, gimple_vuse (stmt));
62738fd1498Szrj gimple_set_vdef (new_stmt, gimple_vdef (stmt));
62838fd1498Szrj gimple_set_location (new_stmt, gimple_location (stmt));
62938fd1498Szrj if (gimple_block (new_stmt) == NULL_TREE)
63038fd1498Szrj gimple_set_block (new_stmt, gimple_block (stmt));
63138fd1498Szrj gsi_replace (si_p, new_stmt, false);
63238fd1498Szrj }
63338fd1498Szrj
63438fd1498Szrj /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
63538fd1498Szrj with number of arguments NARGS, where the arguments in GIMPLE form
63638fd1498Szrj follow NARGS argument. */
63738fd1498Szrj
63838fd1498Szrj bool
update_gimple_call(gimple_stmt_iterator * si_p,tree fn,int nargs,...)63938fd1498Szrj update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
64038fd1498Szrj {
64138fd1498Szrj va_list ap;
64238fd1498Szrj gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
64338fd1498Szrj
64438fd1498Szrj gcc_assert (is_gimple_call (stmt));
64538fd1498Szrj va_start (ap, nargs);
64638fd1498Szrj new_stmt = gimple_build_call_valist (fn, nargs, ap);
64738fd1498Szrj finish_update_gimple_call (si_p, new_stmt, stmt);
64838fd1498Szrj va_end (ap);
64938fd1498Szrj return true;
65038fd1498Szrj }
65138fd1498Szrj
65238fd1498Szrj /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
65338fd1498Szrj value of EXPR, which is expected to be the result of folding the
65438fd1498Szrj call. This can only be done if EXPR is a CALL_EXPR with valid
65538fd1498Szrj GIMPLE operands as arguments, or if it is a suitable RHS expression
65638fd1498Szrj for a GIMPLE_ASSIGN. More complex expressions will require
65738fd1498Szrj gimplification, which will introduce additional statements. In this
65838fd1498Szrj event, no update is performed, and the function returns false.
65938fd1498Szrj Note that we cannot mutate a GIMPLE_CALL in-place, so we always
66038fd1498Szrj replace the statement at *SI_P with an entirely new statement.
66138fd1498Szrj The new statement need not be a call, e.g., if the original call
66238fd1498Szrj folded to a constant. */
66338fd1498Szrj
66438fd1498Szrj bool
update_call_from_tree(gimple_stmt_iterator * si_p,tree expr)66538fd1498Szrj update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
66638fd1498Szrj {
66738fd1498Szrj gimple *stmt = gsi_stmt (*si_p);
66838fd1498Szrj
66938fd1498Szrj if (valid_gimple_call_p (expr))
67038fd1498Szrj {
67138fd1498Szrj /* The call has simplified to another call. */
67238fd1498Szrj tree fn = CALL_EXPR_FN (expr);
67338fd1498Szrj unsigned i;
67438fd1498Szrj unsigned nargs = call_expr_nargs (expr);
67538fd1498Szrj vec<tree> args = vNULL;
67638fd1498Szrj gcall *new_stmt;
67738fd1498Szrj
67838fd1498Szrj if (nargs > 0)
67938fd1498Szrj {
68038fd1498Szrj args.create (nargs);
68138fd1498Szrj args.safe_grow_cleared (nargs);
68238fd1498Szrj
68338fd1498Szrj for (i = 0; i < nargs; i++)
68438fd1498Szrj args[i] = CALL_EXPR_ARG (expr, i);
68538fd1498Szrj }
68638fd1498Szrj
68738fd1498Szrj new_stmt = gimple_build_call_vec (fn, args);
68838fd1498Szrj finish_update_gimple_call (si_p, new_stmt, stmt);
68938fd1498Szrj args.release ();
69038fd1498Szrj
69138fd1498Szrj return true;
69238fd1498Szrj }
69338fd1498Szrj else if (valid_gimple_rhs_p (expr))
69438fd1498Szrj {
69538fd1498Szrj tree lhs = gimple_call_lhs (stmt);
69638fd1498Szrj gimple *new_stmt;
69738fd1498Szrj
69838fd1498Szrj /* The call has simplified to an expression
69938fd1498Szrj that cannot be represented as a GIMPLE_CALL. */
70038fd1498Szrj if (lhs)
70138fd1498Szrj {
70238fd1498Szrj /* A value is expected.
70338fd1498Szrj Introduce a new GIMPLE_ASSIGN statement. */
70438fd1498Szrj STRIP_USELESS_TYPE_CONVERSION (expr);
70538fd1498Szrj new_stmt = gimple_build_assign (lhs, expr);
70638fd1498Szrj move_ssa_defining_stmt_for_defs (new_stmt, stmt);
70738fd1498Szrj gimple_set_vuse (new_stmt, gimple_vuse (stmt));
70838fd1498Szrj gimple_set_vdef (new_stmt, gimple_vdef (stmt));
70938fd1498Szrj }
71038fd1498Szrj else if (!TREE_SIDE_EFFECTS (expr))
71138fd1498Szrj {
71238fd1498Szrj /* No value is expected, and EXPR has no effect.
71338fd1498Szrj Replace it with an empty statement. */
71438fd1498Szrj new_stmt = gimple_build_nop ();
71538fd1498Szrj if (gimple_in_ssa_p (cfun))
71638fd1498Szrj {
71738fd1498Szrj unlink_stmt_vdef (stmt);
71838fd1498Szrj release_defs (stmt);
71938fd1498Szrj }
72038fd1498Szrj }
72138fd1498Szrj else
72238fd1498Szrj {
72338fd1498Szrj /* No value is expected, but EXPR has an effect,
72438fd1498Szrj e.g., it could be a reference to a volatile
72538fd1498Szrj variable. Create an assignment statement
72638fd1498Szrj with a dummy (unused) lhs variable. */
72738fd1498Szrj STRIP_USELESS_TYPE_CONVERSION (expr);
72838fd1498Szrj if (gimple_in_ssa_p (cfun))
72938fd1498Szrj lhs = make_ssa_name (TREE_TYPE (expr));
73038fd1498Szrj else
73138fd1498Szrj lhs = create_tmp_var (TREE_TYPE (expr));
73238fd1498Szrj new_stmt = gimple_build_assign (lhs, expr);
73338fd1498Szrj gimple_set_vuse (new_stmt, gimple_vuse (stmt));
73438fd1498Szrj gimple_set_vdef (new_stmt, gimple_vdef (stmt));
73538fd1498Szrj move_ssa_defining_stmt_for_defs (new_stmt, stmt);
73638fd1498Szrj }
73738fd1498Szrj gimple_set_location (new_stmt, gimple_location (stmt));
73838fd1498Szrj gsi_replace (si_p, new_stmt, false);
73938fd1498Szrj return true;
74038fd1498Szrj }
74138fd1498Szrj else
74238fd1498Szrj /* The call simplified to an expression that is
74338fd1498Szrj not a valid GIMPLE RHS. */
74438fd1498Szrj return false;
74538fd1498Szrj }
74638fd1498Szrj
74738fd1498Szrj /* Entry point to the propagation engine.
74838fd1498Szrj
74938fd1498Szrj The VISIT_STMT virtual function is called for every statement
75038fd1498Szrj visited and the VISIT_PHI virtual function is called for every PHI
75138fd1498Szrj node visited. */
75238fd1498Szrj
75338fd1498Szrj void
ssa_propagate(void)75438fd1498Szrj ssa_propagation_engine::ssa_propagate (void)
75538fd1498Szrj {
75638fd1498Szrj ssa_prop_init ();
75738fd1498Szrj
758*58e805e6Szrj curr_order = 0;
759*58e805e6Szrj
760*58e805e6Szrj /* Iterate until the worklists are empty. We iterate both blocks
761*58e805e6Szrj and stmts in RPO order, using sets of two worklists to first
762*58e805e6Szrj complete the current iteration before iterating over backedges. */
763*58e805e6Szrj while (1)
76438fd1498Szrj {
765*58e805e6Szrj int next_block_order = (bitmap_empty_p (cfg_blocks)
766*58e805e6Szrj ? -1 : bitmap_first_set_bit (cfg_blocks));
767*58e805e6Szrj int next_stmt_uid = (bitmap_empty_p (ssa_edge_worklist)
768*58e805e6Szrj ? -1 : bitmap_first_set_bit (ssa_edge_worklist));
769*58e805e6Szrj if (next_block_order == -1 && next_stmt_uid == -1)
77038fd1498Szrj {
771*58e805e6Szrj if (bitmap_empty_p (cfg_blocks_back)
772*58e805e6Szrj && bitmap_empty_p (ssa_edge_worklist_back))
773*58e805e6Szrj break;
774*58e805e6Szrj
775*58e805e6Szrj if (dump_file && (dump_flags & TDF_DETAILS))
776*58e805e6Szrj fprintf (dump_file, "Regular worklists empty, now processing "
777*58e805e6Szrj "backedge destinations\n");
778*58e805e6Szrj std::swap (cfg_blocks, cfg_blocks_back);
779*58e805e6Szrj std::swap (ssa_edge_worklist, ssa_edge_worklist_back);
78038fd1498Szrj continue;
78138fd1498Szrj }
78238fd1498Szrj
783*58e805e6Szrj int next_stmt_bb_order = -1;
784*58e805e6Szrj gimple *next_stmt = NULL;
785*58e805e6Szrj if (next_stmt_uid != -1)
786*58e805e6Szrj {
787*58e805e6Szrj next_stmt = uid_to_stmt[next_stmt_uid];
788*58e805e6Szrj next_stmt_bb_order = bb_to_cfg_order[gimple_bb (next_stmt)->index];
789*58e805e6Szrj }
790*58e805e6Szrj
791*58e805e6Szrj /* Pull the next block to simulate off the worklist if it comes first. */
792*58e805e6Szrj if (next_block_order != -1
793*58e805e6Szrj && (next_stmt_bb_order == -1
794*58e805e6Szrj || next_block_order <= next_stmt_bb_order))
795*58e805e6Szrj {
796*58e805e6Szrj curr_order = next_block_order;
797*58e805e6Szrj bitmap_clear_bit (cfg_blocks, next_block_order);
798*58e805e6Szrj basic_block bb
799*58e805e6Szrj = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb [next_block_order]);
800*58e805e6Szrj simulate_block (bb);
801*58e805e6Szrj }
802*58e805e6Szrj /* Else simulate from the SSA edge worklist. */
803*58e805e6Szrj else
804*58e805e6Szrj {
805*58e805e6Szrj curr_order = next_stmt_bb_order;
806*58e805e6Szrj if (dump_file && (dump_flags & TDF_DETAILS))
807*58e805e6Szrj {
808*58e805e6Szrj fprintf (dump_file, "\nSimulating statement: ");
809*58e805e6Szrj print_gimple_stmt (dump_file, next_stmt, 0, dump_flags);
810*58e805e6Szrj }
811*58e805e6Szrj simulate_stmt (next_stmt);
812*58e805e6Szrj }
81338fd1498Szrj }
81438fd1498Szrj
81538fd1498Szrj ssa_prop_fini ();
81638fd1498Szrj }
81738fd1498Szrj
81838fd1498Szrj
81938fd1498Szrj /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
82038fd1498Szrj is a non-volatile pointer dereference, a structure reference or a
82138fd1498Szrj reference to a single _DECL. Ignore volatile memory references
82238fd1498Szrj because they are not interesting for the optimizers. */
82338fd1498Szrj
82438fd1498Szrj bool
stmt_makes_single_store(gimple * stmt)82538fd1498Szrj stmt_makes_single_store (gimple *stmt)
82638fd1498Szrj {
82738fd1498Szrj tree lhs;
82838fd1498Szrj
82938fd1498Szrj if (gimple_code (stmt) != GIMPLE_ASSIGN
83038fd1498Szrj && gimple_code (stmt) != GIMPLE_CALL)
83138fd1498Szrj return false;
83238fd1498Szrj
83338fd1498Szrj if (!gimple_vdef (stmt))
83438fd1498Szrj return false;
83538fd1498Szrj
83638fd1498Szrj lhs = gimple_get_lhs (stmt);
83738fd1498Szrj
83838fd1498Szrj /* A call statement may have a null LHS. */
83938fd1498Szrj if (!lhs)
84038fd1498Szrj return false;
84138fd1498Szrj
84238fd1498Szrj return (!TREE_THIS_VOLATILE (lhs)
84338fd1498Szrj && (DECL_P (lhs)
84438fd1498Szrj || REFERENCE_CLASS_P (lhs)));
84538fd1498Szrj }
84638fd1498Szrj
84738fd1498Szrj
84838fd1498Szrj /* Propagation statistics. */
84938fd1498Szrj struct prop_stats_d
85038fd1498Szrj {
85138fd1498Szrj long num_const_prop;
85238fd1498Szrj long num_copy_prop;
85338fd1498Szrj long num_stmts_folded;
85438fd1498Szrj long num_dce;
85538fd1498Szrj };
85638fd1498Szrj
85738fd1498Szrj static struct prop_stats_d prop_stats;
85838fd1498Szrj
85938fd1498Szrj /* Replace USE references in statement STMT with the values stored in
86038fd1498Szrj PROP_VALUE. Return true if at least one reference was replaced. */
86138fd1498Szrj
86238fd1498Szrj bool
replace_uses_in(gimple * stmt)86338fd1498Szrj substitute_and_fold_engine::replace_uses_in (gimple *stmt)
86438fd1498Szrj {
86538fd1498Szrj bool replaced = false;
86638fd1498Szrj use_operand_p use;
86738fd1498Szrj ssa_op_iter iter;
86838fd1498Szrj
86938fd1498Szrj FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
87038fd1498Szrj {
87138fd1498Szrj tree tuse = USE_FROM_PTR (use);
87238fd1498Szrj tree val = get_value (tuse);
87338fd1498Szrj
87438fd1498Szrj if (val == tuse || val == NULL_TREE)
87538fd1498Szrj continue;
87638fd1498Szrj
87738fd1498Szrj if (gimple_code (stmt) == GIMPLE_ASM
87838fd1498Szrj && !may_propagate_copy_into_asm (tuse))
87938fd1498Szrj continue;
88038fd1498Szrj
88138fd1498Szrj if (!may_propagate_copy (tuse, val))
88238fd1498Szrj continue;
88338fd1498Szrj
88438fd1498Szrj if (TREE_CODE (val) != SSA_NAME)
88538fd1498Szrj prop_stats.num_const_prop++;
88638fd1498Szrj else
88738fd1498Szrj prop_stats.num_copy_prop++;
88838fd1498Szrj
88938fd1498Szrj propagate_value (use, val);
89038fd1498Szrj
89138fd1498Szrj replaced = true;
89238fd1498Szrj }
89338fd1498Szrj
89438fd1498Szrj return replaced;
89538fd1498Szrj }
89638fd1498Szrj
89738fd1498Szrj
89838fd1498Szrj /* Replace propagated values into all the arguments for PHI using the
89938fd1498Szrj values from PROP_VALUE. */
90038fd1498Szrj
90138fd1498Szrj bool
replace_phi_args_in(gphi * phi)90238fd1498Szrj substitute_and_fold_engine::replace_phi_args_in (gphi *phi)
90338fd1498Szrj {
90438fd1498Szrj size_t i;
90538fd1498Szrj bool replaced = false;
90638fd1498Szrj
90738fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
90838fd1498Szrj {
90938fd1498Szrj fprintf (dump_file, "Folding PHI node: ");
91038fd1498Szrj print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
91138fd1498Szrj }
91238fd1498Szrj
91338fd1498Szrj for (i = 0; i < gimple_phi_num_args (phi); i++)
91438fd1498Szrj {
91538fd1498Szrj tree arg = gimple_phi_arg_def (phi, i);
91638fd1498Szrj
91738fd1498Szrj if (TREE_CODE (arg) == SSA_NAME)
91838fd1498Szrj {
91938fd1498Szrj tree val = get_value (arg);
92038fd1498Szrj
92138fd1498Szrj if (val && val != arg && may_propagate_copy (arg, val))
92238fd1498Szrj {
92338fd1498Szrj edge e = gimple_phi_arg_edge (phi, i);
92438fd1498Szrj
92538fd1498Szrj if (TREE_CODE (val) != SSA_NAME)
92638fd1498Szrj prop_stats.num_const_prop++;
92738fd1498Szrj else
92838fd1498Szrj prop_stats.num_copy_prop++;
92938fd1498Szrj
93038fd1498Szrj propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
93138fd1498Szrj replaced = true;
93238fd1498Szrj
93338fd1498Szrj /* If we propagated a copy and this argument flows
93438fd1498Szrj through an abnormal edge, update the replacement
93538fd1498Szrj accordingly. */
93638fd1498Szrj if (TREE_CODE (val) == SSA_NAME
93738fd1498Szrj && e->flags & EDGE_ABNORMAL
93838fd1498Szrj && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
93938fd1498Szrj {
94038fd1498Szrj /* This can only occur for virtual operands, since
94138fd1498Szrj for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
94238fd1498Szrj would prevent replacement. */
94338fd1498Szrj gcc_checking_assert (virtual_operand_p (val));
94438fd1498Szrj SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
94538fd1498Szrj }
94638fd1498Szrj }
94738fd1498Szrj }
94838fd1498Szrj }
94938fd1498Szrj
95038fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
95138fd1498Szrj {
95238fd1498Szrj if (!replaced)
95338fd1498Szrj fprintf (dump_file, "No folding possible\n");
95438fd1498Szrj else
95538fd1498Szrj {
95638fd1498Szrj fprintf (dump_file, "Folded into: ");
95738fd1498Szrj print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
95838fd1498Szrj fprintf (dump_file, "\n");
95938fd1498Szrj }
96038fd1498Szrj }
96138fd1498Szrj
96238fd1498Szrj return replaced;
96338fd1498Szrj }
96438fd1498Szrj
96538fd1498Szrj
96638fd1498Szrj class substitute_and_fold_dom_walker : public dom_walker
96738fd1498Szrj {
96838fd1498Szrj public:
substitute_and_fold_dom_walker(cdi_direction direction,class substitute_and_fold_engine * engine)96938fd1498Szrj substitute_and_fold_dom_walker (cdi_direction direction,
97038fd1498Szrj class substitute_and_fold_engine *engine)
97138fd1498Szrj : dom_walker (direction),
97238fd1498Szrj something_changed (false),
97338fd1498Szrj substitute_and_fold_engine (engine)
97438fd1498Szrj {
97538fd1498Szrj stmts_to_remove.create (0);
97638fd1498Szrj stmts_to_fixup.create (0);
97738fd1498Szrj need_eh_cleanup = BITMAP_ALLOC (NULL);
97838fd1498Szrj }
~substitute_and_fold_dom_walker()97938fd1498Szrj ~substitute_and_fold_dom_walker ()
98038fd1498Szrj {
98138fd1498Szrj stmts_to_remove.release ();
98238fd1498Szrj stmts_to_fixup.release ();
98338fd1498Szrj BITMAP_FREE (need_eh_cleanup);
98438fd1498Szrj }
98538fd1498Szrj
98638fd1498Szrj virtual edge before_dom_children (basic_block);
after_dom_children(basic_block)98738fd1498Szrj virtual void after_dom_children (basic_block) {}
98838fd1498Szrj
98938fd1498Szrj bool something_changed;
99038fd1498Szrj vec<gimple *> stmts_to_remove;
99138fd1498Szrj vec<gimple *> stmts_to_fixup;
99238fd1498Szrj bitmap need_eh_cleanup;
99338fd1498Szrj
99438fd1498Szrj class substitute_and_fold_engine *substitute_and_fold_engine;
99538fd1498Szrj };
99638fd1498Szrj
99738fd1498Szrj edge
before_dom_children(basic_block bb)99838fd1498Szrj substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
99938fd1498Szrj {
100038fd1498Szrj /* Propagate known values into PHI nodes. */
100138fd1498Szrj for (gphi_iterator i = gsi_start_phis (bb);
100238fd1498Szrj !gsi_end_p (i);
100338fd1498Szrj gsi_next (&i))
100438fd1498Szrj {
100538fd1498Szrj gphi *phi = i.phi ();
100638fd1498Szrj tree res = gimple_phi_result (phi);
100738fd1498Szrj if (virtual_operand_p (res))
100838fd1498Szrj continue;
100938fd1498Szrj if (res && TREE_CODE (res) == SSA_NAME)
101038fd1498Szrj {
101138fd1498Szrj tree sprime = substitute_and_fold_engine->get_value (res);
101238fd1498Szrj if (sprime
101338fd1498Szrj && sprime != res
101438fd1498Szrj && may_propagate_copy (res, sprime))
101538fd1498Szrj {
101638fd1498Szrj stmts_to_remove.safe_push (phi);
101738fd1498Szrj continue;
101838fd1498Szrj }
101938fd1498Szrj }
102038fd1498Szrj something_changed |= substitute_and_fold_engine->replace_phi_args_in (phi);
102138fd1498Szrj }
102238fd1498Szrj
102338fd1498Szrj /* Propagate known values into stmts. In some case it exposes
102438fd1498Szrj more trivially deletable stmts to walk backward. */
102538fd1498Szrj for (gimple_stmt_iterator i = gsi_start_bb (bb);
102638fd1498Szrj !gsi_end_p (i);
102738fd1498Szrj gsi_next (&i))
102838fd1498Szrj {
102938fd1498Szrj bool did_replace;
103038fd1498Szrj gimple *stmt = gsi_stmt (i);
103138fd1498Szrj
103238fd1498Szrj /* No point propagating into a stmt we have a value for we
103338fd1498Szrj can propagate into all uses. Mark it for removal instead. */
103438fd1498Szrj tree lhs = gimple_get_lhs (stmt);
103538fd1498Szrj if (lhs && TREE_CODE (lhs) == SSA_NAME)
103638fd1498Szrj {
103738fd1498Szrj tree sprime = substitute_and_fold_engine->get_value (lhs);
103838fd1498Szrj if (sprime
103938fd1498Szrj && sprime != lhs
104038fd1498Szrj && may_propagate_copy (lhs, sprime)
104138fd1498Szrj && !stmt_could_throw_p (stmt)
104238fd1498Szrj && !gimple_has_side_effects (stmt)
104338fd1498Szrj /* We have to leave ASSERT_EXPRs around for jump-threading. */
104438fd1498Szrj && (!is_gimple_assign (stmt)
104538fd1498Szrj || gimple_assign_rhs_code (stmt) != ASSERT_EXPR))
104638fd1498Szrj {
104738fd1498Szrj stmts_to_remove.safe_push (stmt);
104838fd1498Szrj continue;
104938fd1498Szrj }
105038fd1498Szrj }
105138fd1498Szrj
105238fd1498Szrj /* Replace the statement with its folded version and mark it
105338fd1498Szrj folded. */
105438fd1498Szrj did_replace = false;
105538fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
105638fd1498Szrj {
105738fd1498Szrj fprintf (dump_file, "Folding statement: ");
105838fd1498Szrj print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
105938fd1498Szrj }
106038fd1498Szrj
106138fd1498Szrj gimple *old_stmt = stmt;
106238fd1498Szrj bool was_noreturn = (is_gimple_call (stmt)
106338fd1498Szrj && gimple_call_noreturn_p (stmt));
106438fd1498Szrj
106538fd1498Szrj /* Replace real uses in the statement. */
106638fd1498Szrj did_replace |= substitute_and_fold_engine->replace_uses_in (stmt);
106738fd1498Szrj
106838fd1498Szrj /* If we made a replacement, fold the statement. */
106938fd1498Szrj if (did_replace)
107038fd1498Szrj {
107138fd1498Szrj fold_stmt (&i, follow_single_use_edges);
107238fd1498Szrj stmt = gsi_stmt (i);
107338fd1498Szrj gimple_set_modified (stmt, true);
107438fd1498Szrj }
107538fd1498Szrj
107638fd1498Szrj /* Some statements may be simplified using propagator
107738fd1498Szrj specific information. Do this before propagating
107838fd1498Szrj into the stmt to not disturb pass specific information. */
107938fd1498Szrj update_stmt_if_modified (stmt);
108038fd1498Szrj if (substitute_and_fold_engine->fold_stmt(&i))
108138fd1498Szrj {
108238fd1498Szrj did_replace = true;
108338fd1498Szrj prop_stats.num_stmts_folded++;
108438fd1498Szrj stmt = gsi_stmt (i);
108538fd1498Szrj gimple_set_modified (stmt, true);
108638fd1498Szrj }
108738fd1498Szrj
108838fd1498Szrj /* If this is a control statement the propagator left edges
108938fd1498Szrj unexecuted on force the condition in a way consistent with
109038fd1498Szrj that. See PR66945 for cases where the propagator can end
109138fd1498Szrj up with a different idea of a taken edge than folding
109238fd1498Szrj (once undefined behavior is involved). */
109338fd1498Szrj if (gimple_code (stmt) == GIMPLE_COND)
109438fd1498Szrj {
109538fd1498Szrj if ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE)
109638fd1498Szrj ^ (EDGE_SUCC (bb, 1)->flags & EDGE_EXECUTABLE))
109738fd1498Szrj {
109838fd1498Szrj if (((EDGE_SUCC (bb, 0)->flags & EDGE_TRUE_VALUE) != 0)
109938fd1498Szrj == ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE) != 0))
110038fd1498Szrj gimple_cond_make_true (as_a <gcond *> (stmt));
110138fd1498Szrj else
110238fd1498Szrj gimple_cond_make_false (as_a <gcond *> (stmt));
110338fd1498Szrj gimple_set_modified (stmt, true);
110438fd1498Szrj did_replace = true;
110538fd1498Szrj }
110638fd1498Szrj }
110738fd1498Szrj
110838fd1498Szrj /* Now cleanup. */
110938fd1498Szrj if (did_replace)
111038fd1498Szrj {
111138fd1498Szrj /* If we cleaned up EH information from the statement,
111238fd1498Szrj remove EH edges. */
111338fd1498Szrj if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
111438fd1498Szrj bitmap_set_bit (need_eh_cleanup, bb->index);
111538fd1498Szrj
111638fd1498Szrj /* If we turned a not noreturn call into a noreturn one
111738fd1498Szrj schedule it for fixup. */
111838fd1498Szrj if (!was_noreturn
111938fd1498Szrj && is_gimple_call (stmt)
112038fd1498Szrj && gimple_call_noreturn_p (stmt))
112138fd1498Szrj stmts_to_fixup.safe_push (stmt);
112238fd1498Szrj
112338fd1498Szrj if (gimple_assign_single_p (stmt))
112438fd1498Szrj {
112538fd1498Szrj tree rhs = gimple_assign_rhs1 (stmt);
112638fd1498Szrj
112738fd1498Szrj if (TREE_CODE (rhs) == ADDR_EXPR)
112838fd1498Szrj recompute_tree_invariant_for_addr_expr (rhs);
112938fd1498Szrj }
113038fd1498Szrj
113138fd1498Szrj /* Determine what needs to be done to update the SSA form. */
113238fd1498Szrj update_stmt_if_modified (stmt);
113338fd1498Szrj if (!is_gimple_debug (stmt))
113438fd1498Szrj something_changed = true;
113538fd1498Szrj }
113638fd1498Szrj
113738fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
113838fd1498Szrj {
113938fd1498Szrj if (did_replace)
114038fd1498Szrj {
114138fd1498Szrj fprintf (dump_file, "Folded into: ");
114238fd1498Szrj print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
114338fd1498Szrj fprintf (dump_file, "\n");
114438fd1498Szrj }
114538fd1498Szrj else
114638fd1498Szrj fprintf (dump_file, "Not folded\n");
114738fd1498Szrj }
114838fd1498Szrj }
114938fd1498Szrj return NULL;
115038fd1498Szrj }
115138fd1498Szrj
115238fd1498Szrj
115338fd1498Szrj
115438fd1498Szrj /* Perform final substitution and folding of propagated values.
115538fd1498Szrj
115638fd1498Szrj PROP_VALUE[I] contains the single value that should be substituted
115738fd1498Szrj at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
115838fd1498Szrj substituted.
115938fd1498Szrj
116038fd1498Szrj If FOLD_FN is non-NULL the function will be invoked on all statements
116138fd1498Szrj before propagating values for pass specific simplification.
116238fd1498Szrj
116338fd1498Szrj DO_DCE is true if trivially dead stmts can be removed.
116438fd1498Szrj
116538fd1498Szrj If DO_DCE is true, the statements within a BB are walked from
116638fd1498Szrj last to first element. Otherwise we scan from first to last element.
116738fd1498Szrj
116838fd1498Szrj Return TRUE when something changed. */
116938fd1498Szrj
117038fd1498Szrj bool
substitute_and_fold(void)117138fd1498Szrj substitute_and_fold_engine::substitute_and_fold (void)
117238fd1498Szrj {
117338fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
117438fd1498Szrj fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
117538fd1498Szrj
117638fd1498Szrj memset (&prop_stats, 0, sizeof (prop_stats));
117738fd1498Szrj
117838fd1498Szrj calculate_dominance_info (CDI_DOMINATORS);
117938fd1498Szrj substitute_and_fold_dom_walker walker (CDI_DOMINATORS, this);
118038fd1498Szrj walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
118138fd1498Szrj
118238fd1498Szrj /* We cannot remove stmts during the BB walk, especially not release
118338fd1498Szrj SSA names there as that destroys the lattice of our callers.
118438fd1498Szrj Remove stmts in reverse order to make debug stmt creation possible. */
118538fd1498Szrj while (!walker.stmts_to_remove.is_empty ())
118638fd1498Szrj {
118738fd1498Szrj gimple *stmt = walker.stmts_to_remove.pop ();
118838fd1498Szrj if (dump_file && dump_flags & TDF_DETAILS)
118938fd1498Szrj {
119038fd1498Szrj fprintf (dump_file, "Removing dead stmt ");
119138fd1498Szrj print_gimple_stmt (dump_file, stmt, 0);
119238fd1498Szrj fprintf (dump_file, "\n");
119338fd1498Szrj }
119438fd1498Szrj prop_stats.num_dce++;
119538fd1498Szrj gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
119638fd1498Szrj if (gimple_code (stmt) == GIMPLE_PHI)
119738fd1498Szrj remove_phi_node (&gsi, true);
119838fd1498Szrj else
119938fd1498Szrj {
120038fd1498Szrj unlink_stmt_vdef (stmt);
120138fd1498Szrj gsi_remove (&gsi, true);
120238fd1498Szrj release_defs (stmt);
120338fd1498Szrj }
120438fd1498Szrj }
120538fd1498Szrj
120638fd1498Szrj if (!bitmap_empty_p (walker.need_eh_cleanup))
120738fd1498Szrj gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup);
120838fd1498Szrj
120938fd1498Szrj /* Fixup stmts that became noreturn calls. This may require splitting
121038fd1498Szrj blocks and thus isn't possible during the dominator walk. Do this
121138fd1498Szrj in reverse order so we don't inadvertedly remove a stmt we want to
121238fd1498Szrj fixup by visiting a dominating now noreturn call first. */
121338fd1498Szrj while (!walker.stmts_to_fixup.is_empty ())
121438fd1498Szrj {
121538fd1498Szrj gimple *stmt = walker.stmts_to_fixup.pop ();
121638fd1498Szrj if (dump_file && dump_flags & TDF_DETAILS)
121738fd1498Szrj {
121838fd1498Szrj fprintf (dump_file, "Fixing up noreturn call ");
121938fd1498Szrj print_gimple_stmt (dump_file, stmt, 0);
122038fd1498Szrj fprintf (dump_file, "\n");
122138fd1498Szrj }
122238fd1498Szrj fixup_noreturn_call (stmt);
122338fd1498Szrj }
122438fd1498Szrj
122538fd1498Szrj statistics_counter_event (cfun, "Constants propagated",
122638fd1498Szrj prop_stats.num_const_prop);
122738fd1498Szrj statistics_counter_event (cfun, "Copies propagated",
122838fd1498Szrj prop_stats.num_copy_prop);
122938fd1498Szrj statistics_counter_event (cfun, "Statements folded",
123038fd1498Szrj prop_stats.num_stmts_folded);
123138fd1498Szrj statistics_counter_event (cfun, "Statements deleted",
123238fd1498Szrj prop_stats.num_dce);
123338fd1498Szrj
123438fd1498Szrj return walker.something_changed;
123538fd1498Szrj }
123638fd1498Szrj
123738fd1498Szrj
123838fd1498Szrj /* Return true if we may propagate ORIG into DEST, false otherwise. */
123938fd1498Szrj
124038fd1498Szrj bool
may_propagate_copy(tree dest,tree orig)124138fd1498Szrj may_propagate_copy (tree dest, tree orig)
124238fd1498Szrj {
124338fd1498Szrj tree type_d = TREE_TYPE (dest);
124438fd1498Szrj tree type_o = TREE_TYPE (orig);
124538fd1498Szrj
124638fd1498Szrj /* If ORIG is a default definition which flows in from an abnormal edge
124738fd1498Szrj then the copy can be propagated. It is important that we do so to avoid
124838fd1498Szrj uninitialized copies. */
124938fd1498Szrj if (TREE_CODE (orig) == SSA_NAME
125038fd1498Szrj && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
125138fd1498Szrj && SSA_NAME_IS_DEFAULT_DEF (orig)
125238fd1498Szrj && (SSA_NAME_VAR (orig) == NULL_TREE
125338fd1498Szrj || TREE_CODE (SSA_NAME_VAR (orig)) == VAR_DECL))
125438fd1498Szrj ;
125538fd1498Szrj /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
125638fd1498Szrj be propagated. */
125738fd1498Szrj else if (TREE_CODE (orig) == SSA_NAME
125838fd1498Szrj && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
125938fd1498Szrj return false;
126038fd1498Szrj /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
126138fd1498Szrj propagated. */
126238fd1498Szrj else if (TREE_CODE (dest) == SSA_NAME
126338fd1498Szrj && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
126438fd1498Szrj return false;
126538fd1498Szrj
126638fd1498Szrj /* Do not copy between types for which we *do* need a conversion. */
126738fd1498Szrj if (!useless_type_conversion_p (type_d, type_o))
126838fd1498Szrj return false;
126938fd1498Szrj
127038fd1498Szrj /* Generally propagating virtual operands is not ok as that may
127138fd1498Szrj create overlapping life-ranges. */
127238fd1498Szrj if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
127338fd1498Szrj return false;
127438fd1498Szrj
127538fd1498Szrj /* Anything else is OK. */
127638fd1498Szrj return true;
127738fd1498Szrj }
127838fd1498Szrj
127938fd1498Szrj /* Like may_propagate_copy, but use as the destination expression
128038fd1498Szrj the principal expression (typically, the RHS) contained in
128138fd1498Szrj statement DEST. This is more efficient when working with the
128238fd1498Szrj gimple tuples representation. */
128338fd1498Szrj
128438fd1498Szrj bool
may_propagate_copy_into_stmt(gimple * dest,tree orig)128538fd1498Szrj may_propagate_copy_into_stmt (gimple *dest, tree orig)
128638fd1498Szrj {
128738fd1498Szrj tree type_d;
128838fd1498Szrj tree type_o;
128938fd1498Szrj
129038fd1498Szrj /* If the statement is a switch or a single-rhs assignment,
129138fd1498Szrj then the expression to be replaced by the propagation may
129238fd1498Szrj be an SSA_NAME. Fortunately, there is an explicit tree
129338fd1498Szrj for the expression, so we delegate to may_propagate_copy. */
129438fd1498Szrj
129538fd1498Szrj if (gimple_assign_single_p (dest))
129638fd1498Szrj return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
129738fd1498Szrj else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest))
129838fd1498Szrj return may_propagate_copy (gimple_switch_index (dest_swtch), orig);
129938fd1498Szrj
130038fd1498Szrj /* In other cases, the expression is not materialized, so there
130138fd1498Szrj is no destination to pass to may_propagate_copy. On the other
130238fd1498Szrj hand, the expression cannot be an SSA_NAME, so the analysis
130338fd1498Szrj is much simpler. */
130438fd1498Szrj
130538fd1498Szrj if (TREE_CODE (orig) == SSA_NAME
130638fd1498Szrj && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
130738fd1498Szrj return false;
130838fd1498Szrj
130938fd1498Szrj if (is_gimple_assign (dest))
131038fd1498Szrj type_d = TREE_TYPE (gimple_assign_lhs (dest));
131138fd1498Szrj else if (gimple_code (dest) == GIMPLE_COND)
131238fd1498Szrj type_d = boolean_type_node;
131338fd1498Szrj else if (is_gimple_call (dest)
131438fd1498Szrj && gimple_call_lhs (dest) != NULL_TREE)
131538fd1498Szrj type_d = TREE_TYPE (gimple_call_lhs (dest));
131638fd1498Szrj else
131738fd1498Szrj gcc_unreachable ();
131838fd1498Szrj
131938fd1498Szrj type_o = TREE_TYPE (orig);
132038fd1498Szrj
132138fd1498Szrj if (!useless_type_conversion_p (type_d, type_o))
132238fd1498Szrj return false;
132338fd1498Szrj
132438fd1498Szrj return true;
132538fd1498Szrj }
132638fd1498Szrj
132738fd1498Szrj /* Similarly, but we know that we're propagating into an ASM_EXPR. */
132838fd1498Szrj
132938fd1498Szrj bool
may_propagate_copy_into_asm(tree dest ATTRIBUTE_UNUSED)133038fd1498Szrj may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED)
133138fd1498Szrj {
133238fd1498Szrj return true;
133338fd1498Szrj }
133438fd1498Szrj
133538fd1498Szrj
133638fd1498Szrj /* Common code for propagate_value and replace_exp.
133738fd1498Szrj
133838fd1498Szrj Replace use operand OP_P with VAL. FOR_PROPAGATION indicates if the
133938fd1498Szrj replacement is done to propagate a value or not. */
134038fd1498Szrj
134138fd1498Szrj static void
replace_exp_1(use_operand_p op_p,tree val,bool for_propagation ATTRIBUTE_UNUSED)134238fd1498Szrj replace_exp_1 (use_operand_p op_p, tree val,
134338fd1498Szrj bool for_propagation ATTRIBUTE_UNUSED)
134438fd1498Szrj {
134538fd1498Szrj if (flag_checking)
134638fd1498Szrj {
134738fd1498Szrj tree op = USE_FROM_PTR (op_p);
134838fd1498Szrj gcc_assert (!(for_propagation
134938fd1498Szrj && TREE_CODE (op) == SSA_NAME
135038fd1498Szrj && TREE_CODE (val) == SSA_NAME
135138fd1498Szrj && !may_propagate_copy (op, val)));
135238fd1498Szrj }
135338fd1498Szrj
135438fd1498Szrj if (TREE_CODE (val) == SSA_NAME)
135538fd1498Szrj SET_USE (op_p, val);
135638fd1498Szrj else
135738fd1498Szrj SET_USE (op_p, unshare_expr (val));
135838fd1498Szrj }
135938fd1498Szrj
136038fd1498Szrj
136138fd1498Szrj /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
136238fd1498Szrj into the operand pointed to by OP_P.
136338fd1498Szrj
136438fd1498Szrj Use this version for const/copy propagation as it will perform additional
136538fd1498Szrj checks to ensure validity of the const/copy propagation. */
136638fd1498Szrj
136738fd1498Szrj void
propagate_value(use_operand_p op_p,tree val)136838fd1498Szrj propagate_value (use_operand_p op_p, tree val)
136938fd1498Szrj {
137038fd1498Szrj replace_exp_1 (op_p, val, true);
137138fd1498Szrj }
137238fd1498Szrj
137338fd1498Szrj /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
137438fd1498Szrj
137538fd1498Szrj Use this version when not const/copy propagating values. For example,
137638fd1498Szrj PRE uses this version when building expressions as they would appear
137738fd1498Szrj in specific blocks taking into account actions of PHI nodes.
137838fd1498Szrj
137938fd1498Szrj The statement in which an expression has been replaced should be
138038fd1498Szrj folded using fold_stmt_inplace. */
138138fd1498Szrj
138238fd1498Szrj void
replace_exp(use_operand_p op_p,tree val)138338fd1498Szrj replace_exp (use_operand_p op_p, tree val)
138438fd1498Szrj {
138538fd1498Szrj replace_exp_1 (op_p, val, false);
138638fd1498Szrj }
138738fd1498Szrj
138838fd1498Szrj
138938fd1498Szrj /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
139038fd1498Szrj into the tree pointed to by OP_P.
139138fd1498Szrj
139238fd1498Szrj Use this version for const/copy propagation when SSA operands are not
139338fd1498Szrj available. It will perform the additional checks to ensure validity of
139438fd1498Szrj the const/copy propagation, but will not update any operand information.
139538fd1498Szrj Be sure to mark the stmt as modified. */
139638fd1498Szrj
139738fd1498Szrj void
propagate_tree_value(tree * op_p,tree val)139838fd1498Szrj propagate_tree_value (tree *op_p, tree val)
139938fd1498Szrj {
140038fd1498Szrj if (TREE_CODE (val) == SSA_NAME)
140138fd1498Szrj *op_p = val;
140238fd1498Szrj else
140338fd1498Szrj *op_p = unshare_expr (val);
140438fd1498Szrj }
140538fd1498Szrj
140638fd1498Szrj
140738fd1498Szrj /* Like propagate_tree_value, but use as the operand to replace
140838fd1498Szrj the principal expression (typically, the RHS) contained in the
140938fd1498Szrj statement referenced by iterator GSI. Note that it is not
141038fd1498Szrj always possible to update the statement in-place, so a new
141138fd1498Szrj statement may be created to replace the original. */
141238fd1498Szrj
141338fd1498Szrj void
propagate_tree_value_into_stmt(gimple_stmt_iterator * gsi,tree val)141438fd1498Szrj propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
141538fd1498Szrj {
141638fd1498Szrj gimple *stmt = gsi_stmt (*gsi);
141738fd1498Szrj
141838fd1498Szrj if (is_gimple_assign (stmt))
141938fd1498Szrj {
142038fd1498Szrj tree expr = NULL_TREE;
142138fd1498Szrj if (gimple_assign_single_p (stmt))
142238fd1498Szrj expr = gimple_assign_rhs1 (stmt);
142338fd1498Szrj propagate_tree_value (&expr, val);
142438fd1498Szrj gimple_assign_set_rhs_from_tree (gsi, expr);
142538fd1498Szrj }
142638fd1498Szrj else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
142738fd1498Szrj {
142838fd1498Szrj tree lhs = NULL_TREE;
142938fd1498Szrj tree rhs = build_zero_cst (TREE_TYPE (val));
143038fd1498Szrj propagate_tree_value (&lhs, val);
143138fd1498Szrj gimple_cond_set_code (cond_stmt, NE_EXPR);
143238fd1498Szrj gimple_cond_set_lhs (cond_stmt, lhs);
143338fd1498Szrj gimple_cond_set_rhs (cond_stmt, rhs);
143438fd1498Szrj }
143538fd1498Szrj else if (is_gimple_call (stmt)
143638fd1498Szrj && gimple_call_lhs (stmt) != NULL_TREE)
143738fd1498Szrj {
143838fd1498Szrj tree expr = NULL_TREE;
143938fd1498Szrj bool res;
144038fd1498Szrj propagate_tree_value (&expr, val);
144138fd1498Szrj res = update_call_from_tree (gsi, expr);
144238fd1498Szrj gcc_assert (res);
144338fd1498Szrj }
144438fd1498Szrj else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
144538fd1498Szrj propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val);
144638fd1498Szrj else
144738fd1498Szrj gcc_unreachable ();
144838fd1498Szrj }
1449