11debfc3dSmrg /* Tail merging for gimple.
2*8feb0f0bSmrg Copyright (C) 2011-2020 Free Software Foundation, Inc.
31debfc3dSmrg Contributed by Tom de Vries (tom@codesourcery.com)
41debfc3dSmrg
51debfc3dSmrg This file is part of GCC.
61debfc3dSmrg
71debfc3dSmrg GCC is free software; you can redistribute it and/or modify
81debfc3dSmrg it under the terms of the GNU General Public License as published by
91debfc3dSmrg the Free Software Foundation; either version 3, or (at your option)
101debfc3dSmrg any later version.
111debfc3dSmrg
121debfc3dSmrg GCC is distributed in the hope that it will be useful,
131debfc3dSmrg but WITHOUT ANY WARRANTY; without even the implied warranty of
141debfc3dSmrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
151debfc3dSmrg GNU General Public License for more details.
161debfc3dSmrg
171debfc3dSmrg You should have received a copy of the GNU General Public License
181debfc3dSmrg along with GCC; see the file COPYING3. If not see
191debfc3dSmrg <http://www.gnu.org/licenses/>. */
201debfc3dSmrg
211debfc3dSmrg /* Pass overview.
221debfc3dSmrg
231debfc3dSmrg
241debfc3dSmrg MOTIVATIONAL EXAMPLE
251debfc3dSmrg
261debfc3dSmrg gimple representation of gcc/testsuite/gcc.dg/pr43864.c at
271debfc3dSmrg
281debfc3dSmrg hprofStartupp (charD.1 * outputFileNameD.2600, charD.1 * ctxD.2601)
291debfc3dSmrg {
301debfc3dSmrg struct FILED.1638 * fpD.2605;
311debfc3dSmrg charD.1 fileNameD.2604[1000];
321debfc3dSmrg intD.0 D.3915;
331debfc3dSmrg const charD.1 * restrict outputFileName.0D.3914;
341debfc3dSmrg
351debfc3dSmrg # BLOCK 2 freq:10000
361debfc3dSmrg # PRED: ENTRY [100.0%] (fallthru,exec)
371debfc3dSmrg # PT = nonlocal { D.3926 } (restr)
381debfc3dSmrg outputFileName.0D.3914_3
391debfc3dSmrg = (const charD.1 * restrict) outputFileNameD.2600_2(D);
401debfc3dSmrg # .MEMD.3923_13 = VDEF <.MEMD.3923_12(D)>
411debfc3dSmrg # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
421debfc3dSmrg # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
431debfc3dSmrg sprintfD.759 (&fileNameD.2604, outputFileName.0D.3914_3);
441debfc3dSmrg # .MEMD.3923_14 = VDEF <.MEMD.3923_13>
451debfc3dSmrg # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
461debfc3dSmrg # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
471debfc3dSmrg D.3915_4 = accessD.2606 (&fileNameD.2604, 1);
481debfc3dSmrg if (D.3915_4 == 0)
491debfc3dSmrg goto <bb 3>;
501debfc3dSmrg else
511debfc3dSmrg goto <bb 4>;
521debfc3dSmrg # SUCC: 3 [10.0%] (true,exec) 4 [90.0%] (false,exec)
531debfc3dSmrg
541debfc3dSmrg # BLOCK 3 freq:1000
551debfc3dSmrg # PRED: 2 [10.0%] (true,exec)
561debfc3dSmrg # .MEMD.3923_15 = VDEF <.MEMD.3923_14>
571debfc3dSmrg # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
581debfc3dSmrg # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
591debfc3dSmrg freeD.898 (ctxD.2601_5(D));
601debfc3dSmrg goto <bb 7>;
611debfc3dSmrg # SUCC: 7 [100.0%] (fallthru,exec)
621debfc3dSmrg
631debfc3dSmrg # BLOCK 4 freq:9000
641debfc3dSmrg # PRED: 2 [90.0%] (false,exec)
651debfc3dSmrg # .MEMD.3923_16 = VDEF <.MEMD.3923_14>
661debfc3dSmrg # PT = nonlocal escaped
671debfc3dSmrg # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
681debfc3dSmrg # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
691debfc3dSmrg fpD.2605_8 = fopenD.1805 (&fileNameD.2604[0], 0B);
701debfc3dSmrg if (fpD.2605_8 == 0B)
711debfc3dSmrg goto <bb 5>;
721debfc3dSmrg else
731debfc3dSmrg goto <bb 6>;
741debfc3dSmrg # SUCC: 5 [1.9%] (true,exec) 6 [98.1%] (false,exec)
751debfc3dSmrg
761debfc3dSmrg # BLOCK 5 freq:173
771debfc3dSmrg # PRED: 4 [1.9%] (true,exec)
781debfc3dSmrg # .MEMD.3923_17 = VDEF <.MEMD.3923_16>
791debfc3dSmrg # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
801debfc3dSmrg # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
811debfc3dSmrg freeD.898 (ctxD.2601_5(D));
821debfc3dSmrg goto <bb 7>;
831debfc3dSmrg # SUCC: 7 [100.0%] (fallthru,exec)
841debfc3dSmrg
851debfc3dSmrg # BLOCK 6 freq:8827
861debfc3dSmrg # PRED: 4 [98.1%] (false,exec)
871debfc3dSmrg # .MEMD.3923_18 = VDEF <.MEMD.3923_16>
881debfc3dSmrg # USE = nonlocal null { fileNameD.2604 D.3926 } (restr)
891debfc3dSmrg # CLB = nonlocal null { fileNameD.2604 D.3926 } (restr)
901debfc3dSmrg fooD.2599 (outputFileNameD.2600_2(D), fpD.2605_8);
911debfc3dSmrg # SUCC: 7 [100.0%] (fallthru,exec)
921debfc3dSmrg
931debfc3dSmrg # BLOCK 7 freq:10000
941debfc3dSmrg # PRED: 3 [100.0%] (fallthru,exec) 5 [100.0%] (fallthru,exec)
951debfc3dSmrg 6 [100.0%] (fallthru,exec)
961debfc3dSmrg # PT = nonlocal null
971debfc3dSmrg
981debfc3dSmrg # ctxD.2601_1 = PHI <0B(3), 0B(5), ctxD.2601_5(D)(6)>
991debfc3dSmrg # .MEMD.3923_11 = PHI <.MEMD.3923_15(3), .MEMD.3923_17(5),
1001debfc3dSmrg .MEMD.3923_18(6)>
1011debfc3dSmrg # VUSE <.MEMD.3923_11>
1021debfc3dSmrg return ctxD.2601_1;
1031debfc3dSmrg # SUCC: EXIT [100.0%]
1041debfc3dSmrg }
1051debfc3dSmrg
1061debfc3dSmrg bb 3 and bb 5 can be merged. The blocks have different predecessors, but the
1071debfc3dSmrg same successors, and the same operations.
1081debfc3dSmrg
1091debfc3dSmrg
1101debfc3dSmrg CONTEXT
1111debfc3dSmrg
1121debfc3dSmrg A technique called tail merging (or cross jumping) can fix the example
1131debfc3dSmrg above. For a block, we look for common code at the end (the tail) of the
1141debfc3dSmrg predecessor blocks, and insert jumps from one block to the other.
1151debfc3dSmrg The example is a special case for tail merging, in that 2 whole blocks
1161debfc3dSmrg can be merged, rather than just the end parts of it.
1171debfc3dSmrg We currently only focus on whole block merging, so in that sense
1181debfc3dSmrg calling this pass tail merge is a bit of a misnomer.
1191debfc3dSmrg
1201debfc3dSmrg We distinguish 2 kinds of situations in which blocks can be merged:
1211debfc3dSmrg - same operations, same predecessors. The successor edges coming from one
1221debfc3dSmrg block are redirected to come from the other block.
1231debfc3dSmrg - same operations, same successors. The predecessor edges entering one block
1241debfc3dSmrg are redirected to enter the other block. Note that this operation might
1251debfc3dSmrg involve introducing phi operations.
1261debfc3dSmrg
1271debfc3dSmrg For efficient implementation, we would like to value numbers the blocks, and
1281debfc3dSmrg have a comparison operator that tells us whether the blocks are equal.
1291debfc3dSmrg Besides being runtime efficient, block value numbering should also abstract
1301debfc3dSmrg from irrelevant differences in order of operations, much like normal value
1311debfc3dSmrg numbering abstracts from irrelevant order of operations.
1321debfc3dSmrg
1331debfc3dSmrg For the first situation (same_operations, same predecessors), normal value
1341debfc3dSmrg numbering fits well. We can calculate a block value number based on the
1351debfc3dSmrg value numbers of the defs and vdefs.
1361debfc3dSmrg
1371debfc3dSmrg For the second situation (same operations, same successors), this approach
1381debfc3dSmrg doesn't work so well. We can illustrate this using the example. The calls
1391debfc3dSmrg to free use different vdefs: MEMD.3923_16 and MEMD.3923_14, and these will
1401debfc3dSmrg remain different in value numbering, since they represent different memory
1411debfc3dSmrg states. So the resulting vdefs of the frees will be different in value
1421debfc3dSmrg numbering, so the block value numbers will be different.
1431debfc3dSmrg
1441debfc3dSmrg The reason why we call the blocks equal is not because they define the same
1451debfc3dSmrg values, but because uses in the blocks use (possibly different) defs in the
1461debfc3dSmrg same way. To be able to detect this efficiently, we need to do some kind of
1471debfc3dSmrg reverse value numbering, meaning number the uses rather than the defs, and
1481debfc3dSmrg calculate a block value number based on the value number of the uses.
1491debfc3dSmrg Ideally, a block comparison operator will also indicate which phis are needed
1501debfc3dSmrg to merge the blocks.
1511debfc3dSmrg
1521debfc3dSmrg For the moment, we don't do block value numbering, but we do insn-by-insn
1531debfc3dSmrg matching, using scc value numbers to match operations with results, and
1541debfc3dSmrg structural comparison otherwise, while ignoring vop mismatches.
1551debfc3dSmrg
1561debfc3dSmrg
1571debfc3dSmrg IMPLEMENTATION
1581debfc3dSmrg
1591debfc3dSmrg 1. The pass first determines all groups of blocks with the same successor
1601debfc3dSmrg blocks.
1611debfc3dSmrg 2. Within each group, it tries to determine clusters of equal basic blocks.
1621debfc3dSmrg 3. The clusters are applied.
1631debfc3dSmrg 4. The same successor groups are updated.
1641debfc3dSmrg 5. This process is repeated from 2 onwards, until no more changes.
1651debfc3dSmrg
1661debfc3dSmrg
1671debfc3dSmrg LIMITATIONS/TODO
1681debfc3dSmrg
1691debfc3dSmrg - block only
1701debfc3dSmrg - handles only 'same operations, same successors'.
1711debfc3dSmrg It handles same predecessors as a special subcase though.
1721debfc3dSmrg - does not implement the reverse value numbering and block value numbering.
1731debfc3dSmrg - improve memory allocation: use garbage collected memory, obstacks,
1741debfc3dSmrg allocpools where appropriate.
1751debfc3dSmrg - no insertion of gimple_reg phis, We only introduce vop-phis.
1761debfc3dSmrg - handle blocks with gimple_reg phi_nodes.
1771debfc3dSmrg
1781debfc3dSmrg
1791debfc3dSmrg PASS PLACEMENT
1801debfc3dSmrg This 'pass' is not a stand-alone gimple pass, but runs as part of
1811debfc3dSmrg pass_pre, in order to share the value numbering.
1821debfc3dSmrg
1831debfc3dSmrg
1841debfc3dSmrg SWITCHES
1851debfc3dSmrg
1861debfc3dSmrg - ftree-tail-merge. On at -O2. We may have to enable it only at -Os. */
1871debfc3dSmrg
1881debfc3dSmrg #include "config.h"
1891debfc3dSmrg #include "system.h"
1901debfc3dSmrg #include "coretypes.h"
1911debfc3dSmrg #include "backend.h"
1921debfc3dSmrg #include "tree.h"
1931debfc3dSmrg #include "gimple.h"
1941debfc3dSmrg #include "cfghooks.h"
1951debfc3dSmrg #include "tree-pass.h"
1961debfc3dSmrg #include "ssa.h"
1971debfc3dSmrg #include "fold-const.h"
1981debfc3dSmrg #include "trans-mem.h"
1991debfc3dSmrg #include "cfganal.h"
2001debfc3dSmrg #include "cfgcleanup.h"
2011debfc3dSmrg #include "gimple-iterator.h"
2021debfc3dSmrg #include "tree-cfg.h"
2031debfc3dSmrg #include "tree-into-ssa.h"
2041debfc3dSmrg #include "tree-ssa-sccvn.h"
2051debfc3dSmrg #include "cfgloop.h"
2061debfc3dSmrg #include "tree-eh.h"
207a2dc1f3fSmrg #include "tree-cfgcleanup.h"
208a2dc1f3fSmrg
209a2dc1f3fSmrg const int ignore_edge_flags = EDGE_DFS_BACK | EDGE_EXECUTABLE;
2101debfc3dSmrg
2111debfc3dSmrg /* Describes a group of bbs with the same successors. The successor bbs are
2121debfc3dSmrg cached in succs, and the successor edge flags are cached in succ_flags.
2131debfc3dSmrg If a bb has the EDGE_TRUE/FALSE_VALUE flags swapped compared to succ_flags,
2141debfc3dSmrg it's marked in inverse.
2151debfc3dSmrg Additionally, the hash value for the struct is cached in hashval, and
2161debfc3dSmrg in_worklist indicates whether it's currently part of worklist. */
2171debfc3dSmrg
2181debfc3dSmrg struct same_succ : pointer_hash <same_succ>
2191debfc3dSmrg {
2201debfc3dSmrg /* The bbs that have the same successor bbs. */
2211debfc3dSmrg bitmap bbs;
2221debfc3dSmrg /* The successor bbs. */
2231debfc3dSmrg bitmap succs;
2241debfc3dSmrg /* Indicates whether the EDGE_TRUE/FALSE_VALUEs of succ_flags are swapped for
2251debfc3dSmrg bb. */
2261debfc3dSmrg bitmap inverse;
2271debfc3dSmrg /* The edge flags for each of the successor bbs. */
2281debfc3dSmrg vec<int> succ_flags;
2291debfc3dSmrg /* Indicates whether the struct is currently in the worklist. */
2301debfc3dSmrg bool in_worklist;
2311debfc3dSmrg /* The hash value of the struct. */
2321debfc3dSmrg hashval_t hashval;
2331debfc3dSmrg
2341debfc3dSmrg /* hash_table support. */
2351debfc3dSmrg static inline hashval_t hash (const same_succ *);
2361debfc3dSmrg static int equal (const same_succ *, const same_succ *);
2371debfc3dSmrg static void remove (same_succ *);
2381debfc3dSmrg };
2391debfc3dSmrg
2401debfc3dSmrg /* hash routine for hash_table support, returns hashval of E. */
2411debfc3dSmrg
2421debfc3dSmrg inline hashval_t
hash(const same_succ * e)2431debfc3dSmrg same_succ::hash (const same_succ *e)
2441debfc3dSmrg {
2451debfc3dSmrg return e->hashval;
2461debfc3dSmrg }
2471debfc3dSmrg
2481debfc3dSmrg /* A group of bbs where 1 bb from bbs can replace the other bbs. */
2491debfc3dSmrg
2501debfc3dSmrg struct bb_cluster
2511debfc3dSmrg {
2521debfc3dSmrg /* The bbs in the cluster. */
2531debfc3dSmrg bitmap bbs;
2541debfc3dSmrg /* The preds of the bbs in the cluster. */
2551debfc3dSmrg bitmap preds;
2561debfc3dSmrg /* Index in all_clusters vector. */
2571debfc3dSmrg int index;
2581debfc3dSmrg /* The bb to replace the cluster with. */
2591debfc3dSmrg basic_block rep_bb;
2601debfc3dSmrg };
2611debfc3dSmrg
2621debfc3dSmrg /* Per bb-info. */
2631debfc3dSmrg
2641debfc3dSmrg struct aux_bb_info
2651debfc3dSmrg {
2661debfc3dSmrg /* The number of non-debug statements in the bb. */
2671debfc3dSmrg int size;
2681debfc3dSmrg /* The same_succ that this bb is a member of. */
2691debfc3dSmrg same_succ *bb_same_succ;
2701debfc3dSmrg /* The cluster that this bb is a member of. */
2711debfc3dSmrg bb_cluster *cluster;
2721debfc3dSmrg /* The vop state at the exit of a bb. This is shortlived data, used to
2731debfc3dSmrg communicate data between update_block_by and update_vuses. */
2741debfc3dSmrg tree vop_at_exit;
2751debfc3dSmrg /* The bb that either contains or is dominated by the dependencies of the
2761debfc3dSmrg bb. */
2771debfc3dSmrg basic_block dep_bb;
2781debfc3dSmrg };
2791debfc3dSmrg
2801debfc3dSmrg /* Macros to access the fields of struct aux_bb_info. */
2811debfc3dSmrg
2821debfc3dSmrg #define BB_SIZE(bb) (((struct aux_bb_info *)bb->aux)->size)
2831debfc3dSmrg #define BB_SAME_SUCC(bb) (((struct aux_bb_info *)bb->aux)->bb_same_succ)
2841debfc3dSmrg #define BB_CLUSTER(bb) (((struct aux_bb_info *)bb->aux)->cluster)
2851debfc3dSmrg #define BB_VOP_AT_EXIT(bb) (((struct aux_bb_info *)bb->aux)->vop_at_exit)
2861debfc3dSmrg #define BB_DEP_BB(bb) (((struct aux_bb_info *)bb->aux)->dep_bb)
2871debfc3dSmrg
288a2dc1f3fSmrg /* Valueization helper querying the VN lattice. */
289a2dc1f3fSmrg
290a2dc1f3fSmrg static tree
tail_merge_valueize(tree name)291a2dc1f3fSmrg tail_merge_valueize (tree name)
292a2dc1f3fSmrg {
293a2dc1f3fSmrg if (TREE_CODE (name) == SSA_NAME
294a2dc1f3fSmrg && has_VN_INFO (name))
295a2dc1f3fSmrg {
296a2dc1f3fSmrg tree tem = VN_INFO (name)->valnum;
297a2dc1f3fSmrg if (tem != VN_TOP)
298a2dc1f3fSmrg return tem;
299a2dc1f3fSmrg }
300a2dc1f3fSmrg return name;
301a2dc1f3fSmrg }
302a2dc1f3fSmrg
3031debfc3dSmrg /* Returns true if the only effect a statement STMT has, is to define locally
3041debfc3dSmrg used SSA_NAMEs. */
3051debfc3dSmrg
3061debfc3dSmrg static bool
stmt_local_def(gimple * stmt)3071debfc3dSmrg stmt_local_def (gimple *stmt)
3081debfc3dSmrg {
3091debfc3dSmrg basic_block bb, def_bb;
3101debfc3dSmrg imm_use_iterator iter;
3111debfc3dSmrg use_operand_p use_p;
3121debfc3dSmrg tree val;
3131debfc3dSmrg def_operand_p def_p;
3141debfc3dSmrg
3151debfc3dSmrg if (gimple_vdef (stmt) != NULL_TREE
3161debfc3dSmrg || gimple_has_side_effects (stmt)
3171debfc3dSmrg || gimple_could_trap_p_1 (stmt, false, false)
3181debfc3dSmrg || gimple_vuse (stmt) != NULL_TREE
3191debfc3dSmrg /* Copied from tree-ssa-ifcombine.c:bb_no_side_effects_p():
3201debfc3dSmrg const calls don't match any of the above, yet they could
3211debfc3dSmrg still have some side-effects - they could contain
3221debfc3dSmrg gimple_could_trap_p statements, like floating point
3231debfc3dSmrg exceptions or integer division by zero. See PR70586.
3241debfc3dSmrg FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
3251debfc3dSmrg should handle this. */
3261debfc3dSmrg || is_gimple_call (stmt))
3271debfc3dSmrg return false;
3281debfc3dSmrg
3291debfc3dSmrg def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3301debfc3dSmrg if (def_p == NULL)
3311debfc3dSmrg return false;
3321debfc3dSmrg
3331debfc3dSmrg val = DEF_FROM_PTR (def_p);
3341debfc3dSmrg if (val == NULL_TREE || TREE_CODE (val) != SSA_NAME)
3351debfc3dSmrg return false;
3361debfc3dSmrg
3371debfc3dSmrg def_bb = gimple_bb (stmt);
3381debfc3dSmrg
3391debfc3dSmrg FOR_EACH_IMM_USE_FAST (use_p, iter, val)
3401debfc3dSmrg {
3411debfc3dSmrg if (is_gimple_debug (USE_STMT (use_p)))
3421debfc3dSmrg continue;
3431debfc3dSmrg bb = gimple_bb (USE_STMT (use_p));
3441debfc3dSmrg if (bb == def_bb)
3451debfc3dSmrg continue;
3461debfc3dSmrg
3471debfc3dSmrg if (gimple_code (USE_STMT (use_p)) == GIMPLE_PHI
3481debfc3dSmrg && EDGE_PRED (bb, PHI_ARG_INDEX_FROM_USE (use_p))->src == def_bb)
3491debfc3dSmrg continue;
3501debfc3dSmrg
3511debfc3dSmrg return false;
3521debfc3dSmrg }
3531debfc3dSmrg
3541debfc3dSmrg return true;
3551debfc3dSmrg }
3561debfc3dSmrg
3571debfc3dSmrg /* Let GSI skip forwards over local defs. */
3581debfc3dSmrg
3591debfc3dSmrg static void
gsi_advance_fw_nondebug_nonlocal(gimple_stmt_iterator * gsi)3601debfc3dSmrg gsi_advance_fw_nondebug_nonlocal (gimple_stmt_iterator *gsi)
3611debfc3dSmrg {
3621debfc3dSmrg gimple *stmt;
3631debfc3dSmrg
3641debfc3dSmrg while (true)
3651debfc3dSmrg {
3661debfc3dSmrg if (gsi_end_p (*gsi))
3671debfc3dSmrg return;
3681debfc3dSmrg stmt = gsi_stmt (*gsi);
3691debfc3dSmrg if (!stmt_local_def (stmt))
3701debfc3dSmrg return;
3711debfc3dSmrg gsi_next_nondebug (gsi);
3721debfc3dSmrg }
3731debfc3dSmrg }
3741debfc3dSmrg
3751debfc3dSmrg /* VAL1 and VAL2 are either:
3761debfc3dSmrg - uses in BB1 and BB2, or
3771debfc3dSmrg - phi alternatives for BB1 and BB2.
3781debfc3dSmrg Return true if the uses have the same gvn value. */
3791debfc3dSmrg
3801debfc3dSmrg static bool
gvn_uses_equal(tree val1,tree val2)3811debfc3dSmrg gvn_uses_equal (tree val1, tree val2)
3821debfc3dSmrg {
3831debfc3dSmrg gcc_checking_assert (val1 != NULL_TREE && val2 != NULL_TREE);
3841debfc3dSmrg
3851debfc3dSmrg if (val1 == val2)
3861debfc3dSmrg return true;
3871debfc3dSmrg
388a2dc1f3fSmrg if (tail_merge_valueize (val1) != tail_merge_valueize (val2))
3891debfc3dSmrg return false;
3901debfc3dSmrg
3911debfc3dSmrg return ((TREE_CODE (val1) == SSA_NAME || CONSTANT_CLASS_P (val1))
3921debfc3dSmrg && (TREE_CODE (val2) == SSA_NAME || CONSTANT_CLASS_P (val2)));
3931debfc3dSmrg }
3941debfc3dSmrg
3951debfc3dSmrg /* Prints E to FILE. */
3961debfc3dSmrg
3971debfc3dSmrg static void
same_succ_print(FILE * file,const same_succ * e)3981debfc3dSmrg same_succ_print (FILE *file, const same_succ *e)
3991debfc3dSmrg {
4001debfc3dSmrg unsigned int i;
4011debfc3dSmrg bitmap_print (file, e->bbs, "bbs:", "\n");
4021debfc3dSmrg bitmap_print (file, e->succs, "succs:", "\n");
4031debfc3dSmrg bitmap_print (file, e->inverse, "inverse:", "\n");
4041debfc3dSmrg fprintf (file, "flags:");
4051debfc3dSmrg for (i = 0; i < e->succ_flags.length (); ++i)
4061debfc3dSmrg fprintf (file, " %x", e->succ_flags[i]);
4071debfc3dSmrg fprintf (file, "\n");
4081debfc3dSmrg }
4091debfc3dSmrg
4101debfc3dSmrg /* Prints same_succ VE to VFILE. */
4111debfc3dSmrg
4121debfc3dSmrg inline int
ssa_same_succ_print_traverse(same_succ ** pe,FILE * file)4131debfc3dSmrg ssa_same_succ_print_traverse (same_succ **pe, FILE *file)
4141debfc3dSmrg {
4151debfc3dSmrg const same_succ *e = *pe;
4161debfc3dSmrg same_succ_print (file, e);
4171debfc3dSmrg return 1;
4181debfc3dSmrg }
4191debfc3dSmrg
4201debfc3dSmrg /* Update BB_DEP_BB (USE_BB), given a use of VAL in USE_BB. */
4211debfc3dSmrg
4221debfc3dSmrg static void
update_dep_bb(basic_block use_bb,tree val)4231debfc3dSmrg update_dep_bb (basic_block use_bb, tree val)
4241debfc3dSmrg {
4251debfc3dSmrg basic_block dep_bb;
4261debfc3dSmrg
4271debfc3dSmrg /* Not a dep. */
4281debfc3dSmrg if (TREE_CODE (val) != SSA_NAME)
4291debfc3dSmrg return;
4301debfc3dSmrg
4311debfc3dSmrg /* Skip use of global def. */
4321debfc3dSmrg if (SSA_NAME_IS_DEFAULT_DEF (val))
4331debfc3dSmrg return;
4341debfc3dSmrg
4351debfc3dSmrg /* Skip use of local def. */
4361debfc3dSmrg dep_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
4371debfc3dSmrg if (dep_bb == use_bb)
4381debfc3dSmrg return;
4391debfc3dSmrg
4401debfc3dSmrg if (BB_DEP_BB (use_bb) == NULL
4411debfc3dSmrg || dominated_by_p (CDI_DOMINATORS, dep_bb, BB_DEP_BB (use_bb)))
4421debfc3dSmrg BB_DEP_BB (use_bb) = dep_bb;
4431debfc3dSmrg }
4441debfc3dSmrg
4451debfc3dSmrg /* Update BB_DEP_BB, given the dependencies in STMT. */
4461debfc3dSmrg
4471debfc3dSmrg static void
stmt_update_dep_bb(gimple * stmt)4481debfc3dSmrg stmt_update_dep_bb (gimple *stmt)
4491debfc3dSmrg {
4501debfc3dSmrg ssa_op_iter iter;
4511debfc3dSmrg use_operand_p use;
4521debfc3dSmrg
4531debfc3dSmrg FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
4541debfc3dSmrg update_dep_bb (gimple_bb (stmt), USE_FROM_PTR (use));
4551debfc3dSmrg }
4561debfc3dSmrg
4571debfc3dSmrg /* Calculates hash value for same_succ VE. */
4581debfc3dSmrg
4591debfc3dSmrg static hashval_t
same_succ_hash(const same_succ * e)4601debfc3dSmrg same_succ_hash (const same_succ *e)
4611debfc3dSmrg {
4621debfc3dSmrg inchash::hash hstate (bitmap_hash (e->succs));
4631debfc3dSmrg int flags;
4641debfc3dSmrg unsigned int i;
4651debfc3dSmrg unsigned int first = bitmap_first_set_bit (e->bbs);
4661debfc3dSmrg basic_block bb = BASIC_BLOCK_FOR_FN (cfun, first);
4671debfc3dSmrg int size = 0;
4681debfc3dSmrg gimple *stmt;
4691debfc3dSmrg tree arg;
4701debfc3dSmrg unsigned int s;
4711debfc3dSmrg bitmap_iterator bs;
4721debfc3dSmrg
4731debfc3dSmrg for (gimple_stmt_iterator gsi = gsi_start_nondebug_bb (bb);
4741debfc3dSmrg !gsi_end_p (gsi); gsi_next_nondebug (&gsi))
4751debfc3dSmrg {
4761debfc3dSmrg stmt = gsi_stmt (gsi);
4771debfc3dSmrg stmt_update_dep_bb (stmt);
4781debfc3dSmrg if (stmt_local_def (stmt))
4791debfc3dSmrg continue;
4801debfc3dSmrg size++;
4811debfc3dSmrg
4821debfc3dSmrg hstate.add_int (gimple_code (stmt));
4831debfc3dSmrg if (is_gimple_assign (stmt))
4841debfc3dSmrg hstate.add_int (gimple_assign_rhs_code (stmt));
4851debfc3dSmrg if (!is_gimple_call (stmt))
4861debfc3dSmrg continue;
4871debfc3dSmrg if (gimple_call_internal_p (stmt))
4881debfc3dSmrg hstate.add_int (gimple_call_internal_fn (stmt));
4891debfc3dSmrg else
4901debfc3dSmrg {
4911debfc3dSmrg inchash::add_expr (gimple_call_fn (stmt), hstate);
4921debfc3dSmrg if (gimple_call_chain (stmt))
4931debfc3dSmrg inchash::add_expr (gimple_call_chain (stmt), hstate);
4941debfc3dSmrg }
4951debfc3dSmrg for (i = 0; i < gimple_call_num_args (stmt); i++)
4961debfc3dSmrg {
4971debfc3dSmrg arg = gimple_call_arg (stmt, i);
498a2dc1f3fSmrg arg = tail_merge_valueize (arg);
4991debfc3dSmrg inchash::add_expr (arg, hstate);
5001debfc3dSmrg }
5011debfc3dSmrg }
5021debfc3dSmrg
5031debfc3dSmrg hstate.add_int (size);
5041debfc3dSmrg BB_SIZE (bb) = size;
5051debfc3dSmrg
506a2dc1f3fSmrg hstate.add_int (bb->loop_father->num);
507a2dc1f3fSmrg
5081debfc3dSmrg for (i = 0; i < e->succ_flags.length (); ++i)
5091debfc3dSmrg {
5101debfc3dSmrg flags = e->succ_flags[i];
5111debfc3dSmrg flags = flags & ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
5121debfc3dSmrg hstate.add_int (flags);
5131debfc3dSmrg }
5141debfc3dSmrg
5151debfc3dSmrg EXECUTE_IF_SET_IN_BITMAP (e->succs, 0, s, bs)
5161debfc3dSmrg {
5171debfc3dSmrg int n = find_edge (bb, BASIC_BLOCK_FOR_FN (cfun, s))->dest_idx;
5181debfc3dSmrg for (gphi_iterator gsi = gsi_start_phis (BASIC_BLOCK_FOR_FN (cfun, s));
5191debfc3dSmrg !gsi_end_p (gsi);
5201debfc3dSmrg gsi_next (&gsi))
5211debfc3dSmrg {
5221debfc3dSmrg gphi *phi = gsi.phi ();
5231debfc3dSmrg tree lhs = gimple_phi_result (phi);
5241debfc3dSmrg tree val = gimple_phi_arg_def (phi, n);
5251debfc3dSmrg
5261debfc3dSmrg if (virtual_operand_p (lhs))
5271debfc3dSmrg continue;
5281debfc3dSmrg update_dep_bb (bb, val);
5291debfc3dSmrg }
5301debfc3dSmrg }
5311debfc3dSmrg
5321debfc3dSmrg return hstate.end ();
5331debfc3dSmrg }
5341debfc3dSmrg
5351debfc3dSmrg /* Returns true if E1 and E2 have 2 successors, and if the successor flags
5361debfc3dSmrg are inverse for the EDGE_TRUE_VALUE and EDGE_FALSE_VALUE flags, and equal for
5371debfc3dSmrg the other edge flags. */
5381debfc3dSmrg
5391debfc3dSmrg static bool
inverse_flags(const same_succ * e1,const same_succ * e2)5401debfc3dSmrg inverse_flags (const same_succ *e1, const same_succ *e2)
5411debfc3dSmrg {
5421debfc3dSmrg int f1a, f1b, f2a, f2b;
5431debfc3dSmrg int mask = ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
5441debfc3dSmrg
5451debfc3dSmrg if (e1->succ_flags.length () != 2)
5461debfc3dSmrg return false;
5471debfc3dSmrg
5481debfc3dSmrg f1a = e1->succ_flags[0];
5491debfc3dSmrg f1b = e1->succ_flags[1];
5501debfc3dSmrg f2a = e2->succ_flags[0];
5511debfc3dSmrg f2b = e2->succ_flags[1];
5521debfc3dSmrg
5531debfc3dSmrg if (f1a == f2a && f1b == f2b)
5541debfc3dSmrg return false;
5551debfc3dSmrg
5561debfc3dSmrg return (f1a & mask) == (f2a & mask) && (f1b & mask) == (f2b & mask);
5571debfc3dSmrg }
5581debfc3dSmrg
5591debfc3dSmrg /* Compares SAME_SUCCs E1 and E2. */
5601debfc3dSmrg
5611debfc3dSmrg int
equal(const same_succ * e1,const same_succ * e2)5621debfc3dSmrg same_succ::equal (const same_succ *e1, const same_succ *e2)
5631debfc3dSmrg {
5641debfc3dSmrg unsigned int i, first1, first2;
5651debfc3dSmrg gimple_stmt_iterator gsi1, gsi2;
5661debfc3dSmrg gimple *s1, *s2;
5671debfc3dSmrg basic_block bb1, bb2;
5681debfc3dSmrg
5691debfc3dSmrg if (e1 == e2)
5701debfc3dSmrg return 1;
5711debfc3dSmrg
5721debfc3dSmrg if (e1->hashval != e2->hashval)
5731debfc3dSmrg return 0;
5741debfc3dSmrg
5751debfc3dSmrg if (e1->succ_flags.length () != e2->succ_flags.length ())
5761debfc3dSmrg return 0;
5771debfc3dSmrg
5781debfc3dSmrg if (!bitmap_equal_p (e1->succs, e2->succs))
5791debfc3dSmrg return 0;
5801debfc3dSmrg
5811debfc3dSmrg if (!inverse_flags (e1, e2))
5821debfc3dSmrg {
5831debfc3dSmrg for (i = 0; i < e1->succ_flags.length (); ++i)
5841debfc3dSmrg if (e1->succ_flags[i] != e2->succ_flags[i])
5851debfc3dSmrg return 0;
5861debfc3dSmrg }
5871debfc3dSmrg
5881debfc3dSmrg first1 = bitmap_first_set_bit (e1->bbs);
5891debfc3dSmrg first2 = bitmap_first_set_bit (e2->bbs);
5901debfc3dSmrg
5911debfc3dSmrg bb1 = BASIC_BLOCK_FOR_FN (cfun, first1);
5921debfc3dSmrg bb2 = BASIC_BLOCK_FOR_FN (cfun, first2);
5931debfc3dSmrg
5941debfc3dSmrg if (BB_SIZE (bb1) != BB_SIZE (bb2))
5951debfc3dSmrg return 0;
5961debfc3dSmrg
597a2dc1f3fSmrg if (bb1->loop_father != bb2->loop_father)
598a2dc1f3fSmrg return 0;
599a2dc1f3fSmrg
6001debfc3dSmrg gsi1 = gsi_start_nondebug_bb (bb1);
6011debfc3dSmrg gsi2 = gsi_start_nondebug_bb (bb2);
6021debfc3dSmrg gsi_advance_fw_nondebug_nonlocal (&gsi1);
6031debfc3dSmrg gsi_advance_fw_nondebug_nonlocal (&gsi2);
6041debfc3dSmrg while (!(gsi_end_p (gsi1) || gsi_end_p (gsi2)))
6051debfc3dSmrg {
6061debfc3dSmrg s1 = gsi_stmt (gsi1);
6071debfc3dSmrg s2 = gsi_stmt (gsi2);
6081debfc3dSmrg if (gimple_code (s1) != gimple_code (s2))
6091debfc3dSmrg return 0;
6101debfc3dSmrg if (is_gimple_call (s1) && !gimple_call_same_target_p (s1, s2))
6111debfc3dSmrg return 0;
6121debfc3dSmrg gsi_next_nondebug (&gsi1);
6131debfc3dSmrg gsi_next_nondebug (&gsi2);
6141debfc3dSmrg gsi_advance_fw_nondebug_nonlocal (&gsi1);
6151debfc3dSmrg gsi_advance_fw_nondebug_nonlocal (&gsi2);
6161debfc3dSmrg }
6171debfc3dSmrg
6181debfc3dSmrg return 1;
6191debfc3dSmrg }
6201debfc3dSmrg
6211debfc3dSmrg /* Alloc and init a new SAME_SUCC. */
6221debfc3dSmrg
6231debfc3dSmrg static same_succ *
same_succ_alloc(void)6241debfc3dSmrg same_succ_alloc (void)
6251debfc3dSmrg {
6261debfc3dSmrg same_succ *same = XNEW (struct same_succ);
6271debfc3dSmrg
6281debfc3dSmrg same->bbs = BITMAP_ALLOC (NULL);
6291debfc3dSmrg same->succs = BITMAP_ALLOC (NULL);
6301debfc3dSmrg same->inverse = BITMAP_ALLOC (NULL);
6311debfc3dSmrg same->succ_flags.create (10);
6321debfc3dSmrg same->in_worklist = false;
6331debfc3dSmrg
6341debfc3dSmrg return same;
6351debfc3dSmrg }
6361debfc3dSmrg
6371debfc3dSmrg /* Delete same_succ E. */
6381debfc3dSmrg
6391debfc3dSmrg void
remove(same_succ * e)6401debfc3dSmrg same_succ::remove (same_succ *e)
6411debfc3dSmrg {
6421debfc3dSmrg BITMAP_FREE (e->bbs);
6431debfc3dSmrg BITMAP_FREE (e->succs);
6441debfc3dSmrg BITMAP_FREE (e->inverse);
6451debfc3dSmrg e->succ_flags.release ();
6461debfc3dSmrg
6471debfc3dSmrg XDELETE (e);
6481debfc3dSmrg }
6491debfc3dSmrg
6501debfc3dSmrg /* Reset same_succ SAME. */
6511debfc3dSmrg
6521debfc3dSmrg static void
same_succ_reset(same_succ * same)6531debfc3dSmrg same_succ_reset (same_succ *same)
6541debfc3dSmrg {
6551debfc3dSmrg bitmap_clear (same->bbs);
6561debfc3dSmrg bitmap_clear (same->succs);
6571debfc3dSmrg bitmap_clear (same->inverse);
6581debfc3dSmrg same->succ_flags.truncate (0);
6591debfc3dSmrg }
6601debfc3dSmrg
6611debfc3dSmrg static hash_table<same_succ> *same_succ_htab;
6621debfc3dSmrg
6631debfc3dSmrg /* Array that is used to store the edge flags for a successor. */
6641debfc3dSmrg
6651debfc3dSmrg static int *same_succ_edge_flags;
6661debfc3dSmrg
6671debfc3dSmrg /* Bitmap that is used to mark bbs that are recently deleted. */
6681debfc3dSmrg
6691debfc3dSmrg static bitmap deleted_bbs;
6701debfc3dSmrg
6711debfc3dSmrg /* Bitmap that is used to mark predecessors of bbs that are
6721debfc3dSmrg deleted. */
6731debfc3dSmrg
6741debfc3dSmrg static bitmap deleted_bb_preds;
6751debfc3dSmrg
6761debfc3dSmrg /* Prints same_succ_htab to stderr. */
6771debfc3dSmrg
6781debfc3dSmrg extern void debug_same_succ (void);
6791debfc3dSmrg DEBUG_FUNCTION void
debug_same_succ(void)6801debfc3dSmrg debug_same_succ ( void)
6811debfc3dSmrg {
6821debfc3dSmrg same_succ_htab->traverse <FILE *, ssa_same_succ_print_traverse> (stderr);
6831debfc3dSmrg }
6841debfc3dSmrg
6851debfc3dSmrg
6861debfc3dSmrg /* Vector of bbs to process. */
6871debfc3dSmrg
6881debfc3dSmrg static vec<same_succ *> worklist;
6891debfc3dSmrg
6901debfc3dSmrg /* Prints worklist to FILE. */
6911debfc3dSmrg
6921debfc3dSmrg static void
print_worklist(FILE * file)6931debfc3dSmrg print_worklist (FILE *file)
6941debfc3dSmrg {
6951debfc3dSmrg unsigned int i;
6961debfc3dSmrg for (i = 0; i < worklist.length (); ++i)
6971debfc3dSmrg same_succ_print (file, worklist[i]);
6981debfc3dSmrg }
6991debfc3dSmrg
7001debfc3dSmrg /* Adds SAME to worklist. */
7011debfc3dSmrg
7021debfc3dSmrg static void
add_to_worklist(same_succ * same)7031debfc3dSmrg add_to_worklist (same_succ *same)
7041debfc3dSmrg {
7051debfc3dSmrg if (same->in_worklist)
7061debfc3dSmrg return;
7071debfc3dSmrg
7081debfc3dSmrg if (bitmap_count_bits (same->bbs) < 2)
7091debfc3dSmrg return;
7101debfc3dSmrg
7111debfc3dSmrg same->in_worklist = true;
7121debfc3dSmrg worklist.safe_push (same);
7131debfc3dSmrg }
7141debfc3dSmrg
7151debfc3dSmrg /* Add BB to same_succ_htab. */
7161debfc3dSmrg
7171debfc3dSmrg static void
find_same_succ_bb(basic_block bb,same_succ ** same_p)7181debfc3dSmrg find_same_succ_bb (basic_block bb, same_succ **same_p)
7191debfc3dSmrg {
7201debfc3dSmrg unsigned int j;
7211debfc3dSmrg bitmap_iterator bj;
7221debfc3dSmrg same_succ *same = *same_p;
7231debfc3dSmrg same_succ **slot;
7241debfc3dSmrg edge_iterator ei;
7251debfc3dSmrg edge e;
7261debfc3dSmrg
727a2dc1f3fSmrg if (bb == NULL)
7281debfc3dSmrg return;
7291debfc3dSmrg bitmap_set_bit (same->bbs, bb->index);
7301debfc3dSmrg FOR_EACH_EDGE (e, ei, bb->succs)
7311debfc3dSmrg {
7321debfc3dSmrg int index = e->dest->index;
7331debfc3dSmrg bitmap_set_bit (same->succs, index);
734a2dc1f3fSmrg same_succ_edge_flags[index] = (e->flags & ~ignore_edge_flags);
7351debfc3dSmrg }
7361debfc3dSmrg EXECUTE_IF_SET_IN_BITMAP (same->succs, 0, j, bj)
7371debfc3dSmrg same->succ_flags.safe_push (same_succ_edge_flags[j]);
7381debfc3dSmrg
7391debfc3dSmrg same->hashval = same_succ_hash (same);
7401debfc3dSmrg
7411debfc3dSmrg slot = same_succ_htab->find_slot_with_hash (same, same->hashval, INSERT);
7421debfc3dSmrg if (*slot == NULL)
7431debfc3dSmrg {
7441debfc3dSmrg *slot = same;
7451debfc3dSmrg BB_SAME_SUCC (bb) = same;
7461debfc3dSmrg add_to_worklist (same);
7471debfc3dSmrg *same_p = NULL;
7481debfc3dSmrg }
7491debfc3dSmrg else
7501debfc3dSmrg {
7511debfc3dSmrg bitmap_set_bit ((*slot)->bbs, bb->index);
7521debfc3dSmrg BB_SAME_SUCC (bb) = *slot;
7531debfc3dSmrg add_to_worklist (*slot);
7541debfc3dSmrg if (inverse_flags (same, *slot))
7551debfc3dSmrg bitmap_set_bit ((*slot)->inverse, bb->index);
7561debfc3dSmrg same_succ_reset (same);
7571debfc3dSmrg }
7581debfc3dSmrg }
7591debfc3dSmrg
7601debfc3dSmrg /* Find bbs with same successors. */
7611debfc3dSmrg
7621debfc3dSmrg static void
find_same_succ(void)7631debfc3dSmrg find_same_succ (void)
7641debfc3dSmrg {
7651debfc3dSmrg same_succ *same = same_succ_alloc ();
7661debfc3dSmrg basic_block bb;
7671debfc3dSmrg
7681debfc3dSmrg FOR_EACH_BB_FN (bb, cfun)
7691debfc3dSmrg {
7701debfc3dSmrg find_same_succ_bb (bb, &same);
7711debfc3dSmrg if (same == NULL)
7721debfc3dSmrg same = same_succ_alloc ();
7731debfc3dSmrg }
7741debfc3dSmrg
7751debfc3dSmrg same_succ::remove (same);
7761debfc3dSmrg }
7771debfc3dSmrg
7781debfc3dSmrg /* Initializes worklist administration. */
7791debfc3dSmrg
7801debfc3dSmrg static void
init_worklist(void)7811debfc3dSmrg init_worklist (void)
7821debfc3dSmrg {
7831debfc3dSmrg alloc_aux_for_blocks (sizeof (struct aux_bb_info));
7841debfc3dSmrg same_succ_htab = new hash_table<same_succ> (n_basic_blocks_for_fn (cfun));
7851debfc3dSmrg same_succ_edge_flags = XCNEWVEC (int, last_basic_block_for_fn (cfun));
7861debfc3dSmrg deleted_bbs = BITMAP_ALLOC (NULL);
7871debfc3dSmrg deleted_bb_preds = BITMAP_ALLOC (NULL);
7881debfc3dSmrg worklist.create (n_basic_blocks_for_fn (cfun));
7891debfc3dSmrg find_same_succ ();
7901debfc3dSmrg
7911debfc3dSmrg if (dump_file && (dump_flags & TDF_DETAILS))
7921debfc3dSmrg {
7931debfc3dSmrg fprintf (dump_file, "initial worklist:\n");
7941debfc3dSmrg print_worklist (dump_file);
7951debfc3dSmrg }
7961debfc3dSmrg }
7971debfc3dSmrg
7981debfc3dSmrg /* Deletes worklist administration. */
7991debfc3dSmrg
8001debfc3dSmrg static void
delete_worklist(void)8011debfc3dSmrg delete_worklist (void)
8021debfc3dSmrg {
8031debfc3dSmrg free_aux_for_blocks ();
8041debfc3dSmrg delete same_succ_htab;
8051debfc3dSmrg same_succ_htab = NULL;
8061debfc3dSmrg XDELETEVEC (same_succ_edge_flags);
8071debfc3dSmrg same_succ_edge_flags = NULL;
8081debfc3dSmrg BITMAP_FREE (deleted_bbs);
8091debfc3dSmrg BITMAP_FREE (deleted_bb_preds);
8101debfc3dSmrg worklist.release ();
8111debfc3dSmrg }
8121debfc3dSmrg
8131debfc3dSmrg /* Mark BB as deleted, and mark its predecessors. */
8141debfc3dSmrg
8151debfc3dSmrg static void
mark_basic_block_deleted(basic_block bb)8161debfc3dSmrg mark_basic_block_deleted (basic_block bb)
8171debfc3dSmrg {
8181debfc3dSmrg edge e;
8191debfc3dSmrg edge_iterator ei;
8201debfc3dSmrg
8211debfc3dSmrg bitmap_set_bit (deleted_bbs, bb->index);
8221debfc3dSmrg
8231debfc3dSmrg FOR_EACH_EDGE (e, ei, bb->preds)
8241debfc3dSmrg bitmap_set_bit (deleted_bb_preds, e->src->index);
8251debfc3dSmrg }
8261debfc3dSmrg
8271debfc3dSmrg /* Removes BB from its corresponding same_succ. */
8281debfc3dSmrg
8291debfc3dSmrg static void
same_succ_flush_bb(basic_block bb)8301debfc3dSmrg same_succ_flush_bb (basic_block bb)
8311debfc3dSmrg {
8321debfc3dSmrg same_succ *same = BB_SAME_SUCC (bb);
8331debfc3dSmrg if (! same)
8341debfc3dSmrg return;
8351debfc3dSmrg
8361debfc3dSmrg BB_SAME_SUCC (bb) = NULL;
8371debfc3dSmrg if (bitmap_single_bit_set_p (same->bbs))
8381debfc3dSmrg same_succ_htab->remove_elt_with_hash (same, same->hashval);
8391debfc3dSmrg else
8401debfc3dSmrg bitmap_clear_bit (same->bbs, bb->index);
8411debfc3dSmrg }
8421debfc3dSmrg
8431debfc3dSmrg /* Removes all bbs in BBS from their corresponding same_succ. */
8441debfc3dSmrg
8451debfc3dSmrg static void
same_succ_flush_bbs(bitmap bbs)8461debfc3dSmrg same_succ_flush_bbs (bitmap bbs)
8471debfc3dSmrg {
8481debfc3dSmrg unsigned int i;
8491debfc3dSmrg bitmap_iterator bi;
8501debfc3dSmrg
8511debfc3dSmrg EXECUTE_IF_SET_IN_BITMAP (bbs, 0, i, bi)
8521debfc3dSmrg same_succ_flush_bb (BASIC_BLOCK_FOR_FN (cfun, i));
8531debfc3dSmrg }
8541debfc3dSmrg
8551debfc3dSmrg /* Release the last vdef in BB, either normal or phi result. */
8561debfc3dSmrg
8571debfc3dSmrg static void
release_last_vdef(basic_block bb)8581debfc3dSmrg release_last_vdef (basic_block bb)
8591debfc3dSmrg {
8601debfc3dSmrg for (gimple_stmt_iterator i = gsi_last_bb (bb); !gsi_end_p (i);
8611debfc3dSmrg gsi_prev_nondebug (&i))
8621debfc3dSmrg {
8631debfc3dSmrg gimple *stmt = gsi_stmt (i);
8641debfc3dSmrg if (gimple_vdef (stmt) == NULL_TREE)
8651debfc3dSmrg continue;
8661debfc3dSmrg
8671debfc3dSmrg mark_virtual_operand_for_renaming (gimple_vdef (stmt));
8681debfc3dSmrg return;
8691debfc3dSmrg }
8701debfc3dSmrg
8711debfc3dSmrg for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
8721debfc3dSmrg gsi_next (&i))
8731debfc3dSmrg {
8741debfc3dSmrg gphi *phi = i.phi ();
8751debfc3dSmrg tree res = gimple_phi_result (phi);
8761debfc3dSmrg
8771debfc3dSmrg if (!virtual_operand_p (res))
8781debfc3dSmrg continue;
8791debfc3dSmrg
8801debfc3dSmrg mark_virtual_phi_result_for_renaming (phi);
8811debfc3dSmrg return;
8821debfc3dSmrg }
8831debfc3dSmrg }
8841debfc3dSmrg
8851debfc3dSmrg /* For deleted_bb_preds, find bbs with same successors. */
8861debfc3dSmrg
8871debfc3dSmrg static void
update_worklist(void)8881debfc3dSmrg update_worklist (void)
8891debfc3dSmrg {
8901debfc3dSmrg unsigned int i;
8911debfc3dSmrg bitmap_iterator bi;
8921debfc3dSmrg basic_block bb;
8931debfc3dSmrg same_succ *same;
8941debfc3dSmrg
8951debfc3dSmrg bitmap_and_compl_into (deleted_bb_preds, deleted_bbs);
8961debfc3dSmrg bitmap_clear (deleted_bbs);
8971debfc3dSmrg
8981debfc3dSmrg bitmap_clear_bit (deleted_bb_preds, ENTRY_BLOCK);
8991debfc3dSmrg same_succ_flush_bbs (deleted_bb_preds);
9001debfc3dSmrg
9011debfc3dSmrg same = same_succ_alloc ();
9021debfc3dSmrg EXECUTE_IF_SET_IN_BITMAP (deleted_bb_preds, 0, i, bi)
9031debfc3dSmrg {
9041debfc3dSmrg bb = BASIC_BLOCK_FOR_FN (cfun, i);
9051debfc3dSmrg gcc_assert (bb != NULL);
9061debfc3dSmrg find_same_succ_bb (bb, &same);
9071debfc3dSmrg if (same == NULL)
9081debfc3dSmrg same = same_succ_alloc ();
9091debfc3dSmrg }
9101debfc3dSmrg same_succ::remove (same);
9111debfc3dSmrg bitmap_clear (deleted_bb_preds);
9121debfc3dSmrg }
9131debfc3dSmrg
9141debfc3dSmrg /* Prints cluster C to FILE. */
9151debfc3dSmrg
9161debfc3dSmrg static void
print_cluster(FILE * file,bb_cluster * c)9171debfc3dSmrg print_cluster (FILE *file, bb_cluster *c)
9181debfc3dSmrg {
9191debfc3dSmrg if (c == NULL)
9201debfc3dSmrg return;
9211debfc3dSmrg bitmap_print (file, c->bbs, "bbs:", "\n");
9221debfc3dSmrg bitmap_print (file, c->preds, "preds:", "\n");
9231debfc3dSmrg }
9241debfc3dSmrg
9251debfc3dSmrg /* Prints cluster C to stderr. */
9261debfc3dSmrg
9271debfc3dSmrg extern void debug_cluster (bb_cluster *);
9281debfc3dSmrg DEBUG_FUNCTION void
debug_cluster(bb_cluster * c)9291debfc3dSmrg debug_cluster (bb_cluster *c)
9301debfc3dSmrg {
9311debfc3dSmrg print_cluster (stderr, c);
9321debfc3dSmrg }
9331debfc3dSmrg
9341debfc3dSmrg /* Update C->rep_bb, given that BB is added to the cluster. */
9351debfc3dSmrg
9361debfc3dSmrg static void
update_rep_bb(bb_cluster * c,basic_block bb)9371debfc3dSmrg update_rep_bb (bb_cluster *c, basic_block bb)
9381debfc3dSmrg {
9391debfc3dSmrg /* Initial. */
9401debfc3dSmrg if (c->rep_bb == NULL)
9411debfc3dSmrg {
9421debfc3dSmrg c->rep_bb = bb;
9431debfc3dSmrg return;
9441debfc3dSmrg }
9451debfc3dSmrg
9461debfc3dSmrg /* Current needs no deps, keep it. */
9471debfc3dSmrg if (BB_DEP_BB (c->rep_bb) == NULL)
9481debfc3dSmrg return;
9491debfc3dSmrg
9501debfc3dSmrg /* Bb needs no deps, change rep_bb. */
9511debfc3dSmrg if (BB_DEP_BB (bb) == NULL)
9521debfc3dSmrg {
9531debfc3dSmrg c->rep_bb = bb;
9541debfc3dSmrg return;
9551debfc3dSmrg }
9561debfc3dSmrg
9571debfc3dSmrg /* Bb needs last deps earlier than current, change rep_bb. A potential
9581debfc3dSmrg problem with this, is that the first deps might also be earlier, which
9591debfc3dSmrg would mean we prefer longer lifetimes for the deps. To be able to check
9601debfc3dSmrg for this, we would have to trace BB_FIRST_DEP_BB as well, besides
9611debfc3dSmrg BB_DEP_BB, which is really BB_LAST_DEP_BB.
9621debfc3dSmrg The benefit of choosing the bb with last deps earlier, is that it can
9631debfc3dSmrg potentially be used as replacement for more bbs. */
9641debfc3dSmrg if (dominated_by_p (CDI_DOMINATORS, BB_DEP_BB (c->rep_bb), BB_DEP_BB (bb)))
9651debfc3dSmrg c->rep_bb = bb;
9661debfc3dSmrg }
9671debfc3dSmrg
9681debfc3dSmrg /* Add BB to cluster C. Sets BB in C->bbs, and preds of BB in C->preds. */
9691debfc3dSmrg
9701debfc3dSmrg static void
add_bb_to_cluster(bb_cluster * c,basic_block bb)9711debfc3dSmrg add_bb_to_cluster (bb_cluster *c, basic_block bb)
9721debfc3dSmrg {
9731debfc3dSmrg edge e;
9741debfc3dSmrg edge_iterator ei;
9751debfc3dSmrg
9761debfc3dSmrg bitmap_set_bit (c->bbs, bb->index);
9771debfc3dSmrg
9781debfc3dSmrg FOR_EACH_EDGE (e, ei, bb->preds)
9791debfc3dSmrg bitmap_set_bit (c->preds, e->src->index);
9801debfc3dSmrg
9811debfc3dSmrg update_rep_bb (c, bb);
9821debfc3dSmrg }
9831debfc3dSmrg
9841debfc3dSmrg /* Allocate and init new cluster. */
9851debfc3dSmrg
9861debfc3dSmrg static bb_cluster *
new_cluster(void)9871debfc3dSmrg new_cluster (void)
9881debfc3dSmrg {
9891debfc3dSmrg bb_cluster *c;
9901debfc3dSmrg c = XCNEW (bb_cluster);
9911debfc3dSmrg c->bbs = BITMAP_ALLOC (NULL);
9921debfc3dSmrg c->preds = BITMAP_ALLOC (NULL);
9931debfc3dSmrg c->rep_bb = NULL;
9941debfc3dSmrg return c;
9951debfc3dSmrg }
9961debfc3dSmrg
9971debfc3dSmrg /* Delete clusters. */
9981debfc3dSmrg
9991debfc3dSmrg static void
delete_cluster(bb_cluster * c)10001debfc3dSmrg delete_cluster (bb_cluster *c)
10011debfc3dSmrg {
10021debfc3dSmrg if (c == NULL)
10031debfc3dSmrg return;
10041debfc3dSmrg BITMAP_FREE (c->bbs);
10051debfc3dSmrg BITMAP_FREE (c->preds);
10061debfc3dSmrg XDELETE (c);
10071debfc3dSmrg }
10081debfc3dSmrg
10091debfc3dSmrg
10101debfc3dSmrg /* Array that contains all clusters. */
10111debfc3dSmrg
10121debfc3dSmrg static vec<bb_cluster *> all_clusters;
10131debfc3dSmrg
10141debfc3dSmrg /* Allocate all cluster vectors. */
10151debfc3dSmrg
10161debfc3dSmrg static void
alloc_cluster_vectors(void)10171debfc3dSmrg alloc_cluster_vectors (void)
10181debfc3dSmrg {
10191debfc3dSmrg all_clusters.create (n_basic_blocks_for_fn (cfun));
10201debfc3dSmrg }
10211debfc3dSmrg
10221debfc3dSmrg /* Reset all cluster vectors. */
10231debfc3dSmrg
10241debfc3dSmrg static void
reset_cluster_vectors(void)10251debfc3dSmrg reset_cluster_vectors (void)
10261debfc3dSmrg {
10271debfc3dSmrg unsigned int i;
10281debfc3dSmrg basic_block bb;
10291debfc3dSmrg for (i = 0; i < all_clusters.length (); ++i)
10301debfc3dSmrg delete_cluster (all_clusters[i]);
10311debfc3dSmrg all_clusters.truncate (0);
10321debfc3dSmrg FOR_EACH_BB_FN (bb, cfun)
10331debfc3dSmrg BB_CLUSTER (bb) = NULL;
10341debfc3dSmrg }
10351debfc3dSmrg
10361debfc3dSmrg /* Delete all cluster vectors. */
10371debfc3dSmrg
10381debfc3dSmrg static void
delete_cluster_vectors(void)10391debfc3dSmrg delete_cluster_vectors (void)
10401debfc3dSmrg {
10411debfc3dSmrg unsigned int i;
10421debfc3dSmrg for (i = 0; i < all_clusters.length (); ++i)
10431debfc3dSmrg delete_cluster (all_clusters[i]);
10441debfc3dSmrg all_clusters.release ();
10451debfc3dSmrg }
10461debfc3dSmrg
10471debfc3dSmrg /* Merge cluster C2 into C1. */
10481debfc3dSmrg
10491debfc3dSmrg static void
merge_clusters(bb_cluster * c1,bb_cluster * c2)10501debfc3dSmrg merge_clusters (bb_cluster *c1, bb_cluster *c2)
10511debfc3dSmrg {
10521debfc3dSmrg bitmap_ior_into (c1->bbs, c2->bbs);
10531debfc3dSmrg bitmap_ior_into (c1->preds, c2->preds);
10541debfc3dSmrg }
10551debfc3dSmrg
10561debfc3dSmrg /* Register equivalence of BB1 and BB2 (members of cluster C). Store c in
10571debfc3dSmrg all_clusters, or merge c with existing cluster. */
10581debfc3dSmrg
10591debfc3dSmrg static void
set_cluster(basic_block bb1,basic_block bb2)10601debfc3dSmrg set_cluster (basic_block bb1, basic_block bb2)
10611debfc3dSmrg {
10621debfc3dSmrg basic_block merge_bb, other_bb;
10631debfc3dSmrg bb_cluster *merge, *old, *c;
10641debfc3dSmrg
10651debfc3dSmrg if (BB_CLUSTER (bb1) == NULL && BB_CLUSTER (bb2) == NULL)
10661debfc3dSmrg {
10671debfc3dSmrg c = new_cluster ();
10681debfc3dSmrg add_bb_to_cluster (c, bb1);
10691debfc3dSmrg add_bb_to_cluster (c, bb2);
10701debfc3dSmrg BB_CLUSTER (bb1) = c;
10711debfc3dSmrg BB_CLUSTER (bb2) = c;
10721debfc3dSmrg c->index = all_clusters.length ();
10731debfc3dSmrg all_clusters.safe_push (c);
10741debfc3dSmrg }
10751debfc3dSmrg else if (BB_CLUSTER (bb1) == NULL || BB_CLUSTER (bb2) == NULL)
10761debfc3dSmrg {
10771debfc3dSmrg merge_bb = BB_CLUSTER (bb1) == NULL ? bb2 : bb1;
10781debfc3dSmrg other_bb = BB_CLUSTER (bb1) == NULL ? bb1 : bb2;
10791debfc3dSmrg merge = BB_CLUSTER (merge_bb);
10801debfc3dSmrg add_bb_to_cluster (merge, other_bb);
10811debfc3dSmrg BB_CLUSTER (other_bb) = merge;
10821debfc3dSmrg }
10831debfc3dSmrg else if (BB_CLUSTER (bb1) != BB_CLUSTER (bb2))
10841debfc3dSmrg {
10851debfc3dSmrg unsigned int i;
10861debfc3dSmrg bitmap_iterator bi;
10871debfc3dSmrg
10881debfc3dSmrg old = BB_CLUSTER (bb2);
10891debfc3dSmrg merge = BB_CLUSTER (bb1);
10901debfc3dSmrg merge_clusters (merge, old);
10911debfc3dSmrg EXECUTE_IF_SET_IN_BITMAP (old->bbs, 0, i, bi)
10921debfc3dSmrg BB_CLUSTER (BASIC_BLOCK_FOR_FN (cfun, i)) = merge;
10931debfc3dSmrg all_clusters[old->index] = NULL;
10941debfc3dSmrg update_rep_bb (merge, old->rep_bb);
10951debfc3dSmrg delete_cluster (old);
10961debfc3dSmrg }
10971debfc3dSmrg else
10981debfc3dSmrg gcc_unreachable ();
10991debfc3dSmrg }
11001debfc3dSmrg
11011debfc3dSmrg /* Return true if gimple operands T1 and T2 have the same value. */
11021debfc3dSmrg
11031debfc3dSmrg static bool
gimple_operand_equal_value_p(tree t1,tree t2)11041debfc3dSmrg gimple_operand_equal_value_p (tree t1, tree t2)
11051debfc3dSmrg {
11061debfc3dSmrg if (t1 == t2)
11071debfc3dSmrg return true;
11081debfc3dSmrg
11091debfc3dSmrg if (t1 == NULL_TREE
11101debfc3dSmrg || t2 == NULL_TREE)
11111debfc3dSmrg return false;
11121debfc3dSmrg
11131debfc3dSmrg if (operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS))
11141debfc3dSmrg return true;
11151debfc3dSmrg
11161debfc3dSmrg return gvn_uses_equal (t1, t2);
11171debfc3dSmrg }
11181debfc3dSmrg
11191debfc3dSmrg /* Return true if gimple statements S1 and S2 are equal. Gimple_bb (s1) and
11201debfc3dSmrg gimple_bb (s2) are members of SAME_SUCC. */
11211debfc3dSmrg
11221debfc3dSmrg static bool
gimple_equal_p(same_succ * same_succ,gimple * s1,gimple * s2)11231debfc3dSmrg gimple_equal_p (same_succ *same_succ, gimple *s1, gimple *s2)
11241debfc3dSmrg {
11251debfc3dSmrg unsigned int i;
11261debfc3dSmrg tree lhs1, lhs2;
11271debfc3dSmrg basic_block bb1 = gimple_bb (s1), bb2 = gimple_bb (s2);
11281debfc3dSmrg tree t1, t2;
11291debfc3dSmrg bool inv_cond;
11301debfc3dSmrg enum tree_code code1, code2;
11311debfc3dSmrg
11321debfc3dSmrg if (gimple_code (s1) != gimple_code (s2))
11331debfc3dSmrg return false;
11341debfc3dSmrg
11351debfc3dSmrg switch (gimple_code (s1))
11361debfc3dSmrg {
11371debfc3dSmrg case GIMPLE_CALL:
11381debfc3dSmrg if (!gimple_call_same_target_p (s1, s2))
11391debfc3dSmrg return false;
11401debfc3dSmrg
11411debfc3dSmrg t1 = gimple_call_chain (s1);
11421debfc3dSmrg t2 = gimple_call_chain (s2);
11431debfc3dSmrg if (!gimple_operand_equal_value_p (t1, t2))
11441debfc3dSmrg return false;
11451debfc3dSmrg
11461debfc3dSmrg if (gimple_call_num_args (s1) != gimple_call_num_args (s2))
11471debfc3dSmrg return false;
11481debfc3dSmrg
11491debfc3dSmrg for (i = 0; i < gimple_call_num_args (s1); ++i)
11501debfc3dSmrg {
11511debfc3dSmrg t1 = gimple_call_arg (s1, i);
11521debfc3dSmrg t2 = gimple_call_arg (s2, i);
11531debfc3dSmrg if (!gimple_operand_equal_value_p (t1, t2))
11541debfc3dSmrg return false;
11551debfc3dSmrg }
11561debfc3dSmrg
11571debfc3dSmrg lhs1 = gimple_get_lhs (s1);
11581debfc3dSmrg lhs2 = gimple_get_lhs (s2);
11591debfc3dSmrg if (lhs1 == NULL_TREE && lhs2 == NULL_TREE)
11601debfc3dSmrg return true;
11611debfc3dSmrg if (lhs1 == NULL_TREE || lhs2 == NULL_TREE)
11621debfc3dSmrg return false;
11631debfc3dSmrg if (TREE_CODE (lhs1) == SSA_NAME && TREE_CODE (lhs2) == SSA_NAME)
1164a2dc1f3fSmrg return tail_merge_valueize (lhs1) == tail_merge_valueize (lhs2);
11651debfc3dSmrg return operand_equal_p (lhs1, lhs2, 0);
11661debfc3dSmrg
11671debfc3dSmrg case GIMPLE_ASSIGN:
11681debfc3dSmrg lhs1 = gimple_get_lhs (s1);
11691debfc3dSmrg lhs2 = gimple_get_lhs (s2);
11701debfc3dSmrg if (TREE_CODE (lhs1) != SSA_NAME
11711debfc3dSmrg && TREE_CODE (lhs2) != SSA_NAME)
11721debfc3dSmrg return (operand_equal_p (lhs1, lhs2, 0)
11731debfc3dSmrg && gimple_operand_equal_value_p (gimple_assign_rhs1 (s1),
11741debfc3dSmrg gimple_assign_rhs1 (s2)));
11751debfc3dSmrg else if (TREE_CODE (lhs1) == SSA_NAME
11761debfc3dSmrg && TREE_CODE (lhs2) == SSA_NAME)
11771debfc3dSmrg return operand_equal_p (gimple_assign_rhs1 (s1),
11781debfc3dSmrg gimple_assign_rhs1 (s2), 0);
11791debfc3dSmrg return false;
11801debfc3dSmrg
11811debfc3dSmrg case GIMPLE_COND:
11821debfc3dSmrg t1 = gimple_cond_lhs (s1);
11831debfc3dSmrg t2 = gimple_cond_lhs (s2);
11841debfc3dSmrg if (!gimple_operand_equal_value_p (t1, t2))
11851debfc3dSmrg return false;
11861debfc3dSmrg
11871debfc3dSmrg t1 = gimple_cond_rhs (s1);
11881debfc3dSmrg t2 = gimple_cond_rhs (s2);
11891debfc3dSmrg if (!gimple_operand_equal_value_p (t1, t2))
11901debfc3dSmrg return false;
11911debfc3dSmrg
11921debfc3dSmrg code1 = gimple_expr_code (s1);
11931debfc3dSmrg code2 = gimple_expr_code (s2);
11941debfc3dSmrg inv_cond = (bitmap_bit_p (same_succ->inverse, bb1->index)
11951debfc3dSmrg != bitmap_bit_p (same_succ->inverse, bb2->index));
11961debfc3dSmrg if (inv_cond)
11971debfc3dSmrg {
11981debfc3dSmrg bool honor_nans = HONOR_NANS (t1);
11991debfc3dSmrg code2 = invert_tree_comparison (code2, honor_nans);
12001debfc3dSmrg }
12011debfc3dSmrg return code1 == code2;
12021debfc3dSmrg
12031debfc3dSmrg default:
12041debfc3dSmrg return false;
12051debfc3dSmrg }
12061debfc3dSmrg }
12071debfc3dSmrg
12081debfc3dSmrg /* Let GSI skip backwards over local defs. Return the earliest vuse in VUSE.
12091debfc3dSmrg Return true in VUSE_ESCAPED if the vuse influenced a SSA_OP_DEF of one of the
12101debfc3dSmrg processed statements. */
12111debfc3dSmrg
12121debfc3dSmrg static void
gsi_advance_bw_nondebug_nonlocal(gimple_stmt_iterator * gsi,tree * vuse,bool * vuse_escaped)12131debfc3dSmrg gsi_advance_bw_nondebug_nonlocal (gimple_stmt_iterator *gsi, tree *vuse,
12141debfc3dSmrg bool *vuse_escaped)
12151debfc3dSmrg {
12161debfc3dSmrg gimple *stmt;
12171debfc3dSmrg tree lvuse;
12181debfc3dSmrg
12191debfc3dSmrg while (true)
12201debfc3dSmrg {
12211debfc3dSmrg if (gsi_end_p (*gsi))
12221debfc3dSmrg return;
12231debfc3dSmrg stmt = gsi_stmt (*gsi);
12241debfc3dSmrg
12251debfc3dSmrg lvuse = gimple_vuse (stmt);
12261debfc3dSmrg if (lvuse != NULL_TREE)
12271debfc3dSmrg {
12281debfc3dSmrg *vuse = lvuse;
12291debfc3dSmrg if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_DEF))
12301debfc3dSmrg *vuse_escaped = true;
12311debfc3dSmrg }
12321debfc3dSmrg
12331debfc3dSmrg if (!stmt_local_def (stmt))
12341debfc3dSmrg return;
12351debfc3dSmrg gsi_prev_nondebug (gsi);
12361debfc3dSmrg }
12371debfc3dSmrg }
12381debfc3dSmrg
12391debfc3dSmrg /* Return true if equal (in the sense of gimple_equal_p) statements STMT1 and
12401debfc3dSmrg STMT2 are allowed to be merged. */
12411debfc3dSmrg
12421debfc3dSmrg static bool
merge_stmts_p(gimple * stmt1,gimple * stmt2)12431debfc3dSmrg merge_stmts_p (gimple *stmt1, gimple *stmt2)
12441debfc3dSmrg {
12451debfc3dSmrg /* What could be better than this here is to blacklist the bb
12461debfc3dSmrg containing the stmt, when encountering the stmt f.i. in
12471debfc3dSmrg same_succ_hash. */
12481debfc3dSmrg if (is_tm_ending (stmt1))
12491debfc3dSmrg return false;
12501debfc3dSmrg
12511debfc3dSmrg /* Verify EH landing pads. */
12521debfc3dSmrg if (lookup_stmt_eh_lp_fn (cfun, stmt1) != lookup_stmt_eh_lp_fn (cfun, stmt2))
12531debfc3dSmrg return false;
12541debfc3dSmrg
12551debfc3dSmrg if (is_gimple_call (stmt1)
12561debfc3dSmrg && gimple_call_internal_p (stmt1))
12571debfc3dSmrg switch (gimple_call_internal_fn (stmt1))
12581debfc3dSmrg {
12591debfc3dSmrg case IFN_UBSAN_NULL:
12601debfc3dSmrg case IFN_UBSAN_BOUNDS:
12611debfc3dSmrg case IFN_UBSAN_VPTR:
12621debfc3dSmrg case IFN_UBSAN_CHECK_ADD:
12631debfc3dSmrg case IFN_UBSAN_CHECK_SUB:
12641debfc3dSmrg case IFN_UBSAN_CHECK_MUL:
12651debfc3dSmrg case IFN_UBSAN_OBJECT_SIZE:
1266a2dc1f3fSmrg case IFN_UBSAN_PTR:
12671debfc3dSmrg case IFN_ASAN_CHECK:
12681debfc3dSmrg /* For these internal functions, gimple_location is an implicit
12691debfc3dSmrg parameter, which will be used explicitly after expansion.
12701debfc3dSmrg Merging these statements may cause confusing line numbers in
12711debfc3dSmrg sanitizer messages. */
12721debfc3dSmrg return gimple_location (stmt1) == gimple_location (stmt2);
12731debfc3dSmrg default:
12741debfc3dSmrg break;
12751debfc3dSmrg }
12761debfc3dSmrg
12771debfc3dSmrg return true;
12781debfc3dSmrg }
12791debfc3dSmrg
12801debfc3dSmrg /* Determines whether BB1 and BB2 (members of same_succ) are duplicates. If so,
12811debfc3dSmrg clusters them. */
12821debfc3dSmrg
12831debfc3dSmrg static void
find_duplicate(same_succ * same_succ,basic_block bb1,basic_block bb2)12841debfc3dSmrg find_duplicate (same_succ *same_succ, basic_block bb1, basic_block bb2)
12851debfc3dSmrg {
12861debfc3dSmrg gimple_stmt_iterator gsi1 = gsi_last_nondebug_bb (bb1);
12871debfc3dSmrg gimple_stmt_iterator gsi2 = gsi_last_nondebug_bb (bb2);
12881debfc3dSmrg tree vuse1 = NULL_TREE, vuse2 = NULL_TREE;
12891debfc3dSmrg bool vuse_escaped = false;
12901debfc3dSmrg
12911debfc3dSmrg gsi_advance_bw_nondebug_nonlocal (&gsi1, &vuse1, &vuse_escaped);
12921debfc3dSmrg gsi_advance_bw_nondebug_nonlocal (&gsi2, &vuse2, &vuse_escaped);
12931debfc3dSmrg
12941debfc3dSmrg while (!gsi_end_p (gsi1) && !gsi_end_p (gsi2))
12951debfc3dSmrg {
12961debfc3dSmrg gimple *stmt1 = gsi_stmt (gsi1);
12971debfc3dSmrg gimple *stmt2 = gsi_stmt (gsi2);
12981debfc3dSmrg
12991debfc3dSmrg if (gimple_code (stmt1) == GIMPLE_LABEL
13001debfc3dSmrg && gimple_code (stmt2) == GIMPLE_LABEL)
13011debfc3dSmrg break;
13021debfc3dSmrg
13031debfc3dSmrg if (!gimple_equal_p (same_succ, stmt1, stmt2))
13041debfc3dSmrg return;
13051debfc3dSmrg
13061debfc3dSmrg if (!merge_stmts_p (stmt1, stmt2))
13071debfc3dSmrg return;
13081debfc3dSmrg
13091debfc3dSmrg gsi_prev_nondebug (&gsi1);
13101debfc3dSmrg gsi_prev_nondebug (&gsi2);
13111debfc3dSmrg gsi_advance_bw_nondebug_nonlocal (&gsi1, &vuse1, &vuse_escaped);
13121debfc3dSmrg gsi_advance_bw_nondebug_nonlocal (&gsi2, &vuse2, &vuse_escaped);
13131debfc3dSmrg }
13141debfc3dSmrg
13151debfc3dSmrg while (!gsi_end_p (gsi1) && gimple_code (gsi_stmt (gsi1)) == GIMPLE_LABEL)
13161debfc3dSmrg {
13171debfc3dSmrg tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi1)));
13181debfc3dSmrg if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
13191debfc3dSmrg return;
13201debfc3dSmrg gsi_prev (&gsi1);
13211debfc3dSmrg }
13221debfc3dSmrg while (!gsi_end_p (gsi2) && gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL)
13231debfc3dSmrg {
13241debfc3dSmrg tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)));
13251debfc3dSmrg if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
13261debfc3dSmrg return;
13271debfc3dSmrg gsi_prev (&gsi2);
13281debfc3dSmrg }
13291debfc3dSmrg if (!(gsi_end_p (gsi1) && gsi_end_p (gsi2)))
13301debfc3dSmrg return;
13311debfc3dSmrg
13321debfc3dSmrg /* If the incoming vuses are not the same, and the vuse escaped into an
13331debfc3dSmrg SSA_OP_DEF, then merging the 2 blocks will change the value of the def,
13341debfc3dSmrg which potentially means the semantics of one of the blocks will be changed.
13351debfc3dSmrg TODO: make this check more precise. */
13361debfc3dSmrg if (vuse_escaped && vuse1 != vuse2)
13371debfc3dSmrg return;
13381debfc3dSmrg
13391debfc3dSmrg if (dump_file)
13401debfc3dSmrg fprintf (dump_file, "find_duplicates: <bb %d> duplicate of <bb %d>\n",
13411debfc3dSmrg bb1->index, bb2->index);
13421debfc3dSmrg
13431debfc3dSmrg set_cluster (bb1, bb2);
13441debfc3dSmrg }
13451debfc3dSmrg
13461debfc3dSmrg /* Returns whether for all phis in DEST the phi alternatives for E1 and
13471debfc3dSmrg E2 are equal. */
13481debfc3dSmrg
13491debfc3dSmrg static bool
same_phi_alternatives_1(basic_block dest,edge e1,edge e2)13501debfc3dSmrg same_phi_alternatives_1 (basic_block dest, edge e1, edge e2)
13511debfc3dSmrg {
13521debfc3dSmrg int n1 = e1->dest_idx, n2 = e2->dest_idx;
13531debfc3dSmrg gphi_iterator gsi;
13541debfc3dSmrg
13551debfc3dSmrg for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
13561debfc3dSmrg {
13571debfc3dSmrg gphi *phi = gsi.phi ();
13581debfc3dSmrg tree lhs = gimple_phi_result (phi);
13591debfc3dSmrg tree val1 = gimple_phi_arg_def (phi, n1);
13601debfc3dSmrg tree val2 = gimple_phi_arg_def (phi, n2);
13611debfc3dSmrg
13621debfc3dSmrg if (virtual_operand_p (lhs))
13631debfc3dSmrg continue;
13641debfc3dSmrg
13651debfc3dSmrg if (operand_equal_for_phi_arg_p (val1, val2))
13661debfc3dSmrg continue;
13671debfc3dSmrg if (gvn_uses_equal (val1, val2))
13681debfc3dSmrg continue;
13691debfc3dSmrg
13701debfc3dSmrg return false;
13711debfc3dSmrg }
13721debfc3dSmrg
13731debfc3dSmrg return true;
13741debfc3dSmrg }
13751debfc3dSmrg
13761debfc3dSmrg /* Returns whether for all successors of BB1 and BB2 (members of SAME_SUCC), the
13771debfc3dSmrg phi alternatives for BB1 and BB2 are equal. */
13781debfc3dSmrg
13791debfc3dSmrg static bool
same_phi_alternatives(same_succ * same_succ,basic_block bb1,basic_block bb2)13801debfc3dSmrg same_phi_alternatives (same_succ *same_succ, basic_block bb1, basic_block bb2)
13811debfc3dSmrg {
13821debfc3dSmrg unsigned int s;
13831debfc3dSmrg bitmap_iterator bs;
13841debfc3dSmrg edge e1, e2;
13851debfc3dSmrg basic_block succ;
13861debfc3dSmrg
13871debfc3dSmrg EXECUTE_IF_SET_IN_BITMAP (same_succ->succs, 0, s, bs)
13881debfc3dSmrg {
13891debfc3dSmrg succ = BASIC_BLOCK_FOR_FN (cfun, s);
13901debfc3dSmrg e1 = find_edge (bb1, succ);
13911debfc3dSmrg e2 = find_edge (bb2, succ);
13921debfc3dSmrg if (e1->flags & EDGE_COMPLEX
13931debfc3dSmrg || e2->flags & EDGE_COMPLEX)
13941debfc3dSmrg return false;
13951debfc3dSmrg
13961debfc3dSmrg /* For all phis in bb, the phi alternatives for e1 and e2 need to have
13971debfc3dSmrg the same value. */
13981debfc3dSmrg if (!same_phi_alternatives_1 (succ, e1, e2))
13991debfc3dSmrg return false;
14001debfc3dSmrg }
14011debfc3dSmrg
14021debfc3dSmrg return true;
14031debfc3dSmrg }
14041debfc3dSmrg
14051debfc3dSmrg /* Return true if BB has non-vop phis. */
14061debfc3dSmrg
14071debfc3dSmrg static bool
bb_has_non_vop_phi(basic_block bb)14081debfc3dSmrg bb_has_non_vop_phi (basic_block bb)
14091debfc3dSmrg {
14101debfc3dSmrg gimple_seq phis = phi_nodes (bb);
14111debfc3dSmrg gimple *phi;
14121debfc3dSmrg
14131debfc3dSmrg if (phis == NULL)
14141debfc3dSmrg return false;
14151debfc3dSmrg
14161debfc3dSmrg if (!gimple_seq_singleton_p (phis))
14171debfc3dSmrg return true;
14181debfc3dSmrg
14191debfc3dSmrg phi = gimple_seq_first_stmt (phis);
14201debfc3dSmrg return !virtual_operand_p (gimple_phi_result (phi));
14211debfc3dSmrg }
14221debfc3dSmrg
14231debfc3dSmrg /* Returns true if redirecting the incoming edges of FROM to TO maintains the
14241debfc3dSmrg invariant that uses in FROM are dominates by their defs. */
14251debfc3dSmrg
14261debfc3dSmrg static bool
deps_ok_for_redirect_from_bb_to_bb(basic_block from,basic_block to)14271debfc3dSmrg deps_ok_for_redirect_from_bb_to_bb (basic_block from, basic_block to)
14281debfc3dSmrg {
14291debfc3dSmrg basic_block cd, dep_bb = BB_DEP_BB (to);
14301debfc3dSmrg edge_iterator ei;
14311debfc3dSmrg edge e;
14321debfc3dSmrg
14331debfc3dSmrg if (dep_bb == NULL)
14341debfc3dSmrg return true;
14351debfc3dSmrg
14361debfc3dSmrg bitmap from_preds = BITMAP_ALLOC (NULL);
14371debfc3dSmrg FOR_EACH_EDGE (e, ei, from->preds)
14381debfc3dSmrg bitmap_set_bit (from_preds, e->src->index);
14391debfc3dSmrg cd = nearest_common_dominator_for_set (CDI_DOMINATORS, from_preds);
14401debfc3dSmrg BITMAP_FREE (from_preds);
14411debfc3dSmrg
14421debfc3dSmrg return dominated_by_p (CDI_DOMINATORS, dep_bb, cd);
14431debfc3dSmrg }
14441debfc3dSmrg
14451debfc3dSmrg /* Returns true if replacing BB1 (or its replacement bb) by BB2 (or its
14461debfc3dSmrg replacement bb) and vice versa maintains the invariant that uses in the
14471debfc3dSmrg replacement are dominates by their defs. */
14481debfc3dSmrg
14491debfc3dSmrg static bool
deps_ok_for_redirect(basic_block bb1,basic_block bb2)14501debfc3dSmrg deps_ok_for_redirect (basic_block bb1, basic_block bb2)
14511debfc3dSmrg {
14521debfc3dSmrg if (BB_CLUSTER (bb1) != NULL)
14531debfc3dSmrg bb1 = BB_CLUSTER (bb1)->rep_bb;
14541debfc3dSmrg
14551debfc3dSmrg if (BB_CLUSTER (bb2) != NULL)
14561debfc3dSmrg bb2 = BB_CLUSTER (bb2)->rep_bb;
14571debfc3dSmrg
14581debfc3dSmrg return (deps_ok_for_redirect_from_bb_to_bb (bb1, bb2)
14591debfc3dSmrg && deps_ok_for_redirect_from_bb_to_bb (bb2, bb1));
14601debfc3dSmrg }
14611debfc3dSmrg
14621debfc3dSmrg /* Within SAME_SUCC->bbs, find clusters of bbs which can be merged. */
14631debfc3dSmrg
14641debfc3dSmrg static void
find_clusters_1(same_succ * same_succ)14651debfc3dSmrg find_clusters_1 (same_succ *same_succ)
14661debfc3dSmrg {
14671debfc3dSmrg basic_block bb1, bb2;
14681debfc3dSmrg unsigned int i, j;
14691debfc3dSmrg bitmap_iterator bi, bj;
14701debfc3dSmrg int nr_comparisons;
1471*8feb0f0bSmrg int max_comparisons = param_max_tail_merge_comparisons;
14721debfc3dSmrg
14731debfc3dSmrg EXECUTE_IF_SET_IN_BITMAP (same_succ->bbs, 0, i, bi)
14741debfc3dSmrg {
14751debfc3dSmrg bb1 = BASIC_BLOCK_FOR_FN (cfun, i);
14761debfc3dSmrg
14771debfc3dSmrg /* TODO: handle blocks with phi-nodes. We'll have to find corresponding
14781debfc3dSmrg phi-nodes in bb1 and bb2, with the same alternatives for the same
14791debfc3dSmrg preds. */
14801debfc3dSmrg if (bb_has_non_vop_phi (bb1) || bb_has_eh_pred (bb1)
14811debfc3dSmrg || bb_has_abnormal_pred (bb1))
14821debfc3dSmrg continue;
14831debfc3dSmrg
14841debfc3dSmrg nr_comparisons = 0;
14851debfc3dSmrg EXECUTE_IF_SET_IN_BITMAP (same_succ->bbs, i + 1, j, bj)
14861debfc3dSmrg {
14871debfc3dSmrg bb2 = BASIC_BLOCK_FOR_FN (cfun, j);
14881debfc3dSmrg
14891debfc3dSmrg if (bb_has_non_vop_phi (bb2) || bb_has_eh_pred (bb2)
14901debfc3dSmrg || bb_has_abnormal_pred (bb2))
14911debfc3dSmrg continue;
14921debfc3dSmrg
14931debfc3dSmrg if (BB_CLUSTER (bb1) != NULL && BB_CLUSTER (bb1) == BB_CLUSTER (bb2))
14941debfc3dSmrg continue;
14951debfc3dSmrg
14961debfc3dSmrg /* Limit quadratic behavior. */
14971debfc3dSmrg nr_comparisons++;
14981debfc3dSmrg if (nr_comparisons > max_comparisons)
14991debfc3dSmrg break;
15001debfc3dSmrg
15011debfc3dSmrg /* This is a conservative dependency check. We could test more
15021debfc3dSmrg precise for allowed replacement direction. */
15031debfc3dSmrg if (!deps_ok_for_redirect (bb1, bb2))
15041debfc3dSmrg continue;
15051debfc3dSmrg
15061debfc3dSmrg if (!(same_phi_alternatives (same_succ, bb1, bb2)))
15071debfc3dSmrg continue;
15081debfc3dSmrg
15091debfc3dSmrg find_duplicate (same_succ, bb1, bb2);
15101debfc3dSmrg }
15111debfc3dSmrg }
15121debfc3dSmrg }
15131debfc3dSmrg
15141debfc3dSmrg /* Find clusters of bbs which can be merged. */
15151debfc3dSmrg
15161debfc3dSmrg static void
find_clusters(void)15171debfc3dSmrg find_clusters (void)
15181debfc3dSmrg {
15191debfc3dSmrg same_succ *same;
15201debfc3dSmrg
15211debfc3dSmrg while (!worklist.is_empty ())
15221debfc3dSmrg {
15231debfc3dSmrg same = worklist.pop ();
15241debfc3dSmrg same->in_worklist = false;
15251debfc3dSmrg if (dump_file && (dump_flags & TDF_DETAILS))
15261debfc3dSmrg {
15271debfc3dSmrg fprintf (dump_file, "processing worklist entry\n");
15281debfc3dSmrg same_succ_print (dump_file, same);
15291debfc3dSmrg }
15301debfc3dSmrg find_clusters_1 (same);
15311debfc3dSmrg }
15321debfc3dSmrg }
15331debfc3dSmrg
15341debfc3dSmrg /* Returns the vop phi of BB, if any. */
15351debfc3dSmrg
15361debfc3dSmrg static gphi *
vop_phi(basic_block bb)15371debfc3dSmrg vop_phi (basic_block bb)
15381debfc3dSmrg {
15391debfc3dSmrg gphi *stmt;
15401debfc3dSmrg gphi_iterator gsi;
15411debfc3dSmrg for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
15421debfc3dSmrg {
15431debfc3dSmrg stmt = gsi.phi ();
15441debfc3dSmrg if (! virtual_operand_p (gimple_phi_result (stmt)))
15451debfc3dSmrg continue;
15461debfc3dSmrg return stmt;
15471debfc3dSmrg }
15481debfc3dSmrg return NULL;
15491debfc3dSmrg }
15501debfc3dSmrg
15511debfc3dSmrg /* Redirect all edges from BB1 to BB2, removes BB1 and marks it as removed. */
15521debfc3dSmrg
15531debfc3dSmrg static void
replace_block_by(basic_block bb1,basic_block bb2)15541debfc3dSmrg replace_block_by (basic_block bb1, basic_block bb2)
15551debfc3dSmrg {
15561debfc3dSmrg edge pred_edge;
15571debfc3dSmrg unsigned int i;
15581debfc3dSmrg gphi *bb2_phi;
15591debfc3dSmrg
15601debfc3dSmrg bb2_phi = vop_phi (bb2);
15611debfc3dSmrg
15621debfc3dSmrg /* Mark the basic block as deleted. */
15631debfc3dSmrg mark_basic_block_deleted (bb1);
15641debfc3dSmrg
15651debfc3dSmrg /* Redirect the incoming edges of bb1 to bb2. */
15661debfc3dSmrg for (i = EDGE_COUNT (bb1->preds); i > 0 ; --i)
15671debfc3dSmrg {
15681debfc3dSmrg pred_edge = EDGE_PRED (bb1, i - 1);
15691debfc3dSmrg pred_edge = redirect_edge_and_branch (pred_edge, bb2);
15701debfc3dSmrg gcc_assert (pred_edge != NULL);
15711debfc3dSmrg
15721debfc3dSmrg if (bb2_phi == NULL)
15731debfc3dSmrg continue;
15741debfc3dSmrg
15751debfc3dSmrg /* The phi might have run out of capacity when the redirect added an
15761debfc3dSmrg argument, which means it could have been replaced. Refresh it. */
15771debfc3dSmrg bb2_phi = vop_phi (bb2);
15781debfc3dSmrg
15791debfc3dSmrg add_phi_arg (bb2_phi, SSA_NAME_VAR (gimple_phi_result (bb2_phi)),
15801debfc3dSmrg pred_edge, UNKNOWN_LOCATION);
15811debfc3dSmrg }
15821debfc3dSmrg
15831debfc3dSmrg
15841debfc3dSmrg /* Merge the outgoing edge counts from bb1 onto bb2. */
1585a2dc1f3fSmrg edge e1, e2;
1586a2dc1f3fSmrg edge_iterator ei;
1587a2dc1f3fSmrg
1588a2dc1f3fSmrg if (bb2->count.initialized_p ())
15891debfc3dSmrg FOR_EACH_EDGE (e1, ei, bb1->succs)
15901debfc3dSmrg {
15911debfc3dSmrg e2 = find_edge (bb2, e1->dest);
15921debfc3dSmrg gcc_assert (e2);
1593a2dc1f3fSmrg
1594a2dc1f3fSmrg /* If probabilities are same, we are done.
1595a2dc1f3fSmrg If counts are nonzero we can distribute accordingly. In remaining
1596a2dc1f3fSmrg cases just avreage the values and hope for the best. */
1597a2dc1f3fSmrg e2->probability = e1->probability.combine_with_count
1598a2dc1f3fSmrg (bb1->count, e2->probability, bb2->count);
15991debfc3dSmrg }
1600a2dc1f3fSmrg bb2->count += bb1->count;
16011debfc3dSmrg
16021debfc3dSmrg /* Move over any user labels from bb1 after the bb2 labels. */
16031debfc3dSmrg gimple_stmt_iterator gsi1 = gsi_start_bb (bb1);
16041debfc3dSmrg if (!gsi_end_p (gsi1) && gimple_code (gsi_stmt (gsi1)) == GIMPLE_LABEL)
16051debfc3dSmrg {
16061debfc3dSmrg gimple_stmt_iterator gsi2 = gsi_after_labels (bb2);
16071debfc3dSmrg while (!gsi_end_p (gsi1)
16081debfc3dSmrg && gimple_code (gsi_stmt (gsi1)) == GIMPLE_LABEL)
16091debfc3dSmrg {
16101debfc3dSmrg tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi1)));
16111debfc3dSmrg gcc_assert (!DECL_NONLOCAL (label) && !FORCED_LABEL (label));
16121debfc3dSmrg if (DECL_ARTIFICIAL (label))
16131debfc3dSmrg gsi_next (&gsi1);
16141debfc3dSmrg else
16151debfc3dSmrg gsi_move_before (&gsi1, &gsi2);
16161debfc3dSmrg }
16171debfc3dSmrg }
16181debfc3dSmrg
16191debfc3dSmrg /* Clear range info from all stmts in BB2 -- this transformation
16201debfc3dSmrg could make them out of date. */
16211debfc3dSmrg reset_flow_sensitive_info_in_bb (bb2);
16221debfc3dSmrg
16231debfc3dSmrg /* Do updates that use bb1, before deleting bb1. */
16241debfc3dSmrg release_last_vdef (bb1);
16251debfc3dSmrg same_succ_flush_bb (bb1);
16261debfc3dSmrg
16271debfc3dSmrg delete_basic_block (bb1);
16281debfc3dSmrg }
16291debfc3dSmrg
16301debfc3dSmrg /* Bbs for which update_debug_stmt need to be called. */
16311debfc3dSmrg
16321debfc3dSmrg static bitmap update_bbs;
16331debfc3dSmrg
16341debfc3dSmrg /* For each cluster in all_clusters, merge all cluster->bbs. Returns
16351debfc3dSmrg number of bbs removed. */
16361debfc3dSmrg
16371debfc3dSmrg static int
apply_clusters(void)16381debfc3dSmrg apply_clusters (void)
16391debfc3dSmrg {
16401debfc3dSmrg basic_block bb1, bb2;
16411debfc3dSmrg bb_cluster *c;
16421debfc3dSmrg unsigned int i, j;
16431debfc3dSmrg bitmap_iterator bj;
16441debfc3dSmrg int nr_bbs_removed = 0;
16451debfc3dSmrg
16461debfc3dSmrg for (i = 0; i < all_clusters.length (); ++i)
16471debfc3dSmrg {
16481debfc3dSmrg c = all_clusters[i];
16491debfc3dSmrg if (c == NULL)
16501debfc3dSmrg continue;
16511debfc3dSmrg
16521debfc3dSmrg bb2 = c->rep_bb;
16531debfc3dSmrg bitmap_set_bit (update_bbs, bb2->index);
16541debfc3dSmrg
16551debfc3dSmrg bitmap_clear_bit (c->bbs, bb2->index);
16561debfc3dSmrg EXECUTE_IF_SET_IN_BITMAP (c->bbs, 0, j, bj)
16571debfc3dSmrg {
16581debfc3dSmrg bb1 = BASIC_BLOCK_FOR_FN (cfun, j);
16591debfc3dSmrg bitmap_clear_bit (update_bbs, bb1->index);
16601debfc3dSmrg
16611debfc3dSmrg replace_block_by (bb1, bb2);
16621debfc3dSmrg nr_bbs_removed++;
16631debfc3dSmrg }
16641debfc3dSmrg }
16651debfc3dSmrg
16661debfc3dSmrg return nr_bbs_removed;
16671debfc3dSmrg }
16681debfc3dSmrg
16691debfc3dSmrg /* Resets debug statement STMT if it has uses that are not dominated by their
16701debfc3dSmrg defs. */
16711debfc3dSmrg
16721debfc3dSmrg static void
update_debug_stmt(gimple * stmt)16731debfc3dSmrg update_debug_stmt (gimple *stmt)
16741debfc3dSmrg {
16751debfc3dSmrg use_operand_p use_p;
16761debfc3dSmrg ssa_op_iter oi;
16771debfc3dSmrg basic_block bbuse;
16781debfc3dSmrg
16791debfc3dSmrg if (!gimple_debug_bind_p (stmt))
16801debfc3dSmrg return;
16811debfc3dSmrg
16821debfc3dSmrg bbuse = gimple_bb (stmt);
16831debfc3dSmrg FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, oi, SSA_OP_USE)
16841debfc3dSmrg {
16851debfc3dSmrg tree name = USE_FROM_PTR (use_p);
16861debfc3dSmrg gimple *def_stmt = SSA_NAME_DEF_STMT (name);
16871debfc3dSmrg basic_block bbdef = gimple_bb (def_stmt);
16881debfc3dSmrg if (bbdef == NULL || bbuse == bbdef
16891debfc3dSmrg || dominated_by_p (CDI_DOMINATORS, bbuse, bbdef))
16901debfc3dSmrg continue;
16911debfc3dSmrg
16921debfc3dSmrg gimple_debug_bind_reset_value (stmt);
16931debfc3dSmrg update_stmt (stmt);
16941debfc3dSmrg break;
16951debfc3dSmrg }
16961debfc3dSmrg }
16971debfc3dSmrg
16981debfc3dSmrg /* Resets all debug statements that have uses that are not
16991debfc3dSmrg dominated by their defs. */
17001debfc3dSmrg
17011debfc3dSmrg static void
update_debug_stmts(void)17021debfc3dSmrg update_debug_stmts (void)
17031debfc3dSmrg {
17041debfc3dSmrg basic_block bb;
17051debfc3dSmrg bitmap_iterator bi;
17061debfc3dSmrg unsigned int i;
17071debfc3dSmrg
17081debfc3dSmrg EXECUTE_IF_SET_IN_BITMAP (update_bbs, 0, i, bi)
17091debfc3dSmrg {
17101debfc3dSmrg gimple *stmt;
17111debfc3dSmrg gimple_stmt_iterator gsi;
17121debfc3dSmrg
17131debfc3dSmrg bb = BASIC_BLOCK_FOR_FN (cfun, i);
17141debfc3dSmrg for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
17151debfc3dSmrg {
17161debfc3dSmrg stmt = gsi_stmt (gsi);
17171debfc3dSmrg if (!is_gimple_debug (stmt))
17181debfc3dSmrg continue;
17191debfc3dSmrg update_debug_stmt (stmt);
17201debfc3dSmrg }
17211debfc3dSmrg }
17221debfc3dSmrg }
17231debfc3dSmrg
17241debfc3dSmrg /* Runs tail merge optimization. */
17251debfc3dSmrg
17261debfc3dSmrg unsigned int
tail_merge_optimize(unsigned int todo)17271debfc3dSmrg tail_merge_optimize (unsigned int todo)
17281debfc3dSmrg {
17291debfc3dSmrg int nr_bbs_removed_total = 0;
17301debfc3dSmrg int nr_bbs_removed;
17311debfc3dSmrg bool loop_entered = false;
17321debfc3dSmrg int iteration_nr = 0;
1733*8feb0f0bSmrg int max_iterations = param_max_tail_merge_iterations;
17341debfc3dSmrg
17351debfc3dSmrg if (!flag_tree_tail_merge
17361debfc3dSmrg || max_iterations == 0)
17371debfc3dSmrg return 0;
17381debfc3dSmrg
17391debfc3dSmrg timevar_push (TV_TREE_TAIL_MERGE);
17401debfc3dSmrg
1741a2dc1f3fSmrg /* We enter from PRE which has critical edges split. Elimination
1742a2dc1f3fSmrg does not process trivially dead code so cleanup the CFG if we
1743a2dc1f3fSmrg are told so. And re-split critical edges then. */
1744a2dc1f3fSmrg if (todo & TODO_cleanup_cfg)
1745a2dc1f3fSmrg {
1746a2dc1f3fSmrg cleanup_tree_cfg ();
1747a2dc1f3fSmrg todo &= ~TODO_cleanup_cfg;
1748*8feb0f0bSmrg split_edges_for_insertion ();
1749a2dc1f3fSmrg }
1750a2dc1f3fSmrg
17511debfc3dSmrg if (!dom_info_available_p (CDI_DOMINATORS))
17521debfc3dSmrg {
17531debfc3dSmrg /* PRE can leave us with unreachable blocks, remove them now. */
17541debfc3dSmrg delete_unreachable_blocks ();
17551debfc3dSmrg calculate_dominance_info (CDI_DOMINATORS);
17561debfc3dSmrg }
17571debfc3dSmrg init_worklist ();
17581debfc3dSmrg
17591debfc3dSmrg while (!worklist.is_empty ())
17601debfc3dSmrg {
17611debfc3dSmrg if (!loop_entered)
17621debfc3dSmrg {
17631debfc3dSmrg loop_entered = true;
17641debfc3dSmrg alloc_cluster_vectors ();
17651debfc3dSmrg update_bbs = BITMAP_ALLOC (NULL);
17661debfc3dSmrg }
17671debfc3dSmrg else
17681debfc3dSmrg reset_cluster_vectors ();
17691debfc3dSmrg
17701debfc3dSmrg iteration_nr++;
17711debfc3dSmrg if (dump_file && (dump_flags & TDF_DETAILS))
17721debfc3dSmrg fprintf (dump_file, "worklist iteration #%d\n", iteration_nr);
17731debfc3dSmrg
17741debfc3dSmrg find_clusters ();
17751debfc3dSmrg gcc_assert (worklist.is_empty ());
17761debfc3dSmrg if (all_clusters.is_empty ())
17771debfc3dSmrg break;
17781debfc3dSmrg
17791debfc3dSmrg nr_bbs_removed = apply_clusters ();
17801debfc3dSmrg nr_bbs_removed_total += nr_bbs_removed;
17811debfc3dSmrg if (nr_bbs_removed == 0)
17821debfc3dSmrg break;
17831debfc3dSmrg
17841debfc3dSmrg free_dominance_info (CDI_DOMINATORS);
17851debfc3dSmrg
17861debfc3dSmrg if (iteration_nr == max_iterations)
17871debfc3dSmrg break;
17881debfc3dSmrg
17891debfc3dSmrg calculate_dominance_info (CDI_DOMINATORS);
17901debfc3dSmrg update_worklist ();
17911debfc3dSmrg }
17921debfc3dSmrg
17931debfc3dSmrg if (dump_file && (dump_flags & TDF_DETAILS))
17941debfc3dSmrg fprintf (dump_file, "htab collision / search: %f\n",
17951debfc3dSmrg same_succ_htab->collisions ());
17961debfc3dSmrg
17971debfc3dSmrg if (nr_bbs_removed_total > 0)
17981debfc3dSmrg {
1799a2dc1f3fSmrg if (MAY_HAVE_DEBUG_BIND_STMTS)
18001debfc3dSmrg {
18011debfc3dSmrg calculate_dominance_info (CDI_DOMINATORS);
18021debfc3dSmrg update_debug_stmts ();
18031debfc3dSmrg }
18041debfc3dSmrg
18051debfc3dSmrg if (dump_file && (dump_flags & TDF_DETAILS))
18061debfc3dSmrg {
18071debfc3dSmrg fprintf (dump_file, "Before TODOs.\n");
18081debfc3dSmrg dump_function_to_file (current_function_decl, dump_file, dump_flags);
18091debfc3dSmrg }
18101debfc3dSmrg
18111debfc3dSmrg mark_virtual_operands_for_renaming (cfun);
18121debfc3dSmrg }
18131debfc3dSmrg
18141debfc3dSmrg delete_worklist ();
18151debfc3dSmrg if (loop_entered)
18161debfc3dSmrg {
18171debfc3dSmrg delete_cluster_vectors ();
18181debfc3dSmrg BITMAP_FREE (update_bbs);
18191debfc3dSmrg }
18201debfc3dSmrg
18211debfc3dSmrg timevar_pop (TV_TREE_TAIL_MERGE);
18221debfc3dSmrg
18231debfc3dSmrg return todo;
18241debfc3dSmrg }
1825