11debfc3dSmrg /* Control flow graph building code for GNU compiler.
28feb0f0bSmrg Copyright (C) 1987-2020 Free Software Foundation, Inc.
31debfc3dSmrg
41debfc3dSmrg This file is part of GCC.
51debfc3dSmrg
61debfc3dSmrg GCC is free software; you can redistribute it and/or modify it under
71debfc3dSmrg the terms of the GNU General Public License as published by the Free
81debfc3dSmrg Software Foundation; either version 3, or (at your option) any later
91debfc3dSmrg version.
101debfc3dSmrg
111debfc3dSmrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
121debfc3dSmrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
131debfc3dSmrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
141debfc3dSmrg for more details.
151debfc3dSmrg
161debfc3dSmrg You should have received a copy of the GNU General Public License
171debfc3dSmrg along with GCC; see the file COPYING3. If not see
181debfc3dSmrg <http://www.gnu.org/licenses/>. */
191debfc3dSmrg
201debfc3dSmrg
211debfc3dSmrg #include "config.h"
221debfc3dSmrg #include "system.h"
231debfc3dSmrg #include "coretypes.h"
241debfc3dSmrg #include "backend.h"
251debfc3dSmrg #include "rtl.h"
261debfc3dSmrg #include "cfghooks.h"
271debfc3dSmrg #include "memmodel.h"
281debfc3dSmrg #include "emit-rtl.h"
291debfc3dSmrg #include "cfgrtl.h"
301debfc3dSmrg #include "cfganal.h"
311debfc3dSmrg #include "cfgbuild.h"
321debfc3dSmrg #include "except.h"
331debfc3dSmrg #include "stmt.h"
341debfc3dSmrg
351debfc3dSmrg static void make_edges (basic_block, basic_block, int);
361debfc3dSmrg static void make_label_edge (sbitmap, basic_block, rtx, int);
371debfc3dSmrg static void find_bb_boundaries (basic_block);
381debfc3dSmrg static void compute_outgoing_frequencies (basic_block);
391debfc3dSmrg
401debfc3dSmrg /* Return true if insn is something that should be contained inside basic
411debfc3dSmrg block. */
421debfc3dSmrg
431debfc3dSmrg bool
inside_basic_block_p(const rtx_insn * insn)441debfc3dSmrg inside_basic_block_p (const rtx_insn *insn)
451debfc3dSmrg {
461debfc3dSmrg switch (GET_CODE (insn))
471debfc3dSmrg {
481debfc3dSmrg case CODE_LABEL:
491debfc3dSmrg /* Avoid creating of basic block for jumptables. */
501debfc3dSmrg return (NEXT_INSN (insn) == 0
511debfc3dSmrg || ! JUMP_TABLE_DATA_P (NEXT_INSN (insn)));
521debfc3dSmrg
531debfc3dSmrg case JUMP_INSN:
541debfc3dSmrg case CALL_INSN:
551debfc3dSmrg case INSN:
561debfc3dSmrg case DEBUG_INSN:
571debfc3dSmrg return true;
581debfc3dSmrg
591debfc3dSmrg case JUMP_TABLE_DATA:
601debfc3dSmrg case BARRIER:
611debfc3dSmrg case NOTE:
621debfc3dSmrg return false;
631debfc3dSmrg
641debfc3dSmrg default:
651debfc3dSmrg gcc_unreachable ();
661debfc3dSmrg }
671debfc3dSmrg }
681debfc3dSmrg
691debfc3dSmrg /* Return true if INSN may cause control flow transfer, so it should be last in
701debfc3dSmrg the basic block. */
711debfc3dSmrg
721debfc3dSmrg bool
control_flow_insn_p(const rtx_insn * insn)731debfc3dSmrg control_flow_insn_p (const rtx_insn *insn)
741debfc3dSmrg {
751debfc3dSmrg switch (GET_CODE (insn))
761debfc3dSmrg {
771debfc3dSmrg case NOTE:
781debfc3dSmrg case CODE_LABEL:
791debfc3dSmrg case DEBUG_INSN:
801debfc3dSmrg return false;
811debfc3dSmrg
821debfc3dSmrg case JUMP_INSN:
831debfc3dSmrg return true;
841debfc3dSmrg
851debfc3dSmrg case CALL_INSN:
861debfc3dSmrg /* Noreturn and sibling call instructions terminate the basic blocks
871debfc3dSmrg (but only if they happen unconditionally). */
881debfc3dSmrg if ((SIBLING_CALL_P (insn)
891debfc3dSmrg || find_reg_note (insn, REG_NORETURN, 0))
901debfc3dSmrg && GET_CODE (PATTERN (insn)) != COND_EXEC)
911debfc3dSmrg return true;
921debfc3dSmrg
931debfc3dSmrg /* Call insn may return to the nonlocal goto handler. */
941debfc3dSmrg if (can_nonlocal_goto (insn))
951debfc3dSmrg return true;
961debfc3dSmrg break;
971debfc3dSmrg
981debfc3dSmrg case INSN:
991debfc3dSmrg /* Treat trap instructions like noreturn calls (same provision). */
1001debfc3dSmrg if (GET_CODE (PATTERN (insn)) == TRAP_IF
1011debfc3dSmrg && XEXP (PATTERN (insn), 0) == const1_rtx)
1021debfc3dSmrg return true;
1031debfc3dSmrg if (!cfun->can_throw_non_call_exceptions)
1041debfc3dSmrg return false;
1051debfc3dSmrg break;
1061debfc3dSmrg
1071debfc3dSmrg case JUMP_TABLE_DATA:
1081debfc3dSmrg case BARRIER:
1091debfc3dSmrg /* It is nonsense to reach this when looking for the
1101debfc3dSmrg end of basic block, but before dead code is eliminated
1111debfc3dSmrg this may happen. */
1121debfc3dSmrg return false;
1131debfc3dSmrg
1141debfc3dSmrg default:
1151debfc3dSmrg gcc_unreachable ();
1161debfc3dSmrg }
1171debfc3dSmrg
1181debfc3dSmrg return can_throw_internal (insn);
1191debfc3dSmrg }
1201debfc3dSmrg
1211debfc3dSmrg
1221debfc3dSmrg /* Create an edge between two basic blocks. FLAGS are auxiliary information
1231debfc3dSmrg about the edge that is accumulated between calls. */
1241debfc3dSmrg
1251debfc3dSmrg /* Create an edge from a basic block to a label. */
1261debfc3dSmrg
1271debfc3dSmrg static void
make_label_edge(sbitmap edge_cache,basic_block src,rtx label,int flags)1281debfc3dSmrg make_label_edge (sbitmap edge_cache, basic_block src, rtx label, int flags)
1291debfc3dSmrg {
1301debfc3dSmrg gcc_assert (LABEL_P (label));
1311debfc3dSmrg
1321debfc3dSmrg /* If the label was never emitted, this insn is junk, but avoid a
1331debfc3dSmrg crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1341debfc3dSmrg as a result of a syntax error and a diagnostic has already been
1351debfc3dSmrg printed. */
1361debfc3dSmrg
1371debfc3dSmrg if (INSN_UID (label) == 0)
1381debfc3dSmrg return;
1391debfc3dSmrg
1401debfc3dSmrg cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1411debfc3dSmrg }
1421debfc3dSmrg
1431debfc3dSmrg /* Create the edges generated by INSN in REGION. */
1441debfc3dSmrg
1451debfc3dSmrg void
rtl_make_eh_edge(sbitmap edge_cache,basic_block src,rtx insn)1461debfc3dSmrg rtl_make_eh_edge (sbitmap edge_cache, basic_block src, rtx insn)
1471debfc3dSmrg {
1481debfc3dSmrg eh_landing_pad lp = get_eh_landing_pad_from_rtx (insn);
1491debfc3dSmrg
1501debfc3dSmrg if (lp)
1511debfc3dSmrg {
1521debfc3dSmrg rtx_insn *label = lp->landing_pad;
1531debfc3dSmrg
1541debfc3dSmrg /* During initial rtl generation, use the post_landing_pad. */
1551debfc3dSmrg if (label == NULL)
1561debfc3dSmrg {
1571debfc3dSmrg gcc_assert (lp->post_landing_pad);
1581debfc3dSmrg label = label_rtx (lp->post_landing_pad);
1591debfc3dSmrg }
1601debfc3dSmrg
1611debfc3dSmrg make_label_edge (edge_cache, src, label,
1621debfc3dSmrg EDGE_ABNORMAL | EDGE_EH
1631debfc3dSmrg | (CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0));
1641debfc3dSmrg }
1651debfc3dSmrg }
1661debfc3dSmrg
1671debfc3dSmrg /* States of basic block as seen by find_many_sub_basic_blocks. */
1681debfc3dSmrg enum state {
1691debfc3dSmrg /* Basic blocks created via split_block belong to this state.
1701debfc3dSmrg make_edges will examine these basic blocks to see if we need to
1711debfc3dSmrg create edges going out of them. */
1721debfc3dSmrg BLOCK_NEW = 0,
1731debfc3dSmrg
1741debfc3dSmrg /* Basic blocks that do not need examining belong to this state.
1751debfc3dSmrg These blocks will be left intact. In particular, make_edges will
1761debfc3dSmrg not create edges going out of these basic blocks. */
1771debfc3dSmrg BLOCK_ORIGINAL,
1781debfc3dSmrg
1791debfc3dSmrg /* Basic blocks that may need splitting (due to a label appearing in
1801debfc3dSmrg the middle, etc) belong to this state. After splitting them,
1811debfc3dSmrg make_edges will create edges going out of them as needed. */
1821debfc3dSmrg BLOCK_TO_SPLIT
1831debfc3dSmrg };
1841debfc3dSmrg
1851debfc3dSmrg #define STATE(BB) (enum state) ((size_t) (BB)->aux)
1861debfc3dSmrg #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
1871debfc3dSmrg
1881debfc3dSmrg /* Used internally by purge_dead_tablejump_edges, ORed into state. */
1891debfc3dSmrg #define BLOCK_USED_BY_TABLEJUMP 32
1901debfc3dSmrg #define FULL_STATE(BB) ((size_t) (BB)->aux)
1911debfc3dSmrg
1921debfc3dSmrg /* Identify the edges going out of basic blocks between MIN and MAX,
1931debfc3dSmrg inclusive, that have their states set to BLOCK_NEW or
1941debfc3dSmrg BLOCK_TO_SPLIT.
1951debfc3dSmrg
1961debfc3dSmrg UPDATE_P should be nonzero if we are updating CFG and zero if we
1971debfc3dSmrg are building CFG from scratch. */
1981debfc3dSmrg
1991debfc3dSmrg static void
make_edges(basic_block min,basic_block max,int update_p)2001debfc3dSmrg make_edges (basic_block min, basic_block max, int update_p)
2011debfc3dSmrg {
2021debfc3dSmrg basic_block bb;
2031debfc3dSmrg sbitmap edge_cache = NULL;
2041debfc3dSmrg
2051debfc3dSmrg /* Heavy use of computed goto in machine-generated code can lead to
2061debfc3dSmrg nearly fully-connected CFGs. In that case we spend a significant
2071debfc3dSmrg amount of time searching the edge lists for duplicates. */
2081debfc3dSmrg if (!vec_safe_is_empty (forced_labels)
2091debfc3dSmrg || cfun->cfg->max_jumptable_ents > 100)
2101debfc3dSmrg edge_cache = sbitmap_alloc (last_basic_block_for_fn (cfun));
2111debfc3dSmrg
2121debfc3dSmrg /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
2131debfc3dSmrg is always the entry. */
2141debfc3dSmrg if (min == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
2151debfc3dSmrg make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), min, EDGE_FALLTHRU);
2161debfc3dSmrg
2171debfc3dSmrg FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
2181debfc3dSmrg {
2191debfc3dSmrg rtx_insn *insn;
2201debfc3dSmrg enum rtx_code code;
2211debfc3dSmrg edge e;
2221debfc3dSmrg edge_iterator ei;
2231debfc3dSmrg
2241debfc3dSmrg if (STATE (bb) == BLOCK_ORIGINAL)
2251debfc3dSmrg continue;
2261debfc3dSmrg
2271debfc3dSmrg /* If we have an edge cache, cache edges going out of BB. */
2281debfc3dSmrg if (edge_cache)
2291debfc3dSmrg {
2301debfc3dSmrg bitmap_clear (edge_cache);
2311debfc3dSmrg if (update_p)
2321debfc3dSmrg {
2331debfc3dSmrg FOR_EACH_EDGE (e, ei, bb->succs)
2341debfc3dSmrg if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2351debfc3dSmrg bitmap_set_bit (edge_cache, e->dest->index);
2361debfc3dSmrg }
2371debfc3dSmrg }
2381debfc3dSmrg
2391debfc3dSmrg if (LABEL_P (BB_HEAD (bb))
2401debfc3dSmrg && LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
2411debfc3dSmrg cached_make_edge (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0);
2421debfc3dSmrg
2431debfc3dSmrg /* Examine the last instruction of the block, and discover the
2441debfc3dSmrg ways we can leave the block. */
2451debfc3dSmrg
2461debfc3dSmrg insn = BB_END (bb);
2471debfc3dSmrg code = GET_CODE (insn);
2481debfc3dSmrg
2491debfc3dSmrg /* A branch. */
2501debfc3dSmrg if (code == JUMP_INSN)
2511debfc3dSmrg {
2521debfc3dSmrg rtx tmp;
2531debfc3dSmrg rtx_jump_table_data *table;
2541debfc3dSmrg
2551debfc3dSmrg /* Recognize a non-local goto as a branch outside the
2561debfc3dSmrg current function. */
2571debfc3dSmrg if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
2581debfc3dSmrg ;
2591debfc3dSmrg
2601debfc3dSmrg /* Recognize a tablejump and do the right thing. */
2611debfc3dSmrg else if (tablejump_p (insn, NULL, &table))
2621debfc3dSmrg {
2631debfc3dSmrg rtvec vec = table->get_labels ();
2641debfc3dSmrg int j;
2651debfc3dSmrg
2661debfc3dSmrg for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
2671debfc3dSmrg make_label_edge (edge_cache, bb,
2681debfc3dSmrg XEXP (RTVEC_ELT (vec, j), 0), 0);
2691debfc3dSmrg
2701debfc3dSmrg /* Some targets (eg, ARM) emit a conditional jump that also
2711debfc3dSmrg contains the out-of-range target. Scan for these and
2721debfc3dSmrg add an edge if necessary. */
2731debfc3dSmrg if ((tmp = single_set (insn)) != NULL
2741debfc3dSmrg && SET_DEST (tmp) == pc_rtx
2751debfc3dSmrg && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
2761debfc3dSmrg && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
2771debfc3dSmrg make_label_edge (edge_cache, bb,
2781debfc3dSmrg label_ref_label (XEXP (SET_SRC (tmp), 2)), 0);
2791debfc3dSmrg }
2801debfc3dSmrg
2811debfc3dSmrg /* If this is a computed jump, then mark it as reaching
2821debfc3dSmrg everything on the forced_labels list. */
2831debfc3dSmrg else if (computed_jump_p (insn))
2841debfc3dSmrg {
2851debfc3dSmrg rtx_insn *insn;
2861debfc3dSmrg unsigned int i;
2871debfc3dSmrg FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
2881debfc3dSmrg make_label_edge (edge_cache, bb, insn, EDGE_ABNORMAL);
2891debfc3dSmrg }
2901debfc3dSmrg
2911debfc3dSmrg /* Returns create an exit out. */
2921debfc3dSmrg else if (returnjump_p (insn))
2931debfc3dSmrg cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
2941debfc3dSmrg
2951debfc3dSmrg /* Recognize asm goto and do the right thing. */
2961debfc3dSmrg else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
2971debfc3dSmrg {
2981debfc3dSmrg int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
2991debfc3dSmrg for (i = 0; i < n; ++i)
3001debfc3dSmrg make_label_edge (edge_cache, bb,
3011debfc3dSmrg XEXP (ASM_OPERANDS_LABEL (tmp, i), 0), 0);
3021debfc3dSmrg }
3031debfc3dSmrg
3041debfc3dSmrg /* Otherwise, we have a plain conditional or unconditional jump. */
3051debfc3dSmrg else
3061debfc3dSmrg {
3071debfc3dSmrg gcc_assert (JUMP_LABEL (insn));
3081debfc3dSmrg make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
3091debfc3dSmrg }
3101debfc3dSmrg }
3111debfc3dSmrg
3121debfc3dSmrg /* If this is a sibling call insn, then this is in effect a combined call
3131debfc3dSmrg and return, and so we need an edge to the exit block. No need to
3141debfc3dSmrg worry about EH edges, since we wouldn't have created the sibling call
3151debfc3dSmrg in the first place. */
3161debfc3dSmrg if (code == CALL_INSN && SIBLING_CALL_P (insn))
3171debfc3dSmrg cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
3181debfc3dSmrg EDGE_SIBCALL | EDGE_ABNORMAL);
3191debfc3dSmrg
3201debfc3dSmrg /* If this is a CALL_INSN, then mark it as reaching the active EH
3211debfc3dSmrg handler for this CALL_INSN. If we're handling non-call
3221debfc3dSmrg exceptions then any insn can reach any of the active handlers.
3231debfc3dSmrg Also mark the CALL_INSN as reaching any nonlocal goto handler. */
3241debfc3dSmrg else if (code == CALL_INSN || cfun->can_throw_non_call_exceptions)
3251debfc3dSmrg {
3261debfc3dSmrg /* Add any appropriate EH edges. */
3271debfc3dSmrg rtl_make_eh_edge (edge_cache, bb, insn);
3281debfc3dSmrg
3291debfc3dSmrg if (code == CALL_INSN)
3301debfc3dSmrg {
3311debfc3dSmrg if (can_nonlocal_goto (insn))
3321debfc3dSmrg {
3331debfc3dSmrg /* ??? This could be made smarter: in some cases it's
3341debfc3dSmrg possible to tell that certain calls will not do a
3351debfc3dSmrg nonlocal goto. For example, if the nested functions
3361debfc3dSmrg that do the nonlocal gotos do not have their addresses
3371debfc3dSmrg taken, then only calls to those functions or to other
3381debfc3dSmrg nested functions that use them could possibly do
3391debfc3dSmrg nonlocal gotos. */
3401debfc3dSmrg for (rtx_insn_list *x = nonlocal_goto_handler_labels;
3411debfc3dSmrg x;
3421debfc3dSmrg x = x->next ())
3431debfc3dSmrg make_label_edge (edge_cache, bb, x->insn (),
3441debfc3dSmrg EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
3451debfc3dSmrg }
3461debfc3dSmrg
3471debfc3dSmrg if (flag_tm)
3481debfc3dSmrg {
3491debfc3dSmrg rtx note;
3501debfc3dSmrg for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3511debfc3dSmrg if (REG_NOTE_KIND (note) == REG_TM)
3521debfc3dSmrg make_label_edge (edge_cache, bb, XEXP (note, 0),
3531debfc3dSmrg EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
3541debfc3dSmrg }
3551debfc3dSmrg }
3561debfc3dSmrg }
3571debfc3dSmrg
3581debfc3dSmrg /* Find out if we can drop through to the next block. */
3591debfc3dSmrg insn = NEXT_INSN (insn);
3601debfc3dSmrg e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
3611debfc3dSmrg if (e && e->flags & EDGE_FALLTHRU)
3621debfc3dSmrg insn = NULL;
3631debfc3dSmrg
3641debfc3dSmrg while (insn
3651debfc3dSmrg && NOTE_P (insn)
3661debfc3dSmrg && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK)
3671debfc3dSmrg insn = NEXT_INSN (insn);
3681debfc3dSmrg
3691debfc3dSmrg if (!insn)
3701debfc3dSmrg cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
3711debfc3dSmrg EDGE_FALLTHRU);
3721debfc3dSmrg else if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3731debfc3dSmrg {
3741debfc3dSmrg if (insn == BB_HEAD (bb->next_bb))
3751debfc3dSmrg cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
3761debfc3dSmrg }
3771debfc3dSmrg }
3781debfc3dSmrg
3791debfc3dSmrg if (edge_cache)
3801debfc3dSmrg sbitmap_free (edge_cache);
3811debfc3dSmrg }
3821debfc3dSmrg
3831debfc3dSmrg static void
mark_tablejump_edge(rtx label)3841debfc3dSmrg mark_tablejump_edge (rtx label)
3851debfc3dSmrg {
3861debfc3dSmrg basic_block bb;
3871debfc3dSmrg
3881debfc3dSmrg gcc_assert (LABEL_P (label));
3891debfc3dSmrg /* See comment in make_label_edge. */
3901debfc3dSmrg if (INSN_UID (label) == 0)
3911debfc3dSmrg return;
3921debfc3dSmrg bb = BLOCK_FOR_INSN (label);
3931debfc3dSmrg SET_STATE (bb, FULL_STATE (bb) | BLOCK_USED_BY_TABLEJUMP);
3941debfc3dSmrg }
3951debfc3dSmrg
3961debfc3dSmrg static void
purge_dead_tablejump_edges(basic_block bb,rtx_jump_table_data * table)3971debfc3dSmrg purge_dead_tablejump_edges (basic_block bb, rtx_jump_table_data *table)
3981debfc3dSmrg {
3991debfc3dSmrg rtx_insn *insn = BB_END (bb);
4001debfc3dSmrg rtx tmp;
4011debfc3dSmrg rtvec vec;
4021debfc3dSmrg int j;
4031debfc3dSmrg edge_iterator ei;
4041debfc3dSmrg edge e;
4051debfc3dSmrg
4061debfc3dSmrg vec = table->get_labels ();
4071debfc3dSmrg
4081debfc3dSmrg for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
4091debfc3dSmrg mark_tablejump_edge (XEXP (RTVEC_ELT (vec, j), 0));
4101debfc3dSmrg
4111debfc3dSmrg /* Some targets (eg, ARM) emit a conditional jump that also
4121debfc3dSmrg contains the out-of-range target. Scan for these and
4131debfc3dSmrg add an edge if necessary. */
4141debfc3dSmrg if ((tmp = single_set (insn)) != NULL
4151debfc3dSmrg && SET_DEST (tmp) == pc_rtx
4161debfc3dSmrg && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
4171debfc3dSmrg && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
4181debfc3dSmrg mark_tablejump_edge (label_ref_label (XEXP (SET_SRC (tmp), 2)));
4191debfc3dSmrg
4201debfc3dSmrg for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4211debfc3dSmrg {
4221debfc3dSmrg if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP)
4231debfc3dSmrg SET_STATE (e->dest, FULL_STATE (e->dest)
4241debfc3dSmrg & ~(size_t) BLOCK_USED_BY_TABLEJUMP);
4251debfc3dSmrg else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
4261debfc3dSmrg {
4271debfc3dSmrg remove_edge (e);
4281debfc3dSmrg continue;
4291debfc3dSmrg }
4301debfc3dSmrg ei_next (&ei);
4311debfc3dSmrg }
4321debfc3dSmrg }
4331debfc3dSmrg
4341debfc3dSmrg /* Scan basic block BB for possible BB boundaries inside the block
4351debfc3dSmrg and create new basic blocks in the progress. */
4361debfc3dSmrg
4371debfc3dSmrg static void
find_bb_boundaries(basic_block bb)4381debfc3dSmrg find_bb_boundaries (basic_block bb)
4391debfc3dSmrg {
4401debfc3dSmrg basic_block orig_bb = bb;
4411debfc3dSmrg rtx_insn *insn = BB_HEAD (bb);
4421debfc3dSmrg rtx_insn *end = BB_END (bb), *x;
4431debfc3dSmrg rtx_jump_table_data *table;
4441debfc3dSmrg rtx_insn *flow_transfer_insn = NULL;
4451debfc3dSmrg rtx_insn *debug_insn = NULL;
4461debfc3dSmrg edge fallthru = NULL;
447a2dc1f3fSmrg bool skip_purge;
448*23f5f463Smrg bool seen_note_after_debug = false;
4491debfc3dSmrg
4501debfc3dSmrg if (insn == end)
4511debfc3dSmrg return;
4521debfc3dSmrg
453a2dc1f3fSmrg if (DEBUG_INSN_P (insn) || DEBUG_INSN_P (end))
454a2dc1f3fSmrg {
455a2dc1f3fSmrg /* Check whether, without debug insns, the insn==end test above
456a2dc1f3fSmrg would have caused us to return immediately, and behave the
457a2dc1f3fSmrg same way even with debug insns. If we don't do this, debug
458a2dc1f3fSmrg insns could cause us to purge dead edges at different times,
459a2dc1f3fSmrg which could in turn change the cfg and affect codegen
460a2dc1f3fSmrg decisions in subtle but undesirable ways. */
461a2dc1f3fSmrg while (insn != end && DEBUG_INSN_P (insn))
462a2dc1f3fSmrg insn = NEXT_INSN (insn);
463a2dc1f3fSmrg rtx_insn *e = end;
464a2dc1f3fSmrg while (insn != e && DEBUG_INSN_P (e))
465a2dc1f3fSmrg e = PREV_INSN (e);
466a2dc1f3fSmrg if (insn == e)
467a2dc1f3fSmrg {
468a2dc1f3fSmrg /* If there are debug insns after a single insn that is a
469a2dc1f3fSmrg control flow insn in the block, we'd have left right
470a2dc1f3fSmrg away, but we should clean up the debug insns after the
471a2dc1f3fSmrg control flow insn, because they can't remain in the same
472a2dc1f3fSmrg block. So, do the debug insn cleaning up, but then bail
473a2dc1f3fSmrg out without purging dead edges as we would if the debug
474a2dc1f3fSmrg insns hadn't been there. */
475a2dc1f3fSmrg if (e != end && !DEBUG_INSN_P (e) && control_flow_insn_p (e))
476a2dc1f3fSmrg {
477a2dc1f3fSmrg skip_purge = true;
478a2dc1f3fSmrg flow_transfer_insn = e;
479a2dc1f3fSmrg goto clean_up_debug_after_control_flow;
480a2dc1f3fSmrg }
481a2dc1f3fSmrg return;
482a2dc1f3fSmrg }
483a2dc1f3fSmrg }
484a2dc1f3fSmrg
4851debfc3dSmrg if (LABEL_P (insn))
4861debfc3dSmrg insn = NEXT_INSN (insn);
4871debfc3dSmrg
4881debfc3dSmrg /* Scan insn chain and try to find new basic block boundaries. */
4891debfc3dSmrg while (1)
4901debfc3dSmrg {
4911debfc3dSmrg enum rtx_code code = GET_CODE (insn);
4921debfc3dSmrg
4931debfc3dSmrg if (code == DEBUG_INSN)
4941debfc3dSmrg {
4951debfc3dSmrg if (flow_transfer_insn && !debug_insn)
496*23f5f463Smrg {
4971debfc3dSmrg debug_insn = insn;
498*23f5f463Smrg seen_note_after_debug = false;
499*23f5f463Smrg }
5001debfc3dSmrg }
5011debfc3dSmrg /* In case we've previously seen an insn that effects a control
5021debfc3dSmrg flow transfer, split the block. */
5031debfc3dSmrg else if ((flow_transfer_insn || code == CODE_LABEL)
5041debfc3dSmrg && inside_basic_block_p (insn))
5051debfc3dSmrg {
5061debfc3dSmrg rtx_insn *prev = PREV_INSN (insn);
5071debfc3dSmrg
5081debfc3dSmrg /* If the first non-debug inside_basic_block_p insn after a control
5091debfc3dSmrg flow transfer is not a label, split the block before the debug
5101debfc3dSmrg insn instead of before the non-debug insn, so that the debug
5111debfc3dSmrg insns are not lost. */
5121debfc3dSmrg if (debug_insn && code != CODE_LABEL && code != BARRIER)
513*23f5f463Smrg {
5141debfc3dSmrg prev = PREV_INSN (debug_insn);
515*23f5f463Smrg if (seen_note_after_debug)
516*23f5f463Smrg {
517*23f5f463Smrg /* Though, if there are NOTEs intermixed with DEBUG_INSNs,
518*23f5f463Smrg move the NOTEs before the DEBUG_INSNs and split after
519*23f5f463Smrg the last NOTE. */
520*23f5f463Smrg rtx_insn *first = NULL, *last = NULL;
521*23f5f463Smrg for (x = debug_insn; x != insn; x = NEXT_INSN (x))
522*23f5f463Smrg {
523*23f5f463Smrg if (NOTE_P (x))
524*23f5f463Smrg {
525*23f5f463Smrg if (first == NULL)
526*23f5f463Smrg first = x;
527*23f5f463Smrg last = x;
528*23f5f463Smrg }
529*23f5f463Smrg else
530*23f5f463Smrg {
531*23f5f463Smrg gcc_assert (DEBUG_INSN_P (x));
532*23f5f463Smrg if (first)
533*23f5f463Smrg {
534*23f5f463Smrg reorder_insns_nobb (first, last, prev);
535*23f5f463Smrg prev = last;
536*23f5f463Smrg first = last = NULL;
537*23f5f463Smrg }
538*23f5f463Smrg }
539*23f5f463Smrg }
540*23f5f463Smrg if (first)
541*23f5f463Smrg {
542*23f5f463Smrg reorder_insns_nobb (first, last, prev);
543*23f5f463Smrg prev = last;
544*23f5f463Smrg }
545*23f5f463Smrg }
546*23f5f463Smrg }
5471debfc3dSmrg fallthru = split_block (bb, prev);
5481debfc3dSmrg if (flow_transfer_insn)
5491debfc3dSmrg {
5501debfc3dSmrg BB_END (bb) = flow_transfer_insn;
5511debfc3dSmrg
5521debfc3dSmrg rtx_insn *next;
5531debfc3dSmrg /* Clean up the bb field for the insns between the blocks. */
5541debfc3dSmrg for (x = NEXT_INSN (flow_transfer_insn);
5551debfc3dSmrg x != BB_HEAD (fallthru->dest);
5561debfc3dSmrg x = next)
5571debfc3dSmrg {
5581debfc3dSmrg next = NEXT_INSN (x);
5591debfc3dSmrg /* Debug insns should not be in between basic blocks,
5601debfc3dSmrg drop them on the floor. */
5611debfc3dSmrg if (DEBUG_INSN_P (x))
5621debfc3dSmrg delete_insn (x);
5631debfc3dSmrg else if (!BARRIER_P (x))
5641debfc3dSmrg set_block_for_insn (x, NULL);
5651debfc3dSmrg }
5661debfc3dSmrg }
5671debfc3dSmrg
5681debfc3dSmrg bb = fallthru->dest;
5691debfc3dSmrg remove_edge (fallthru);
570a2dc1f3fSmrg /* BB is unreachable at this point - we need to determine its profile
571a2dc1f3fSmrg once edges are built. */
572a2dc1f3fSmrg bb->count = profile_count::uninitialized ();
5731debfc3dSmrg flow_transfer_insn = NULL;
5741debfc3dSmrg debug_insn = NULL;
5751debfc3dSmrg if (code == CODE_LABEL && LABEL_ALT_ENTRY_P (insn))
5761debfc3dSmrg make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0);
5771debfc3dSmrg }
5781debfc3dSmrg else if (code == BARRIER)
5791debfc3dSmrg {
5801debfc3dSmrg /* __builtin_unreachable () may cause a barrier to be emitted in
5811debfc3dSmrg the middle of a BB. We need to split it in the same manner as
5821debfc3dSmrg if the barrier were preceded by a control_flow_insn_p insn. */
5831debfc3dSmrg if (!flow_transfer_insn)
584a2dc1f3fSmrg flow_transfer_insn = prev_nonnote_nondebug_insn_bb (insn);
5858feb0f0bSmrg debug_insn = NULL;
5861debfc3dSmrg }
587*23f5f463Smrg else if (debug_insn)
588*23f5f463Smrg {
589*23f5f463Smrg if (code == NOTE)
590*23f5f463Smrg seen_note_after_debug = true;
591*23f5f463Smrg else
592*23f5f463Smrg /* Jump tables. */
593*23f5f463Smrg debug_insn = NULL;
594*23f5f463Smrg }
5951debfc3dSmrg
5961debfc3dSmrg if (control_flow_insn_p (insn))
5971debfc3dSmrg flow_transfer_insn = insn;
5981debfc3dSmrg if (insn == end)
5991debfc3dSmrg break;
6001debfc3dSmrg insn = NEXT_INSN (insn);
6011debfc3dSmrg }
6021debfc3dSmrg
6031debfc3dSmrg /* In case expander replaced normal insn by sequence terminating by
6041debfc3dSmrg return and barrier, or possibly other sequence not behaving like
6051debfc3dSmrg ordinary jump, we need to take care and move basic block boundary. */
6061debfc3dSmrg if (flow_transfer_insn && flow_transfer_insn != end)
6071debfc3dSmrg {
608a2dc1f3fSmrg skip_purge = false;
609a2dc1f3fSmrg
610a2dc1f3fSmrg clean_up_debug_after_control_flow:
6111debfc3dSmrg BB_END (bb) = flow_transfer_insn;
6121debfc3dSmrg
6131debfc3dSmrg /* Clean up the bb field for the insns that do not belong to BB. */
6141debfc3dSmrg rtx_insn *next;
6151debfc3dSmrg for (x = NEXT_INSN (flow_transfer_insn); ; x = next)
6161debfc3dSmrg {
6171debfc3dSmrg next = NEXT_INSN (x);
6181debfc3dSmrg /* Debug insns should not be in between basic blocks,
6191debfc3dSmrg drop them on the floor. */
6201debfc3dSmrg if (DEBUG_INSN_P (x))
6211debfc3dSmrg delete_insn (x);
6221debfc3dSmrg else if (!BARRIER_P (x))
6231debfc3dSmrg set_block_for_insn (x, NULL);
6241debfc3dSmrg if (x == end)
6251debfc3dSmrg break;
6261debfc3dSmrg }
627a2dc1f3fSmrg
628a2dc1f3fSmrg if (skip_purge)
629a2dc1f3fSmrg return;
6301debfc3dSmrg }
6311debfc3dSmrg
6321debfc3dSmrg /* We've possibly replaced the conditional jump by conditional jump
6331debfc3dSmrg followed by cleanup at fallthru edge, so the outgoing edges may
6341debfc3dSmrg be dead. */
6351debfc3dSmrg purge_dead_edges (bb);
6361debfc3dSmrg
6371debfc3dSmrg /* purge_dead_edges doesn't handle tablejump's, but if we have split the
6381debfc3dSmrg basic block, we might need to kill some edges. */
6391debfc3dSmrg if (bb != orig_bb && tablejump_p (BB_END (bb), NULL, &table))
6401debfc3dSmrg purge_dead_tablejump_edges (bb, table);
6411debfc3dSmrg }
6421debfc3dSmrg
6431debfc3dSmrg /* Assume that frequency of basic block B is known. Compute frequencies
6441debfc3dSmrg and probabilities of outgoing edges. */
6451debfc3dSmrg
6461debfc3dSmrg static void
compute_outgoing_frequencies(basic_block b)6471debfc3dSmrg compute_outgoing_frequencies (basic_block b)
6481debfc3dSmrg {
6491debfc3dSmrg edge e, f;
6501debfc3dSmrg edge_iterator ei;
6511debfc3dSmrg
6521debfc3dSmrg if (EDGE_COUNT (b->succs) == 2)
6531debfc3dSmrg {
6541debfc3dSmrg rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL);
6551debfc3dSmrg int probability;
6561debfc3dSmrg
6571debfc3dSmrg if (note)
6581debfc3dSmrg {
6591debfc3dSmrg probability = XINT (note, 0);
6601debfc3dSmrg e = BRANCH_EDGE (b);
661a2dc1f3fSmrg e->probability
662a2dc1f3fSmrg = profile_probability::from_reg_br_prob_note (probability);
6631debfc3dSmrg f = FALLTHRU_EDGE (b);
664a2dc1f3fSmrg f->probability = e->probability.invert ();
6651debfc3dSmrg return;
6661debfc3dSmrg }
6671debfc3dSmrg else
6681debfc3dSmrg {
6691debfc3dSmrg guess_outgoing_edge_probabilities (b);
6701debfc3dSmrg }
6711debfc3dSmrg }
6721debfc3dSmrg else if (single_succ_p (b))
6731debfc3dSmrg {
6741debfc3dSmrg e = single_succ_edge (b);
675a2dc1f3fSmrg e->probability = profile_probability::always ();
6761debfc3dSmrg return;
6771debfc3dSmrg }
6781debfc3dSmrg else
6791debfc3dSmrg {
6801debfc3dSmrg /* We rely on BBs with more than two successors to have sane probabilities
6811debfc3dSmrg and do not guess them here. For BBs terminated by switch statements
6821debfc3dSmrg expanded to jump-table jump, we have done the right thing during
6831debfc3dSmrg expansion. For EH edges, we still guess the probabilities here. */
6841debfc3dSmrg bool complex_edge = false;
6851debfc3dSmrg FOR_EACH_EDGE (e, ei, b->succs)
6861debfc3dSmrg if (e->flags & EDGE_COMPLEX)
6871debfc3dSmrg {
6881debfc3dSmrg complex_edge = true;
6891debfc3dSmrg break;
6901debfc3dSmrg }
6911debfc3dSmrg if (complex_edge)
6921debfc3dSmrg guess_outgoing_edge_probabilities (b);
6931debfc3dSmrg }
6941debfc3dSmrg }
6951debfc3dSmrg
6961debfc3dSmrg /* Assume that some pass has inserted labels or control flow
6971debfc3dSmrg instructions within a basic block. Split basic blocks as needed
6981debfc3dSmrg and create edges. */
6991debfc3dSmrg
7001debfc3dSmrg void
find_many_sub_basic_blocks(sbitmap blocks)7011debfc3dSmrg find_many_sub_basic_blocks (sbitmap blocks)
7021debfc3dSmrg {
7031debfc3dSmrg basic_block bb, min, max;
704a2dc1f3fSmrg bool found = false;
705a2dc1f3fSmrg auto_vec<unsigned int> n_succs;
706a2dc1f3fSmrg n_succs.safe_grow_cleared (last_basic_block_for_fn (cfun));
7071debfc3dSmrg
7081debfc3dSmrg FOR_EACH_BB_FN (bb, cfun)
7091debfc3dSmrg SET_STATE (bb,
7101debfc3dSmrg bitmap_bit_p (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
7111debfc3dSmrg
7121debfc3dSmrg FOR_EACH_BB_FN (bb, cfun)
7131debfc3dSmrg if (STATE (bb) == BLOCK_TO_SPLIT)
714a2dc1f3fSmrg {
715a2dc1f3fSmrg int n = last_basic_block_for_fn (cfun);
716a2dc1f3fSmrg unsigned int ns = EDGE_COUNT (bb->succs);
717a2dc1f3fSmrg
7181debfc3dSmrg find_bb_boundaries (bb);
719a2dc1f3fSmrg if (n == last_basic_block_for_fn (cfun) && ns == EDGE_COUNT (bb->succs))
720a2dc1f3fSmrg n_succs[bb->index] = EDGE_COUNT (bb->succs);
721a2dc1f3fSmrg }
7221debfc3dSmrg
7231debfc3dSmrg FOR_EACH_BB_FN (bb, cfun)
7241debfc3dSmrg if (STATE (bb) != BLOCK_ORIGINAL)
725a2dc1f3fSmrg {
726a2dc1f3fSmrg found = true;
7271debfc3dSmrg break;
728a2dc1f3fSmrg }
729a2dc1f3fSmrg
730a2dc1f3fSmrg if (!found)
731a2dc1f3fSmrg return;
7321debfc3dSmrg
7331debfc3dSmrg min = max = bb;
7341debfc3dSmrg for (; bb != EXIT_BLOCK_PTR_FOR_FN (cfun); bb = bb->next_bb)
7351debfc3dSmrg if (STATE (bb) != BLOCK_ORIGINAL)
7361debfc3dSmrg max = bb;
7371debfc3dSmrg
7381debfc3dSmrg /* Now re-scan and wire in all edges. This expect simple (conditional)
7391debfc3dSmrg jumps at the end of each new basic blocks. */
7401debfc3dSmrg make_edges (min, max, 1);
7411debfc3dSmrg
7421debfc3dSmrg /* Update branch probabilities. Expect only (un)conditional jumps
7431debfc3dSmrg to be created with only the forward edges. */
7441debfc3dSmrg if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
7451debfc3dSmrg FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
7461debfc3dSmrg {
7471debfc3dSmrg edge e;
7481debfc3dSmrg edge_iterator ei;
7491debfc3dSmrg
7501debfc3dSmrg if (STATE (bb) == BLOCK_ORIGINAL)
7511debfc3dSmrg continue;
7521debfc3dSmrg if (STATE (bb) == BLOCK_NEW)
7531debfc3dSmrg {
754a2dc1f3fSmrg bool initialized_src = false, uninitialized_src = false;
755a2dc1f3fSmrg bb->count = profile_count::zero ();
7561debfc3dSmrg FOR_EACH_EDGE (e, ei, bb->preds)
7571debfc3dSmrg {
758a2dc1f3fSmrg if (e->count ().initialized_p ())
759a2dc1f3fSmrg {
760a2dc1f3fSmrg bb->count += e->count ();
761a2dc1f3fSmrg initialized_src = true;
7621debfc3dSmrg }
763a2dc1f3fSmrg else
764a2dc1f3fSmrg uninitialized_src = true;
765a2dc1f3fSmrg }
766a2dc1f3fSmrg /* When some edges are missing with read profile, this is
767a2dc1f3fSmrg most likely because RTL expansion introduced loop.
768a2dc1f3fSmrg When profile is guessed we may have BB that is reachable
769a2dc1f3fSmrg from unlikely path as well as from normal path.
770a2dc1f3fSmrg
771a2dc1f3fSmrg TODO: We should handle loops created during BB expansion
772a2dc1f3fSmrg correctly here. For now we assume all those loop to cycle
773a2dc1f3fSmrg precisely once. */
774a2dc1f3fSmrg if (!initialized_src
775a2dc1f3fSmrg || (uninitialized_src
776a2dc1f3fSmrg && profile_status_for_fn (cfun) < PROFILE_GUESSED))
777a2dc1f3fSmrg bb->count = profile_count::uninitialized ();
778a2dc1f3fSmrg }
779a2dc1f3fSmrg /* If nothing changed, there is no need to create new BBs. */
780a2dc1f3fSmrg else if (EDGE_COUNT (bb->succs) == n_succs[bb->index])
781a2dc1f3fSmrg {
782a2dc1f3fSmrg /* In rare occassions RTL expansion might have mistakely assigned
783a2dc1f3fSmrg a probabilities different from what is in CFG. This happens
784a2dc1f3fSmrg when we try to split branch to two but optimize out the
785a2dc1f3fSmrg second branch during the way. See PR81030. */
786a2dc1f3fSmrg if (JUMP_P (BB_END (bb)) && any_condjump_p (BB_END (bb))
787a2dc1f3fSmrg && EDGE_COUNT (bb->succs) >= 2)
788a2dc1f3fSmrg update_br_prob_note (bb);
789a2dc1f3fSmrg continue;
7901debfc3dSmrg }
7911debfc3dSmrg
7921debfc3dSmrg compute_outgoing_frequencies (bb);
7931debfc3dSmrg }
7941debfc3dSmrg
7951debfc3dSmrg FOR_EACH_BB_FN (bb, cfun)
7961debfc3dSmrg SET_STATE (bb, 0);
7971debfc3dSmrg }
798