138fd1498Szrj /* Control flow graph manipulation code for GNU compiler.
238fd1498Szrj Copyright (C) 1987-2018 Free Software Foundation, Inc.
338fd1498Szrj
438fd1498Szrj This file is part of GCC.
538fd1498Szrj
638fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
738fd1498Szrj the terms of the GNU General Public License as published by the Free
838fd1498Szrj Software Foundation; either version 3, or (at your option) any later
938fd1498Szrj version.
1038fd1498Szrj
1138fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1238fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
1338fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
1438fd1498Szrj for more details.
1538fd1498Szrj
1638fd1498Szrj You should have received a copy of the GNU General Public License
1738fd1498Szrj along with GCC; see the file COPYING3. If not see
1838fd1498Szrj <http://www.gnu.org/licenses/>. */
1938fd1498Szrj
2038fd1498Szrj /* This file contains low level functions to manipulate the CFG and analyze it
2138fd1498Szrj that are aware of the RTL intermediate language.
2238fd1498Szrj
2338fd1498Szrj Available functionality:
2438fd1498Szrj - Basic CFG/RTL manipulation API documented in cfghooks.h
2538fd1498Szrj - CFG-aware instruction chain manipulation
2638fd1498Szrj delete_insn, delete_insn_chain
2738fd1498Szrj - Edge splitting and committing to edges
2838fd1498Szrj insert_insn_on_edge, commit_edge_insertions
2938fd1498Szrj - CFG updating after insn simplification
3038fd1498Szrj purge_dead_edges, purge_all_dead_edges
3138fd1498Szrj - CFG fixing after coarse manipulation
3238fd1498Szrj fixup_abnormal_edges
3338fd1498Szrj
3438fd1498Szrj Functions not supposed for generic use:
3538fd1498Szrj - Infrastructure to determine quickly basic block for insn
3638fd1498Szrj compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
3738fd1498Szrj - Edge redirection with updating and optimizing of insn chain
3838fd1498Szrj block_label, tidy_fallthru_edge, force_nonfallthru */
3938fd1498Szrj
4038fd1498Szrj #include "config.h"
4138fd1498Szrj #include "system.h"
4238fd1498Szrj #include "coretypes.h"
4338fd1498Szrj #include "backend.h"
4438fd1498Szrj #include "target.h"
4538fd1498Szrj #include "rtl.h"
4638fd1498Szrj #include "tree.h"
4738fd1498Szrj #include "cfghooks.h"
4838fd1498Szrj #include "df.h"
4938fd1498Szrj #include "insn-config.h"
5038fd1498Szrj #include "memmodel.h"
5138fd1498Szrj #include "emit-rtl.h"
5238fd1498Szrj #include "cfgrtl.h"
5338fd1498Szrj #include "cfganal.h"
5438fd1498Szrj #include "cfgbuild.h"
5538fd1498Szrj #include "cfgcleanup.h"
5638fd1498Szrj #include "bb-reorder.h"
5738fd1498Szrj #include "rtl-error.h"
5838fd1498Szrj #include "insn-attr.h"
5938fd1498Szrj #include "dojump.h"
6038fd1498Szrj #include "expr.h"
6138fd1498Szrj #include "cfgloop.h"
6238fd1498Szrj #include "tree-pass.h"
6338fd1498Szrj #include "print-rtl.h"
6438fd1498Szrj
6538fd1498Szrj /* Holds the interesting leading and trailing notes for the function.
6638fd1498Szrj Only applicable if the CFG is in cfglayout mode. */
6738fd1498Szrj static GTY(()) rtx_insn *cfg_layout_function_footer;
6838fd1498Szrj static GTY(()) rtx_insn *cfg_layout_function_header;
6938fd1498Szrj
7038fd1498Szrj static rtx_insn *skip_insns_after_block (basic_block);
7138fd1498Szrj static void record_effective_endpoints (void);
7238fd1498Szrj static void fixup_reorder_chain (void);
7338fd1498Szrj
7438fd1498Szrj void verify_insn_chain (void);
7538fd1498Szrj static void fixup_fallthru_exit_predecessor (void);
7638fd1498Szrj static int can_delete_note_p (const rtx_note *);
7738fd1498Szrj static int can_delete_label_p (const rtx_code_label *);
7838fd1498Szrj static basic_block rtl_split_edge (edge);
7938fd1498Szrj static bool rtl_move_block_after (basic_block, basic_block);
8038fd1498Szrj static int rtl_verify_flow_info (void);
8138fd1498Szrj static basic_block cfg_layout_split_block (basic_block, void *);
8238fd1498Szrj static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
8338fd1498Szrj static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
8438fd1498Szrj static void cfg_layout_delete_block (basic_block);
8538fd1498Szrj static void rtl_delete_block (basic_block);
8638fd1498Szrj static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
8738fd1498Szrj static edge rtl_redirect_edge_and_branch (edge, basic_block);
8838fd1498Szrj static basic_block rtl_split_block (basic_block, void *);
8938fd1498Szrj static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
9038fd1498Szrj static int rtl_verify_flow_info_1 (void);
9138fd1498Szrj static void rtl_make_forwarder_block (edge);
9238fd1498Szrj
9338fd1498Szrj /* Return true if NOTE is not one of the ones that must be kept paired,
9438fd1498Szrj so that we may simply delete it. */
9538fd1498Szrj
9638fd1498Szrj static int
can_delete_note_p(const rtx_note * note)9738fd1498Szrj can_delete_note_p (const rtx_note *note)
9838fd1498Szrj {
9938fd1498Szrj switch (NOTE_KIND (note))
10038fd1498Szrj {
10138fd1498Szrj case NOTE_INSN_DELETED:
10238fd1498Szrj case NOTE_INSN_BASIC_BLOCK:
10338fd1498Szrj case NOTE_INSN_EPILOGUE_BEG:
10438fd1498Szrj return true;
10538fd1498Szrj
10638fd1498Szrj default:
10738fd1498Szrj return false;
10838fd1498Szrj }
10938fd1498Szrj }
11038fd1498Szrj
11138fd1498Szrj /* True if a given label can be deleted. */
11238fd1498Szrj
11338fd1498Szrj static int
can_delete_label_p(const rtx_code_label * label)11438fd1498Szrj can_delete_label_p (const rtx_code_label *label)
11538fd1498Szrj {
11638fd1498Szrj return (!LABEL_PRESERVE_P (label)
11738fd1498Szrj /* User declared labels must be preserved. */
11838fd1498Szrj && LABEL_NAME (label) == 0
11938fd1498Szrj && !vec_safe_contains<rtx_insn *> (forced_labels,
12038fd1498Szrj const_cast<rtx_code_label *> (label)));
12138fd1498Szrj }
12238fd1498Szrj
12338fd1498Szrj /* Delete INSN by patching it out. */
12438fd1498Szrj
12538fd1498Szrj void
delete_insn(rtx_insn * insn)12638fd1498Szrj delete_insn (rtx_insn *insn)
12738fd1498Szrj {
12838fd1498Szrj rtx note;
12938fd1498Szrj bool really_delete = true;
13038fd1498Szrj
13138fd1498Szrj if (LABEL_P (insn))
13238fd1498Szrj {
13338fd1498Szrj /* Some labels can't be directly removed from the INSN chain, as they
13438fd1498Szrj might be references via variables, constant pool etc.
13538fd1498Szrj Convert them to the special NOTE_INSN_DELETED_LABEL note. */
13638fd1498Szrj if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
13738fd1498Szrj {
13838fd1498Szrj const char *name = LABEL_NAME (insn);
13938fd1498Szrj basic_block bb = BLOCK_FOR_INSN (insn);
14038fd1498Szrj rtx_insn *bb_note = NEXT_INSN (insn);
14138fd1498Szrj
14238fd1498Szrj really_delete = false;
14338fd1498Szrj PUT_CODE (insn, NOTE);
14438fd1498Szrj NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
14538fd1498Szrj NOTE_DELETED_LABEL_NAME (insn) = name;
14638fd1498Szrj
14738fd1498Szrj /* If the note following the label starts a basic block, and the
14838fd1498Szrj label is a member of the same basic block, interchange the two. */
14938fd1498Szrj if (bb_note != NULL_RTX
15038fd1498Szrj && NOTE_INSN_BASIC_BLOCK_P (bb_note)
15138fd1498Szrj && bb != NULL
15238fd1498Szrj && bb == BLOCK_FOR_INSN (bb_note))
15338fd1498Szrj {
15438fd1498Szrj reorder_insns_nobb (insn, insn, bb_note);
15538fd1498Szrj BB_HEAD (bb) = bb_note;
15638fd1498Szrj if (BB_END (bb) == bb_note)
15738fd1498Szrj BB_END (bb) = insn;
15838fd1498Szrj }
15938fd1498Szrj }
16038fd1498Szrj
16138fd1498Szrj remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
16238fd1498Szrj }
16338fd1498Szrj
16438fd1498Szrj if (really_delete)
16538fd1498Szrj {
16638fd1498Szrj /* If this insn has already been deleted, something is very wrong. */
16738fd1498Szrj gcc_assert (!insn->deleted ());
16838fd1498Szrj if (INSN_P (insn))
16938fd1498Szrj df_insn_delete (insn);
17038fd1498Szrj remove_insn (insn);
17138fd1498Szrj insn->set_deleted ();
17238fd1498Szrj }
17338fd1498Szrj
17438fd1498Szrj /* If deleting a jump, decrement the use count of the label. Deleting
17538fd1498Szrj the label itself should happen in the normal course of block merging. */
17638fd1498Szrj if (JUMP_P (insn))
17738fd1498Szrj {
17838fd1498Szrj if (JUMP_LABEL (insn)
17938fd1498Szrj && LABEL_P (JUMP_LABEL (insn)))
18038fd1498Szrj LABEL_NUSES (JUMP_LABEL (insn))--;
18138fd1498Szrj
18238fd1498Szrj /* If there are more targets, remove them too. */
18338fd1498Szrj while ((note
18438fd1498Szrj = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
18538fd1498Szrj && LABEL_P (XEXP (note, 0)))
18638fd1498Szrj {
18738fd1498Szrj LABEL_NUSES (XEXP (note, 0))--;
18838fd1498Szrj remove_note (insn, note);
18938fd1498Szrj }
19038fd1498Szrj }
19138fd1498Szrj
19238fd1498Szrj /* Also if deleting any insn that references a label as an operand. */
19338fd1498Szrj while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
19438fd1498Szrj && LABEL_P (XEXP (note, 0)))
19538fd1498Szrj {
19638fd1498Szrj LABEL_NUSES (XEXP (note, 0))--;
19738fd1498Szrj remove_note (insn, note);
19838fd1498Szrj }
19938fd1498Szrj
20038fd1498Szrj if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
20138fd1498Szrj {
20238fd1498Szrj rtvec vec = table->get_labels ();
20338fd1498Szrj int len = GET_NUM_ELEM (vec);
20438fd1498Szrj int i;
20538fd1498Szrj
20638fd1498Szrj for (i = 0; i < len; i++)
20738fd1498Szrj {
20838fd1498Szrj rtx label = XEXP (RTVEC_ELT (vec, i), 0);
20938fd1498Szrj
21038fd1498Szrj /* When deleting code in bulk (e.g. removing many unreachable
21138fd1498Szrj blocks) we can delete a label that's a target of the vector
21238fd1498Szrj before deleting the vector itself. */
21338fd1498Szrj if (!NOTE_P (label))
21438fd1498Szrj LABEL_NUSES (label)--;
21538fd1498Szrj }
21638fd1498Szrj }
21738fd1498Szrj }
21838fd1498Szrj
21938fd1498Szrj /* Like delete_insn but also purge dead edges from BB.
22038fd1498Szrj Return true if any edges are eliminated. */
22138fd1498Szrj
22238fd1498Szrj bool
delete_insn_and_edges(rtx_insn * insn)22338fd1498Szrj delete_insn_and_edges (rtx_insn *insn)
22438fd1498Szrj {
22538fd1498Szrj bool purge = false;
22638fd1498Szrj
22738fd1498Szrj if (INSN_P (insn)
22838fd1498Szrj && BLOCK_FOR_INSN (insn)
22938fd1498Szrj && BB_END (BLOCK_FOR_INSN (insn)) == insn)
23038fd1498Szrj purge = true;
23138fd1498Szrj delete_insn (insn);
23238fd1498Szrj if (purge)
23338fd1498Szrj return purge_dead_edges (BLOCK_FOR_INSN (insn));
23438fd1498Szrj return false;
23538fd1498Szrj }
23638fd1498Szrj
23738fd1498Szrj /* Unlink a chain of insns between START and FINISH, leaving notes
23838fd1498Szrj that must be paired. If CLEAR_BB is true, we set bb field for
23938fd1498Szrj insns that cannot be removed to NULL. */
24038fd1498Szrj
24138fd1498Szrj void
delete_insn_chain(rtx start,rtx_insn * finish,bool clear_bb)24238fd1498Szrj delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
24338fd1498Szrj {
24438fd1498Szrj /* Unchain the insns one by one. It would be quicker to delete all of these
24538fd1498Szrj with a single unchaining, rather than one at a time, but we need to keep
24638fd1498Szrj the NOTE's. */
24738fd1498Szrj rtx_insn *current = finish;
24838fd1498Szrj while (1)
24938fd1498Szrj {
25038fd1498Szrj rtx_insn *prev = PREV_INSN (current);
25138fd1498Szrj if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
25238fd1498Szrj ;
25338fd1498Szrj else
25438fd1498Szrj delete_insn (current);
25538fd1498Szrj
25638fd1498Szrj if (clear_bb && !current->deleted ())
25738fd1498Szrj set_block_for_insn (current, NULL);
25838fd1498Szrj
25938fd1498Szrj if (current == start)
26038fd1498Szrj break;
26138fd1498Szrj current = prev;
26238fd1498Szrj }
26338fd1498Szrj }
26438fd1498Szrj
26538fd1498Szrj /* Create a new basic block consisting of the instructions between HEAD and END
26638fd1498Szrj inclusive. This function is designed to allow fast BB construction - reuses
26738fd1498Szrj the note and basic block struct in BB_NOTE, if any and do not grow
26838fd1498Szrj BASIC_BLOCK chain and should be used directly only by CFG construction code.
26938fd1498Szrj END can be NULL in to create new empty basic block before HEAD. Both END
27038fd1498Szrj and HEAD can be NULL to create basic block at the end of INSN chain.
27138fd1498Szrj AFTER is the basic block we should be put after. */
27238fd1498Szrj
27338fd1498Szrj basic_block
create_basic_block_structure(rtx_insn * head,rtx_insn * end,rtx_note * bb_note,basic_block after)27438fd1498Szrj create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
27538fd1498Szrj basic_block after)
27638fd1498Szrj {
27738fd1498Szrj basic_block bb;
27838fd1498Szrj
27938fd1498Szrj if (bb_note
28038fd1498Szrj && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
28138fd1498Szrj && bb->aux == NULL)
28238fd1498Szrj {
28338fd1498Szrj /* If we found an existing note, thread it back onto the chain. */
28438fd1498Szrj
28538fd1498Szrj rtx_insn *after;
28638fd1498Szrj
28738fd1498Szrj if (LABEL_P (head))
28838fd1498Szrj after = head;
28938fd1498Szrj else
29038fd1498Szrj {
29138fd1498Szrj after = PREV_INSN (head);
29238fd1498Szrj head = bb_note;
29338fd1498Szrj }
29438fd1498Szrj
29538fd1498Szrj if (after != bb_note && NEXT_INSN (after) != bb_note)
29638fd1498Szrj reorder_insns_nobb (bb_note, bb_note, after);
29738fd1498Szrj }
29838fd1498Szrj else
29938fd1498Szrj {
30038fd1498Szrj /* Otherwise we must create a note and a basic block structure. */
30138fd1498Szrj
30238fd1498Szrj bb = alloc_block ();
30338fd1498Szrj
30438fd1498Szrj init_rtl_bb_info (bb);
30538fd1498Szrj if (!head && !end)
30638fd1498Szrj head = end = bb_note
30738fd1498Szrj = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
30838fd1498Szrj else if (LABEL_P (head) && end)
30938fd1498Szrj {
31038fd1498Szrj bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
31138fd1498Szrj if (head == end)
31238fd1498Szrj end = bb_note;
31338fd1498Szrj }
31438fd1498Szrj else
31538fd1498Szrj {
31638fd1498Szrj bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
31738fd1498Szrj head = bb_note;
31838fd1498Szrj if (!end)
31938fd1498Szrj end = head;
32038fd1498Szrj }
32138fd1498Szrj
32238fd1498Szrj NOTE_BASIC_BLOCK (bb_note) = bb;
32338fd1498Szrj }
32438fd1498Szrj
32538fd1498Szrj /* Always include the bb note in the block. */
32638fd1498Szrj if (NEXT_INSN (end) == bb_note)
32738fd1498Szrj end = bb_note;
32838fd1498Szrj
32938fd1498Szrj BB_HEAD (bb) = head;
33038fd1498Szrj BB_END (bb) = end;
33138fd1498Szrj bb->index = last_basic_block_for_fn (cfun)++;
33238fd1498Szrj bb->flags = BB_NEW | BB_RTL;
33338fd1498Szrj link_block (bb, after);
33438fd1498Szrj SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
33538fd1498Szrj df_bb_refs_record (bb->index, false);
33638fd1498Szrj update_bb_for_insn (bb);
33738fd1498Szrj BB_SET_PARTITION (bb, BB_UNPARTITIONED);
33838fd1498Szrj
33938fd1498Szrj /* Tag the block so that we know it has been used when considering
34038fd1498Szrj other basic block notes. */
34138fd1498Szrj bb->aux = bb;
34238fd1498Szrj
34338fd1498Szrj return bb;
34438fd1498Szrj }
34538fd1498Szrj
34638fd1498Szrj /* Create new basic block consisting of instructions in between HEAD and END
34738fd1498Szrj and place it to the BB chain after block AFTER. END can be NULL to
34838fd1498Szrj create a new empty basic block before HEAD. Both END and HEAD can be
34938fd1498Szrj NULL to create basic block at the end of INSN chain. */
35038fd1498Szrj
35138fd1498Szrj static basic_block
rtl_create_basic_block(void * headp,void * endp,basic_block after)35238fd1498Szrj rtl_create_basic_block (void *headp, void *endp, basic_block after)
35338fd1498Szrj {
35438fd1498Szrj rtx_insn *head = (rtx_insn *) headp;
35538fd1498Szrj rtx_insn *end = (rtx_insn *) endp;
35638fd1498Szrj basic_block bb;
35738fd1498Szrj
35838fd1498Szrj /* Grow the basic block array if needed. */
35938fd1498Szrj if ((size_t) last_basic_block_for_fn (cfun)
36038fd1498Szrj >= basic_block_info_for_fn (cfun)->length ())
36138fd1498Szrj {
36238fd1498Szrj size_t new_size =
36338fd1498Szrj (last_basic_block_for_fn (cfun)
36438fd1498Szrj + (last_basic_block_for_fn (cfun) + 3) / 4);
36538fd1498Szrj vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
36638fd1498Szrj }
36738fd1498Szrj
36838fd1498Szrj n_basic_blocks_for_fn (cfun)++;
36938fd1498Szrj
37038fd1498Szrj bb = create_basic_block_structure (head, end, NULL, after);
37138fd1498Szrj bb->aux = NULL;
37238fd1498Szrj return bb;
37338fd1498Szrj }
37438fd1498Szrj
37538fd1498Szrj static basic_block
cfg_layout_create_basic_block(void * head,void * end,basic_block after)37638fd1498Szrj cfg_layout_create_basic_block (void *head, void *end, basic_block after)
37738fd1498Szrj {
37838fd1498Szrj basic_block newbb = rtl_create_basic_block (head, end, after);
37938fd1498Szrj
38038fd1498Szrj return newbb;
38138fd1498Szrj }
38238fd1498Szrj
38338fd1498Szrj /* Delete the insns in a (non-live) block. We physically delete every
38438fd1498Szrj non-deleted-note insn, and update the flow graph appropriately.
38538fd1498Szrj
38638fd1498Szrj Return nonzero if we deleted an exception handler. */
38738fd1498Szrj
38838fd1498Szrj /* ??? Preserving all such notes strikes me as wrong. It would be nice
38938fd1498Szrj to post-process the stream to remove empty blocks, loops, ranges, etc. */
39038fd1498Szrj
39138fd1498Szrj static void
rtl_delete_block(basic_block b)39238fd1498Szrj rtl_delete_block (basic_block b)
39338fd1498Szrj {
39438fd1498Szrj rtx_insn *insn, *end;
39538fd1498Szrj
39638fd1498Szrj /* If the head of this block is a CODE_LABEL, then it might be the
39738fd1498Szrj label for an exception handler which can't be reached. We need
39838fd1498Szrj to remove the label from the exception_handler_label list. */
39938fd1498Szrj insn = BB_HEAD (b);
40038fd1498Szrj
40138fd1498Szrj end = get_last_bb_insn (b);
40238fd1498Szrj
40338fd1498Szrj /* Selectively delete the entire chain. */
40438fd1498Szrj BB_HEAD (b) = NULL;
40538fd1498Szrj delete_insn_chain (insn, end, true);
40638fd1498Szrj
40738fd1498Szrj
40838fd1498Szrj if (dump_file)
40938fd1498Szrj fprintf (dump_file, "deleting block %d\n", b->index);
41038fd1498Szrj df_bb_delete (b->index);
41138fd1498Szrj }
41238fd1498Szrj
41338fd1498Szrj /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
41438fd1498Szrj
41538fd1498Szrj void
compute_bb_for_insn(void)41638fd1498Szrj compute_bb_for_insn (void)
41738fd1498Szrj {
41838fd1498Szrj basic_block bb;
41938fd1498Szrj
42038fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
42138fd1498Szrj {
42238fd1498Szrj rtx_insn *end = BB_END (bb);
42338fd1498Szrj rtx_insn *insn;
42438fd1498Szrj
42538fd1498Szrj for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
42638fd1498Szrj {
42738fd1498Szrj BLOCK_FOR_INSN (insn) = bb;
42838fd1498Szrj if (insn == end)
42938fd1498Szrj break;
43038fd1498Szrj }
43138fd1498Szrj }
43238fd1498Szrj }
43338fd1498Szrj
43438fd1498Szrj /* Release the basic_block_for_insn array. */
43538fd1498Szrj
43638fd1498Szrj unsigned int
free_bb_for_insn(void)43738fd1498Szrj free_bb_for_insn (void)
43838fd1498Szrj {
43938fd1498Szrj rtx_insn *insn;
44038fd1498Szrj for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
44138fd1498Szrj if (!BARRIER_P (insn))
44238fd1498Szrj BLOCK_FOR_INSN (insn) = NULL;
44338fd1498Szrj return 0;
44438fd1498Szrj }
44538fd1498Szrj
44638fd1498Szrj namespace {
44738fd1498Szrj
44838fd1498Szrj const pass_data pass_data_free_cfg =
44938fd1498Szrj {
45038fd1498Szrj RTL_PASS, /* type */
45138fd1498Szrj "*free_cfg", /* name */
45238fd1498Szrj OPTGROUP_NONE, /* optinfo_flags */
45338fd1498Szrj TV_NONE, /* tv_id */
45438fd1498Szrj 0, /* properties_required */
45538fd1498Szrj 0, /* properties_provided */
45638fd1498Szrj PROP_cfg, /* properties_destroyed */
45738fd1498Szrj 0, /* todo_flags_start */
45838fd1498Szrj 0, /* todo_flags_finish */
45938fd1498Szrj };
46038fd1498Szrj
46138fd1498Szrj class pass_free_cfg : public rtl_opt_pass
46238fd1498Szrj {
46338fd1498Szrj public:
pass_free_cfg(gcc::context * ctxt)46438fd1498Szrj pass_free_cfg (gcc::context *ctxt)
46538fd1498Szrj : rtl_opt_pass (pass_data_free_cfg, ctxt)
46638fd1498Szrj {}
46738fd1498Szrj
46838fd1498Szrj /* opt_pass methods: */
46938fd1498Szrj virtual unsigned int execute (function *);
47038fd1498Szrj
47138fd1498Szrj }; // class pass_free_cfg
47238fd1498Szrj
47338fd1498Szrj unsigned int
execute(function *)47438fd1498Szrj pass_free_cfg::execute (function *)
47538fd1498Szrj {
47638fd1498Szrj /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
47738fd1498Szrj valid at that point so it would be too late to call df_analyze. */
47838fd1498Szrj if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
47938fd1498Szrj {
48038fd1498Szrj df_note_add_problem ();
48138fd1498Szrj df_analyze ();
48238fd1498Szrj }
48338fd1498Szrj
48438fd1498Szrj if (crtl->has_bb_partition)
48538fd1498Szrj insert_section_boundary_note ();
48638fd1498Szrj
48738fd1498Szrj free_bb_for_insn ();
48838fd1498Szrj return 0;
48938fd1498Szrj }
49038fd1498Szrj
49138fd1498Szrj } // anon namespace
49238fd1498Szrj
49338fd1498Szrj rtl_opt_pass *
make_pass_free_cfg(gcc::context * ctxt)49438fd1498Szrj make_pass_free_cfg (gcc::context *ctxt)
49538fd1498Szrj {
49638fd1498Szrj return new pass_free_cfg (ctxt);
49738fd1498Szrj }
49838fd1498Szrj
49938fd1498Szrj /* Return RTX to emit after when we want to emit code on the entry of function. */
50038fd1498Szrj rtx_insn *
entry_of_function(void)50138fd1498Szrj entry_of_function (void)
50238fd1498Szrj {
50338fd1498Szrj return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
50438fd1498Szrj BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
50538fd1498Szrj }
50638fd1498Szrj
50738fd1498Szrj /* Emit INSN at the entry point of the function, ensuring that it is only
50838fd1498Szrj executed once per function. */
50938fd1498Szrj void
emit_insn_at_entry(rtx insn)51038fd1498Szrj emit_insn_at_entry (rtx insn)
51138fd1498Szrj {
51238fd1498Szrj edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
51338fd1498Szrj edge e = ei_safe_edge (ei);
51438fd1498Szrj gcc_assert (e->flags & EDGE_FALLTHRU);
51538fd1498Szrj
51638fd1498Szrj insert_insn_on_edge (insn, e);
51738fd1498Szrj commit_edge_insertions ();
51838fd1498Szrj }
51938fd1498Szrj
52038fd1498Szrj /* Update BLOCK_FOR_INSN of insns between BEGIN and END
52138fd1498Szrj (or BARRIER if found) and notify df of the bb change.
52238fd1498Szrj The insn chain range is inclusive
52338fd1498Szrj (i.e. both BEGIN and END will be updated. */
52438fd1498Szrj
52538fd1498Szrj static void
update_bb_for_insn_chain(rtx_insn * begin,rtx_insn * end,basic_block bb)52638fd1498Szrj update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
52738fd1498Szrj {
52838fd1498Szrj rtx_insn *insn;
52938fd1498Szrj
53038fd1498Szrj end = NEXT_INSN (end);
53138fd1498Szrj for (insn = begin; insn != end; insn = NEXT_INSN (insn))
53238fd1498Szrj if (!BARRIER_P (insn))
53338fd1498Szrj df_insn_change_bb (insn, bb);
53438fd1498Szrj }
53538fd1498Szrj
53638fd1498Szrj /* Update BLOCK_FOR_INSN of insns in BB to BB,
53738fd1498Szrj and notify df of the change. */
53838fd1498Szrj
53938fd1498Szrj void
update_bb_for_insn(basic_block bb)54038fd1498Szrj update_bb_for_insn (basic_block bb)
54138fd1498Szrj {
54238fd1498Szrj update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
54338fd1498Szrj }
54438fd1498Szrj
54538fd1498Szrj
54638fd1498Szrj /* Like active_insn_p, except keep the return value clobber around
54738fd1498Szrj even after reload. */
54838fd1498Szrj
54938fd1498Szrj static bool
flow_active_insn_p(const rtx_insn * insn)55038fd1498Szrj flow_active_insn_p (const rtx_insn *insn)
55138fd1498Szrj {
55238fd1498Szrj if (active_insn_p (insn))
55338fd1498Szrj return true;
55438fd1498Szrj
55538fd1498Szrj /* A clobber of the function return value exists for buggy
55638fd1498Szrj programs that fail to return a value. Its effect is to
55738fd1498Szrj keep the return value from being live across the entire
55838fd1498Szrj function. If we allow it to be skipped, we introduce the
55938fd1498Szrj possibility for register lifetime confusion. */
56038fd1498Szrj if (GET_CODE (PATTERN (insn)) == CLOBBER
56138fd1498Szrj && REG_P (XEXP (PATTERN (insn), 0))
56238fd1498Szrj && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
56338fd1498Szrj return true;
56438fd1498Szrj
56538fd1498Szrj return false;
56638fd1498Szrj }
56738fd1498Szrj
56838fd1498Szrj /* Return true if the block has no effect and only forwards control flow to
56938fd1498Szrj its single destination. */
57038fd1498Szrj
57138fd1498Szrj bool
contains_no_active_insn_p(const_basic_block bb)57238fd1498Szrj contains_no_active_insn_p (const_basic_block bb)
57338fd1498Szrj {
57438fd1498Szrj rtx_insn *insn;
57538fd1498Szrj
57638fd1498Szrj if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
57738fd1498Szrj || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
57838fd1498Szrj || !single_succ_p (bb)
57938fd1498Szrj || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
58038fd1498Szrj return false;
58138fd1498Szrj
58238fd1498Szrj for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
58338fd1498Szrj if (INSN_P (insn) && flow_active_insn_p (insn))
58438fd1498Szrj return false;
58538fd1498Szrj
58638fd1498Szrj return (!INSN_P (insn)
58738fd1498Szrj || (JUMP_P (insn) && simplejump_p (insn))
58838fd1498Szrj || !flow_active_insn_p (insn));
58938fd1498Szrj }
59038fd1498Szrj
59138fd1498Szrj /* Likewise, but protect loop latches, headers and preheaders. */
59238fd1498Szrj /* FIXME: Make this a cfg hook. */
59338fd1498Szrj
59438fd1498Szrj bool
forwarder_block_p(const_basic_block bb)59538fd1498Szrj forwarder_block_p (const_basic_block bb)
59638fd1498Szrj {
59738fd1498Szrj if (!contains_no_active_insn_p (bb))
59838fd1498Szrj return false;
59938fd1498Szrj
60038fd1498Szrj /* Protect loop latches, headers and preheaders. */
60138fd1498Szrj if (current_loops)
60238fd1498Szrj {
60338fd1498Szrj basic_block dest;
60438fd1498Szrj if (bb->loop_father->header == bb)
60538fd1498Szrj return false;
60638fd1498Szrj dest = EDGE_SUCC (bb, 0)->dest;
60738fd1498Szrj if (dest->loop_father->header == dest)
60838fd1498Szrj return false;
60938fd1498Szrj }
61038fd1498Szrj
61138fd1498Szrj return true;
61238fd1498Szrj }
61338fd1498Szrj
61438fd1498Szrj /* Return nonzero if we can reach target from src by falling through. */
61538fd1498Szrj /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
61638fd1498Szrj
61738fd1498Szrj bool
can_fallthru(basic_block src,basic_block target)61838fd1498Szrj can_fallthru (basic_block src, basic_block target)
61938fd1498Szrj {
62038fd1498Szrj rtx_insn *insn = BB_END (src);
62138fd1498Szrj rtx_insn *insn2;
62238fd1498Szrj edge e;
62338fd1498Szrj edge_iterator ei;
62438fd1498Szrj
62538fd1498Szrj if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
62638fd1498Szrj return true;
62738fd1498Szrj if (src->next_bb != target)
62838fd1498Szrj return false;
62938fd1498Szrj
63038fd1498Szrj /* ??? Later we may add code to move jump tables offline. */
63138fd1498Szrj if (tablejump_p (insn, NULL, NULL))
63238fd1498Szrj return false;
63338fd1498Szrj
63438fd1498Szrj FOR_EACH_EDGE (e, ei, src->succs)
63538fd1498Szrj if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
63638fd1498Szrj && e->flags & EDGE_FALLTHRU)
63738fd1498Szrj return false;
63838fd1498Szrj
63938fd1498Szrj insn2 = BB_HEAD (target);
64038fd1498Szrj if (!active_insn_p (insn2))
64138fd1498Szrj insn2 = next_active_insn (insn2);
64238fd1498Szrj
64338fd1498Szrj return next_active_insn (insn) == insn2;
64438fd1498Szrj }
64538fd1498Szrj
64638fd1498Szrj /* Return nonzero if we could reach target from src by falling through,
64738fd1498Szrj if the target was made adjacent. If we already have a fall-through
64838fd1498Szrj edge to the exit block, we can't do that. */
64938fd1498Szrj static bool
could_fall_through(basic_block src,basic_block target)65038fd1498Szrj could_fall_through (basic_block src, basic_block target)
65138fd1498Szrj {
65238fd1498Szrj edge e;
65338fd1498Szrj edge_iterator ei;
65438fd1498Szrj
65538fd1498Szrj if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
65638fd1498Szrj return true;
65738fd1498Szrj FOR_EACH_EDGE (e, ei, src->succs)
65838fd1498Szrj if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
65938fd1498Szrj && e->flags & EDGE_FALLTHRU)
66038fd1498Szrj return 0;
66138fd1498Szrj return true;
66238fd1498Szrj }
66338fd1498Szrj
66438fd1498Szrj /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
66538fd1498Szrj rtx_note *
bb_note(basic_block bb)66638fd1498Szrj bb_note (basic_block bb)
66738fd1498Szrj {
66838fd1498Szrj rtx_insn *note;
66938fd1498Szrj
67038fd1498Szrj note = BB_HEAD (bb);
67138fd1498Szrj if (LABEL_P (note))
67238fd1498Szrj note = NEXT_INSN (note);
67338fd1498Szrj
67438fd1498Szrj gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
67538fd1498Szrj return as_a <rtx_note *> (note);
67638fd1498Szrj }
67738fd1498Szrj
67838fd1498Szrj /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
67938fd1498Szrj note associated with the BLOCK. */
68038fd1498Szrj
68138fd1498Szrj static rtx_insn *
first_insn_after_basic_block_note(basic_block block)68238fd1498Szrj first_insn_after_basic_block_note (basic_block block)
68338fd1498Szrj {
68438fd1498Szrj rtx_insn *insn;
68538fd1498Szrj
68638fd1498Szrj /* Get the first instruction in the block. */
68738fd1498Szrj insn = BB_HEAD (block);
68838fd1498Szrj
68938fd1498Szrj if (insn == NULL_RTX)
69038fd1498Szrj return NULL;
69138fd1498Szrj if (LABEL_P (insn))
69238fd1498Szrj insn = NEXT_INSN (insn);
69338fd1498Szrj gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
69438fd1498Szrj
69538fd1498Szrj return NEXT_INSN (insn);
69638fd1498Szrj }
69738fd1498Szrj
69838fd1498Szrj /* Creates a new basic block just after basic block BB by splitting
69938fd1498Szrj everything after specified instruction INSNP. */
70038fd1498Szrj
70138fd1498Szrj static basic_block
rtl_split_block(basic_block bb,void * insnp)70238fd1498Szrj rtl_split_block (basic_block bb, void *insnp)
70338fd1498Szrj {
70438fd1498Szrj basic_block new_bb;
70538fd1498Szrj rtx_insn *insn = (rtx_insn *) insnp;
70638fd1498Szrj edge e;
70738fd1498Szrj edge_iterator ei;
70838fd1498Szrj
70938fd1498Szrj if (!insn)
71038fd1498Szrj {
71138fd1498Szrj insn = first_insn_after_basic_block_note (bb);
71238fd1498Szrj
71338fd1498Szrj if (insn)
71438fd1498Szrj {
71538fd1498Szrj rtx_insn *next = insn;
71638fd1498Szrj
71738fd1498Szrj insn = PREV_INSN (insn);
71838fd1498Szrj
71938fd1498Szrj /* If the block contains only debug insns, insn would have
72038fd1498Szrj been NULL in a non-debug compilation, and then we'd end
72138fd1498Szrj up emitting a DELETED note. For -fcompare-debug
72238fd1498Szrj stability, emit the note too. */
72338fd1498Szrj if (insn != BB_END (bb)
72438fd1498Szrj && DEBUG_INSN_P (next)
72538fd1498Szrj && DEBUG_INSN_P (BB_END (bb)))
72638fd1498Szrj {
72738fd1498Szrj while (next != BB_END (bb) && DEBUG_INSN_P (next))
72838fd1498Szrj next = NEXT_INSN (next);
72938fd1498Szrj
73038fd1498Szrj if (next == BB_END (bb))
73138fd1498Szrj emit_note_after (NOTE_INSN_DELETED, next);
73238fd1498Szrj }
73338fd1498Szrj }
73438fd1498Szrj else
73538fd1498Szrj insn = get_last_insn ();
73638fd1498Szrj }
73738fd1498Szrj
73838fd1498Szrj /* We probably should check type of the insn so that we do not create
73938fd1498Szrj inconsistent cfg. It is checked in verify_flow_info anyway, so do not
74038fd1498Szrj bother. */
74138fd1498Szrj if (insn == BB_END (bb))
74238fd1498Szrj emit_note_after (NOTE_INSN_DELETED, insn);
74338fd1498Szrj
74438fd1498Szrj /* Create the new basic block. */
74538fd1498Szrj new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
74638fd1498Szrj BB_COPY_PARTITION (new_bb, bb);
74738fd1498Szrj BB_END (bb) = insn;
74838fd1498Szrj
74938fd1498Szrj /* Redirect the outgoing edges. */
75038fd1498Szrj new_bb->succs = bb->succs;
75138fd1498Szrj bb->succs = NULL;
75238fd1498Szrj FOR_EACH_EDGE (e, ei, new_bb->succs)
75338fd1498Szrj e->src = new_bb;
75438fd1498Szrj
75538fd1498Szrj /* The new block starts off being dirty. */
75638fd1498Szrj df_set_bb_dirty (bb);
75738fd1498Szrj return new_bb;
75838fd1498Szrj }
75938fd1498Szrj
76038fd1498Szrj /* Return true if the single edge between blocks A and B is the only place
76138fd1498Szrj in RTL which holds some unique locus. */
76238fd1498Szrj
76338fd1498Szrj static bool
unique_locus_on_edge_between_p(basic_block a,basic_block b)76438fd1498Szrj unique_locus_on_edge_between_p (basic_block a, basic_block b)
76538fd1498Szrj {
76638fd1498Szrj const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
76738fd1498Szrj rtx_insn *insn, *end;
76838fd1498Szrj
76938fd1498Szrj if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
77038fd1498Szrj return false;
77138fd1498Szrj
77238fd1498Szrj /* First scan block A backward. */
77338fd1498Szrj insn = BB_END (a);
77438fd1498Szrj end = PREV_INSN (BB_HEAD (a));
77538fd1498Szrj while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
77638fd1498Szrj insn = PREV_INSN (insn);
77738fd1498Szrj
77838fd1498Szrj if (insn != end && INSN_LOCATION (insn) == goto_locus)
77938fd1498Szrj return false;
78038fd1498Szrj
78138fd1498Szrj /* Then scan block B forward. */
78238fd1498Szrj insn = BB_HEAD (b);
78338fd1498Szrj if (insn)
78438fd1498Szrj {
78538fd1498Szrj end = NEXT_INSN (BB_END (b));
78638fd1498Szrj while (insn != end && !NONDEBUG_INSN_P (insn))
78738fd1498Szrj insn = NEXT_INSN (insn);
78838fd1498Szrj
78938fd1498Szrj if (insn != end && INSN_HAS_LOCATION (insn)
79038fd1498Szrj && INSN_LOCATION (insn) == goto_locus)
79138fd1498Szrj return false;
79238fd1498Szrj }
79338fd1498Szrj
79438fd1498Szrj return true;
79538fd1498Szrj }
79638fd1498Szrj
79738fd1498Szrj /* If the single edge between blocks A and B is the only place in RTL which
79838fd1498Szrj holds some unique locus, emit a nop with that locus between the blocks. */
79938fd1498Szrj
80038fd1498Szrj static void
emit_nop_for_unique_locus_between(basic_block a,basic_block b)80138fd1498Szrj emit_nop_for_unique_locus_between (basic_block a, basic_block b)
80238fd1498Szrj {
80338fd1498Szrj if (!unique_locus_on_edge_between_p (a, b))
80438fd1498Szrj return;
80538fd1498Szrj
80638fd1498Szrj BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
80738fd1498Szrj INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
80838fd1498Szrj }
80938fd1498Szrj
81038fd1498Szrj /* Blocks A and B are to be merged into a single block A. The insns
81138fd1498Szrj are already contiguous. */
81238fd1498Szrj
81338fd1498Szrj static void
rtl_merge_blocks(basic_block a,basic_block b)81438fd1498Szrj rtl_merge_blocks (basic_block a, basic_block b)
81538fd1498Szrj {
81638fd1498Szrj rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
81738fd1498Szrj rtx_insn *del_first = NULL, *del_last = NULL;
81838fd1498Szrj rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
81938fd1498Szrj bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
82038fd1498Szrj int b_empty = 0;
82138fd1498Szrj
82238fd1498Szrj if (dump_file)
82338fd1498Szrj fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
82438fd1498Szrj a->index);
82538fd1498Szrj
82638fd1498Szrj while (DEBUG_INSN_P (b_end))
82738fd1498Szrj b_end = PREV_INSN (b_debug_start = b_end);
82838fd1498Szrj
82938fd1498Szrj /* If there was a CODE_LABEL beginning B, delete it. */
83038fd1498Szrj if (LABEL_P (b_head))
83138fd1498Szrj {
83238fd1498Szrj /* Detect basic blocks with nothing but a label. This can happen
83338fd1498Szrj in particular at the end of a function. */
83438fd1498Szrj if (b_head == b_end)
83538fd1498Szrj b_empty = 1;
83638fd1498Szrj
83738fd1498Szrj del_first = del_last = b_head;
83838fd1498Szrj b_head = NEXT_INSN (b_head);
83938fd1498Szrj }
84038fd1498Szrj
84138fd1498Szrj /* Delete the basic block note and handle blocks containing just that
84238fd1498Szrj note. */
84338fd1498Szrj if (NOTE_INSN_BASIC_BLOCK_P (b_head))
84438fd1498Szrj {
84538fd1498Szrj if (b_head == b_end)
84638fd1498Szrj b_empty = 1;
84738fd1498Szrj if (! del_last)
84838fd1498Szrj del_first = b_head;
84938fd1498Szrj
85038fd1498Szrj del_last = b_head;
85138fd1498Szrj b_head = NEXT_INSN (b_head);
85238fd1498Szrj }
85338fd1498Szrj
85438fd1498Szrj /* If there was a jump out of A, delete it. */
85538fd1498Szrj if (JUMP_P (a_end))
85638fd1498Szrj {
85738fd1498Szrj rtx_insn *prev;
85838fd1498Szrj
85938fd1498Szrj for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
86038fd1498Szrj if (!NOTE_P (prev)
86138fd1498Szrj || NOTE_INSN_BASIC_BLOCK_P (prev)
86238fd1498Szrj || prev == BB_HEAD (a))
86338fd1498Szrj break;
86438fd1498Szrj
86538fd1498Szrj del_first = a_end;
86638fd1498Szrj
86738fd1498Szrj /* If this was a conditional jump, we need to also delete
86838fd1498Szrj the insn that set cc0. */
86938fd1498Szrj if (HAVE_cc0 && only_sets_cc0_p (prev))
87038fd1498Szrj {
87138fd1498Szrj rtx_insn *tmp = prev;
87238fd1498Szrj
87338fd1498Szrj prev = prev_nonnote_insn (prev);
87438fd1498Szrj if (!prev)
87538fd1498Szrj prev = BB_HEAD (a);
87638fd1498Szrj del_first = tmp;
87738fd1498Szrj }
87838fd1498Szrj
87938fd1498Szrj a_end = PREV_INSN (del_first);
88038fd1498Szrj }
88138fd1498Szrj else if (BARRIER_P (NEXT_INSN (a_end)))
88238fd1498Szrj del_first = NEXT_INSN (a_end);
88338fd1498Szrj
88438fd1498Szrj /* Delete everything marked above as well as crap that might be
88538fd1498Szrj hanging out between the two blocks. */
88638fd1498Szrj BB_END (a) = a_end;
88738fd1498Szrj BB_HEAD (b) = b_empty ? NULL : b_head;
88838fd1498Szrj delete_insn_chain (del_first, del_last, true);
88938fd1498Szrj
89038fd1498Szrj /* When not optimizing and the edge is the only place in RTL which holds
89138fd1498Szrj some unique locus, emit a nop with that locus in between. */
89238fd1498Szrj if (!optimize)
89338fd1498Szrj {
89438fd1498Szrj emit_nop_for_unique_locus_between (a, b);
89538fd1498Szrj a_end = BB_END (a);
89638fd1498Szrj }
89738fd1498Szrj
89838fd1498Szrj /* Reassociate the insns of B with A. */
89938fd1498Szrj if (!b_empty)
90038fd1498Szrj {
90138fd1498Szrj update_bb_for_insn_chain (a_end, b_debug_end, a);
90238fd1498Szrj
90338fd1498Szrj BB_END (a) = b_debug_end;
90438fd1498Szrj BB_HEAD (b) = NULL;
90538fd1498Szrj }
90638fd1498Szrj else if (b_end != b_debug_end)
90738fd1498Szrj {
90838fd1498Szrj /* Move any deleted labels and other notes between the end of A
90938fd1498Szrj and the debug insns that make up B after the debug insns,
91038fd1498Szrj bringing the debug insns into A while keeping the notes after
91138fd1498Szrj the end of A. */
91238fd1498Szrj if (NEXT_INSN (a_end) != b_debug_start)
91338fd1498Szrj reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
91438fd1498Szrj b_debug_end);
91538fd1498Szrj update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
91638fd1498Szrj BB_END (a) = b_debug_end;
91738fd1498Szrj }
91838fd1498Szrj
91938fd1498Szrj df_bb_delete (b->index);
92038fd1498Szrj
92138fd1498Szrj /* If B was a forwarder block, propagate the locus on the edge. */
92238fd1498Szrj if (forwarder_p
92338fd1498Szrj && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
92438fd1498Szrj EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
92538fd1498Szrj
92638fd1498Szrj if (dump_file)
92738fd1498Szrj fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
92838fd1498Szrj }
92938fd1498Szrj
93038fd1498Szrj
93138fd1498Szrj /* Return true when block A and B can be merged. */
93238fd1498Szrj
93338fd1498Szrj static bool
rtl_can_merge_blocks(basic_block a,basic_block b)93438fd1498Szrj rtl_can_merge_blocks (basic_block a, basic_block b)
93538fd1498Szrj {
93638fd1498Szrj /* If we are partitioning hot/cold basic blocks, we don't want to
93738fd1498Szrj mess up unconditional or indirect jumps that cross between hot
93838fd1498Szrj and cold sections.
93938fd1498Szrj
94038fd1498Szrj Basic block partitioning may result in some jumps that appear to
94138fd1498Szrj be optimizable (or blocks that appear to be mergeable), but which really
94238fd1498Szrj must be left untouched (they are required to make it safely across
94338fd1498Szrj partition boundaries). See the comments at the top of
94438fd1498Szrj bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
94538fd1498Szrj
94638fd1498Szrj if (BB_PARTITION (a) != BB_PARTITION (b))
94738fd1498Szrj return false;
94838fd1498Szrj
94938fd1498Szrj /* Protect the loop latches. */
95038fd1498Szrj if (current_loops && b->loop_father->latch == b)
95138fd1498Szrj return false;
95238fd1498Szrj
95338fd1498Szrj /* There must be exactly one edge in between the blocks. */
95438fd1498Szrj return (single_succ_p (a)
95538fd1498Szrj && single_succ (a) == b
95638fd1498Szrj && single_pred_p (b)
95738fd1498Szrj && a != b
95838fd1498Szrj /* Must be simple edge. */
95938fd1498Szrj && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
96038fd1498Szrj && a->next_bb == b
96138fd1498Szrj && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
96238fd1498Szrj && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
96338fd1498Szrj /* If the jump insn has side effects,
96438fd1498Szrj we can't kill the edge. */
96538fd1498Szrj && (!JUMP_P (BB_END (a))
96638fd1498Szrj || (reload_completed
96738fd1498Szrj ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
96838fd1498Szrj }
96938fd1498Szrj
97038fd1498Szrj /* Return the label in the head of basic block BLOCK. Create one if it doesn't
97138fd1498Szrj exist. */
97238fd1498Szrj
97338fd1498Szrj rtx_code_label *
block_label(basic_block block)97438fd1498Szrj block_label (basic_block block)
97538fd1498Szrj {
97638fd1498Szrj if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
97738fd1498Szrj return NULL;
97838fd1498Szrj
97938fd1498Szrj if (!LABEL_P (BB_HEAD (block)))
98038fd1498Szrj {
98138fd1498Szrj BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
98238fd1498Szrj }
98338fd1498Szrj
98438fd1498Szrj return as_a <rtx_code_label *> (BB_HEAD (block));
98538fd1498Szrj }
98638fd1498Szrj
987*58e805e6Szrj /* Remove all barriers from BB_FOOTER of a BB. */
988*58e805e6Szrj
989*58e805e6Szrj static void
remove_barriers_from_footer(basic_block bb)990*58e805e6Szrj remove_barriers_from_footer (basic_block bb)
991*58e805e6Szrj {
992*58e805e6Szrj rtx_insn *insn = BB_FOOTER (bb);
993*58e805e6Szrj
994*58e805e6Szrj /* Remove barriers but keep jumptables. */
995*58e805e6Szrj while (insn)
996*58e805e6Szrj {
997*58e805e6Szrj if (BARRIER_P (insn))
998*58e805e6Szrj {
999*58e805e6Szrj if (PREV_INSN (insn))
1000*58e805e6Szrj SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1001*58e805e6Szrj else
1002*58e805e6Szrj BB_FOOTER (bb) = NEXT_INSN (insn);
1003*58e805e6Szrj if (NEXT_INSN (insn))
1004*58e805e6Szrj SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1005*58e805e6Szrj }
1006*58e805e6Szrj if (LABEL_P (insn))
1007*58e805e6Szrj return;
1008*58e805e6Szrj insn = NEXT_INSN (insn);
1009*58e805e6Szrj }
1010*58e805e6Szrj }
1011*58e805e6Szrj
101238fd1498Szrj /* Attempt to perform edge redirection by replacing possibly complex jump
101338fd1498Szrj instruction by unconditional jump or removing jump completely. This can
101438fd1498Szrj apply only if all edges now point to the same block. The parameters and
101538fd1498Szrj return values are equivalent to redirect_edge_and_branch. */
101638fd1498Szrj
101738fd1498Szrj edge
try_redirect_by_replacing_jump(edge e,basic_block target,bool in_cfglayout)101838fd1498Szrj try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
101938fd1498Szrj {
102038fd1498Szrj basic_block src = e->src;
102138fd1498Szrj rtx_insn *insn = BB_END (src), *kill_from;
102238fd1498Szrj rtx set;
102338fd1498Szrj int fallthru = 0;
102438fd1498Szrj
102538fd1498Szrj /* If we are partitioning hot/cold basic blocks, we don't want to
102638fd1498Szrj mess up unconditional or indirect jumps that cross between hot
102738fd1498Szrj and cold sections.
102838fd1498Szrj
102938fd1498Szrj Basic block partitioning may result in some jumps that appear to
103038fd1498Szrj be optimizable (or blocks that appear to be mergeable), but which really
103138fd1498Szrj must be left untouched (they are required to make it safely across
103238fd1498Szrj partition boundaries). See the comments at the top of
103338fd1498Szrj bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
103438fd1498Szrj
103538fd1498Szrj if (BB_PARTITION (src) != BB_PARTITION (target))
103638fd1498Szrj return NULL;
103738fd1498Szrj
103838fd1498Szrj /* We can replace or remove a complex jump only when we have exactly
103938fd1498Szrj two edges. Also, if we have exactly one outgoing edge, we can
104038fd1498Szrj redirect that. */
104138fd1498Szrj if (EDGE_COUNT (src->succs) >= 3
104238fd1498Szrj /* Verify that all targets will be TARGET. Specifically, the
104338fd1498Szrj edge that is not E must also go to TARGET. */
104438fd1498Szrj || (EDGE_COUNT (src->succs) == 2
104538fd1498Szrj && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
104638fd1498Szrj return NULL;
104738fd1498Szrj
104838fd1498Szrj if (!onlyjump_p (insn))
104938fd1498Szrj return NULL;
105038fd1498Szrj if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
105138fd1498Szrj return NULL;
105238fd1498Szrj
105338fd1498Szrj /* Avoid removing branch with side effects. */
105438fd1498Szrj set = single_set (insn);
105538fd1498Szrj if (!set || side_effects_p (set))
105638fd1498Szrj return NULL;
105738fd1498Szrj
105838fd1498Szrj /* In case we zap a conditional jump, we'll need to kill
105938fd1498Szrj the cc0 setter too. */
106038fd1498Szrj kill_from = insn;
106138fd1498Szrj if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
106238fd1498Szrj && only_sets_cc0_p (PREV_INSN (insn)))
106338fd1498Szrj kill_from = PREV_INSN (insn);
106438fd1498Szrj
106538fd1498Szrj /* See if we can create the fallthru edge. */
106638fd1498Szrj if (in_cfglayout || can_fallthru (src, target))
106738fd1498Szrj {
106838fd1498Szrj if (dump_file)
106938fd1498Szrj fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
107038fd1498Szrj fallthru = 1;
107138fd1498Szrj
107238fd1498Szrj /* Selectively unlink whole insn chain. */
107338fd1498Szrj if (in_cfglayout)
107438fd1498Szrj {
107538fd1498Szrj delete_insn_chain (kill_from, BB_END (src), false);
1076*58e805e6Szrj remove_barriers_from_footer (src);
107738fd1498Szrj }
107838fd1498Szrj else
107938fd1498Szrj delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
108038fd1498Szrj false);
108138fd1498Szrj }
108238fd1498Szrj
108338fd1498Szrj /* If this already is simplejump, redirect it. */
108438fd1498Szrj else if (simplejump_p (insn))
108538fd1498Szrj {
108638fd1498Szrj if (e->dest == target)
108738fd1498Szrj return NULL;
108838fd1498Szrj if (dump_file)
108938fd1498Szrj fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
109038fd1498Szrj INSN_UID (insn), e->dest->index, target->index);
109138fd1498Szrj if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
109238fd1498Szrj block_label (target), 0))
109338fd1498Szrj {
109438fd1498Szrj gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
109538fd1498Szrj return NULL;
109638fd1498Szrj }
109738fd1498Szrj }
109838fd1498Szrj
109938fd1498Szrj /* Cannot do anything for target exit block. */
110038fd1498Szrj else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
110138fd1498Szrj return NULL;
110238fd1498Szrj
110338fd1498Szrj /* Or replace possibly complicated jump insn by simple jump insn. */
110438fd1498Szrj else
110538fd1498Szrj {
110638fd1498Szrj rtx_code_label *target_label = block_label (target);
110738fd1498Szrj rtx_insn *barrier;
110838fd1498Szrj rtx_insn *label;
110938fd1498Szrj rtx_jump_table_data *table;
111038fd1498Szrj
111138fd1498Szrj emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
111238fd1498Szrj JUMP_LABEL (BB_END (src)) = target_label;
111338fd1498Szrj LABEL_NUSES (target_label)++;
111438fd1498Szrj if (dump_file)
111538fd1498Szrj fprintf (dump_file, "Replacing insn %i by jump %i\n",
111638fd1498Szrj INSN_UID (insn), INSN_UID (BB_END (src)));
111738fd1498Szrj
111838fd1498Szrj
111938fd1498Szrj delete_insn_chain (kill_from, insn, false);
112038fd1498Szrj
112138fd1498Szrj /* Recognize a tablejump that we are converting to a
112238fd1498Szrj simple jump and remove its associated CODE_LABEL
112338fd1498Szrj and ADDR_VEC or ADDR_DIFF_VEC. */
112438fd1498Szrj if (tablejump_p (insn, &label, &table))
112538fd1498Szrj delete_insn_chain (label, table, false);
112638fd1498Szrj
112738fd1498Szrj barrier = next_nonnote_nondebug_insn (BB_END (src));
112838fd1498Szrj if (!barrier || !BARRIER_P (barrier))
112938fd1498Szrj emit_barrier_after (BB_END (src));
113038fd1498Szrj else
113138fd1498Szrj {
113238fd1498Szrj if (barrier != NEXT_INSN (BB_END (src)))
113338fd1498Szrj {
113438fd1498Szrj /* Move the jump before barrier so that the notes
113538fd1498Szrj which originally were or were created before jump table are
113638fd1498Szrj inside the basic block. */
113738fd1498Szrj rtx_insn *new_insn = BB_END (src);
113838fd1498Szrj
113938fd1498Szrj update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
114038fd1498Szrj PREV_INSN (barrier), src);
114138fd1498Szrj
114238fd1498Szrj SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
114338fd1498Szrj SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
114438fd1498Szrj
114538fd1498Szrj SET_NEXT_INSN (new_insn) = barrier;
114638fd1498Szrj SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
114738fd1498Szrj
114838fd1498Szrj SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
114938fd1498Szrj SET_PREV_INSN (barrier) = new_insn;
115038fd1498Szrj }
115138fd1498Szrj }
115238fd1498Szrj }
115338fd1498Szrj
115438fd1498Szrj /* Keep only one edge out and set proper flags. */
115538fd1498Szrj if (!single_succ_p (src))
115638fd1498Szrj remove_edge (e);
115738fd1498Szrj gcc_assert (single_succ_p (src));
115838fd1498Szrj
115938fd1498Szrj e = single_succ_edge (src);
116038fd1498Szrj if (fallthru)
116138fd1498Szrj e->flags = EDGE_FALLTHRU;
116238fd1498Szrj else
116338fd1498Szrj e->flags = 0;
116438fd1498Szrj
116538fd1498Szrj e->probability = profile_probability::always ();
116638fd1498Szrj
116738fd1498Szrj if (e->dest != target)
116838fd1498Szrj redirect_edge_succ (e, target);
116938fd1498Szrj return e;
117038fd1498Szrj }
117138fd1498Szrj
117238fd1498Szrj /* Subroutine of redirect_branch_edge that tries to patch the jump
117338fd1498Szrj instruction INSN so that it reaches block NEW. Do this
117438fd1498Szrj only when it originally reached block OLD. Return true if this
117538fd1498Szrj worked or the original target wasn't OLD, return false if redirection
117638fd1498Szrj doesn't work. */
117738fd1498Szrj
117838fd1498Szrj static bool
patch_jump_insn(rtx_insn * insn,rtx_insn * old_label,basic_block new_bb)117938fd1498Szrj patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
118038fd1498Szrj {
118138fd1498Szrj rtx_jump_table_data *table;
118238fd1498Szrj rtx tmp;
118338fd1498Szrj /* Recognize a tablejump and adjust all matching cases. */
118438fd1498Szrj if (tablejump_p (insn, NULL, &table))
118538fd1498Szrj {
118638fd1498Szrj rtvec vec;
118738fd1498Szrj int j;
118838fd1498Szrj rtx_code_label *new_label = block_label (new_bb);
118938fd1498Szrj
119038fd1498Szrj if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
119138fd1498Szrj return false;
119238fd1498Szrj vec = table->get_labels ();
119338fd1498Szrj
119438fd1498Szrj for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
119538fd1498Szrj if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
119638fd1498Szrj {
119738fd1498Szrj RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
119838fd1498Szrj --LABEL_NUSES (old_label);
119938fd1498Szrj ++LABEL_NUSES (new_label);
120038fd1498Szrj }
120138fd1498Szrj
120238fd1498Szrj /* Handle casesi dispatch insns. */
120338fd1498Szrj if ((tmp = single_set (insn)) != NULL
120438fd1498Szrj && SET_DEST (tmp) == pc_rtx
120538fd1498Szrj && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
120638fd1498Szrj && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
120738fd1498Szrj && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
120838fd1498Szrj {
120938fd1498Szrj XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
121038fd1498Szrj new_label);
121138fd1498Szrj --LABEL_NUSES (old_label);
121238fd1498Szrj ++LABEL_NUSES (new_label);
121338fd1498Szrj }
121438fd1498Szrj }
121538fd1498Szrj else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
121638fd1498Szrj {
121738fd1498Szrj int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
121838fd1498Szrj rtx note;
121938fd1498Szrj
122038fd1498Szrj if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
122138fd1498Szrj return false;
122238fd1498Szrj rtx_code_label *new_label = block_label (new_bb);
122338fd1498Szrj
122438fd1498Szrj for (i = 0; i < n; ++i)
122538fd1498Szrj {
122638fd1498Szrj rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
122738fd1498Szrj gcc_assert (GET_CODE (old_ref) == LABEL_REF);
122838fd1498Szrj if (XEXP (old_ref, 0) == old_label)
122938fd1498Szrj {
123038fd1498Szrj ASM_OPERANDS_LABEL (tmp, i)
123138fd1498Szrj = gen_rtx_LABEL_REF (Pmode, new_label);
123238fd1498Szrj --LABEL_NUSES (old_label);
123338fd1498Szrj ++LABEL_NUSES (new_label);
123438fd1498Szrj }
123538fd1498Szrj }
123638fd1498Szrj
123738fd1498Szrj if (JUMP_LABEL (insn) == old_label)
123838fd1498Szrj {
123938fd1498Szrj JUMP_LABEL (insn) = new_label;
124038fd1498Szrj note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
124138fd1498Szrj if (note)
124238fd1498Szrj remove_note (insn, note);
124338fd1498Szrj }
124438fd1498Szrj else
124538fd1498Szrj {
124638fd1498Szrj note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
124738fd1498Szrj if (note)
124838fd1498Szrj remove_note (insn, note);
124938fd1498Szrj if (JUMP_LABEL (insn) != new_label
125038fd1498Szrj && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
125138fd1498Szrj add_reg_note (insn, REG_LABEL_TARGET, new_label);
125238fd1498Szrj }
125338fd1498Szrj while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
125438fd1498Szrj != NULL_RTX)
125538fd1498Szrj XEXP (note, 0) = new_label;
125638fd1498Szrj }
125738fd1498Szrj else
125838fd1498Szrj {
125938fd1498Szrj /* ?? We may play the games with moving the named labels from
126038fd1498Szrj one basic block to the other in case only one computed_jump is
126138fd1498Szrj available. */
126238fd1498Szrj if (computed_jump_p (insn)
126338fd1498Szrj /* A return instruction can't be redirected. */
126438fd1498Szrj || returnjump_p (insn))
126538fd1498Szrj return false;
126638fd1498Szrj
126738fd1498Szrj if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
126838fd1498Szrj {
126938fd1498Szrj /* If the insn doesn't go where we think, we're confused. */
127038fd1498Szrj gcc_assert (JUMP_LABEL (insn) == old_label);
127138fd1498Szrj
127238fd1498Szrj /* If the substitution doesn't succeed, die. This can happen
127338fd1498Szrj if the back end emitted unrecognizable instructions or if
1274*58e805e6Szrj target is exit block on some arches. Or for crossing
1275*58e805e6Szrj jumps. */
127638fd1498Szrj if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
127738fd1498Szrj block_label (new_bb), 0))
127838fd1498Szrj {
1279*58e805e6Szrj gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1280*58e805e6Szrj || CROSSING_JUMP_P (insn));
128138fd1498Szrj return false;
128238fd1498Szrj }
128338fd1498Szrj }
128438fd1498Szrj }
128538fd1498Szrj return true;
128638fd1498Szrj }
128738fd1498Szrj
128838fd1498Szrj
128938fd1498Szrj /* Redirect edge representing branch of (un)conditional jump or tablejump,
129038fd1498Szrj NULL on failure */
129138fd1498Szrj static edge
redirect_branch_edge(edge e,basic_block target)129238fd1498Szrj redirect_branch_edge (edge e, basic_block target)
129338fd1498Szrj {
129438fd1498Szrj rtx_insn *old_label = BB_HEAD (e->dest);
129538fd1498Szrj basic_block src = e->src;
129638fd1498Szrj rtx_insn *insn = BB_END (src);
129738fd1498Szrj
129838fd1498Szrj /* We can only redirect non-fallthru edges of jump insn. */
129938fd1498Szrj if (e->flags & EDGE_FALLTHRU)
130038fd1498Szrj return NULL;
130138fd1498Szrj else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
130238fd1498Szrj return NULL;
130338fd1498Szrj
130438fd1498Szrj if (!currently_expanding_to_rtl)
130538fd1498Szrj {
130638fd1498Szrj if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
130738fd1498Szrj return NULL;
130838fd1498Szrj }
130938fd1498Szrj else
131038fd1498Szrj /* When expanding this BB might actually contain multiple
131138fd1498Szrj jumps (i.e. not yet split by find_many_sub_basic_blocks).
131238fd1498Szrj Redirect all of those that match our label. */
131338fd1498Szrj FOR_BB_INSNS (src, insn)
131438fd1498Szrj if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
131538fd1498Szrj old_label, target))
131638fd1498Szrj return NULL;
131738fd1498Szrj
131838fd1498Szrj if (dump_file)
131938fd1498Szrj fprintf (dump_file, "Edge %i->%i redirected to %i\n",
132038fd1498Szrj e->src->index, e->dest->index, target->index);
132138fd1498Szrj
132238fd1498Szrj if (e->dest != target)
132338fd1498Szrj e = redirect_edge_succ_nodup (e, target);
132438fd1498Szrj
132538fd1498Szrj return e;
132638fd1498Szrj }
132738fd1498Szrj
132838fd1498Szrj /* Called when edge E has been redirected to a new destination,
132938fd1498Szrj in order to update the region crossing flag on the edge and
133038fd1498Szrj jump. */
133138fd1498Szrj
133238fd1498Szrj static void
fixup_partition_crossing(edge e)133338fd1498Szrj fixup_partition_crossing (edge e)
133438fd1498Szrj {
133538fd1498Szrj if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
133638fd1498Szrj == EXIT_BLOCK_PTR_FOR_FN (cfun))
133738fd1498Szrj return;
133838fd1498Szrj /* If we redirected an existing edge, it may already be marked
133938fd1498Szrj crossing, even though the new src is missing a reg crossing note.
134038fd1498Szrj But make sure reg crossing note doesn't already exist before
134138fd1498Szrj inserting. */
134238fd1498Szrj if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
134338fd1498Szrj {
134438fd1498Szrj e->flags |= EDGE_CROSSING;
134538fd1498Szrj if (JUMP_P (BB_END (e->src)))
134638fd1498Szrj CROSSING_JUMP_P (BB_END (e->src)) = 1;
134738fd1498Szrj }
134838fd1498Szrj else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
134938fd1498Szrj {
135038fd1498Szrj e->flags &= ~EDGE_CROSSING;
135138fd1498Szrj /* Remove the section crossing note from jump at end of
135238fd1498Szrj src if it exists, and if no other successors are
135338fd1498Szrj still crossing. */
135438fd1498Szrj if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
135538fd1498Szrj {
135638fd1498Szrj bool has_crossing_succ = false;
135738fd1498Szrj edge e2;
135838fd1498Szrj edge_iterator ei;
135938fd1498Szrj FOR_EACH_EDGE (e2, ei, e->src->succs)
136038fd1498Szrj {
136138fd1498Szrj has_crossing_succ |= (e2->flags & EDGE_CROSSING);
136238fd1498Szrj if (has_crossing_succ)
136338fd1498Szrj break;
136438fd1498Szrj }
136538fd1498Szrj if (!has_crossing_succ)
136638fd1498Szrj CROSSING_JUMP_P (BB_END (e->src)) = 0;
136738fd1498Szrj }
136838fd1498Szrj }
136938fd1498Szrj }
137038fd1498Szrj
137138fd1498Szrj /* Called when block BB has been reassigned to the cold partition,
137238fd1498Szrj because it is now dominated by another cold block,
137338fd1498Szrj to ensure that the region crossing attributes are updated. */
137438fd1498Szrj
137538fd1498Szrj static void
fixup_new_cold_bb(basic_block bb)137638fd1498Szrj fixup_new_cold_bb (basic_block bb)
137738fd1498Szrj {
137838fd1498Szrj edge e;
137938fd1498Szrj edge_iterator ei;
138038fd1498Szrj
138138fd1498Szrj /* This is called when a hot bb is found to now be dominated
138238fd1498Szrj by a cold bb and therefore needs to become cold. Therefore,
138338fd1498Szrj its preds will no longer be region crossing. Any non-dominating
138438fd1498Szrj preds that were previously hot would also have become cold
138538fd1498Szrj in the caller for the same region. Any preds that were previously
138638fd1498Szrj region-crossing will be adjusted in fixup_partition_crossing. */
138738fd1498Szrj FOR_EACH_EDGE (e, ei, bb->preds)
138838fd1498Szrj {
138938fd1498Szrj fixup_partition_crossing (e);
139038fd1498Szrj }
139138fd1498Szrj
139238fd1498Szrj /* Possibly need to make bb's successor edges region crossing,
139338fd1498Szrj or remove stale region crossing. */
139438fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
139538fd1498Szrj {
139638fd1498Szrj /* We can't have fall-through edges across partition boundaries.
139738fd1498Szrj Note that force_nonfallthru will do any necessary partition
139838fd1498Szrj boundary fixup by calling fixup_partition_crossing itself. */
139938fd1498Szrj if ((e->flags & EDGE_FALLTHRU)
140038fd1498Szrj && BB_PARTITION (bb) != BB_PARTITION (e->dest)
140138fd1498Szrj && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
140238fd1498Szrj force_nonfallthru (e);
140338fd1498Szrj else
140438fd1498Szrj fixup_partition_crossing (e);
140538fd1498Szrj }
140638fd1498Szrj }
140738fd1498Szrj
140838fd1498Szrj /* Attempt to change code to redirect edge E to TARGET. Don't do that on
140938fd1498Szrj expense of adding new instructions or reordering basic blocks.
141038fd1498Szrj
141138fd1498Szrj Function can be also called with edge destination equivalent to the TARGET.
141238fd1498Szrj Then it should try the simplifications and do nothing if none is possible.
141338fd1498Szrj
141438fd1498Szrj Return edge representing the branch if transformation succeeded. Return NULL
141538fd1498Szrj on failure.
141638fd1498Szrj We still return NULL in case E already destinated TARGET and we didn't
141738fd1498Szrj managed to simplify instruction stream. */
141838fd1498Szrj
141938fd1498Szrj static edge
rtl_redirect_edge_and_branch(edge e,basic_block target)142038fd1498Szrj rtl_redirect_edge_and_branch (edge e, basic_block target)
142138fd1498Szrj {
142238fd1498Szrj edge ret;
142338fd1498Szrj basic_block src = e->src;
142438fd1498Szrj basic_block dest = e->dest;
142538fd1498Szrj
142638fd1498Szrj if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
142738fd1498Szrj return NULL;
142838fd1498Szrj
142938fd1498Szrj if (dest == target)
143038fd1498Szrj return e;
143138fd1498Szrj
143238fd1498Szrj if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
143338fd1498Szrj {
143438fd1498Szrj df_set_bb_dirty (src);
143538fd1498Szrj fixup_partition_crossing (ret);
143638fd1498Szrj return ret;
143738fd1498Szrj }
143838fd1498Szrj
143938fd1498Szrj ret = redirect_branch_edge (e, target);
144038fd1498Szrj if (!ret)
144138fd1498Szrj return NULL;
144238fd1498Szrj
144338fd1498Szrj df_set_bb_dirty (src);
144438fd1498Szrj fixup_partition_crossing (ret);
144538fd1498Szrj return ret;
144638fd1498Szrj }
144738fd1498Szrj
144838fd1498Szrj /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
144938fd1498Szrj
145038fd1498Szrj void
emit_barrier_after_bb(basic_block bb)145138fd1498Szrj emit_barrier_after_bb (basic_block bb)
145238fd1498Szrj {
145338fd1498Szrj rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
145438fd1498Szrj gcc_assert (current_ir_type () == IR_RTL_CFGRTL
145538fd1498Szrj || current_ir_type () == IR_RTL_CFGLAYOUT);
145638fd1498Szrj if (current_ir_type () == IR_RTL_CFGLAYOUT)
145738fd1498Szrj {
145838fd1498Szrj rtx_insn *insn = unlink_insn_chain (barrier, barrier);
145938fd1498Szrj
146038fd1498Szrj if (BB_FOOTER (bb))
146138fd1498Szrj {
146238fd1498Szrj rtx_insn *footer_tail = BB_FOOTER (bb);
146338fd1498Szrj
146438fd1498Szrj while (NEXT_INSN (footer_tail))
146538fd1498Szrj footer_tail = NEXT_INSN (footer_tail);
146638fd1498Szrj if (!BARRIER_P (footer_tail))
146738fd1498Szrj {
146838fd1498Szrj SET_NEXT_INSN (footer_tail) = insn;
146938fd1498Szrj SET_PREV_INSN (insn) = footer_tail;
147038fd1498Szrj }
147138fd1498Szrj }
147238fd1498Szrj else
147338fd1498Szrj BB_FOOTER (bb) = insn;
147438fd1498Szrj }
147538fd1498Szrj }
147638fd1498Szrj
147738fd1498Szrj /* Like force_nonfallthru below, but additionally performs redirection
147838fd1498Szrj Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
147938fd1498Szrj when redirecting to the EXIT_BLOCK, it is either ret_rtx or
148038fd1498Szrj simple_return_rtx, indicating which kind of returnjump to create.
148138fd1498Szrj It should be NULL otherwise. */
148238fd1498Szrj
148338fd1498Szrj basic_block
force_nonfallthru_and_redirect(edge e,basic_block target,rtx jump_label)148438fd1498Szrj force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
148538fd1498Szrj {
148638fd1498Szrj basic_block jump_block, new_bb = NULL, src = e->src;
148738fd1498Szrj rtx note;
148838fd1498Szrj edge new_edge;
148938fd1498Szrj int abnormal_edge_flags = 0;
149038fd1498Szrj bool asm_goto_edge = false;
149138fd1498Szrj int loc;
149238fd1498Szrj
149338fd1498Szrj /* In the case the last instruction is conditional jump to the next
149438fd1498Szrj instruction, first redirect the jump itself and then continue
149538fd1498Szrj by creating a basic block afterwards to redirect fallthru edge. */
149638fd1498Szrj if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
149738fd1498Szrj && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
149838fd1498Szrj && any_condjump_p (BB_END (e->src))
149938fd1498Szrj && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
150038fd1498Szrj {
150138fd1498Szrj rtx note;
150238fd1498Szrj edge b = unchecked_make_edge (e->src, target, 0);
150338fd1498Szrj bool redirected;
150438fd1498Szrj
150538fd1498Szrj redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
150638fd1498Szrj block_label (target), 0);
150738fd1498Szrj gcc_assert (redirected);
150838fd1498Szrj
150938fd1498Szrj note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
151038fd1498Szrj if (note)
151138fd1498Szrj {
151238fd1498Szrj int prob = XINT (note, 0);
151338fd1498Szrj
151438fd1498Szrj b->probability = profile_probability::from_reg_br_prob_note (prob);
151538fd1498Szrj e->probability -= e->probability;
151638fd1498Szrj }
151738fd1498Szrj }
151838fd1498Szrj
151938fd1498Szrj if (e->flags & EDGE_ABNORMAL)
152038fd1498Szrj {
152138fd1498Szrj /* Irritating special case - fallthru edge to the same block as abnormal
152238fd1498Szrj edge.
152338fd1498Szrj We can't redirect abnormal edge, but we still can split the fallthru
152438fd1498Szrj one and create separate abnormal edge to original destination.
152538fd1498Szrj This allows bb-reorder to make such edge non-fallthru. */
152638fd1498Szrj gcc_assert (e->dest == target);
152738fd1498Szrj abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
152838fd1498Szrj e->flags &= EDGE_FALLTHRU;
152938fd1498Szrj }
153038fd1498Szrj else
153138fd1498Szrj {
153238fd1498Szrj gcc_assert (e->flags & EDGE_FALLTHRU);
153338fd1498Szrj if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
153438fd1498Szrj {
153538fd1498Szrj /* We can't redirect the entry block. Create an empty block
153638fd1498Szrj at the start of the function which we use to add the new
153738fd1498Szrj jump. */
153838fd1498Szrj edge tmp;
153938fd1498Szrj edge_iterator ei;
154038fd1498Szrj bool found = false;
154138fd1498Szrj
154238fd1498Szrj basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
154338fd1498Szrj ENTRY_BLOCK_PTR_FOR_FN (cfun));
154438fd1498Szrj bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
154538fd1498Szrj
154638fd1498Szrj /* Make sure new block ends up in correct hot/cold section. */
154738fd1498Szrj BB_COPY_PARTITION (bb, e->dest);
154838fd1498Szrj
154938fd1498Szrj /* Change the existing edge's source to be the new block, and add
155038fd1498Szrj a new edge from the entry block to the new block. */
155138fd1498Szrj e->src = bb;
155238fd1498Szrj for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
155338fd1498Szrj (tmp = ei_safe_edge (ei)); )
155438fd1498Szrj {
155538fd1498Szrj if (tmp == e)
155638fd1498Szrj {
155738fd1498Szrj ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
155838fd1498Szrj found = true;
155938fd1498Szrj break;
156038fd1498Szrj }
156138fd1498Szrj else
156238fd1498Szrj ei_next (&ei);
156338fd1498Szrj }
156438fd1498Szrj
156538fd1498Szrj gcc_assert (found);
156638fd1498Szrj
156738fd1498Szrj vec_safe_push (bb->succs, e);
156838fd1498Szrj make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
156938fd1498Szrj EDGE_FALLTHRU);
157038fd1498Szrj }
157138fd1498Szrj }
157238fd1498Szrj
157338fd1498Szrj /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
157438fd1498Szrj don't point to the target or fallthru label. */
157538fd1498Szrj if (JUMP_P (BB_END (e->src))
157638fd1498Szrj && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
157738fd1498Szrj && (e->flags & EDGE_FALLTHRU)
157838fd1498Szrj && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
157938fd1498Szrj {
158038fd1498Szrj int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
158138fd1498Szrj bool adjust_jump_target = false;
158238fd1498Szrj
158338fd1498Szrj for (i = 0; i < n; ++i)
158438fd1498Szrj {
158538fd1498Szrj if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
158638fd1498Szrj {
158738fd1498Szrj LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
158838fd1498Szrj XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
158938fd1498Szrj LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
159038fd1498Szrj adjust_jump_target = true;
159138fd1498Szrj }
159238fd1498Szrj if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
159338fd1498Szrj asm_goto_edge = true;
159438fd1498Szrj }
159538fd1498Szrj if (adjust_jump_target)
159638fd1498Szrj {
159738fd1498Szrj rtx_insn *insn = BB_END (e->src);
159838fd1498Szrj rtx note;
159938fd1498Szrj rtx_insn *old_label = BB_HEAD (e->dest);
160038fd1498Szrj rtx_insn *new_label = BB_HEAD (target);
160138fd1498Szrj
160238fd1498Szrj if (JUMP_LABEL (insn) == old_label)
160338fd1498Szrj {
160438fd1498Szrj JUMP_LABEL (insn) = new_label;
160538fd1498Szrj note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
160638fd1498Szrj if (note)
160738fd1498Szrj remove_note (insn, note);
160838fd1498Szrj }
160938fd1498Szrj else
161038fd1498Szrj {
161138fd1498Szrj note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
161238fd1498Szrj if (note)
161338fd1498Szrj remove_note (insn, note);
161438fd1498Szrj if (JUMP_LABEL (insn) != new_label
161538fd1498Szrj && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
161638fd1498Szrj add_reg_note (insn, REG_LABEL_TARGET, new_label);
161738fd1498Szrj }
161838fd1498Szrj while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
161938fd1498Szrj != NULL_RTX)
162038fd1498Szrj XEXP (note, 0) = new_label;
162138fd1498Szrj }
162238fd1498Szrj }
162338fd1498Szrj
162438fd1498Szrj if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
162538fd1498Szrj {
162638fd1498Szrj rtx_insn *new_head;
162738fd1498Szrj profile_count count = e->count ();
162838fd1498Szrj profile_probability probability = e->probability;
162938fd1498Szrj /* Create the new structures. */
163038fd1498Szrj
163138fd1498Szrj /* If the old block ended with a tablejump, skip its table
163238fd1498Szrj by searching forward from there. Otherwise start searching
163338fd1498Szrj forward from the last instruction of the old block. */
163438fd1498Szrj rtx_jump_table_data *table;
163538fd1498Szrj if (tablejump_p (BB_END (e->src), NULL, &table))
163638fd1498Szrj new_head = table;
163738fd1498Szrj else
163838fd1498Szrj new_head = BB_END (e->src);
163938fd1498Szrj new_head = NEXT_INSN (new_head);
164038fd1498Szrj
164138fd1498Szrj jump_block = create_basic_block (new_head, NULL, e->src);
164238fd1498Szrj jump_block->count = count;
164338fd1498Szrj
164438fd1498Szrj /* Make sure new block ends up in correct hot/cold section. */
164538fd1498Szrj
164638fd1498Szrj BB_COPY_PARTITION (jump_block, e->src);
164738fd1498Szrj
164838fd1498Szrj /* Wire edge in. */
164938fd1498Szrj new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
165038fd1498Szrj new_edge->probability = probability;
165138fd1498Szrj
165238fd1498Szrj /* Redirect old edge. */
165338fd1498Szrj redirect_edge_pred (e, jump_block);
165438fd1498Szrj e->probability = profile_probability::always ();
165538fd1498Szrj
165638fd1498Szrj /* If e->src was previously region crossing, it no longer is
165738fd1498Szrj and the reg crossing note should be removed. */
165838fd1498Szrj fixup_partition_crossing (new_edge);
165938fd1498Szrj
166038fd1498Szrj /* If asm goto has any label refs to target's label,
166138fd1498Szrj add also edge from asm goto bb to target. */
166238fd1498Szrj if (asm_goto_edge)
166338fd1498Szrj {
166438fd1498Szrj new_edge->probability = new_edge->probability.apply_scale (1, 2);
166538fd1498Szrj jump_block->count = jump_block->count.apply_scale (1, 2);
166638fd1498Szrj edge new_edge2 = make_edge (new_edge->src, target,
166738fd1498Szrj e->flags & ~EDGE_FALLTHRU);
166838fd1498Szrj new_edge2->probability = probability - new_edge->probability;
166938fd1498Szrj }
167038fd1498Szrj
167138fd1498Szrj new_bb = jump_block;
167238fd1498Szrj }
167338fd1498Szrj else
167438fd1498Szrj jump_block = e->src;
167538fd1498Szrj
167638fd1498Szrj loc = e->goto_locus;
167738fd1498Szrj e->flags &= ~EDGE_FALLTHRU;
167838fd1498Szrj if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
167938fd1498Szrj {
168038fd1498Szrj if (jump_label == ret_rtx)
168138fd1498Szrj emit_jump_insn_after_setloc (targetm.gen_return (),
168238fd1498Szrj BB_END (jump_block), loc);
168338fd1498Szrj else
168438fd1498Szrj {
168538fd1498Szrj gcc_assert (jump_label == simple_return_rtx);
168638fd1498Szrj emit_jump_insn_after_setloc (targetm.gen_simple_return (),
168738fd1498Szrj BB_END (jump_block), loc);
168838fd1498Szrj }
168938fd1498Szrj set_return_jump_label (BB_END (jump_block));
169038fd1498Szrj }
169138fd1498Szrj else
169238fd1498Szrj {
169338fd1498Szrj rtx_code_label *label = block_label (target);
169438fd1498Szrj emit_jump_insn_after_setloc (targetm.gen_jump (label),
169538fd1498Szrj BB_END (jump_block), loc);
169638fd1498Szrj JUMP_LABEL (BB_END (jump_block)) = label;
169738fd1498Szrj LABEL_NUSES (label)++;
169838fd1498Szrj }
169938fd1498Szrj
170038fd1498Szrj /* We might be in cfg layout mode, and if so, the following routine will
170138fd1498Szrj insert the barrier correctly. */
170238fd1498Szrj emit_barrier_after_bb (jump_block);
170338fd1498Szrj redirect_edge_succ_nodup (e, target);
170438fd1498Szrj
170538fd1498Szrj if (abnormal_edge_flags)
170638fd1498Szrj make_edge (src, target, abnormal_edge_flags);
170738fd1498Szrj
170838fd1498Szrj df_mark_solutions_dirty ();
170938fd1498Szrj fixup_partition_crossing (e);
171038fd1498Szrj return new_bb;
171138fd1498Szrj }
171238fd1498Szrj
171338fd1498Szrj /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
171438fd1498Szrj (and possibly create new basic block) to make edge non-fallthru.
171538fd1498Szrj Return newly created BB or NULL if none. */
171638fd1498Szrj
171738fd1498Szrj static basic_block
rtl_force_nonfallthru(edge e)171838fd1498Szrj rtl_force_nonfallthru (edge e)
171938fd1498Szrj {
172038fd1498Szrj return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
172138fd1498Szrj }
172238fd1498Szrj
172338fd1498Szrj /* Redirect edge even at the expense of creating new jump insn or
172438fd1498Szrj basic block. Return new basic block if created, NULL otherwise.
172538fd1498Szrj Conversion must be possible. */
172638fd1498Szrj
172738fd1498Szrj static basic_block
rtl_redirect_edge_and_branch_force(edge e,basic_block target)172838fd1498Szrj rtl_redirect_edge_and_branch_force (edge e, basic_block target)
172938fd1498Szrj {
173038fd1498Szrj if (redirect_edge_and_branch (e, target)
173138fd1498Szrj || e->dest == target)
173238fd1498Szrj return NULL;
173338fd1498Szrj
173438fd1498Szrj /* In case the edge redirection failed, try to force it to be non-fallthru
173538fd1498Szrj and redirect newly created simplejump. */
173638fd1498Szrj df_set_bb_dirty (e->src);
173738fd1498Szrj return force_nonfallthru_and_redirect (e, target, NULL_RTX);
173838fd1498Szrj }
173938fd1498Szrj
174038fd1498Szrj /* The given edge should potentially be a fallthru edge. If that is in
174138fd1498Szrj fact true, delete the jump and barriers that are in the way. */
174238fd1498Szrj
174338fd1498Szrj static void
rtl_tidy_fallthru_edge(edge e)174438fd1498Szrj rtl_tidy_fallthru_edge (edge e)
174538fd1498Szrj {
174638fd1498Szrj rtx_insn *q;
174738fd1498Szrj basic_block b = e->src, c = b->next_bb;
174838fd1498Szrj
174938fd1498Szrj /* ??? In a late-running flow pass, other folks may have deleted basic
175038fd1498Szrj blocks by nopping out blocks, leaving multiple BARRIERs between here
175138fd1498Szrj and the target label. They ought to be chastised and fixed.
175238fd1498Szrj
175338fd1498Szrj We can also wind up with a sequence of undeletable labels between
175438fd1498Szrj one block and the next.
175538fd1498Szrj
175638fd1498Szrj So search through a sequence of barriers, labels, and notes for
175738fd1498Szrj the head of block C and assert that we really do fall through. */
175838fd1498Szrj
175938fd1498Szrj for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
176038fd1498Szrj if (NONDEBUG_INSN_P (q))
176138fd1498Szrj return;
176238fd1498Szrj
176338fd1498Szrj /* Remove what will soon cease being the jump insn from the source block.
176438fd1498Szrj If block B consisted only of this single jump, turn it into a deleted
176538fd1498Szrj note. */
176638fd1498Szrj q = BB_END (b);
176738fd1498Szrj if (JUMP_P (q)
176838fd1498Szrj && onlyjump_p (q)
176938fd1498Szrj && (any_uncondjump_p (q)
177038fd1498Szrj || single_succ_p (b)))
177138fd1498Szrj {
177238fd1498Szrj rtx_insn *label;
177338fd1498Szrj rtx_jump_table_data *table;
177438fd1498Szrj
177538fd1498Szrj if (tablejump_p (q, &label, &table))
177638fd1498Szrj {
177738fd1498Szrj /* The label is likely mentioned in some instruction before
177838fd1498Szrj the tablejump and might not be DCEd, so turn it into
177938fd1498Szrj a note instead and move before the tablejump that is going to
178038fd1498Szrj be deleted. */
178138fd1498Szrj const char *name = LABEL_NAME (label);
178238fd1498Szrj PUT_CODE (label, NOTE);
178338fd1498Szrj NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
178438fd1498Szrj NOTE_DELETED_LABEL_NAME (label) = name;
178538fd1498Szrj reorder_insns (label, label, PREV_INSN (q));
178638fd1498Szrj delete_insn (table);
178738fd1498Szrj }
178838fd1498Szrj
178938fd1498Szrj /* If this was a conditional jump, we need to also delete
179038fd1498Szrj the insn that set cc0. */
179138fd1498Szrj if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
179238fd1498Szrj q = PREV_INSN (q);
179338fd1498Szrj
179438fd1498Szrj q = PREV_INSN (q);
179538fd1498Szrj }
179638fd1498Szrj /* Unconditional jumps with side-effects (i.e. which we can't just delete
179738fd1498Szrj together with the barrier) should never have a fallthru edge. */
179838fd1498Szrj else if (JUMP_P (q) && any_uncondjump_p (q))
179938fd1498Szrj return;
180038fd1498Szrj
180138fd1498Szrj /* Selectively unlink the sequence. */
180238fd1498Szrj if (q != PREV_INSN (BB_HEAD (c)))
180338fd1498Szrj delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
180438fd1498Szrj
180538fd1498Szrj e->flags |= EDGE_FALLTHRU;
180638fd1498Szrj }
180738fd1498Szrj
180838fd1498Szrj /* Should move basic block BB after basic block AFTER. NIY. */
180938fd1498Szrj
181038fd1498Szrj static bool
rtl_move_block_after(basic_block bb ATTRIBUTE_UNUSED,basic_block after ATTRIBUTE_UNUSED)181138fd1498Szrj rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
181238fd1498Szrj basic_block after ATTRIBUTE_UNUSED)
181338fd1498Szrj {
181438fd1498Szrj return false;
181538fd1498Szrj }
181638fd1498Szrj
181738fd1498Szrj /* Locate the last bb in the same partition as START_BB. */
181838fd1498Szrj
181938fd1498Szrj static basic_block
last_bb_in_partition(basic_block start_bb)182038fd1498Szrj last_bb_in_partition (basic_block start_bb)
182138fd1498Szrj {
182238fd1498Szrj basic_block bb;
182338fd1498Szrj FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
182438fd1498Szrj {
182538fd1498Szrj if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
182638fd1498Szrj return bb;
182738fd1498Szrj }
182838fd1498Szrj /* Return bb before the exit block. */
182938fd1498Szrj return bb->prev_bb;
183038fd1498Szrj }
183138fd1498Szrj
183238fd1498Szrj /* Split a (typically critical) edge. Return the new block.
183338fd1498Szrj The edge must not be abnormal.
183438fd1498Szrj
183538fd1498Szrj ??? The code generally expects to be called on critical edges.
183638fd1498Szrj The case of a block ending in an unconditional jump to a
183738fd1498Szrj block with multiple predecessors is not handled optimally. */
183838fd1498Szrj
183938fd1498Szrj static basic_block
rtl_split_edge(edge edge_in)184038fd1498Szrj rtl_split_edge (edge edge_in)
184138fd1498Szrj {
184238fd1498Szrj basic_block bb, new_bb;
184338fd1498Szrj rtx_insn *before;
184438fd1498Szrj
184538fd1498Szrj /* Abnormal edges cannot be split. */
184638fd1498Szrj gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
184738fd1498Szrj
184838fd1498Szrj /* We are going to place the new block in front of edge destination.
184938fd1498Szrj Avoid existence of fallthru predecessors. */
185038fd1498Szrj if ((edge_in->flags & EDGE_FALLTHRU) == 0)
185138fd1498Szrj {
185238fd1498Szrj edge e = find_fallthru_edge (edge_in->dest->preds);
185338fd1498Szrj
185438fd1498Szrj if (e)
185538fd1498Szrj force_nonfallthru (e);
185638fd1498Szrj }
185738fd1498Szrj
185838fd1498Szrj /* Create the basic block note. */
185938fd1498Szrj if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
186038fd1498Szrj before = BB_HEAD (edge_in->dest);
186138fd1498Szrj else
186238fd1498Szrj before = NULL;
186338fd1498Szrj
186438fd1498Szrj /* If this is a fall through edge to the exit block, the blocks might be
186538fd1498Szrj not adjacent, and the right place is after the source. */
186638fd1498Szrj if ((edge_in->flags & EDGE_FALLTHRU)
186738fd1498Szrj && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
186838fd1498Szrj {
186938fd1498Szrj before = NEXT_INSN (BB_END (edge_in->src));
187038fd1498Szrj bb = create_basic_block (before, NULL, edge_in->src);
187138fd1498Szrj BB_COPY_PARTITION (bb, edge_in->src);
187238fd1498Szrj }
187338fd1498Szrj else
187438fd1498Szrj {
187538fd1498Szrj if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
187638fd1498Szrj {
187738fd1498Szrj bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
187838fd1498Szrj BB_COPY_PARTITION (bb, edge_in->dest);
187938fd1498Szrj }
188038fd1498Szrj else
188138fd1498Szrj {
188238fd1498Szrj basic_block after = edge_in->dest->prev_bb;
188338fd1498Szrj /* If this is post-bb reordering, and the edge crosses a partition
188438fd1498Szrj boundary, the new block needs to be inserted in the bb chain
188538fd1498Szrj at the end of the src partition (since we put the new bb into
188638fd1498Szrj that partition, see below). Otherwise we may end up creating
188738fd1498Szrj an extra partition crossing in the chain, which is illegal.
188838fd1498Szrj It can't go after the src, because src may have a fall-through
188938fd1498Szrj to a different block. */
189038fd1498Szrj if (crtl->bb_reorder_complete
189138fd1498Szrj && (edge_in->flags & EDGE_CROSSING))
189238fd1498Szrj {
189338fd1498Szrj after = last_bb_in_partition (edge_in->src);
189438fd1498Szrj before = get_last_bb_insn (after);
189538fd1498Szrj /* The instruction following the last bb in partition should
189638fd1498Szrj be a barrier, since it cannot end in a fall-through. */
189738fd1498Szrj gcc_checking_assert (BARRIER_P (before));
189838fd1498Szrj before = NEXT_INSN (before);
189938fd1498Szrj }
190038fd1498Szrj bb = create_basic_block (before, NULL, after);
190138fd1498Szrj /* Put the split bb into the src partition, to avoid creating
190238fd1498Szrj a situation where a cold bb dominates a hot bb, in the case
190338fd1498Szrj where src is cold and dest is hot. The src will dominate
190438fd1498Szrj the new bb (whereas it might not have dominated dest). */
190538fd1498Szrj BB_COPY_PARTITION (bb, edge_in->src);
190638fd1498Szrj }
190738fd1498Szrj }
190838fd1498Szrj
190938fd1498Szrj make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
191038fd1498Szrj
191138fd1498Szrj /* Can't allow a region crossing edge to be fallthrough. */
191238fd1498Szrj if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
191338fd1498Szrj && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
191438fd1498Szrj {
191538fd1498Szrj new_bb = force_nonfallthru (single_succ_edge (bb));
191638fd1498Szrj gcc_assert (!new_bb);
191738fd1498Szrj }
191838fd1498Szrj
191938fd1498Szrj /* For non-fallthru edges, we must adjust the predecessor's
192038fd1498Szrj jump instruction to target our new block. */
192138fd1498Szrj if ((edge_in->flags & EDGE_FALLTHRU) == 0)
192238fd1498Szrj {
192338fd1498Szrj edge redirected = redirect_edge_and_branch (edge_in, bb);
192438fd1498Szrj gcc_assert (redirected);
192538fd1498Szrj }
192638fd1498Szrj else
192738fd1498Szrj {
192838fd1498Szrj if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
192938fd1498Szrj {
193038fd1498Szrj /* For asm goto even splitting of fallthru edge might
193138fd1498Szrj need insn patching, as other labels might point to the
193238fd1498Szrj old label. */
193338fd1498Szrj rtx_insn *last = BB_END (edge_in->src);
193438fd1498Szrj if (last
193538fd1498Szrj && JUMP_P (last)
193638fd1498Szrj && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
193738fd1498Szrj && (extract_asm_operands (PATTERN (last))
193838fd1498Szrj || JUMP_LABEL (last) == before)
193938fd1498Szrj && patch_jump_insn (last, before, bb))
194038fd1498Szrj df_set_bb_dirty (edge_in->src);
194138fd1498Szrj }
194238fd1498Szrj redirect_edge_succ (edge_in, bb);
194338fd1498Szrj }
194438fd1498Szrj
194538fd1498Szrj return bb;
194638fd1498Szrj }
194738fd1498Szrj
194838fd1498Szrj /* Queue instructions for insertion on an edge between two basic blocks.
194938fd1498Szrj The new instructions and basic blocks (if any) will not appear in the
195038fd1498Szrj CFG until commit_edge_insertions is called. */
195138fd1498Szrj
195238fd1498Szrj void
insert_insn_on_edge(rtx pattern,edge e)195338fd1498Szrj insert_insn_on_edge (rtx pattern, edge e)
195438fd1498Szrj {
195538fd1498Szrj /* We cannot insert instructions on an abnormal critical edge.
195638fd1498Szrj It will be easier to find the culprit if we die now. */
195738fd1498Szrj gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
195838fd1498Szrj
195938fd1498Szrj if (e->insns.r == NULL_RTX)
196038fd1498Szrj start_sequence ();
196138fd1498Szrj else
196238fd1498Szrj push_to_sequence (e->insns.r);
196338fd1498Szrj
196438fd1498Szrj emit_insn (pattern);
196538fd1498Szrj
196638fd1498Szrj e->insns.r = get_insns ();
196738fd1498Szrj end_sequence ();
196838fd1498Szrj }
196938fd1498Szrj
197038fd1498Szrj /* Update the CFG for the instructions queued on edge E. */
197138fd1498Szrj
197238fd1498Szrj void
commit_one_edge_insertion(edge e)197338fd1498Szrj commit_one_edge_insertion (edge e)
197438fd1498Szrj {
197538fd1498Szrj rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
197638fd1498Szrj basic_block bb;
197738fd1498Szrj
197838fd1498Szrj /* Pull the insns off the edge now since the edge might go away. */
197938fd1498Szrj insns = e->insns.r;
198038fd1498Szrj e->insns.r = NULL;
198138fd1498Szrj
198238fd1498Szrj /* Figure out where to put these insns. If the destination has
198338fd1498Szrj one predecessor, insert there. Except for the exit block. */
198438fd1498Szrj if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
198538fd1498Szrj {
198638fd1498Szrj bb = e->dest;
198738fd1498Szrj
198838fd1498Szrj /* Get the location correct wrt a code label, and "nice" wrt
198938fd1498Szrj a basic block note, and before everything else. */
199038fd1498Szrj tmp = BB_HEAD (bb);
199138fd1498Szrj if (LABEL_P (tmp))
199238fd1498Szrj tmp = NEXT_INSN (tmp);
199338fd1498Szrj if (NOTE_INSN_BASIC_BLOCK_P (tmp))
199438fd1498Szrj tmp = NEXT_INSN (tmp);
199538fd1498Szrj if (tmp == BB_HEAD (bb))
199638fd1498Szrj before = tmp;
199738fd1498Szrj else if (tmp)
199838fd1498Szrj after = PREV_INSN (tmp);
199938fd1498Szrj else
200038fd1498Szrj after = get_last_insn ();
200138fd1498Szrj }
200238fd1498Szrj
200338fd1498Szrj /* If the source has one successor and the edge is not abnormal,
200438fd1498Szrj insert there. Except for the entry block.
200538fd1498Szrj Don't do this if the predecessor ends in a jump other than
200638fd1498Szrj unconditional simple jump. E.g. for asm goto that points all
200738fd1498Szrj its labels at the fallthru basic block, we can't insert instructions
200838fd1498Szrj before the asm goto, as the asm goto can have various of side effects,
200938fd1498Szrj and can't emit instructions after the asm goto, as it must end
201038fd1498Szrj the basic block. */
201138fd1498Szrj else if ((e->flags & EDGE_ABNORMAL) == 0
201238fd1498Szrj && single_succ_p (e->src)
201338fd1498Szrj && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
201438fd1498Szrj && (!JUMP_P (BB_END (e->src))
201538fd1498Szrj || simplejump_p (BB_END (e->src))))
201638fd1498Szrj {
201738fd1498Szrj bb = e->src;
201838fd1498Szrj
201938fd1498Szrj /* It is possible to have a non-simple jump here. Consider a target
202038fd1498Szrj where some forms of unconditional jumps clobber a register. This
202138fd1498Szrj happens on the fr30 for example.
202238fd1498Szrj
202338fd1498Szrj We know this block has a single successor, so we can just emit
202438fd1498Szrj the queued insns before the jump. */
202538fd1498Szrj if (JUMP_P (BB_END (bb)))
202638fd1498Szrj before = BB_END (bb);
202738fd1498Szrj else
202838fd1498Szrj {
202938fd1498Szrj /* We'd better be fallthru, or we've lost track of what's what. */
203038fd1498Szrj gcc_assert (e->flags & EDGE_FALLTHRU);
203138fd1498Szrj
203238fd1498Szrj after = BB_END (bb);
203338fd1498Szrj }
203438fd1498Szrj }
203538fd1498Szrj
203638fd1498Szrj /* Otherwise we must split the edge. */
203738fd1498Szrj else
203838fd1498Szrj {
203938fd1498Szrj bb = split_edge (e);
204038fd1498Szrj
204138fd1498Szrj /* If E crossed a partition boundary, we needed to make bb end in
204238fd1498Szrj a region-crossing jump, even though it was originally fallthru. */
204338fd1498Szrj if (JUMP_P (BB_END (bb)))
204438fd1498Szrj before = BB_END (bb);
204538fd1498Szrj else
204638fd1498Szrj after = BB_END (bb);
204738fd1498Szrj }
204838fd1498Szrj
204938fd1498Szrj /* Now that we've found the spot, do the insertion. */
205038fd1498Szrj if (before)
205138fd1498Szrj {
205238fd1498Szrj emit_insn_before_noloc (insns, before, bb);
205338fd1498Szrj last = prev_nonnote_insn (before);
205438fd1498Szrj }
205538fd1498Szrj else
205638fd1498Szrj last = emit_insn_after_noloc (insns, after, bb);
205738fd1498Szrj
205838fd1498Szrj if (returnjump_p (last))
205938fd1498Szrj {
206038fd1498Szrj /* ??? Remove all outgoing edges from BB and add one for EXIT.
206138fd1498Szrj This is not currently a problem because this only happens
206238fd1498Szrj for the (single) epilogue, which already has a fallthru edge
206338fd1498Szrj to EXIT. */
206438fd1498Szrj
206538fd1498Szrj e = single_succ_edge (bb);
206638fd1498Szrj gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
206738fd1498Szrj && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
206838fd1498Szrj
206938fd1498Szrj e->flags &= ~EDGE_FALLTHRU;
207038fd1498Szrj emit_barrier_after (last);
207138fd1498Szrj
207238fd1498Szrj if (before)
207338fd1498Szrj delete_insn (before);
207438fd1498Szrj }
207538fd1498Szrj else
207638fd1498Szrj gcc_assert (!JUMP_P (last));
207738fd1498Szrj }
207838fd1498Szrj
207938fd1498Szrj /* Update the CFG for all queued instructions. */
208038fd1498Szrj
208138fd1498Szrj void
commit_edge_insertions(void)208238fd1498Szrj commit_edge_insertions (void)
208338fd1498Szrj {
208438fd1498Szrj basic_block bb;
208538fd1498Szrj
208638fd1498Szrj /* Optimization passes that invoke this routine can cause hot blocks
208738fd1498Szrj previously reached by both hot and cold blocks to become dominated only
208838fd1498Szrj by cold blocks. This will cause the verification below to fail,
208938fd1498Szrj and lead to now cold code in the hot section. In some cases this
209038fd1498Szrj may only be visible after newly unreachable blocks are deleted,
209138fd1498Szrj which will be done by fixup_partitions. */
209238fd1498Szrj fixup_partitions ();
209338fd1498Szrj
209438fd1498Szrj checking_verify_flow_info ();
209538fd1498Szrj
209638fd1498Szrj FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
209738fd1498Szrj EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
209838fd1498Szrj {
209938fd1498Szrj edge e;
210038fd1498Szrj edge_iterator ei;
210138fd1498Szrj
210238fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
210338fd1498Szrj if (e->insns.r)
210438fd1498Szrj commit_one_edge_insertion (e);
210538fd1498Szrj }
210638fd1498Szrj }
210738fd1498Szrj
210838fd1498Szrj
210938fd1498Szrj /* Print out RTL-specific basic block information (live information
211038fd1498Szrj at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
211138fd1498Szrj documented in dumpfile.h. */
211238fd1498Szrj
211338fd1498Szrj static void
rtl_dump_bb(FILE * outf,basic_block bb,int indent,dump_flags_t flags)211438fd1498Szrj rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
211538fd1498Szrj {
211638fd1498Szrj char *s_indent;
211738fd1498Szrj
211838fd1498Szrj s_indent = (char *) alloca ((size_t) indent + 1);
211938fd1498Szrj memset (s_indent, ' ', (size_t) indent);
212038fd1498Szrj s_indent[indent] = '\0';
212138fd1498Szrj
212238fd1498Szrj if (df && (flags & TDF_DETAILS))
212338fd1498Szrj {
212438fd1498Szrj df_dump_top (bb, outf);
212538fd1498Szrj putc ('\n', outf);
212638fd1498Szrj }
212738fd1498Szrj
212838fd1498Szrj if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
212938fd1498Szrj {
213038fd1498Szrj rtx_insn *last = BB_END (bb);
213138fd1498Szrj if (last)
213238fd1498Szrj last = NEXT_INSN (last);
213338fd1498Szrj for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
213438fd1498Szrj {
213538fd1498Szrj if (flags & TDF_DETAILS)
213638fd1498Szrj df_dump_insn_top (insn, outf);
213738fd1498Szrj if (! (flags & TDF_SLIM))
213838fd1498Szrj print_rtl_single (outf, insn);
213938fd1498Szrj else
214038fd1498Szrj dump_insn_slim (outf, insn);
214138fd1498Szrj if (flags & TDF_DETAILS)
214238fd1498Szrj df_dump_insn_bottom (insn, outf);
214338fd1498Szrj }
214438fd1498Szrj }
214538fd1498Szrj
214638fd1498Szrj if (df && (flags & TDF_DETAILS))
214738fd1498Szrj {
214838fd1498Szrj df_dump_bottom (bb, outf);
214938fd1498Szrj putc ('\n', outf);
215038fd1498Szrj }
215138fd1498Szrj
215238fd1498Szrj }
215338fd1498Szrj
215438fd1498Szrj /* Like dump_function_to_file, but for RTL. Print out dataflow information
215538fd1498Szrj for the start of each basic block. FLAGS are the TDF_* masks documented
215638fd1498Szrj in dumpfile.h. */
215738fd1498Szrj
215838fd1498Szrj void
print_rtl_with_bb(FILE * outf,const rtx_insn * rtx_first,dump_flags_t flags)215938fd1498Szrj print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
216038fd1498Szrj {
216138fd1498Szrj const rtx_insn *tmp_rtx;
216238fd1498Szrj if (rtx_first == 0)
216338fd1498Szrj fprintf (outf, "(nil)\n");
216438fd1498Szrj else
216538fd1498Szrj {
216638fd1498Szrj enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
216738fd1498Szrj int max_uid = get_max_uid ();
216838fd1498Szrj basic_block *start = XCNEWVEC (basic_block, max_uid);
216938fd1498Szrj basic_block *end = XCNEWVEC (basic_block, max_uid);
217038fd1498Szrj enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
217138fd1498Szrj basic_block bb;
217238fd1498Szrj
217338fd1498Szrj /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
217438fd1498Szrj insns, but the CFG is not maintained so the basic block info
217538fd1498Szrj is not reliable. Therefore it's omitted from the dumps. */
217638fd1498Szrj if (! (cfun->curr_properties & PROP_cfg))
217738fd1498Szrj flags &= ~TDF_BLOCKS;
217838fd1498Szrj
217938fd1498Szrj if (df)
218038fd1498Szrj df_dump_start (outf);
218138fd1498Szrj
218238fd1498Szrj if (flags & TDF_BLOCKS)
218338fd1498Szrj {
218438fd1498Szrj FOR_EACH_BB_REVERSE_FN (bb, cfun)
218538fd1498Szrj {
218638fd1498Szrj rtx_insn *x;
218738fd1498Szrj
218838fd1498Szrj start[INSN_UID (BB_HEAD (bb))] = bb;
218938fd1498Szrj end[INSN_UID (BB_END (bb))] = bb;
219038fd1498Szrj for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
219138fd1498Szrj {
219238fd1498Szrj enum bb_state state = IN_MULTIPLE_BB;
219338fd1498Szrj
219438fd1498Szrj if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
219538fd1498Szrj state = IN_ONE_BB;
219638fd1498Szrj in_bb_p[INSN_UID (x)] = state;
219738fd1498Szrj
219838fd1498Szrj if (x == BB_END (bb))
219938fd1498Szrj break;
220038fd1498Szrj }
220138fd1498Szrj }
220238fd1498Szrj }
220338fd1498Szrj
220438fd1498Szrj for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
220538fd1498Szrj {
220638fd1498Szrj if (flags & TDF_BLOCKS)
220738fd1498Szrj {
220838fd1498Szrj bb = start[INSN_UID (tmp_rtx)];
220938fd1498Szrj if (bb != NULL)
221038fd1498Szrj {
221138fd1498Szrj dump_bb_info (outf, bb, 0, dump_flags, true, false);
221238fd1498Szrj if (df && (flags & TDF_DETAILS))
221338fd1498Szrj df_dump_top (bb, outf);
221438fd1498Szrj }
221538fd1498Szrj
221638fd1498Szrj if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
221738fd1498Szrj && !NOTE_P (tmp_rtx)
221838fd1498Szrj && !BARRIER_P (tmp_rtx))
221938fd1498Szrj fprintf (outf, ";; Insn is not within a basic block\n");
222038fd1498Szrj else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
222138fd1498Szrj fprintf (outf, ";; Insn is in multiple basic blocks\n");
222238fd1498Szrj }
222338fd1498Szrj
222438fd1498Szrj if (flags & TDF_DETAILS)
222538fd1498Szrj df_dump_insn_top (tmp_rtx, outf);
222638fd1498Szrj if (! (flags & TDF_SLIM))
222738fd1498Szrj print_rtl_single (outf, tmp_rtx);
222838fd1498Szrj else
222938fd1498Szrj dump_insn_slim (outf, tmp_rtx);
223038fd1498Szrj if (flags & TDF_DETAILS)
223138fd1498Szrj df_dump_insn_bottom (tmp_rtx, outf);
223238fd1498Szrj
223338fd1498Szrj if (flags & TDF_BLOCKS)
223438fd1498Szrj {
223538fd1498Szrj bb = end[INSN_UID (tmp_rtx)];
223638fd1498Szrj if (bb != NULL)
223738fd1498Szrj {
223838fd1498Szrj dump_bb_info (outf, bb, 0, dump_flags, false, true);
223938fd1498Szrj if (df && (flags & TDF_DETAILS))
224038fd1498Szrj df_dump_bottom (bb, outf);
224138fd1498Szrj putc ('\n', outf);
224238fd1498Szrj }
224338fd1498Szrj }
224438fd1498Szrj }
224538fd1498Szrj
224638fd1498Szrj free (start);
224738fd1498Szrj free (end);
224838fd1498Szrj free (in_bb_p);
224938fd1498Szrj }
225038fd1498Szrj }
225138fd1498Szrj
225238fd1498Szrj /* Update the branch probability of BB if a REG_BR_PROB is present. */
225338fd1498Szrj
225438fd1498Szrj void
update_br_prob_note(basic_block bb)225538fd1498Szrj update_br_prob_note (basic_block bb)
225638fd1498Szrj {
225738fd1498Szrj rtx note;
225838fd1498Szrj note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
225938fd1498Szrj if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
226038fd1498Szrj {
226138fd1498Szrj if (note)
226238fd1498Szrj {
226338fd1498Szrj rtx *note_link, this_rtx;
226438fd1498Szrj
226538fd1498Szrj note_link = ®_NOTES (BB_END (bb));
226638fd1498Szrj for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
226738fd1498Szrj if (this_rtx == note)
226838fd1498Szrj {
226938fd1498Szrj *note_link = XEXP (this_rtx, 1);
227038fd1498Szrj break;
227138fd1498Szrj }
227238fd1498Szrj }
227338fd1498Szrj return;
227438fd1498Szrj }
227538fd1498Szrj if (!note
227638fd1498Szrj || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
227738fd1498Szrj return;
227838fd1498Szrj XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
227938fd1498Szrj }
228038fd1498Szrj
228138fd1498Szrj /* Get the last insn associated with block BB (that includes barriers and
228238fd1498Szrj tablejumps after BB). */
228338fd1498Szrj rtx_insn *
get_last_bb_insn(basic_block bb)228438fd1498Szrj get_last_bb_insn (basic_block bb)
228538fd1498Szrj {
228638fd1498Szrj rtx_jump_table_data *table;
228738fd1498Szrj rtx_insn *tmp;
228838fd1498Szrj rtx_insn *end = BB_END (bb);
228938fd1498Szrj
229038fd1498Szrj /* Include any jump table following the basic block. */
229138fd1498Szrj if (tablejump_p (end, NULL, &table))
229238fd1498Szrj end = table;
229338fd1498Szrj
229438fd1498Szrj /* Include any barriers that may follow the basic block. */
229538fd1498Szrj tmp = next_nonnote_nondebug_insn_bb (end);
229638fd1498Szrj while (tmp && BARRIER_P (tmp))
229738fd1498Szrj {
229838fd1498Szrj end = tmp;
229938fd1498Szrj tmp = next_nonnote_nondebug_insn_bb (end);
230038fd1498Szrj }
230138fd1498Szrj
230238fd1498Szrj return end;
230338fd1498Szrj }
230438fd1498Szrj
230538fd1498Szrj /* Add all BBs reachable from entry via hot paths into the SET. */
230638fd1498Szrj
230738fd1498Szrj void
find_bbs_reachable_by_hot_paths(hash_set<basic_block> * set)230838fd1498Szrj find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
230938fd1498Szrj {
231038fd1498Szrj auto_vec<basic_block, 64> worklist;
231138fd1498Szrj
231238fd1498Szrj set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
231338fd1498Szrj worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
231438fd1498Szrj
231538fd1498Szrj while (worklist.length () > 0)
231638fd1498Szrj {
231738fd1498Szrj basic_block bb = worklist.pop ();
231838fd1498Szrj edge_iterator ei;
231938fd1498Szrj edge e;
232038fd1498Szrj
232138fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
232238fd1498Szrj if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
232338fd1498Szrj && !set->add (e->dest))
232438fd1498Szrj worklist.safe_push (e->dest);
232538fd1498Szrj }
232638fd1498Szrj }
232738fd1498Szrj
232838fd1498Szrj /* Sanity check partition hotness to ensure that basic blocks in
232938fd1498Szrj the cold partition don't dominate basic blocks in the hot partition.
233038fd1498Szrj If FLAG_ONLY is true, report violations as errors. Otherwise
233138fd1498Szrj re-mark the dominated blocks as cold, since this is run after
233238fd1498Szrj cfg optimizations that may make hot blocks previously reached
233338fd1498Szrj by both hot and cold blocks now only reachable along cold paths. */
233438fd1498Szrj
233538fd1498Szrj static vec<basic_block>
find_partition_fixes(bool flag_only)233638fd1498Szrj find_partition_fixes (bool flag_only)
233738fd1498Szrj {
233838fd1498Szrj basic_block bb;
233938fd1498Szrj vec<basic_block> bbs_in_cold_partition = vNULL;
234038fd1498Szrj vec<basic_block> bbs_to_fix = vNULL;
234138fd1498Szrj hash_set<basic_block> set;
234238fd1498Szrj
234338fd1498Szrj /* Callers check this. */
234438fd1498Szrj gcc_checking_assert (crtl->has_bb_partition);
234538fd1498Szrj
234638fd1498Szrj find_bbs_reachable_by_hot_paths (&set);
234738fd1498Szrj
234838fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
234938fd1498Szrj if (!set.contains (bb)
235038fd1498Szrj && BB_PARTITION (bb) != BB_COLD_PARTITION)
235138fd1498Szrj {
235238fd1498Szrj if (flag_only)
235338fd1498Szrj error ("non-cold basic block %d reachable only "
235438fd1498Szrj "by paths crossing the cold partition", bb->index);
235538fd1498Szrj else
235638fd1498Szrj BB_SET_PARTITION (bb, BB_COLD_PARTITION);
235738fd1498Szrj bbs_to_fix.safe_push (bb);
235838fd1498Szrj bbs_in_cold_partition.safe_push (bb);
235938fd1498Szrj }
236038fd1498Szrj
236138fd1498Szrj return bbs_to_fix;
236238fd1498Szrj }
236338fd1498Szrj
236438fd1498Szrj /* Perform cleanup on the hot/cold bb partitioning after optimization
236538fd1498Szrj passes that modify the cfg. */
236638fd1498Szrj
236738fd1498Szrj void
fixup_partitions(void)236838fd1498Szrj fixup_partitions (void)
236938fd1498Szrj {
237038fd1498Szrj basic_block bb;
237138fd1498Szrj
237238fd1498Szrj if (!crtl->has_bb_partition)
237338fd1498Szrj return;
237438fd1498Szrj
237538fd1498Szrj /* Delete any blocks that became unreachable and weren't
237638fd1498Szrj already cleaned up, for example during edge forwarding
237738fd1498Szrj and convert_jumps_to_returns. This will expose more
237838fd1498Szrj opportunities for fixing the partition boundaries here.
237938fd1498Szrj Also, the calculation of the dominance graph during verification
238038fd1498Szrj will assert if there are unreachable nodes. */
238138fd1498Szrj delete_unreachable_blocks ();
238238fd1498Szrj
238338fd1498Szrj /* If there are partitions, do a sanity check on them: A basic block in
238438fd1498Szrj a cold partition cannot dominate a basic block in a hot partition.
238538fd1498Szrj Fixup any that now violate this requirement, as a result of edge
238638fd1498Szrj forwarding and unreachable block deletion. */
238738fd1498Szrj vec<basic_block> bbs_to_fix = find_partition_fixes (false);
238838fd1498Szrj
238938fd1498Szrj /* Do the partition fixup after all necessary blocks have been converted to
239038fd1498Szrj cold, so that we only update the region crossings the minimum number of
239138fd1498Szrj places, which can require forcing edges to be non fallthru. */
239238fd1498Szrj while (! bbs_to_fix.is_empty ())
239338fd1498Szrj {
239438fd1498Szrj bb = bbs_to_fix.pop ();
239538fd1498Szrj fixup_new_cold_bb (bb);
239638fd1498Szrj }
239738fd1498Szrj }
239838fd1498Szrj
239938fd1498Szrj /* Verify, in the basic block chain, that there is at most one switch
240038fd1498Szrj between hot/cold partitions. This condition will not be true until
240138fd1498Szrj after reorder_basic_blocks is called. */
240238fd1498Szrj
240338fd1498Szrj static int
verify_hot_cold_block_grouping(void)240438fd1498Szrj verify_hot_cold_block_grouping (void)
240538fd1498Szrj {
240638fd1498Szrj basic_block bb;
240738fd1498Szrj int err = 0;
240838fd1498Szrj bool switched_sections = false;
240938fd1498Szrj int current_partition = BB_UNPARTITIONED;
241038fd1498Szrj
241138fd1498Szrj /* Even after bb reordering is complete, we go into cfglayout mode
241238fd1498Szrj again (in compgoto). Ensure we don't call this before going back
241338fd1498Szrj into linearized RTL when any layout fixes would have been committed. */
241438fd1498Szrj if (!crtl->bb_reorder_complete
241538fd1498Szrj || current_ir_type () != IR_RTL_CFGRTL)
241638fd1498Szrj return err;
241738fd1498Szrj
241838fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
241938fd1498Szrj {
242038fd1498Szrj if (current_partition != BB_UNPARTITIONED
242138fd1498Szrj && BB_PARTITION (bb) != current_partition)
242238fd1498Szrj {
242338fd1498Szrj if (switched_sections)
242438fd1498Szrj {
242538fd1498Szrj error ("multiple hot/cold transitions found (bb %i)",
242638fd1498Szrj bb->index);
242738fd1498Szrj err = 1;
242838fd1498Szrj }
242938fd1498Szrj else
243038fd1498Szrj switched_sections = true;
243138fd1498Szrj
243238fd1498Szrj if (!crtl->has_bb_partition)
243338fd1498Szrj error ("partition found but function partition flag not set");
243438fd1498Szrj }
243538fd1498Szrj current_partition = BB_PARTITION (bb);
243638fd1498Szrj }
243738fd1498Szrj
243838fd1498Szrj return err;
243938fd1498Szrj }
244038fd1498Szrj
244138fd1498Szrj
244238fd1498Szrj /* Perform several checks on the edges out of each block, such as
244338fd1498Szrj the consistency of the branch probabilities, the correctness
244438fd1498Szrj of hot/cold partition crossing edges, and the number of expected
244538fd1498Szrj successor edges. Also verify that the dominance relationship
244638fd1498Szrj between hot/cold blocks is sane. */
244738fd1498Szrj
244838fd1498Szrj static int
rtl_verify_edges(void)244938fd1498Szrj rtl_verify_edges (void)
245038fd1498Szrj {
245138fd1498Szrj int err = 0;
245238fd1498Szrj basic_block bb;
245338fd1498Szrj
245438fd1498Szrj FOR_EACH_BB_REVERSE_FN (bb, cfun)
245538fd1498Szrj {
245638fd1498Szrj int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
245738fd1498Szrj int n_eh = 0, n_abnormal = 0;
245838fd1498Szrj edge e, fallthru = NULL;
245938fd1498Szrj edge_iterator ei;
246038fd1498Szrj rtx note;
246138fd1498Szrj bool has_crossing_edge = false;
246238fd1498Szrj
246338fd1498Szrj if (JUMP_P (BB_END (bb))
246438fd1498Szrj && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
246538fd1498Szrj && EDGE_COUNT (bb->succs) >= 2
246638fd1498Szrj && any_condjump_p (BB_END (bb)))
246738fd1498Szrj {
246838fd1498Szrj if (!BRANCH_EDGE (bb)->probability.initialized_p ())
246938fd1498Szrj {
247038fd1498Szrj if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
247138fd1498Szrj {
247238fd1498Szrj error ("verify_flow_info: "
247338fd1498Szrj "REG_BR_PROB is set but cfg probability is not");
247438fd1498Szrj err = 1;
247538fd1498Szrj }
247638fd1498Szrj }
247738fd1498Szrj else if (XINT (note, 0)
247838fd1498Szrj != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
247938fd1498Szrj && profile_status_for_fn (cfun) != PROFILE_ABSENT)
248038fd1498Szrj {
248138fd1498Szrj error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
248238fd1498Szrj XINT (note, 0),
248338fd1498Szrj BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
248438fd1498Szrj err = 1;
248538fd1498Szrj }
248638fd1498Szrj }
248738fd1498Szrj
248838fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
248938fd1498Szrj {
249038fd1498Szrj bool is_crossing;
249138fd1498Szrj
249238fd1498Szrj if (e->flags & EDGE_FALLTHRU)
249338fd1498Szrj n_fallthru++, fallthru = e;
249438fd1498Szrj
249538fd1498Szrj is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
249638fd1498Szrj && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
249738fd1498Szrj && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
249838fd1498Szrj has_crossing_edge |= is_crossing;
249938fd1498Szrj if (e->flags & EDGE_CROSSING)
250038fd1498Szrj {
250138fd1498Szrj if (!is_crossing)
250238fd1498Szrj {
250338fd1498Szrj error ("EDGE_CROSSING incorrectly set across same section");
250438fd1498Szrj err = 1;
250538fd1498Szrj }
250638fd1498Szrj if (e->flags & EDGE_FALLTHRU)
250738fd1498Szrj {
250838fd1498Szrj error ("fallthru edge crosses section boundary in bb %i",
250938fd1498Szrj e->src->index);
251038fd1498Szrj err = 1;
251138fd1498Szrj }
251238fd1498Szrj if (e->flags & EDGE_EH)
251338fd1498Szrj {
251438fd1498Szrj error ("EH edge crosses section boundary in bb %i",
251538fd1498Szrj e->src->index);
251638fd1498Szrj err = 1;
251738fd1498Szrj }
251838fd1498Szrj if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
251938fd1498Szrj {
252038fd1498Szrj error ("No region crossing jump at section boundary in bb %i",
252138fd1498Szrj bb->index);
252238fd1498Szrj err = 1;
252338fd1498Szrj }
252438fd1498Szrj }
252538fd1498Szrj else if (is_crossing)
252638fd1498Szrj {
252738fd1498Szrj error ("EDGE_CROSSING missing across section boundary");
252838fd1498Szrj err = 1;
252938fd1498Szrj }
253038fd1498Szrj
253138fd1498Szrj if ((e->flags & ~(EDGE_DFS_BACK
253238fd1498Szrj | EDGE_CAN_FALLTHRU
253338fd1498Szrj | EDGE_IRREDUCIBLE_LOOP
253438fd1498Szrj | EDGE_LOOP_EXIT
253538fd1498Szrj | EDGE_CROSSING
253638fd1498Szrj | EDGE_PRESERVE)) == 0)
253738fd1498Szrj n_branch++;
253838fd1498Szrj
253938fd1498Szrj if (e->flags & EDGE_ABNORMAL_CALL)
254038fd1498Szrj n_abnormal_call++;
254138fd1498Szrj
254238fd1498Szrj if (e->flags & EDGE_SIBCALL)
254338fd1498Szrj n_sibcall++;
254438fd1498Szrj
254538fd1498Szrj if (e->flags & EDGE_EH)
254638fd1498Szrj n_eh++;
254738fd1498Szrj
254838fd1498Szrj if (e->flags & EDGE_ABNORMAL)
254938fd1498Szrj n_abnormal++;
255038fd1498Szrj }
255138fd1498Szrj
255238fd1498Szrj if (!has_crossing_edge
255338fd1498Szrj && JUMP_P (BB_END (bb))
255438fd1498Szrj && CROSSING_JUMP_P (BB_END (bb)))
255538fd1498Szrj {
255638fd1498Szrj print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
255738fd1498Szrj error ("Region crossing jump across same section in bb %i",
255838fd1498Szrj bb->index);
255938fd1498Szrj err = 1;
256038fd1498Szrj }
256138fd1498Szrj
256238fd1498Szrj if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
256338fd1498Szrj {
256438fd1498Szrj error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
256538fd1498Szrj err = 1;
256638fd1498Szrj }
256738fd1498Szrj if (n_eh > 1)
256838fd1498Szrj {
256938fd1498Szrj error ("too many exception handling edges in bb %i", bb->index);
257038fd1498Szrj err = 1;
257138fd1498Szrj }
257238fd1498Szrj if (n_branch
257338fd1498Szrj && (!JUMP_P (BB_END (bb))
257438fd1498Szrj || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
257538fd1498Szrj || any_condjump_p (BB_END (bb))))))
257638fd1498Szrj {
257738fd1498Szrj error ("too many outgoing branch edges from bb %i", bb->index);
257838fd1498Szrj err = 1;
257938fd1498Szrj }
258038fd1498Szrj if (n_fallthru && any_uncondjump_p (BB_END (bb)))
258138fd1498Szrj {
258238fd1498Szrj error ("fallthru edge after unconditional jump in bb %i", bb->index);
258338fd1498Szrj err = 1;
258438fd1498Szrj }
258538fd1498Szrj if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
258638fd1498Szrj {
258738fd1498Szrj error ("wrong number of branch edges after unconditional jump"
258838fd1498Szrj " in bb %i", bb->index);
258938fd1498Szrj err = 1;
259038fd1498Szrj }
259138fd1498Szrj if (n_branch != 1 && any_condjump_p (BB_END (bb))
259238fd1498Szrj && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
259338fd1498Szrj {
259438fd1498Szrj error ("wrong amount of branch edges after conditional jump"
259538fd1498Szrj " in bb %i", bb->index);
259638fd1498Szrj err = 1;
259738fd1498Szrj }
259838fd1498Szrj if (n_abnormal_call && !CALL_P (BB_END (bb)))
259938fd1498Szrj {
260038fd1498Szrj error ("abnormal call edges for non-call insn in bb %i", bb->index);
260138fd1498Szrj err = 1;
260238fd1498Szrj }
260338fd1498Szrj if (n_sibcall && !CALL_P (BB_END (bb)))
260438fd1498Szrj {
260538fd1498Szrj error ("sibcall edges for non-call insn in bb %i", bb->index);
260638fd1498Szrj err = 1;
260738fd1498Szrj }
260838fd1498Szrj if (n_abnormal > n_eh
260938fd1498Szrj && !(CALL_P (BB_END (bb))
261038fd1498Szrj && n_abnormal == n_abnormal_call + n_sibcall)
261138fd1498Szrj && (!JUMP_P (BB_END (bb))
261238fd1498Szrj || any_condjump_p (BB_END (bb))
261338fd1498Szrj || any_uncondjump_p (BB_END (bb))))
261438fd1498Szrj {
261538fd1498Szrj error ("abnormal edges for no purpose in bb %i", bb->index);
261638fd1498Szrj err = 1;
261738fd1498Szrj }
261838fd1498Szrj }
261938fd1498Szrj
262038fd1498Szrj /* If there are partitions, do a sanity check on them: A basic block in
262138fd1498Szrj a cold partition cannot dominate a basic block in a hot partition. */
262238fd1498Szrj if (crtl->has_bb_partition && !err
262338fd1498Szrj && current_ir_type () == IR_RTL_CFGLAYOUT)
262438fd1498Szrj {
262538fd1498Szrj vec<basic_block> bbs_to_fix = find_partition_fixes (true);
262638fd1498Szrj err = !bbs_to_fix.is_empty ();
262738fd1498Szrj }
262838fd1498Szrj
262938fd1498Szrj /* Clean up. */
263038fd1498Szrj return err;
263138fd1498Szrj }
263238fd1498Szrj
263338fd1498Szrj /* Checks on the instructions within blocks. Currently checks that each
263438fd1498Szrj block starts with a basic block note, and that basic block notes and
263538fd1498Szrj control flow jumps are not found in the middle of the block. */
263638fd1498Szrj
263738fd1498Szrj static int
rtl_verify_bb_insns(void)263838fd1498Szrj rtl_verify_bb_insns (void)
263938fd1498Szrj {
264038fd1498Szrj rtx_insn *x;
264138fd1498Szrj int err = 0;
264238fd1498Szrj basic_block bb;
264338fd1498Szrj
264438fd1498Szrj FOR_EACH_BB_REVERSE_FN (bb, cfun)
264538fd1498Szrj {
264638fd1498Szrj /* Now check the header of basic
264738fd1498Szrj block. It ought to contain optional CODE_LABEL followed
264838fd1498Szrj by NOTE_BASIC_BLOCK. */
264938fd1498Szrj x = BB_HEAD (bb);
265038fd1498Szrj if (LABEL_P (x))
265138fd1498Szrj {
265238fd1498Szrj if (BB_END (bb) == x)
265338fd1498Szrj {
265438fd1498Szrj error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
265538fd1498Szrj bb->index);
265638fd1498Szrj err = 1;
265738fd1498Szrj }
265838fd1498Szrj
265938fd1498Szrj x = NEXT_INSN (x);
266038fd1498Szrj }
266138fd1498Szrj
266238fd1498Szrj if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
266338fd1498Szrj {
266438fd1498Szrj error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
266538fd1498Szrj bb->index);
266638fd1498Szrj err = 1;
266738fd1498Szrj }
266838fd1498Szrj
266938fd1498Szrj if (BB_END (bb) == x)
267038fd1498Szrj /* Do checks for empty blocks here. */
267138fd1498Szrj ;
267238fd1498Szrj else
267338fd1498Szrj for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
267438fd1498Szrj {
267538fd1498Szrj if (NOTE_INSN_BASIC_BLOCK_P (x))
267638fd1498Szrj {
267738fd1498Szrj error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
267838fd1498Szrj INSN_UID (x), bb->index);
267938fd1498Szrj err = 1;
268038fd1498Szrj }
268138fd1498Szrj
268238fd1498Szrj if (x == BB_END (bb))
268338fd1498Szrj break;
268438fd1498Szrj
268538fd1498Szrj if (control_flow_insn_p (x))
268638fd1498Szrj {
268738fd1498Szrj error ("in basic block %d:", bb->index);
268838fd1498Szrj fatal_insn ("flow control insn inside a basic block", x);
268938fd1498Szrj }
269038fd1498Szrj }
269138fd1498Szrj }
269238fd1498Szrj
269338fd1498Szrj /* Clean up. */
269438fd1498Szrj return err;
269538fd1498Szrj }
269638fd1498Szrj
269738fd1498Szrj /* Verify that block pointers for instructions in basic blocks, headers and
269838fd1498Szrj footers are set appropriately. */
269938fd1498Szrj
270038fd1498Szrj static int
rtl_verify_bb_pointers(void)270138fd1498Szrj rtl_verify_bb_pointers (void)
270238fd1498Szrj {
270338fd1498Szrj int err = 0;
270438fd1498Szrj basic_block bb;
270538fd1498Szrj
270638fd1498Szrj /* Check the general integrity of the basic blocks. */
270738fd1498Szrj FOR_EACH_BB_REVERSE_FN (bb, cfun)
270838fd1498Szrj {
270938fd1498Szrj rtx_insn *insn;
271038fd1498Szrj
271138fd1498Szrj if (!(bb->flags & BB_RTL))
271238fd1498Szrj {
271338fd1498Szrj error ("BB_RTL flag not set for block %d", bb->index);
271438fd1498Szrj err = 1;
271538fd1498Szrj }
271638fd1498Szrj
271738fd1498Szrj FOR_BB_INSNS (bb, insn)
271838fd1498Szrj if (BLOCK_FOR_INSN (insn) != bb)
271938fd1498Szrj {
272038fd1498Szrj error ("insn %d basic block pointer is %d, should be %d",
272138fd1498Szrj INSN_UID (insn),
272238fd1498Szrj BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
272338fd1498Szrj bb->index);
272438fd1498Szrj err = 1;
272538fd1498Szrj }
272638fd1498Szrj
272738fd1498Szrj for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
272838fd1498Szrj if (!BARRIER_P (insn)
272938fd1498Szrj && BLOCK_FOR_INSN (insn) != NULL)
273038fd1498Szrj {
273138fd1498Szrj error ("insn %d in header of bb %d has non-NULL basic block",
273238fd1498Szrj INSN_UID (insn), bb->index);
273338fd1498Szrj err = 1;
273438fd1498Szrj }
273538fd1498Szrj for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
273638fd1498Szrj if (!BARRIER_P (insn)
273738fd1498Szrj && BLOCK_FOR_INSN (insn) != NULL)
273838fd1498Szrj {
273938fd1498Szrj error ("insn %d in footer of bb %d has non-NULL basic block",
274038fd1498Szrj INSN_UID (insn), bb->index);
274138fd1498Szrj err = 1;
274238fd1498Szrj }
274338fd1498Szrj }
274438fd1498Szrj
274538fd1498Szrj /* Clean up. */
274638fd1498Szrj return err;
274738fd1498Szrj }
274838fd1498Szrj
274938fd1498Szrj /* Verify the CFG and RTL consistency common for both underlying RTL and
275038fd1498Szrj cfglayout RTL.
275138fd1498Szrj
275238fd1498Szrj Currently it does following checks:
275338fd1498Szrj
275438fd1498Szrj - overlapping of basic blocks
275538fd1498Szrj - insns with wrong BLOCK_FOR_INSN pointers
275638fd1498Szrj - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
275738fd1498Szrj - tails of basic blocks (ensure that boundary is necessary)
275838fd1498Szrj - scans body of the basic block for JUMP_INSN, CODE_LABEL
275938fd1498Szrj and NOTE_INSN_BASIC_BLOCK
276038fd1498Szrj - verify that no fall_thru edge crosses hot/cold partition boundaries
276138fd1498Szrj - verify that there are no pending RTL branch predictions
276238fd1498Szrj - verify that hot blocks are not dominated by cold blocks
276338fd1498Szrj
276438fd1498Szrj In future it can be extended check a lot of other stuff as well
276538fd1498Szrj (reachability of basic blocks, life information, etc. etc.). */
276638fd1498Szrj
276738fd1498Szrj static int
rtl_verify_flow_info_1(void)276838fd1498Szrj rtl_verify_flow_info_1 (void)
276938fd1498Szrj {
277038fd1498Szrj int err = 0;
277138fd1498Szrj
277238fd1498Szrj err |= rtl_verify_bb_pointers ();
277338fd1498Szrj
277438fd1498Szrj err |= rtl_verify_bb_insns ();
277538fd1498Szrj
277638fd1498Szrj err |= rtl_verify_edges ();
277738fd1498Szrj
277838fd1498Szrj return err;
277938fd1498Szrj }
278038fd1498Szrj
278138fd1498Szrj /* Walk the instruction chain and verify that bb head/end pointers
278238fd1498Szrj are correct, and that instructions are in exactly one bb and have
278338fd1498Szrj correct block pointers. */
278438fd1498Szrj
278538fd1498Szrj static int
rtl_verify_bb_insn_chain(void)278638fd1498Szrj rtl_verify_bb_insn_chain (void)
278738fd1498Szrj {
278838fd1498Szrj basic_block bb;
278938fd1498Szrj int err = 0;
279038fd1498Szrj rtx_insn *x;
279138fd1498Szrj rtx_insn *last_head = get_last_insn ();
279238fd1498Szrj basic_block *bb_info;
279338fd1498Szrj const int max_uid = get_max_uid ();
279438fd1498Szrj
279538fd1498Szrj bb_info = XCNEWVEC (basic_block, max_uid);
279638fd1498Szrj
279738fd1498Szrj FOR_EACH_BB_REVERSE_FN (bb, cfun)
279838fd1498Szrj {
279938fd1498Szrj rtx_insn *head = BB_HEAD (bb);
280038fd1498Szrj rtx_insn *end = BB_END (bb);
280138fd1498Szrj
280238fd1498Szrj for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
280338fd1498Szrj {
280438fd1498Szrj /* Verify the end of the basic block is in the INSN chain. */
280538fd1498Szrj if (x == end)
280638fd1498Szrj break;
280738fd1498Szrj
280838fd1498Szrj /* And that the code outside of basic blocks has NULL bb field. */
280938fd1498Szrj if (!BARRIER_P (x)
281038fd1498Szrj && BLOCK_FOR_INSN (x) != NULL)
281138fd1498Szrj {
281238fd1498Szrj error ("insn %d outside of basic blocks has non-NULL bb field",
281338fd1498Szrj INSN_UID (x));
281438fd1498Szrj err = 1;
281538fd1498Szrj }
281638fd1498Szrj }
281738fd1498Szrj
281838fd1498Szrj if (!x)
281938fd1498Szrj {
282038fd1498Szrj error ("end insn %d for block %d not found in the insn stream",
282138fd1498Szrj INSN_UID (end), bb->index);
282238fd1498Szrj err = 1;
282338fd1498Szrj }
282438fd1498Szrj
282538fd1498Szrj /* Work backwards from the end to the head of the basic block
282638fd1498Szrj to verify the head is in the RTL chain. */
282738fd1498Szrj for (; x != NULL_RTX; x = PREV_INSN (x))
282838fd1498Szrj {
282938fd1498Szrj /* While walking over the insn chain, verify insns appear
283038fd1498Szrj in only one basic block. */
283138fd1498Szrj if (bb_info[INSN_UID (x)] != NULL)
283238fd1498Szrj {
283338fd1498Szrj error ("insn %d is in multiple basic blocks (%d and %d)",
283438fd1498Szrj INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
283538fd1498Szrj err = 1;
283638fd1498Szrj }
283738fd1498Szrj
283838fd1498Szrj bb_info[INSN_UID (x)] = bb;
283938fd1498Szrj
284038fd1498Szrj if (x == head)
284138fd1498Szrj break;
284238fd1498Szrj }
284338fd1498Szrj if (!x)
284438fd1498Szrj {
284538fd1498Szrj error ("head insn %d for block %d not found in the insn stream",
284638fd1498Szrj INSN_UID (head), bb->index);
284738fd1498Szrj err = 1;
284838fd1498Szrj }
284938fd1498Szrj
285038fd1498Szrj last_head = PREV_INSN (x);
285138fd1498Szrj }
285238fd1498Szrj
285338fd1498Szrj for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
285438fd1498Szrj {
285538fd1498Szrj /* Check that the code before the first basic block has NULL
285638fd1498Szrj bb field. */
285738fd1498Szrj if (!BARRIER_P (x)
285838fd1498Szrj && BLOCK_FOR_INSN (x) != NULL)
285938fd1498Szrj {
286038fd1498Szrj error ("insn %d outside of basic blocks has non-NULL bb field",
286138fd1498Szrj INSN_UID (x));
286238fd1498Szrj err = 1;
286338fd1498Szrj }
286438fd1498Szrj }
286538fd1498Szrj free (bb_info);
286638fd1498Szrj
286738fd1498Szrj return err;
286838fd1498Szrj }
286938fd1498Szrj
287038fd1498Szrj /* Verify that fallthru edges point to adjacent blocks in layout order and
287138fd1498Szrj that barriers exist after non-fallthru blocks. */
287238fd1498Szrj
287338fd1498Szrj static int
rtl_verify_fallthru(void)287438fd1498Szrj rtl_verify_fallthru (void)
287538fd1498Szrj {
287638fd1498Szrj basic_block bb;
287738fd1498Szrj int err = 0;
287838fd1498Szrj
287938fd1498Szrj FOR_EACH_BB_REVERSE_FN (bb, cfun)
288038fd1498Szrj {
288138fd1498Szrj edge e;
288238fd1498Szrj
288338fd1498Szrj e = find_fallthru_edge (bb->succs);
288438fd1498Szrj if (!e)
288538fd1498Szrj {
288638fd1498Szrj rtx_insn *insn;
288738fd1498Szrj
288838fd1498Szrj /* Ensure existence of barrier in BB with no fallthru edges. */
288938fd1498Szrj for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
289038fd1498Szrj {
289138fd1498Szrj if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
289238fd1498Szrj {
289338fd1498Szrj error ("missing barrier after block %i", bb->index);
289438fd1498Szrj err = 1;
289538fd1498Szrj break;
289638fd1498Szrj }
289738fd1498Szrj if (BARRIER_P (insn))
289838fd1498Szrj break;
289938fd1498Szrj }
290038fd1498Szrj }
290138fd1498Szrj else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
290238fd1498Szrj && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
290338fd1498Szrj {
290438fd1498Szrj rtx_insn *insn;
290538fd1498Szrj
290638fd1498Szrj if (e->src->next_bb != e->dest)
290738fd1498Szrj {
290838fd1498Szrj error
290938fd1498Szrj ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
291038fd1498Szrj e->src->index, e->dest->index);
291138fd1498Szrj err = 1;
291238fd1498Szrj }
291338fd1498Szrj else
291438fd1498Szrj for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
291538fd1498Szrj insn = NEXT_INSN (insn))
291638fd1498Szrj if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
291738fd1498Szrj {
291838fd1498Szrj error ("verify_flow_info: Incorrect fallthru %i->%i",
291938fd1498Szrj e->src->index, e->dest->index);
292038fd1498Szrj fatal_insn ("wrong insn in the fallthru edge", insn);
292138fd1498Szrj err = 1;
292238fd1498Szrj }
292338fd1498Szrj }
292438fd1498Szrj }
292538fd1498Szrj
292638fd1498Szrj return err;
292738fd1498Szrj }
292838fd1498Szrj
292938fd1498Szrj /* Verify that blocks are laid out in consecutive order. While walking the
293038fd1498Szrj instructions, verify that all expected instructions are inside the basic
293138fd1498Szrj blocks, and that all returns are followed by barriers. */
293238fd1498Szrj
293338fd1498Szrj static int
rtl_verify_bb_layout(void)293438fd1498Szrj rtl_verify_bb_layout (void)
293538fd1498Szrj {
293638fd1498Szrj basic_block bb;
293738fd1498Szrj int err = 0;
293838fd1498Szrj rtx_insn *x, *y;
293938fd1498Szrj int num_bb_notes;
294038fd1498Szrj rtx_insn * const rtx_first = get_insns ();
294138fd1498Szrj basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
294238fd1498Szrj
294338fd1498Szrj num_bb_notes = 0;
294438fd1498Szrj last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
294538fd1498Szrj
294638fd1498Szrj for (x = rtx_first; x; x = NEXT_INSN (x))
294738fd1498Szrj {
294838fd1498Szrj if (NOTE_INSN_BASIC_BLOCK_P (x))
294938fd1498Szrj {
295038fd1498Szrj bb = NOTE_BASIC_BLOCK (x);
295138fd1498Szrj
295238fd1498Szrj num_bb_notes++;
295338fd1498Szrj if (bb != last_bb_seen->next_bb)
295438fd1498Szrj internal_error ("basic blocks not laid down consecutively");
295538fd1498Szrj
295638fd1498Szrj curr_bb = last_bb_seen = bb;
295738fd1498Szrj }
295838fd1498Szrj
295938fd1498Szrj if (!curr_bb)
296038fd1498Szrj {
296138fd1498Szrj switch (GET_CODE (x))
296238fd1498Szrj {
296338fd1498Szrj case BARRIER:
296438fd1498Szrj case NOTE:
296538fd1498Szrj break;
296638fd1498Szrj
296738fd1498Szrj case CODE_LABEL:
296838fd1498Szrj /* An ADDR_VEC is placed outside any basic block. */
296938fd1498Szrj if (NEXT_INSN (x)
297038fd1498Szrj && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
297138fd1498Szrj x = NEXT_INSN (x);
297238fd1498Szrj
297338fd1498Szrj /* But in any case, non-deletable labels can appear anywhere. */
297438fd1498Szrj break;
297538fd1498Szrj
297638fd1498Szrj default:
297738fd1498Szrj fatal_insn ("insn outside basic block", x);
297838fd1498Szrj }
297938fd1498Szrj }
298038fd1498Szrj
298138fd1498Szrj if (JUMP_P (x)
298238fd1498Szrj && returnjump_p (x) && ! condjump_p (x)
298338fd1498Szrj && ! ((y = next_nonnote_nondebug_insn (x))
298438fd1498Szrj && BARRIER_P (y)))
298538fd1498Szrj fatal_insn ("return not followed by barrier", x);
298638fd1498Szrj
298738fd1498Szrj if (curr_bb && x == BB_END (curr_bb))
298838fd1498Szrj curr_bb = NULL;
298938fd1498Szrj }
299038fd1498Szrj
299138fd1498Szrj if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
299238fd1498Szrj internal_error
299338fd1498Szrj ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
299438fd1498Szrj num_bb_notes, n_basic_blocks_for_fn (cfun));
299538fd1498Szrj
299638fd1498Szrj return err;
299738fd1498Szrj }
299838fd1498Szrj
299938fd1498Szrj /* Verify the CFG and RTL consistency common for both underlying RTL and
300038fd1498Szrj cfglayout RTL, plus consistency checks specific to linearized RTL mode.
300138fd1498Szrj
300238fd1498Szrj Currently it does following checks:
300338fd1498Szrj - all checks of rtl_verify_flow_info_1
300438fd1498Szrj - test head/end pointers
300538fd1498Szrj - check that blocks are laid out in consecutive order
300638fd1498Szrj - check that all insns are in the basic blocks
300738fd1498Szrj (except the switch handling code, barriers and notes)
300838fd1498Szrj - check that all returns are followed by barriers
300938fd1498Szrj - check that all fallthru edge points to the adjacent blocks
301038fd1498Szrj - verify that there is a single hot/cold partition boundary after bbro */
301138fd1498Szrj
301238fd1498Szrj static int
rtl_verify_flow_info(void)301338fd1498Szrj rtl_verify_flow_info (void)
301438fd1498Szrj {
301538fd1498Szrj int err = 0;
301638fd1498Szrj
301738fd1498Szrj err |= rtl_verify_flow_info_1 ();
301838fd1498Szrj
301938fd1498Szrj err |= rtl_verify_bb_insn_chain ();
302038fd1498Szrj
302138fd1498Szrj err |= rtl_verify_fallthru ();
302238fd1498Szrj
302338fd1498Szrj err |= rtl_verify_bb_layout ();
302438fd1498Szrj
302538fd1498Szrj err |= verify_hot_cold_block_grouping ();
302638fd1498Szrj
302738fd1498Szrj return err;
302838fd1498Szrj }
302938fd1498Szrj
303038fd1498Szrj /* Assume that the preceding pass has possibly eliminated jump instructions
303138fd1498Szrj or converted the unconditional jumps. Eliminate the edges from CFG.
303238fd1498Szrj Return true if any edges are eliminated. */
303338fd1498Szrj
303438fd1498Szrj bool
purge_dead_edges(basic_block bb)303538fd1498Szrj purge_dead_edges (basic_block bb)
303638fd1498Szrj {
303738fd1498Szrj edge e;
303838fd1498Szrj rtx_insn *insn = BB_END (bb);
303938fd1498Szrj rtx note;
304038fd1498Szrj bool purged = false;
304138fd1498Szrj bool found;
304238fd1498Szrj edge_iterator ei;
304338fd1498Szrj
304438fd1498Szrj if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
304538fd1498Szrj do
304638fd1498Szrj insn = PREV_INSN (insn);
304738fd1498Szrj while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
304838fd1498Szrj
304938fd1498Szrj /* If this instruction cannot trap, remove REG_EH_REGION notes. */
305038fd1498Szrj if (NONJUMP_INSN_P (insn)
305138fd1498Szrj && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
305238fd1498Szrj {
305338fd1498Szrj rtx eqnote;
305438fd1498Szrj
305538fd1498Szrj if (! may_trap_p (PATTERN (insn))
305638fd1498Szrj || ((eqnote = find_reg_equal_equiv_note (insn))
305738fd1498Szrj && ! may_trap_p (XEXP (eqnote, 0))))
305838fd1498Szrj remove_note (insn, note);
305938fd1498Szrj }
306038fd1498Szrj
306138fd1498Szrj /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
306238fd1498Szrj for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
306338fd1498Szrj {
306438fd1498Szrj bool remove = false;
306538fd1498Szrj
306638fd1498Szrj /* There are three types of edges we need to handle correctly here: EH
306738fd1498Szrj edges, abnormal call EH edges, and abnormal call non-EH edges. The
306838fd1498Szrj latter can appear when nonlocal gotos are used. */
306938fd1498Szrj if (e->flags & EDGE_ABNORMAL_CALL)
307038fd1498Szrj {
307138fd1498Szrj if (!CALL_P (insn))
307238fd1498Szrj remove = true;
307338fd1498Szrj else if (can_nonlocal_goto (insn))
307438fd1498Szrj ;
307538fd1498Szrj else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
307638fd1498Szrj ;
307738fd1498Szrj else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
307838fd1498Szrj ;
307938fd1498Szrj else
308038fd1498Szrj remove = true;
308138fd1498Szrj }
308238fd1498Szrj else if (e->flags & EDGE_EH)
308338fd1498Szrj remove = !can_throw_internal (insn);
308438fd1498Szrj
308538fd1498Szrj if (remove)
308638fd1498Szrj {
308738fd1498Szrj remove_edge (e);
308838fd1498Szrj df_set_bb_dirty (bb);
308938fd1498Szrj purged = true;
309038fd1498Szrj }
309138fd1498Szrj else
309238fd1498Szrj ei_next (&ei);
309338fd1498Szrj }
309438fd1498Szrj
309538fd1498Szrj if (JUMP_P (insn))
309638fd1498Szrj {
309738fd1498Szrj rtx note;
309838fd1498Szrj edge b,f;
309938fd1498Szrj edge_iterator ei;
310038fd1498Szrj
310138fd1498Szrj /* We do care only about conditional jumps and simplejumps. */
310238fd1498Szrj if (!any_condjump_p (insn)
310338fd1498Szrj && !returnjump_p (insn)
310438fd1498Szrj && !simplejump_p (insn))
310538fd1498Szrj return purged;
310638fd1498Szrj
310738fd1498Szrj /* Branch probability/prediction notes are defined only for
310838fd1498Szrj condjumps. We've possibly turned condjump into simplejump. */
310938fd1498Szrj if (simplejump_p (insn))
311038fd1498Szrj {
311138fd1498Szrj note = find_reg_note (insn, REG_BR_PROB, NULL);
311238fd1498Szrj if (note)
311338fd1498Szrj remove_note (insn, note);
311438fd1498Szrj while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
311538fd1498Szrj remove_note (insn, note);
311638fd1498Szrj }
311738fd1498Szrj
311838fd1498Szrj for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
311938fd1498Szrj {
312038fd1498Szrj /* Avoid abnormal flags to leak from computed jumps turned
312138fd1498Szrj into simplejumps. */
312238fd1498Szrj
312338fd1498Szrj e->flags &= ~EDGE_ABNORMAL;
312438fd1498Szrj
312538fd1498Szrj /* See if this edge is one we should keep. */
312638fd1498Szrj if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
312738fd1498Szrj /* A conditional jump can fall through into the next
312838fd1498Szrj block, so we should keep the edge. */
312938fd1498Szrj {
313038fd1498Szrj ei_next (&ei);
313138fd1498Szrj continue;
313238fd1498Szrj }
313338fd1498Szrj else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
313438fd1498Szrj && BB_HEAD (e->dest) == JUMP_LABEL (insn))
313538fd1498Szrj /* If the destination block is the target of the jump,
313638fd1498Szrj keep the edge. */
313738fd1498Szrj {
313838fd1498Szrj ei_next (&ei);
313938fd1498Szrj continue;
314038fd1498Szrj }
314138fd1498Szrj else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
314238fd1498Szrj && returnjump_p (insn))
314338fd1498Szrj /* If the destination block is the exit block, and this
314438fd1498Szrj instruction is a return, then keep the edge. */
314538fd1498Szrj {
314638fd1498Szrj ei_next (&ei);
314738fd1498Szrj continue;
314838fd1498Szrj }
314938fd1498Szrj else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
315038fd1498Szrj /* Keep the edges that correspond to exceptions thrown by
315138fd1498Szrj this instruction and rematerialize the EDGE_ABNORMAL
315238fd1498Szrj flag we just cleared above. */
315338fd1498Szrj {
315438fd1498Szrj e->flags |= EDGE_ABNORMAL;
315538fd1498Szrj ei_next (&ei);
315638fd1498Szrj continue;
315738fd1498Szrj }
315838fd1498Szrj
315938fd1498Szrj /* We do not need this edge. */
316038fd1498Szrj df_set_bb_dirty (bb);
316138fd1498Szrj purged = true;
316238fd1498Szrj remove_edge (e);
316338fd1498Szrj }
316438fd1498Szrj
316538fd1498Szrj if (EDGE_COUNT (bb->succs) == 0 || !purged)
316638fd1498Szrj return purged;
316738fd1498Szrj
316838fd1498Szrj if (dump_file)
316938fd1498Szrj fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
317038fd1498Szrj
317138fd1498Szrj if (!optimize)
317238fd1498Szrj return purged;
317338fd1498Szrj
317438fd1498Szrj /* Redistribute probabilities. */
317538fd1498Szrj if (single_succ_p (bb))
317638fd1498Szrj {
317738fd1498Szrj single_succ_edge (bb)->probability = profile_probability::always ();
317838fd1498Szrj }
317938fd1498Szrj else
318038fd1498Szrj {
318138fd1498Szrj note = find_reg_note (insn, REG_BR_PROB, NULL);
318238fd1498Szrj if (!note)
318338fd1498Szrj return purged;
318438fd1498Szrj
318538fd1498Szrj b = BRANCH_EDGE (bb);
318638fd1498Szrj f = FALLTHRU_EDGE (bb);
318738fd1498Szrj b->probability = profile_probability::from_reg_br_prob_note
318838fd1498Szrj (XINT (note, 0));
318938fd1498Szrj f->probability = b->probability.invert ();
319038fd1498Szrj }
319138fd1498Szrj
319238fd1498Szrj return purged;
319338fd1498Szrj }
319438fd1498Szrj else if (CALL_P (insn) && SIBLING_CALL_P (insn))
319538fd1498Szrj {
319638fd1498Szrj /* First, there should not be any EH or ABCALL edges resulting
319738fd1498Szrj from non-local gotos and the like. If there were, we shouldn't
319838fd1498Szrj have created the sibcall in the first place. Second, there
319938fd1498Szrj should of course never have been a fallthru edge. */
320038fd1498Szrj gcc_assert (single_succ_p (bb));
320138fd1498Szrj gcc_assert (single_succ_edge (bb)->flags
320238fd1498Szrj == (EDGE_SIBCALL | EDGE_ABNORMAL));
320338fd1498Szrj
320438fd1498Szrj return 0;
320538fd1498Szrj }
320638fd1498Szrj
320738fd1498Szrj /* If we don't see a jump insn, we don't know exactly why the block would
320838fd1498Szrj have been broken at this point. Look for a simple, non-fallthru edge,
320938fd1498Szrj as these are only created by conditional branches. If we find such an
321038fd1498Szrj edge we know that there used to be a jump here and can then safely
321138fd1498Szrj remove all non-fallthru edges. */
321238fd1498Szrj found = false;
321338fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
321438fd1498Szrj if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
321538fd1498Szrj {
321638fd1498Szrj found = true;
321738fd1498Szrj break;
321838fd1498Szrj }
321938fd1498Szrj
322038fd1498Szrj if (!found)
322138fd1498Szrj return purged;
322238fd1498Szrj
322338fd1498Szrj /* Remove all but the fake and fallthru edges. The fake edge may be
322438fd1498Szrj the only successor for this block in the case of noreturn
322538fd1498Szrj calls. */
322638fd1498Szrj for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
322738fd1498Szrj {
322838fd1498Szrj if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
322938fd1498Szrj {
323038fd1498Szrj df_set_bb_dirty (bb);
323138fd1498Szrj remove_edge (e);
323238fd1498Szrj purged = true;
323338fd1498Szrj }
323438fd1498Szrj else
323538fd1498Szrj ei_next (&ei);
323638fd1498Szrj }
323738fd1498Szrj
323838fd1498Szrj gcc_assert (single_succ_p (bb));
323938fd1498Szrj
324038fd1498Szrj single_succ_edge (bb)->probability = profile_probability::always ();
324138fd1498Szrj
324238fd1498Szrj if (dump_file)
324338fd1498Szrj fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
324438fd1498Szrj bb->index);
324538fd1498Szrj return purged;
324638fd1498Szrj }
324738fd1498Szrj
324838fd1498Szrj /* Search all basic blocks for potentially dead edges and purge them. Return
324938fd1498Szrj true if some edge has been eliminated. */
325038fd1498Szrj
325138fd1498Szrj bool
purge_all_dead_edges(void)325238fd1498Szrj purge_all_dead_edges (void)
325338fd1498Szrj {
325438fd1498Szrj int purged = false;
325538fd1498Szrj basic_block bb;
325638fd1498Szrj
325738fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
325838fd1498Szrj {
325938fd1498Szrj bool purged_here = purge_dead_edges (bb);
326038fd1498Szrj
326138fd1498Szrj purged |= purged_here;
326238fd1498Szrj }
326338fd1498Szrj
326438fd1498Szrj return purged;
326538fd1498Szrj }
326638fd1498Szrj
326738fd1498Szrj /* This is used by a few passes that emit some instructions after abnormal
326838fd1498Szrj calls, moving the basic block's end, while they in fact do want to emit
326938fd1498Szrj them on the fallthru edge. Look for abnormal call edges, find backward
327038fd1498Szrj the call in the block and insert the instructions on the edge instead.
327138fd1498Szrj
327238fd1498Szrj Similarly, handle instructions throwing exceptions internally.
327338fd1498Szrj
327438fd1498Szrj Return true when instructions have been found and inserted on edges. */
327538fd1498Szrj
327638fd1498Szrj bool
fixup_abnormal_edges(void)327738fd1498Szrj fixup_abnormal_edges (void)
327838fd1498Szrj {
327938fd1498Szrj bool inserted = false;
328038fd1498Szrj basic_block bb;
328138fd1498Szrj
328238fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
328338fd1498Szrj {
328438fd1498Szrj edge e;
328538fd1498Szrj edge_iterator ei;
328638fd1498Szrj
328738fd1498Szrj /* Look for cases we are interested in - calls or instructions causing
328838fd1498Szrj exceptions. */
328938fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
329038fd1498Szrj if ((e->flags & EDGE_ABNORMAL_CALL)
329138fd1498Szrj || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
329238fd1498Szrj == (EDGE_ABNORMAL | EDGE_EH)))
329338fd1498Szrj break;
329438fd1498Szrj
329538fd1498Szrj if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
329638fd1498Szrj {
329738fd1498Szrj rtx_insn *insn;
329838fd1498Szrj
329938fd1498Szrj /* Get past the new insns generated. Allow notes, as the insns
330038fd1498Szrj may be already deleted. */
330138fd1498Szrj insn = BB_END (bb);
330238fd1498Szrj while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
330338fd1498Szrj && !can_throw_internal (insn)
330438fd1498Szrj && insn != BB_HEAD (bb))
330538fd1498Szrj insn = PREV_INSN (insn);
330638fd1498Szrj
330738fd1498Szrj if (CALL_P (insn) || can_throw_internal (insn))
330838fd1498Szrj {
330938fd1498Szrj rtx_insn *stop, *next;
331038fd1498Szrj
331138fd1498Szrj e = find_fallthru_edge (bb->succs);
331238fd1498Szrj
331338fd1498Szrj stop = NEXT_INSN (BB_END (bb));
331438fd1498Szrj BB_END (bb) = insn;
331538fd1498Szrj
331638fd1498Szrj for (insn = NEXT_INSN (insn); insn != stop; insn = next)
331738fd1498Szrj {
331838fd1498Szrj next = NEXT_INSN (insn);
331938fd1498Szrj if (INSN_P (insn))
332038fd1498Szrj {
332138fd1498Szrj delete_insn (insn);
332238fd1498Szrj
332338fd1498Szrj /* Sometimes there's still the return value USE.
332438fd1498Szrj If it's placed after a trapping call (i.e. that
332538fd1498Szrj call is the last insn anyway), we have no fallthru
332638fd1498Szrj edge. Simply delete this use and don't try to insert
3327*58e805e6Szrj on the non-existent edge.
3328*58e805e6Szrj Similarly, sometimes a call that can throw is
3329*58e805e6Szrj followed in the source with __builtin_unreachable (),
3330*58e805e6Szrj meaning that there is UB if the call returns rather
3331*58e805e6Szrj than throws. If there weren't any instructions
3332*58e805e6Szrj following such calls before, supposedly even the ones
3333*58e805e6Szrj we've deleted aren't significant and can be
3334*58e805e6Szrj removed. */
3335*58e805e6Szrj if (e)
333638fd1498Szrj {
333738fd1498Szrj /* We're not deleting it, we're moving it. */
333838fd1498Szrj insn->set_undeleted ();
333938fd1498Szrj SET_PREV_INSN (insn) = NULL_RTX;
334038fd1498Szrj SET_NEXT_INSN (insn) = NULL_RTX;
334138fd1498Szrj
334238fd1498Szrj insert_insn_on_edge (insn, e);
334338fd1498Szrj inserted = true;
334438fd1498Szrj }
334538fd1498Szrj }
334638fd1498Szrj else if (!BARRIER_P (insn))
334738fd1498Szrj set_block_for_insn (insn, NULL);
334838fd1498Szrj }
334938fd1498Szrj }
335038fd1498Szrj
335138fd1498Szrj /* It may be that we don't find any trapping insn. In this
335238fd1498Szrj case we discovered quite late that the insn that had been
335338fd1498Szrj marked as can_throw_internal in fact couldn't trap at all.
335438fd1498Szrj So we should in fact delete the EH edges out of the block. */
335538fd1498Szrj else
335638fd1498Szrj purge_dead_edges (bb);
335738fd1498Szrj }
335838fd1498Szrj }
335938fd1498Szrj
336038fd1498Szrj return inserted;
336138fd1498Szrj }
336238fd1498Szrj
336338fd1498Szrj /* Cut the insns from FIRST to LAST out of the insns stream. */
336438fd1498Szrj
336538fd1498Szrj rtx_insn *
unlink_insn_chain(rtx_insn * first,rtx_insn * last)336638fd1498Szrj unlink_insn_chain (rtx_insn *first, rtx_insn *last)
336738fd1498Szrj {
336838fd1498Szrj rtx_insn *prevfirst = PREV_INSN (first);
336938fd1498Szrj rtx_insn *nextlast = NEXT_INSN (last);
337038fd1498Szrj
337138fd1498Szrj SET_PREV_INSN (first) = NULL;
337238fd1498Szrj SET_NEXT_INSN (last) = NULL;
337338fd1498Szrj if (prevfirst)
337438fd1498Szrj SET_NEXT_INSN (prevfirst) = nextlast;
337538fd1498Szrj if (nextlast)
337638fd1498Szrj SET_PREV_INSN (nextlast) = prevfirst;
337738fd1498Szrj else
337838fd1498Szrj set_last_insn (prevfirst);
337938fd1498Szrj if (!prevfirst)
338038fd1498Szrj set_first_insn (nextlast);
338138fd1498Szrj return first;
338238fd1498Szrj }
338338fd1498Szrj
338438fd1498Szrj /* Skip over inter-block insns occurring after BB which are typically
338538fd1498Szrj associated with BB (e.g., barriers). If there are any such insns,
338638fd1498Szrj we return the last one. Otherwise, we return the end of BB. */
338738fd1498Szrj
338838fd1498Szrj static rtx_insn *
skip_insns_after_block(basic_block bb)338938fd1498Szrj skip_insns_after_block (basic_block bb)
339038fd1498Szrj {
339138fd1498Szrj rtx_insn *insn, *last_insn, *next_head, *prev;
339238fd1498Szrj
339338fd1498Szrj next_head = NULL;
339438fd1498Szrj if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
339538fd1498Szrj next_head = BB_HEAD (bb->next_bb);
339638fd1498Szrj
339738fd1498Szrj for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
339838fd1498Szrj {
339938fd1498Szrj if (insn == next_head)
340038fd1498Szrj break;
340138fd1498Szrj
340238fd1498Szrj switch (GET_CODE (insn))
340338fd1498Szrj {
340438fd1498Szrj case BARRIER:
340538fd1498Szrj last_insn = insn;
340638fd1498Szrj continue;
340738fd1498Szrj
340838fd1498Szrj case NOTE:
340938fd1498Szrj switch (NOTE_KIND (insn))
341038fd1498Szrj {
341138fd1498Szrj case NOTE_INSN_BLOCK_END:
341238fd1498Szrj gcc_unreachable ();
341338fd1498Szrj continue;
341438fd1498Szrj default:
341538fd1498Szrj continue;
341638fd1498Szrj break;
341738fd1498Szrj }
341838fd1498Szrj break;
341938fd1498Szrj
342038fd1498Szrj case CODE_LABEL:
342138fd1498Szrj if (NEXT_INSN (insn)
342238fd1498Szrj && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
342338fd1498Szrj {
342438fd1498Szrj insn = NEXT_INSN (insn);
342538fd1498Szrj last_insn = insn;
342638fd1498Szrj continue;
342738fd1498Szrj }
342838fd1498Szrj break;
342938fd1498Szrj
343038fd1498Szrj default:
343138fd1498Szrj break;
343238fd1498Szrj }
343338fd1498Szrj
343438fd1498Szrj break;
343538fd1498Szrj }
343638fd1498Szrj
343738fd1498Szrj /* It is possible to hit contradictory sequence. For instance:
343838fd1498Szrj
343938fd1498Szrj jump_insn
344038fd1498Szrj NOTE_INSN_BLOCK_BEG
344138fd1498Szrj barrier
344238fd1498Szrj
344338fd1498Szrj Where barrier belongs to jump_insn, but the note does not. This can be
344438fd1498Szrj created by removing the basic block originally following
344538fd1498Szrj NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
344638fd1498Szrj
344738fd1498Szrj for (insn = last_insn; insn != BB_END (bb); insn = prev)
344838fd1498Szrj {
344938fd1498Szrj prev = PREV_INSN (insn);
345038fd1498Szrj if (NOTE_P (insn))
345138fd1498Szrj switch (NOTE_KIND (insn))
345238fd1498Szrj {
345338fd1498Szrj case NOTE_INSN_BLOCK_END:
345438fd1498Szrj gcc_unreachable ();
345538fd1498Szrj break;
345638fd1498Szrj case NOTE_INSN_DELETED:
345738fd1498Szrj case NOTE_INSN_DELETED_LABEL:
345838fd1498Szrj case NOTE_INSN_DELETED_DEBUG_LABEL:
345938fd1498Szrj continue;
346038fd1498Szrj default:
346138fd1498Szrj reorder_insns (insn, insn, last_insn);
346238fd1498Szrj }
346338fd1498Szrj }
346438fd1498Szrj
346538fd1498Szrj return last_insn;
346638fd1498Szrj }
346738fd1498Szrj
346838fd1498Szrj /* Locate or create a label for a given basic block. */
346938fd1498Szrj
347038fd1498Szrj static rtx_insn *
label_for_bb(basic_block bb)347138fd1498Szrj label_for_bb (basic_block bb)
347238fd1498Szrj {
347338fd1498Szrj rtx_insn *label = BB_HEAD (bb);
347438fd1498Szrj
347538fd1498Szrj if (!LABEL_P (label))
347638fd1498Szrj {
347738fd1498Szrj if (dump_file)
347838fd1498Szrj fprintf (dump_file, "Emitting label for block %d\n", bb->index);
347938fd1498Szrj
348038fd1498Szrj label = block_label (bb);
348138fd1498Szrj }
348238fd1498Szrj
348338fd1498Szrj return label;
348438fd1498Szrj }
348538fd1498Szrj
348638fd1498Szrj /* Locate the effective beginning and end of the insn chain for each
348738fd1498Szrj block, as defined by skip_insns_after_block above. */
348838fd1498Szrj
348938fd1498Szrj static void
record_effective_endpoints(void)349038fd1498Szrj record_effective_endpoints (void)
349138fd1498Szrj {
349238fd1498Szrj rtx_insn *next_insn;
349338fd1498Szrj basic_block bb;
349438fd1498Szrj rtx_insn *insn;
349538fd1498Szrj
349638fd1498Szrj for (insn = get_insns ();
349738fd1498Szrj insn
349838fd1498Szrj && NOTE_P (insn)
349938fd1498Szrj && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
350038fd1498Szrj insn = NEXT_INSN (insn))
350138fd1498Szrj continue;
350238fd1498Szrj /* No basic blocks at all? */
350338fd1498Szrj gcc_assert (insn);
350438fd1498Szrj
350538fd1498Szrj if (PREV_INSN (insn))
350638fd1498Szrj cfg_layout_function_header =
350738fd1498Szrj unlink_insn_chain (get_insns (), PREV_INSN (insn));
350838fd1498Szrj else
350938fd1498Szrj cfg_layout_function_header = NULL;
351038fd1498Szrj
351138fd1498Szrj next_insn = get_insns ();
351238fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
351338fd1498Szrj {
351438fd1498Szrj rtx_insn *end;
351538fd1498Szrj
351638fd1498Szrj if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
351738fd1498Szrj BB_HEADER (bb) = unlink_insn_chain (next_insn,
351838fd1498Szrj PREV_INSN (BB_HEAD (bb)));
351938fd1498Szrj end = skip_insns_after_block (bb);
352038fd1498Szrj if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
352138fd1498Szrj BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
352238fd1498Szrj next_insn = NEXT_INSN (BB_END (bb));
352338fd1498Szrj }
352438fd1498Szrj
352538fd1498Szrj cfg_layout_function_footer = next_insn;
352638fd1498Szrj if (cfg_layout_function_footer)
352738fd1498Szrj cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
352838fd1498Szrj }
352938fd1498Szrj
353038fd1498Szrj namespace {
353138fd1498Szrj
353238fd1498Szrj const pass_data pass_data_into_cfg_layout_mode =
353338fd1498Szrj {
353438fd1498Szrj RTL_PASS, /* type */
353538fd1498Szrj "into_cfglayout", /* name */
353638fd1498Szrj OPTGROUP_NONE, /* optinfo_flags */
353738fd1498Szrj TV_CFG, /* tv_id */
353838fd1498Szrj 0, /* properties_required */
353938fd1498Szrj PROP_cfglayout, /* properties_provided */
354038fd1498Szrj 0, /* properties_destroyed */
354138fd1498Szrj 0, /* todo_flags_start */
354238fd1498Szrj 0, /* todo_flags_finish */
354338fd1498Szrj };
354438fd1498Szrj
354538fd1498Szrj class pass_into_cfg_layout_mode : public rtl_opt_pass
354638fd1498Szrj {
354738fd1498Szrj public:
pass_into_cfg_layout_mode(gcc::context * ctxt)354838fd1498Szrj pass_into_cfg_layout_mode (gcc::context *ctxt)
354938fd1498Szrj : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
355038fd1498Szrj {}
355138fd1498Szrj
355238fd1498Szrj /* opt_pass methods: */
execute(function *)355338fd1498Szrj virtual unsigned int execute (function *)
355438fd1498Szrj {
355538fd1498Szrj cfg_layout_initialize (0);
355638fd1498Szrj return 0;
355738fd1498Szrj }
355838fd1498Szrj
355938fd1498Szrj }; // class pass_into_cfg_layout_mode
356038fd1498Szrj
356138fd1498Szrj } // anon namespace
356238fd1498Szrj
356338fd1498Szrj rtl_opt_pass *
make_pass_into_cfg_layout_mode(gcc::context * ctxt)356438fd1498Szrj make_pass_into_cfg_layout_mode (gcc::context *ctxt)
356538fd1498Szrj {
356638fd1498Szrj return new pass_into_cfg_layout_mode (ctxt);
356738fd1498Szrj }
356838fd1498Szrj
356938fd1498Szrj namespace {
357038fd1498Szrj
357138fd1498Szrj const pass_data pass_data_outof_cfg_layout_mode =
357238fd1498Szrj {
357338fd1498Szrj RTL_PASS, /* type */
357438fd1498Szrj "outof_cfglayout", /* name */
357538fd1498Szrj OPTGROUP_NONE, /* optinfo_flags */
357638fd1498Szrj TV_CFG, /* tv_id */
357738fd1498Szrj 0, /* properties_required */
357838fd1498Szrj 0, /* properties_provided */
357938fd1498Szrj PROP_cfglayout, /* properties_destroyed */
358038fd1498Szrj 0, /* todo_flags_start */
358138fd1498Szrj 0, /* todo_flags_finish */
358238fd1498Szrj };
358338fd1498Szrj
358438fd1498Szrj class pass_outof_cfg_layout_mode : public rtl_opt_pass
358538fd1498Szrj {
358638fd1498Szrj public:
pass_outof_cfg_layout_mode(gcc::context * ctxt)358738fd1498Szrj pass_outof_cfg_layout_mode (gcc::context *ctxt)
358838fd1498Szrj : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
358938fd1498Szrj {}
359038fd1498Szrj
359138fd1498Szrj /* opt_pass methods: */
359238fd1498Szrj virtual unsigned int execute (function *);
359338fd1498Szrj
359438fd1498Szrj }; // class pass_outof_cfg_layout_mode
359538fd1498Szrj
359638fd1498Szrj unsigned int
execute(function * fun)359738fd1498Szrj pass_outof_cfg_layout_mode::execute (function *fun)
359838fd1498Szrj {
359938fd1498Szrj basic_block bb;
360038fd1498Szrj
360138fd1498Szrj FOR_EACH_BB_FN (bb, fun)
360238fd1498Szrj if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
360338fd1498Szrj bb->aux = bb->next_bb;
360438fd1498Szrj
360538fd1498Szrj cfg_layout_finalize ();
360638fd1498Szrj
360738fd1498Szrj return 0;
360838fd1498Szrj }
360938fd1498Szrj
361038fd1498Szrj } // anon namespace
361138fd1498Szrj
361238fd1498Szrj rtl_opt_pass *
make_pass_outof_cfg_layout_mode(gcc::context * ctxt)361338fd1498Szrj make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
361438fd1498Szrj {
361538fd1498Szrj return new pass_outof_cfg_layout_mode (ctxt);
361638fd1498Szrj }
361738fd1498Szrj
361838fd1498Szrj
361938fd1498Szrj /* Link the basic blocks in the correct order, compacting the basic
362038fd1498Szrj block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
362138fd1498Szrj function also clears the basic block header and footer fields.
362238fd1498Szrj
362338fd1498Szrj This function is usually called after a pass (e.g. tracer) finishes
362438fd1498Szrj some transformations while in cfglayout mode. The required sequence
362538fd1498Szrj of the basic blocks is in a linked list along the bb->aux field.
362638fd1498Szrj This functions re-links the basic block prev_bb and next_bb pointers
362738fd1498Szrj accordingly, and it compacts and renumbers the blocks.
362838fd1498Szrj
362938fd1498Szrj FIXME: This currently works only for RTL, but the only RTL-specific
363038fd1498Szrj bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
363138fd1498Szrj to GIMPLE a long time ago, but it doesn't relink the basic block
363238fd1498Szrj chain. It could do that (to give better initial RTL) if this function
363338fd1498Szrj is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
363438fd1498Szrj
363538fd1498Szrj void
relink_block_chain(bool stay_in_cfglayout_mode)363638fd1498Szrj relink_block_chain (bool stay_in_cfglayout_mode)
363738fd1498Szrj {
363838fd1498Szrj basic_block bb, prev_bb;
363938fd1498Szrj int index;
364038fd1498Szrj
364138fd1498Szrj /* Maybe dump the re-ordered sequence. */
364238fd1498Szrj if (dump_file)
364338fd1498Szrj {
364438fd1498Szrj fprintf (dump_file, "Reordered sequence:\n");
364538fd1498Szrj for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
364638fd1498Szrj NUM_FIXED_BLOCKS;
364738fd1498Szrj bb;
364838fd1498Szrj bb = (basic_block) bb->aux, index++)
364938fd1498Szrj {
365038fd1498Szrj fprintf (dump_file, " %i ", index);
365138fd1498Szrj if (get_bb_original (bb))
365238fd1498Szrj fprintf (dump_file, "duplicate of %i ",
365338fd1498Szrj get_bb_original (bb)->index);
365438fd1498Szrj else if (forwarder_block_p (bb)
365538fd1498Szrj && !LABEL_P (BB_HEAD (bb)))
365638fd1498Szrj fprintf (dump_file, "compensation ");
365738fd1498Szrj else
365838fd1498Szrj fprintf (dump_file, "bb %i ", bb->index);
365938fd1498Szrj }
366038fd1498Szrj }
366138fd1498Szrj
366238fd1498Szrj /* Now reorder the blocks. */
366338fd1498Szrj prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
366438fd1498Szrj bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
366538fd1498Szrj for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
366638fd1498Szrj {
366738fd1498Szrj bb->prev_bb = prev_bb;
366838fd1498Szrj prev_bb->next_bb = bb;
366938fd1498Szrj }
367038fd1498Szrj prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
367138fd1498Szrj EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
367238fd1498Szrj
367338fd1498Szrj /* Then, clean up the aux fields. */
367438fd1498Szrj FOR_ALL_BB_FN (bb, cfun)
367538fd1498Szrj {
367638fd1498Szrj bb->aux = NULL;
367738fd1498Szrj if (!stay_in_cfglayout_mode)
367838fd1498Szrj BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
367938fd1498Szrj }
368038fd1498Szrj
368138fd1498Szrj /* Maybe reset the original copy tables, they are not valid anymore
368238fd1498Szrj when we renumber the basic blocks in compact_blocks. If we are
368338fd1498Szrj are going out of cfglayout mode, don't re-allocate the tables. */
368438fd1498Szrj if (original_copy_tables_initialized_p ())
368538fd1498Szrj free_original_copy_tables ();
368638fd1498Szrj if (stay_in_cfglayout_mode)
368738fd1498Szrj initialize_original_copy_tables ();
368838fd1498Szrj
368938fd1498Szrj /* Finally, put basic_block_info in the new order. */
369038fd1498Szrj compact_blocks ();
369138fd1498Szrj }
369238fd1498Szrj
369338fd1498Szrj
369438fd1498Szrj /* Given a reorder chain, rearrange the code to match. */
369538fd1498Szrj
369638fd1498Szrj static void
fixup_reorder_chain(void)369738fd1498Szrj fixup_reorder_chain (void)
369838fd1498Szrj {
369938fd1498Szrj basic_block bb;
370038fd1498Szrj rtx_insn *insn = NULL;
370138fd1498Szrj
370238fd1498Szrj if (cfg_layout_function_header)
370338fd1498Szrj {
370438fd1498Szrj set_first_insn (cfg_layout_function_header);
370538fd1498Szrj insn = cfg_layout_function_header;
370638fd1498Szrj while (NEXT_INSN (insn))
370738fd1498Szrj insn = NEXT_INSN (insn);
370838fd1498Szrj }
370938fd1498Szrj
371038fd1498Szrj /* First do the bulk reordering -- rechain the blocks without regard to
371138fd1498Szrj the needed changes to jumps and labels. */
371238fd1498Szrj
371338fd1498Szrj for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
371438fd1498Szrj bb->aux)
371538fd1498Szrj {
371638fd1498Szrj if (BB_HEADER (bb))
371738fd1498Szrj {
371838fd1498Szrj if (insn)
371938fd1498Szrj SET_NEXT_INSN (insn) = BB_HEADER (bb);
372038fd1498Szrj else
372138fd1498Szrj set_first_insn (BB_HEADER (bb));
372238fd1498Szrj SET_PREV_INSN (BB_HEADER (bb)) = insn;
372338fd1498Szrj insn = BB_HEADER (bb);
372438fd1498Szrj while (NEXT_INSN (insn))
372538fd1498Szrj insn = NEXT_INSN (insn);
372638fd1498Szrj }
372738fd1498Szrj if (insn)
372838fd1498Szrj SET_NEXT_INSN (insn) = BB_HEAD (bb);
372938fd1498Szrj else
373038fd1498Szrj set_first_insn (BB_HEAD (bb));
373138fd1498Szrj SET_PREV_INSN (BB_HEAD (bb)) = insn;
373238fd1498Szrj insn = BB_END (bb);
373338fd1498Szrj if (BB_FOOTER (bb))
373438fd1498Szrj {
373538fd1498Szrj SET_NEXT_INSN (insn) = BB_FOOTER (bb);
373638fd1498Szrj SET_PREV_INSN (BB_FOOTER (bb)) = insn;
373738fd1498Szrj while (NEXT_INSN (insn))
373838fd1498Szrj insn = NEXT_INSN (insn);
373938fd1498Szrj }
374038fd1498Szrj }
374138fd1498Szrj
374238fd1498Szrj SET_NEXT_INSN (insn) = cfg_layout_function_footer;
374338fd1498Szrj if (cfg_layout_function_footer)
374438fd1498Szrj SET_PREV_INSN (cfg_layout_function_footer) = insn;
374538fd1498Szrj
374638fd1498Szrj while (NEXT_INSN (insn))
374738fd1498Szrj insn = NEXT_INSN (insn);
374838fd1498Szrj
374938fd1498Szrj set_last_insn (insn);
375038fd1498Szrj if (flag_checking)
375138fd1498Szrj verify_insn_chain ();
375238fd1498Szrj
375338fd1498Szrj /* Now add jumps and labels as needed to match the blocks new
375438fd1498Szrj outgoing edges. */
375538fd1498Szrj
375638fd1498Szrj for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
375738fd1498Szrj bb->aux)
375838fd1498Szrj {
375938fd1498Szrj edge e_fall, e_taken, e;
376038fd1498Szrj rtx_insn *bb_end_insn;
376138fd1498Szrj rtx ret_label = NULL_RTX;
376238fd1498Szrj basic_block nb;
376338fd1498Szrj edge_iterator ei;
376438fd1498Szrj
376538fd1498Szrj if (EDGE_COUNT (bb->succs) == 0)
376638fd1498Szrj continue;
376738fd1498Szrj
376838fd1498Szrj /* Find the old fallthru edge, and another non-EH edge for
376938fd1498Szrj a taken jump. */
377038fd1498Szrj e_taken = e_fall = NULL;
377138fd1498Szrj
377238fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
377338fd1498Szrj if (e->flags & EDGE_FALLTHRU)
377438fd1498Szrj e_fall = e;
377538fd1498Szrj else if (! (e->flags & EDGE_EH))
377638fd1498Szrj e_taken = e;
377738fd1498Szrj
377838fd1498Szrj bb_end_insn = BB_END (bb);
377938fd1498Szrj if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
378038fd1498Szrj {
378138fd1498Szrj ret_label = JUMP_LABEL (bb_end_jump);
378238fd1498Szrj if (any_condjump_p (bb_end_jump))
378338fd1498Szrj {
378438fd1498Szrj /* This might happen if the conditional jump has side
378538fd1498Szrj effects and could therefore not be optimized away.
378638fd1498Szrj Make the basic block to end with a barrier in order
378738fd1498Szrj to prevent rtl_verify_flow_info from complaining. */
378838fd1498Szrj if (!e_fall)
378938fd1498Szrj {
379038fd1498Szrj gcc_assert (!onlyjump_p (bb_end_jump)
379138fd1498Szrj || returnjump_p (bb_end_jump)
379238fd1498Szrj || (e_taken->flags & EDGE_CROSSING));
379338fd1498Szrj emit_barrier_after (bb_end_jump);
379438fd1498Szrj continue;
379538fd1498Szrj }
379638fd1498Szrj
379738fd1498Szrj /* If the old fallthru is still next, nothing to do. */
379838fd1498Szrj if (bb->aux == e_fall->dest
379938fd1498Szrj || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
380038fd1498Szrj continue;
380138fd1498Szrj
380238fd1498Szrj /* The degenerated case of conditional jump jumping to the next
380338fd1498Szrj instruction can happen for jumps with side effects. We need
380438fd1498Szrj to construct a forwarder block and this will be done just
380538fd1498Szrj fine by force_nonfallthru below. */
380638fd1498Szrj if (!e_taken)
380738fd1498Szrj ;
380838fd1498Szrj
380938fd1498Szrj /* There is another special case: if *neither* block is next,
381038fd1498Szrj such as happens at the very end of a function, then we'll
381138fd1498Szrj need to add a new unconditional jump. Choose the taken
381238fd1498Szrj edge based on known or assumed probability. */
381338fd1498Szrj else if (bb->aux != e_taken->dest)
381438fd1498Szrj {
381538fd1498Szrj rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
381638fd1498Szrj
381738fd1498Szrj if (note
381838fd1498Szrj && profile_probability::from_reg_br_prob_note
381938fd1498Szrj (XINT (note, 0)) < profile_probability::even ()
382038fd1498Szrj && invert_jump (bb_end_jump,
382138fd1498Szrj (e_fall->dest
382238fd1498Szrj == EXIT_BLOCK_PTR_FOR_FN (cfun)
382338fd1498Szrj ? NULL_RTX
382438fd1498Szrj : label_for_bb (e_fall->dest)), 0))
382538fd1498Szrj {
382638fd1498Szrj e_fall->flags &= ~EDGE_FALLTHRU;
382738fd1498Szrj gcc_checking_assert (could_fall_through
382838fd1498Szrj (e_taken->src, e_taken->dest));
382938fd1498Szrj e_taken->flags |= EDGE_FALLTHRU;
383038fd1498Szrj update_br_prob_note (bb);
383138fd1498Szrj e = e_fall, e_fall = e_taken, e_taken = e;
383238fd1498Szrj }
383338fd1498Szrj }
383438fd1498Szrj
383538fd1498Szrj /* If the "jumping" edge is a crossing edge, and the fall
383638fd1498Szrj through edge is non-crossing, leave things as they are. */
383738fd1498Szrj else if ((e_taken->flags & EDGE_CROSSING)
383838fd1498Szrj && !(e_fall->flags & EDGE_CROSSING))
383938fd1498Szrj continue;
384038fd1498Szrj
384138fd1498Szrj /* Otherwise we can try to invert the jump. This will
384238fd1498Szrj basically never fail, however, keep up the pretense. */
384338fd1498Szrj else if (invert_jump (bb_end_jump,
384438fd1498Szrj (e_fall->dest
384538fd1498Szrj == EXIT_BLOCK_PTR_FOR_FN (cfun)
384638fd1498Szrj ? NULL_RTX
384738fd1498Szrj : label_for_bb (e_fall->dest)), 0))
384838fd1498Szrj {
384938fd1498Szrj e_fall->flags &= ~EDGE_FALLTHRU;
385038fd1498Szrj gcc_checking_assert (could_fall_through
385138fd1498Szrj (e_taken->src, e_taken->dest));
385238fd1498Szrj e_taken->flags |= EDGE_FALLTHRU;
385338fd1498Szrj update_br_prob_note (bb);
385438fd1498Szrj if (LABEL_NUSES (ret_label) == 0
385538fd1498Szrj && single_pred_p (e_taken->dest))
385638fd1498Szrj delete_insn (as_a<rtx_insn *> (ret_label));
385738fd1498Szrj continue;
385838fd1498Szrj }
385938fd1498Szrj }
386038fd1498Szrj else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
386138fd1498Szrj {
386238fd1498Szrj /* If the old fallthru is still next or if
386338fd1498Szrj asm goto doesn't have a fallthru (e.g. when followed by
386438fd1498Szrj __builtin_unreachable ()), nothing to do. */
386538fd1498Szrj if (! e_fall
386638fd1498Szrj || bb->aux == e_fall->dest
386738fd1498Szrj || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
386838fd1498Szrj continue;
386938fd1498Szrj
387038fd1498Szrj /* Otherwise we'll have to use the fallthru fixup below. */
387138fd1498Szrj }
387238fd1498Szrj else
387338fd1498Szrj {
387438fd1498Szrj /* Otherwise we have some return, switch or computed
387538fd1498Szrj jump. In the 99% case, there should not have been a
387638fd1498Szrj fallthru edge. */
387738fd1498Szrj gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
387838fd1498Szrj continue;
387938fd1498Szrj }
388038fd1498Szrj }
388138fd1498Szrj else
388238fd1498Szrj {
388338fd1498Szrj /* No fallthru implies a noreturn function with EH edges, or
388438fd1498Szrj something similarly bizarre. In any case, we don't need to
388538fd1498Szrj do anything. */
388638fd1498Szrj if (! e_fall)
388738fd1498Szrj continue;
388838fd1498Szrj
388938fd1498Szrj /* If the fallthru block is still next, nothing to do. */
389038fd1498Szrj if (bb->aux == e_fall->dest)
389138fd1498Szrj continue;
389238fd1498Szrj
389338fd1498Szrj /* A fallthru to exit block. */
389438fd1498Szrj if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
389538fd1498Szrj continue;
389638fd1498Szrj }
389738fd1498Szrj
389838fd1498Szrj /* We got here if we need to add a new jump insn.
389938fd1498Szrj Note force_nonfallthru can delete E_FALL and thus we have to
390038fd1498Szrj save E_FALL->src prior to the call to force_nonfallthru. */
390138fd1498Szrj nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
390238fd1498Szrj if (nb)
390338fd1498Szrj {
390438fd1498Szrj nb->aux = bb->aux;
390538fd1498Szrj bb->aux = nb;
390638fd1498Szrj /* Don't process this new block. */
390738fd1498Szrj bb = nb;
390838fd1498Szrj }
390938fd1498Szrj }
391038fd1498Szrj
391138fd1498Szrj relink_block_chain (/*stay_in_cfglayout_mode=*/false);
391238fd1498Szrj
391338fd1498Szrj /* Annoying special case - jump around dead jumptables left in the code. */
391438fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
391538fd1498Szrj {
391638fd1498Szrj edge e = find_fallthru_edge (bb->succs);
391738fd1498Szrj
391838fd1498Szrj if (e && !can_fallthru (e->src, e->dest))
391938fd1498Szrj force_nonfallthru (e);
392038fd1498Szrj }
392138fd1498Szrj
392238fd1498Szrj /* Ensure goto_locus from edges has some instructions with that locus
392338fd1498Szrj in RTL. */
392438fd1498Szrj if (!optimize)
392538fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
392638fd1498Szrj {
392738fd1498Szrj edge e;
392838fd1498Szrj edge_iterator ei;
392938fd1498Szrj
393038fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
393138fd1498Szrj if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
393238fd1498Szrj && !(e->flags & EDGE_ABNORMAL))
393338fd1498Szrj {
393438fd1498Szrj edge e2;
393538fd1498Szrj edge_iterator ei2;
393638fd1498Szrj basic_block dest, nb;
393738fd1498Szrj rtx_insn *end;
393838fd1498Szrj
393938fd1498Szrj insn = BB_END (e->src);
394038fd1498Szrj end = PREV_INSN (BB_HEAD (e->src));
394138fd1498Szrj while (insn != end
394238fd1498Szrj && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
394338fd1498Szrj insn = PREV_INSN (insn);
394438fd1498Szrj if (insn != end
394538fd1498Szrj && INSN_LOCATION (insn) == e->goto_locus)
394638fd1498Szrj continue;
394738fd1498Szrj if (simplejump_p (BB_END (e->src))
394838fd1498Szrj && !INSN_HAS_LOCATION (BB_END (e->src)))
394938fd1498Szrj {
395038fd1498Szrj INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
395138fd1498Szrj continue;
395238fd1498Szrj }
395338fd1498Szrj dest = e->dest;
395438fd1498Szrj if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
395538fd1498Szrj {
395638fd1498Szrj /* Non-fallthru edges to the exit block cannot be split. */
395738fd1498Szrj if (!(e->flags & EDGE_FALLTHRU))
395838fd1498Szrj continue;
395938fd1498Szrj }
396038fd1498Szrj else
396138fd1498Szrj {
396238fd1498Szrj insn = BB_HEAD (dest);
396338fd1498Szrj end = NEXT_INSN (BB_END (dest));
396438fd1498Szrj while (insn != end && !NONDEBUG_INSN_P (insn))
396538fd1498Szrj insn = NEXT_INSN (insn);
396638fd1498Szrj if (insn != end && INSN_HAS_LOCATION (insn)
396738fd1498Szrj && INSN_LOCATION (insn) == e->goto_locus)
396838fd1498Szrj continue;
396938fd1498Szrj }
397038fd1498Szrj nb = split_edge (e);
397138fd1498Szrj if (!INSN_P (BB_END (nb)))
397238fd1498Szrj BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
397338fd1498Szrj nb);
397438fd1498Szrj INSN_LOCATION (BB_END (nb)) = e->goto_locus;
397538fd1498Szrj
397638fd1498Szrj /* If there are other incoming edges to the destination block
397738fd1498Szrj with the same goto locus, redirect them to the new block as
397838fd1498Szrj well, this can prevent other such blocks from being created
397938fd1498Szrj in subsequent iterations of the loop. */
398038fd1498Szrj for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
398138fd1498Szrj if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
398238fd1498Szrj && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
398338fd1498Szrj && e->goto_locus == e2->goto_locus)
398438fd1498Szrj redirect_edge_and_branch (e2, nb);
398538fd1498Szrj else
398638fd1498Szrj ei_next (&ei2);
398738fd1498Szrj }
398838fd1498Szrj }
398938fd1498Szrj }
399038fd1498Szrj
399138fd1498Szrj /* Perform sanity checks on the insn chain.
399238fd1498Szrj 1. Check that next/prev pointers are consistent in both the forward and
399338fd1498Szrj reverse direction.
399438fd1498Szrj 2. Count insns in chain, going both directions, and check if equal.
399538fd1498Szrj 3. Check that get_last_insn () returns the actual end of chain. */
399638fd1498Szrj
399738fd1498Szrj DEBUG_FUNCTION void
verify_insn_chain(void)399838fd1498Szrj verify_insn_chain (void)
399938fd1498Szrj {
400038fd1498Szrj rtx_insn *x, *prevx, *nextx;
400138fd1498Szrj int insn_cnt1, insn_cnt2;
400238fd1498Szrj
400338fd1498Szrj for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
400438fd1498Szrj x != 0;
400538fd1498Szrj prevx = x, insn_cnt1++, x = NEXT_INSN (x))
400638fd1498Szrj gcc_assert (PREV_INSN (x) == prevx);
400738fd1498Szrj
400838fd1498Szrj gcc_assert (prevx == get_last_insn ());
400938fd1498Szrj
401038fd1498Szrj for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
401138fd1498Szrj x != 0;
401238fd1498Szrj nextx = x, insn_cnt2++, x = PREV_INSN (x))
401338fd1498Szrj gcc_assert (NEXT_INSN (x) == nextx);
401438fd1498Szrj
401538fd1498Szrj gcc_assert (insn_cnt1 == insn_cnt2);
401638fd1498Szrj }
401738fd1498Szrj
401838fd1498Szrj /* If we have assembler epilogues, the block falling through to exit must
401938fd1498Szrj be the last one in the reordered chain when we reach final. Ensure
402038fd1498Szrj that this condition is met. */
402138fd1498Szrj static void
fixup_fallthru_exit_predecessor(void)402238fd1498Szrj fixup_fallthru_exit_predecessor (void)
402338fd1498Szrj {
402438fd1498Szrj edge e;
402538fd1498Szrj basic_block bb = NULL;
402638fd1498Szrj
402738fd1498Szrj /* This transformation is not valid before reload, because we might
402838fd1498Szrj separate a call from the instruction that copies the return
402938fd1498Szrj value. */
403038fd1498Szrj gcc_assert (reload_completed);
403138fd1498Szrj
403238fd1498Szrj e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
403338fd1498Szrj if (e)
403438fd1498Szrj bb = e->src;
403538fd1498Szrj
403638fd1498Szrj if (bb && bb->aux)
403738fd1498Szrj {
403838fd1498Szrj basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
403938fd1498Szrj
404038fd1498Szrj /* If the very first block is the one with the fall-through exit
404138fd1498Szrj edge, we have to split that block. */
404238fd1498Szrj if (c == bb)
404338fd1498Szrj {
404438fd1498Szrj bb = split_block_after_labels (bb)->dest;
404538fd1498Szrj bb->aux = c->aux;
404638fd1498Szrj c->aux = bb;
404738fd1498Szrj BB_FOOTER (bb) = BB_FOOTER (c);
404838fd1498Szrj BB_FOOTER (c) = NULL;
404938fd1498Szrj }
405038fd1498Szrj
405138fd1498Szrj while (c->aux != bb)
405238fd1498Szrj c = (basic_block) c->aux;
405338fd1498Szrj
405438fd1498Szrj c->aux = bb->aux;
405538fd1498Szrj while (c->aux)
405638fd1498Szrj c = (basic_block) c->aux;
405738fd1498Szrj
405838fd1498Szrj c->aux = bb;
405938fd1498Szrj bb->aux = NULL;
406038fd1498Szrj }
406138fd1498Szrj }
406238fd1498Szrj
406338fd1498Szrj /* In case there are more than one fallthru predecessors of exit, force that
406438fd1498Szrj there is only one. */
406538fd1498Szrj
406638fd1498Szrj static void
force_one_exit_fallthru(void)406738fd1498Szrj force_one_exit_fallthru (void)
406838fd1498Szrj {
406938fd1498Szrj edge e, predecessor = NULL;
407038fd1498Szrj bool more = false;
407138fd1498Szrj edge_iterator ei;
407238fd1498Szrj basic_block forwarder, bb;
407338fd1498Szrj
407438fd1498Szrj FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
407538fd1498Szrj if (e->flags & EDGE_FALLTHRU)
407638fd1498Szrj {
407738fd1498Szrj if (predecessor == NULL)
407838fd1498Szrj predecessor = e;
407938fd1498Szrj else
408038fd1498Szrj {
408138fd1498Szrj more = true;
408238fd1498Szrj break;
408338fd1498Szrj }
408438fd1498Szrj }
408538fd1498Szrj
408638fd1498Szrj if (!more)
408738fd1498Szrj return;
408838fd1498Szrj
408938fd1498Szrj /* Exit has several fallthru predecessors. Create a forwarder block for
409038fd1498Szrj them. */
409138fd1498Szrj forwarder = split_edge (predecessor);
409238fd1498Szrj for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
409338fd1498Szrj (e = ei_safe_edge (ei)); )
409438fd1498Szrj {
409538fd1498Szrj if (e->src == forwarder
409638fd1498Szrj || !(e->flags & EDGE_FALLTHRU))
409738fd1498Szrj ei_next (&ei);
409838fd1498Szrj else
409938fd1498Szrj redirect_edge_and_branch_force (e, forwarder);
410038fd1498Szrj }
410138fd1498Szrj
410238fd1498Szrj /* Fix up the chain of blocks -- make FORWARDER immediately precede the
410338fd1498Szrj exit block. */
410438fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
410538fd1498Szrj {
410638fd1498Szrj if (bb->aux == NULL && bb != forwarder)
410738fd1498Szrj {
410838fd1498Szrj bb->aux = forwarder;
410938fd1498Szrj break;
411038fd1498Szrj }
411138fd1498Szrj }
411238fd1498Szrj }
411338fd1498Szrj
411438fd1498Szrj /* Return true in case it is possible to duplicate the basic block BB. */
411538fd1498Szrj
411638fd1498Szrj static bool
cfg_layout_can_duplicate_bb_p(const_basic_block bb)411738fd1498Szrj cfg_layout_can_duplicate_bb_p (const_basic_block bb)
411838fd1498Szrj {
411938fd1498Szrj /* Do not attempt to duplicate tablejumps, as we need to unshare
412038fd1498Szrj the dispatch table. This is difficult to do, as the instructions
412138fd1498Szrj computing jump destination may be hoisted outside the basic block. */
412238fd1498Szrj if (tablejump_p (BB_END (bb), NULL, NULL))
412338fd1498Szrj return false;
412438fd1498Szrj
412538fd1498Szrj /* Do not duplicate blocks containing insns that can't be copied. */
412638fd1498Szrj if (targetm.cannot_copy_insn_p)
412738fd1498Szrj {
412838fd1498Szrj rtx_insn *insn = BB_HEAD (bb);
412938fd1498Szrj while (1)
413038fd1498Szrj {
413138fd1498Szrj if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
413238fd1498Szrj return false;
413338fd1498Szrj if (insn == BB_END (bb))
413438fd1498Szrj break;
413538fd1498Szrj insn = NEXT_INSN (insn);
413638fd1498Szrj }
413738fd1498Szrj }
413838fd1498Szrj
413938fd1498Szrj return true;
414038fd1498Szrj }
414138fd1498Szrj
414238fd1498Szrj rtx_insn *
duplicate_insn_chain(rtx_insn * from,rtx_insn * to)414338fd1498Szrj duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
414438fd1498Szrj {
414538fd1498Szrj rtx_insn *insn, *next, *copy;
414638fd1498Szrj rtx_note *last;
414738fd1498Szrj
414838fd1498Szrj /* Avoid updating of boundaries of previous basic block. The
414938fd1498Szrj note will get removed from insn stream in fixup. */
415038fd1498Szrj last = emit_note (NOTE_INSN_DELETED);
415138fd1498Szrj
415238fd1498Szrj /* Create copy at the end of INSN chain. The chain will
415338fd1498Szrj be reordered later. */
415438fd1498Szrj for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
415538fd1498Szrj {
415638fd1498Szrj switch (GET_CODE (insn))
415738fd1498Szrj {
415838fd1498Szrj case DEBUG_INSN:
415938fd1498Szrj /* Don't duplicate label debug insns. */
416038fd1498Szrj if (DEBUG_BIND_INSN_P (insn)
416138fd1498Szrj && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
416238fd1498Szrj break;
416338fd1498Szrj /* FALLTHRU */
416438fd1498Szrj case INSN:
416538fd1498Szrj case CALL_INSN:
416638fd1498Szrj case JUMP_INSN:
416738fd1498Szrj copy = emit_copy_of_insn_after (insn, get_last_insn ());
416838fd1498Szrj if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
416938fd1498Szrj && ANY_RETURN_P (JUMP_LABEL (insn)))
417038fd1498Szrj JUMP_LABEL (copy) = JUMP_LABEL (insn);
417138fd1498Szrj maybe_copy_prologue_epilogue_insn (insn, copy);
417238fd1498Szrj break;
417338fd1498Szrj
417438fd1498Szrj case JUMP_TABLE_DATA:
417538fd1498Szrj /* Avoid copying of dispatch tables. We never duplicate
417638fd1498Szrj tablejumps, so this can hit only in case the table got
417738fd1498Szrj moved far from original jump.
417838fd1498Szrj Avoid copying following barrier as well if any
417938fd1498Szrj (and debug insns in between). */
418038fd1498Szrj for (next = NEXT_INSN (insn);
418138fd1498Szrj next != NEXT_INSN (to);
418238fd1498Szrj next = NEXT_INSN (next))
418338fd1498Szrj if (!DEBUG_INSN_P (next))
418438fd1498Szrj break;
418538fd1498Szrj if (next != NEXT_INSN (to) && BARRIER_P (next))
418638fd1498Szrj insn = next;
418738fd1498Szrj break;
418838fd1498Szrj
418938fd1498Szrj case CODE_LABEL:
419038fd1498Szrj break;
419138fd1498Szrj
419238fd1498Szrj case BARRIER:
419338fd1498Szrj emit_barrier ();
419438fd1498Szrj break;
419538fd1498Szrj
419638fd1498Szrj case NOTE:
419738fd1498Szrj switch (NOTE_KIND (insn))
419838fd1498Szrj {
419938fd1498Szrj /* In case prologue is empty and function contain label
420038fd1498Szrj in first BB, we may want to copy the block. */
420138fd1498Szrj case NOTE_INSN_PROLOGUE_END:
420238fd1498Szrj
420338fd1498Szrj case NOTE_INSN_DELETED:
420438fd1498Szrj case NOTE_INSN_DELETED_LABEL:
420538fd1498Szrj case NOTE_INSN_DELETED_DEBUG_LABEL:
420638fd1498Szrj /* No problem to strip these. */
420738fd1498Szrj case NOTE_INSN_FUNCTION_BEG:
420838fd1498Szrj /* There is always just single entry to function. */
420938fd1498Szrj case NOTE_INSN_BASIC_BLOCK:
421038fd1498Szrj /* We should only switch text sections once. */
421138fd1498Szrj case NOTE_INSN_SWITCH_TEXT_SECTIONS:
421238fd1498Szrj break;
421338fd1498Szrj
421438fd1498Szrj case NOTE_INSN_EPILOGUE_BEG:
421538fd1498Szrj case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
421638fd1498Szrj emit_note_copy (as_a <rtx_note *> (insn));
421738fd1498Szrj break;
421838fd1498Szrj
421938fd1498Szrj default:
422038fd1498Szrj /* All other notes should have already been eliminated. */
422138fd1498Szrj gcc_unreachable ();
422238fd1498Szrj }
422338fd1498Szrj break;
422438fd1498Szrj default:
422538fd1498Szrj gcc_unreachable ();
422638fd1498Szrj }
422738fd1498Szrj }
422838fd1498Szrj insn = NEXT_INSN (last);
422938fd1498Szrj delete_insn (last);
423038fd1498Szrj return insn;
423138fd1498Szrj }
423238fd1498Szrj
423338fd1498Szrj /* Create a duplicate of the basic block BB. */
423438fd1498Szrj
423538fd1498Szrj static basic_block
cfg_layout_duplicate_bb(basic_block bb)423638fd1498Szrj cfg_layout_duplicate_bb (basic_block bb)
423738fd1498Szrj {
423838fd1498Szrj rtx_insn *insn;
423938fd1498Szrj basic_block new_bb;
424038fd1498Szrj
424138fd1498Szrj insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
424238fd1498Szrj new_bb = create_basic_block (insn,
424338fd1498Szrj insn ? get_last_insn () : NULL,
424438fd1498Szrj EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
424538fd1498Szrj
424638fd1498Szrj BB_COPY_PARTITION (new_bb, bb);
424738fd1498Szrj if (BB_HEADER (bb))
424838fd1498Szrj {
424938fd1498Szrj insn = BB_HEADER (bb);
425038fd1498Szrj while (NEXT_INSN (insn))
425138fd1498Szrj insn = NEXT_INSN (insn);
425238fd1498Szrj insn = duplicate_insn_chain (BB_HEADER (bb), insn);
425338fd1498Szrj if (insn)
425438fd1498Szrj BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
425538fd1498Szrj }
425638fd1498Szrj
425738fd1498Szrj if (BB_FOOTER (bb))
425838fd1498Szrj {
425938fd1498Szrj insn = BB_FOOTER (bb);
426038fd1498Szrj while (NEXT_INSN (insn))
426138fd1498Szrj insn = NEXT_INSN (insn);
426238fd1498Szrj insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
426338fd1498Szrj if (insn)
426438fd1498Szrj BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
426538fd1498Szrj }
426638fd1498Szrj
426738fd1498Szrj return new_bb;
426838fd1498Szrj }
426938fd1498Szrj
427038fd1498Szrj
427138fd1498Szrj /* Main entry point to this module - initialize the datastructures for
427238fd1498Szrj CFG layout changes. It keeps LOOPS up-to-date if not null.
427338fd1498Szrj
427438fd1498Szrj FLAGS is a set of additional flags to pass to cleanup_cfg(). */
427538fd1498Szrj
427638fd1498Szrj void
cfg_layout_initialize(int flags)427738fd1498Szrj cfg_layout_initialize (int flags)
427838fd1498Szrj {
427938fd1498Szrj rtx_insn_list *x;
428038fd1498Szrj basic_block bb;
428138fd1498Szrj
428238fd1498Szrj /* Once bb partitioning is complete, cfg layout mode should not be
428338fd1498Szrj re-entered. Entering cfg layout mode may require fixups. As an
428438fd1498Szrj example, if edge forwarding performed when optimizing the cfg
428538fd1498Szrj layout required moving a block from the hot to the cold
428638fd1498Szrj section. This would create an illegal partitioning unless some
428738fd1498Szrj manual fixup was performed. */
428838fd1498Szrj gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
428938fd1498Szrj
429038fd1498Szrj initialize_original_copy_tables ();
429138fd1498Szrj
429238fd1498Szrj cfg_layout_rtl_register_cfg_hooks ();
429338fd1498Szrj
429438fd1498Szrj record_effective_endpoints ();
429538fd1498Szrj
429638fd1498Szrj /* Make sure that the targets of non local gotos are marked. */
429738fd1498Szrj for (x = nonlocal_goto_handler_labels; x; x = x->next ())
429838fd1498Szrj {
429938fd1498Szrj bb = BLOCK_FOR_INSN (x->insn ());
430038fd1498Szrj bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
430138fd1498Szrj }
430238fd1498Szrj
430338fd1498Szrj cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
430438fd1498Szrj }
430538fd1498Szrj
430638fd1498Szrj /* Splits superblocks. */
430738fd1498Szrj void
break_superblocks(void)430838fd1498Szrj break_superblocks (void)
430938fd1498Szrj {
431038fd1498Szrj bool need = false;
431138fd1498Szrj basic_block bb;
431238fd1498Szrj
431338fd1498Szrj auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
431438fd1498Szrj bitmap_clear (superblocks);
431538fd1498Szrj
431638fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
431738fd1498Szrj if (bb->flags & BB_SUPERBLOCK)
431838fd1498Szrj {
431938fd1498Szrj bb->flags &= ~BB_SUPERBLOCK;
432038fd1498Szrj bitmap_set_bit (superblocks, bb->index);
432138fd1498Szrj need = true;
432238fd1498Szrj }
432338fd1498Szrj
432438fd1498Szrj if (need)
432538fd1498Szrj {
432638fd1498Szrj rebuild_jump_labels (get_insns ());
432738fd1498Szrj find_many_sub_basic_blocks (superblocks);
432838fd1498Szrj }
432938fd1498Szrj }
433038fd1498Szrj
433138fd1498Szrj /* Finalize the changes: reorder insn list according to the sequence specified
433238fd1498Szrj by aux pointers, enter compensation code, rebuild scope forest. */
433338fd1498Szrj
433438fd1498Szrj void
cfg_layout_finalize(void)433538fd1498Szrj cfg_layout_finalize (void)
433638fd1498Szrj {
433738fd1498Szrj free_dominance_info (CDI_DOMINATORS);
433838fd1498Szrj force_one_exit_fallthru ();
433938fd1498Szrj rtl_register_cfg_hooks ();
434038fd1498Szrj if (reload_completed && !targetm.have_epilogue ())
434138fd1498Szrj fixup_fallthru_exit_predecessor ();
434238fd1498Szrj fixup_reorder_chain ();
434338fd1498Szrj
434438fd1498Szrj rebuild_jump_labels (get_insns ());
434538fd1498Szrj delete_dead_jumptables ();
434638fd1498Szrj
434738fd1498Szrj if (flag_checking)
434838fd1498Szrj verify_insn_chain ();
434938fd1498Szrj checking_verify_flow_info ();
435038fd1498Szrj }
435138fd1498Szrj
435238fd1498Szrj
435338fd1498Szrj /* Same as split_block but update cfg_layout structures. */
435438fd1498Szrj
435538fd1498Szrj static basic_block
cfg_layout_split_block(basic_block bb,void * insnp)435638fd1498Szrj cfg_layout_split_block (basic_block bb, void *insnp)
435738fd1498Szrj {
435838fd1498Szrj rtx insn = (rtx) insnp;
435938fd1498Szrj basic_block new_bb = rtl_split_block (bb, insn);
436038fd1498Szrj
436138fd1498Szrj BB_FOOTER (new_bb) = BB_FOOTER (bb);
436238fd1498Szrj BB_FOOTER (bb) = NULL;
436338fd1498Szrj
436438fd1498Szrj return new_bb;
436538fd1498Szrj }
436638fd1498Szrj
436738fd1498Szrj /* Redirect Edge to DEST. */
436838fd1498Szrj static edge
cfg_layout_redirect_edge_and_branch(edge e,basic_block dest)436938fd1498Szrj cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
437038fd1498Szrj {
437138fd1498Szrj basic_block src = e->src;
437238fd1498Szrj edge ret;
437338fd1498Szrj
437438fd1498Szrj if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
437538fd1498Szrj return NULL;
437638fd1498Szrj
437738fd1498Szrj if (e->dest == dest)
437838fd1498Szrj return e;
437938fd1498Szrj
438038fd1498Szrj if (e->flags & EDGE_CROSSING
438138fd1498Szrj && BB_PARTITION (e->src) == BB_PARTITION (dest)
438238fd1498Szrj && simplejump_p (BB_END (src)))
438338fd1498Szrj {
438438fd1498Szrj if (dump_file)
438538fd1498Szrj fprintf (dump_file,
438638fd1498Szrj "Removing crossing jump while redirecting edge form %i to %i\n",
438738fd1498Szrj e->src->index, dest->index);
438838fd1498Szrj delete_insn (BB_END (src));
4389*58e805e6Szrj remove_barriers_from_footer (src);
439038fd1498Szrj e->flags |= EDGE_FALLTHRU;
439138fd1498Szrj }
439238fd1498Szrj
439338fd1498Szrj if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
439438fd1498Szrj && (ret = try_redirect_by_replacing_jump (e, dest, true)))
439538fd1498Szrj {
439638fd1498Szrj df_set_bb_dirty (src);
439738fd1498Szrj return ret;
439838fd1498Szrj }
439938fd1498Szrj
440038fd1498Szrj if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
440138fd1498Szrj && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
440238fd1498Szrj {
440338fd1498Szrj if (dump_file)
440438fd1498Szrj fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
440538fd1498Szrj e->src->index, dest->index);
440638fd1498Szrj
440738fd1498Szrj df_set_bb_dirty (e->src);
440838fd1498Szrj redirect_edge_succ (e, dest);
440938fd1498Szrj return e;
441038fd1498Szrj }
441138fd1498Szrj
441238fd1498Szrj /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
441338fd1498Szrj in the case the basic block appears to be in sequence. Avoid this
441438fd1498Szrj transformation. */
441538fd1498Szrj
441638fd1498Szrj if (e->flags & EDGE_FALLTHRU)
441738fd1498Szrj {
441838fd1498Szrj /* Redirect any branch edges unified with the fallthru one. */
441938fd1498Szrj if (JUMP_P (BB_END (src))
442038fd1498Szrj && label_is_jump_target_p (BB_HEAD (e->dest),
442138fd1498Szrj BB_END (src)))
442238fd1498Szrj {
442338fd1498Szrj edge redirected;
442438fd1498Szrj
442538fd1498Szrj if (dump_file)
442638fd1498Szrj fprintf (dump_file, "Fallthru edge unified with branch "
442738fd1498Szrj "%i->%i redirected to %i\n",
442838fd1498Szrj e->src->index, e->dest->index, dest->index);
442938fd1498Szrj e->flags &= ~EDGE_FALLTHRU;
443038fd1498Szrj redirected = redirect_branch_edge (e, dest);
443138fd1498Szrj gcc_assert (redirected);
443238fd1498Szrj redirected->flags |= EDGE_FALLTHRU;
443338fd1498Szrj df_set_bb_dirty (redirected->src);
443438fd1498Szrj return redirected;
443538fd1498Szrj }
443638fd1498Szrj /* In case we are redirecting fallthru edge to the branch edge
443738fd1498Szrj of conditional jump, remove it. */
443838fd1498Szrj if (EDGE_COUNT (src->succs) == 2)
443938fd1498Szrj {
444038fd1498Szrj /* Find the edge that is different from E. */
444138fd1498Szrj edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
444238fd1498Szrj
444338fd1498Szrj if (s->dest == dest
444438fd1498Szrj && any_condjump_p (BB_END (src))
444538fd1498Szrj && onlyjump_p (BB_END (src)))
444638fd1498Szrj delete_insn (BB_END (src));
444738fd1498Szrj }
444838fd1498Szrj if (dump_file)
444938fd1498Szrj fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
445038fd1498Szrj e->src->index, e->dest->index, dest->index);
445138fd1498Szrj ret = redirect_edge_succ_nodup (e, dest);
445238fd1498Szrj }
445338fd1498Szrj else
445438fd1498Szrj ret = redirect_branch_edge (e, dest);
445538fd1498Szrj
4456*58e805e6Szrj if (!ret)
4457*58e805e6Szrj return NULL;
4458*58e805e6Szrj
445938fd1498Szrj fixup_partition_crossing (ret);
446038fd1498Szrj /* We don't want simplejumps in the insn stream during cfglayout. */
446138fd1498Szrj gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
446238fd1498Szrj
446338fd1498Szrj df_set_bb_dirty (src);
446438fd1498Szrj return ret;
446538fd1498Szrj }
446638fd1498Szrj
446738fd1498Szrj /* Simple wrapper as we always can redirect fallthru edges. */
446838fd1498Szrj static basic_block
cfg_layout_redirect_edge_and_branch_force(edge e,basic_block dest)446938fd1498Szrj cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
447038fd1498Szrj {
447138fd1498Szrj edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
447238fd1498Szrj
447338fd1498Szrj gcc_assert (redirected);
447438fd1498Szrj return NULL;
447538fd1498Szrj }
447638fd1498Szrj
447738fd1498Szrj /* Same as delete_basic_block but update cfg_layout structures. */
447838fd1498Szrj
447938fd1498Szrj static void
cfg_layout_delete_block(basic_block bb)448038fd1498Szrj cfg_layout_delete_block (basic_block bb)
448138fd1498Szrj {
448238fd1498Szrj rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
448338fd1498Szrj rtx_insn **to;
448438fd1498Szrj
448538fd1498Szrj if (BB_HEADER (bb))
448638fd1498Szrj {
448738fd1498Szrj next = BB_HEAD (bb);
448838fd1498Szrj if (prev)
448938fd1498Szrj SET_NEXT_INSN (prev) = BB_HEADER (bb);
449038fd1498Szrj else
449138fd1498Szrj set_first_insn (BB_HEADER (bb));
449238fd1498Szrj SET_PREV_INSN (BB_HEADER (bb)) = prev;
449338fd1498Szrj insn = BB_HEADER (bb);
449438fd1498Szrj while (NEXT_INSN (insn))
449538fd1498Szrj insn = NEXT_INSN (insn);
449638fd1498Szrj SET_NEXT_INSN (insn) = next;
449738fd1498Szrj SET_PREV_INSN (next) = insn;
449838fd1498Szrj }
449938fd1498Szrj next = NEXT_INSN (BB_END (bb));
450038fd1498Szrj if (BB_FOOTER (bb))
450138fd1498Szrj {
450238fd1498Szrj insn = BB_FOOTER (bb);
450338fd1498Szrj while (insn)
450438fd1498Szrj {
450538fd1498Szrj if (BARRIER_P (insn))
450638fd1498Szrj {
450738fd1498Szrj if (PREV_INSN (insn))
450838fd1498Szrj SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
450938fd1498Szrj else
451038fd1498Szrj BB_FOOTER (bb) = NEXT_INSN (insn);
451138fd1498Szrj if (NEXT_INSN (insn))
451238fd1498Szrj SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
451338fd1498Szrj }
451438fd1498Szrj if (LABEL_P (insn))
451538fd1498Szrj break;
451638fd1498Szrj insn = NEXT_INSN (insn);
451738fd1498Szrj }
451838fd1498Szrj if (BB_FOOTER (bb))
451938fd1498Szrj {
452038fd1498Szrj insn = BB_END (bb);
452138fd1498Szrj SET_NEXT_INSN (insn) = BB_FOOTER (bb);
452238fd1498Szrj SET_PREV_INSN (BB_FOOTER (bb)) = insn;
452338fd1498Szrj while (NEXT_INSN (insn))
452438fd1498Szrj insn = NEXT_INSN (insn);
452538fd1498Szrj SET_NEXT_INSN (insn) = next;
452638fd1498Szrj if (next)
452738fd1498Szrj SET_PREV_INSN (next) = insn;
452838fd1498Szrj else
452938fd1498Szrj set_last_insn (insn);
453038fd1498Szrj }
453138fd1498Szrj }
453238fd1498Szrj if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
453338fd1498Szrj to = &BB_HEADER (bb->next_bb);
453438fd1498Szrj else
453538fd1498Szrj to = &cfg_layout_function_footer;
453638fd1498Szrj
453738fd1498Szrj rtl_delete_block (bb);
453838fd1498Szrj
453938fd1498Szrj if (prev)
454038fd1498Szrj prev = NEXT_INSN (prev);
454138fd1498Szrj else
454238fd1498Szrj prev = get_insns ();
454338fd1498Szrj if (next)
454438fd1498Szrj next = PREV_INSN (next);
454538fd1498Szrj else
454638fd1498Szrj next = get_last_insn ();
454738fd1498Szrj
454838fd1498Szrj if (next && NEXT_INSN (next) != prev)
454938fd1498Szrj {
455038fd1498Szrj remaints = unlink_insn_chain (prev, next);
455138fd1498Szrj insn = remaints;
455238fd1498Szrj while (NEXT_INSN (insn))
455338fd1498Szrj insn = NEXT_INSN (insn);
455438fd1498Szrj SET_NEXT_INSN (insn) = *to;
455538fd1498Szrj if (*to)
455638fd1498Szrj SET_PREV_INSN (*to) = insn;
455738fd1498Szrj *to = remaints;
455838fd1498Szrj }
455938fd1498Szrj }
456038fd1498Szrj
456138fd1498Szrj /* Return true when blocks A and B can be safely merged. */
456238fd1498Szrj
456338fd1498Szrj static bool
cfg_layout_can_merge_blocks_p(basic_block a,basic_block b)456438fd1498Szrj cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
456538fd1498Szrj {
456638fd1498Szrj /* If we are partitioning hot/cold basic blocks, we don't want to
456738fd1498Szrj mess up unconditional or indirect jumps that cross between hot
456838fd1498Szrj and cold sections.
456938fd1498Szrj
457038fd1498Szrj Basic block partitioning may result in some jumps that appear to
457138fd1498Szrj be optimizable (or blocks that appear to be mergeable), but which really
457238fd1498Szrj must be left untouched (they are required to make it safely across
457338fd1498Szrj partition boundaries). See the comments at the top of
457438fd1498Szrj bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
457538fd1498Szrj
457638fd1498Szrj if (BB_PARTITION (a) != BB_PARTITION (b))
457738fd1498Szrj return false;
457838fd1498Szrj
457938fd1498Szrj /* Protect the loop latches. */
458038fd1498Szrj if (current_loops && b->loop_father->latch == b)
458138fd1498Szrj return false;
458238fd1498Szrj
458338fd1498Szrj /* If we would end up moving B's instructions, make sure it doesn't fall
458438fd1498Szrj through into the exit block, since we cannot recover from a fallthrough
458538fd1498Szrj edge into the exit block occurring in the middle of a function. */
458638fd1498Szrj if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
458738fd1498Szrj {
458838fd1498Szrj edge e = find_fallthru_edge (b->succs);
458938fd1498Szrj if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
459038fd1498Szrj return false;
459138fd1498Szrj }
459238fd1498Szrj
459338fd1498Szrj /* There must be exactly one edge in between the blocks. */
459438fd1498Szrj return (single_succ_p (a)
459538fd1498Szrj && single_succ (a) == b
459638fd1498Szrj && single_pred_p (b) == 1
459738fd1498Szrj && a != b
459838fd1498Szrj /* Must be simple edge. */
459938fd1498Szrj && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
460038fd1498Szrj && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
460138fd1498Szrj && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
460238fd1498Szrj /* If the jump insn has side effects, we can't kill the edge.
460338fd1498Szrj When not optimizing, try_redirect_by_replacing_jump will
460438fd1498Szrj not allow us to redirect an edge by replacing a table jump. */
460538fd1498Szrj && (!JUMP_P (BB_END (a))
460638fd1498Szrj || ((!optimize || reload_completed)
460738fd1498Szrj ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
460838fd1498Szrj }
460938fd1498Szrj
461038fd1498Szrj /* Merge block A and B. The blocks must be mergeable. */
461138fd1498Szrj
461238fd1498Szrj static void
cfg_layout_merge_blocks(basic_block a,basic_block b)461338fd1498Szrj cfg_layout_merge_blocks (basic_block a, basic_block b)
461438fd1498Szrj {
461538fd1498Szrj bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
461638fd1498Szrj rtx_insn *insn;
461738fd1498Szrj
461838fd1498Szrj gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
461938fd1498Szrj
462038fd1498Szrj if (dump_file)
462138fd1498Szrj fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
462238fd1498Szrj a->index);
462338fd1498Szrj
462438fd1498Szrj /* If there was a CODE_LABEL beginning B, delete it. */
462538fd1498Szrj if (LABEL_P (BB_HEAD (b)))
462638fd1498Szrj {
462738fd1498Szrj delete_insn (BB_HEAD (b));
462838fd1498Szrj }
462938fd1498Szrj
463038fd1498Szrj /* We should have fallthru edge in a, or we can do dummy redirection to get
463138fd1498Szrj it cleaned up. */
463238fd1498Szrj if (JUMP_P (BB_END (a)))
463338fd1498Szrj try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
463438fd1498Szrj gcc_assert (!JUMP_P (BB_END (a)));
463538fd1498Szrj
463638fd1498Szrj /* When not optimizing and the edge is the only place in RTL which holds
463738fd1498Szrj some unique locus, emit a nop with that locus in between. */
463838fd1498Szrj if (!optimize)
463938fd1498Szrj emit_nop_for_unique_locus_between (a, b);
464038fd1498Szrj
464138fd1498Szrj /* Move things from b->footer after a->footer. */
464238fd1498Szrj if (BB_FOOTER (b))
464338fd1498Szrj {
464438fd1498Szrj if (!BB_FOOTER (a))
464538fd1498Szrj BB_FOOTER (a) = BB_FOOTER (b);
464638fd1498Szrj else
464738fd1498Szrj {
464838fd1498Szrj rtx_insn *last = BB_FOOTER (a);
464938fd1498Szrj
465038fd1498Szrj while (NEXT_INSN (last))
465138fd1498Szrj last = NEXT_INSN (last);
465238fd1498Szrj SET_NEXT_INSN (last) = BB_FOOTER (b);
465338fd1498Szrj SET_PREV_INSN (BB_FOOTER (b)) = last;
465438fd1498Szrj }
465538fd1498Szrj BB_FOOTER (b) = NULL;
465638fd1498Szrj }
465738fd1498Szrj
465838fd1498Szrj /* Move things from b->header before a->footer.
465938fd1498Szrj Note that this may include dead tablejump data, but we don't clean
466038fd1498Szrj those up until we go out of cfglayout mode. */
466138fd1498Szrj if (BB_HEADER (b))
466238fd1498Szrj {
466338fd1498Szrj if (! BB_FOOTER (a))
466438fd1498Szrj BB_FOOTER (a) = BB_HEADER (b);
466538fd1498Szrj else
466638fd1498Szrj {
466738fd1498Szrj rtx_insn *last = BB_HEADER (b);
466838fd1498Szrj
466938fd1498Szrj while (NEXT_INSN (last))
467038fd1498Szrj last = NEXT_INSN (last);
467138fd1498Szrj SET_NEXT_INSN (last) = BB_FOOTER (a);
467238fd1498Szrj SET_PREV_INSN (BB_FOOTER (a)) = last;
467338fd1498Szrj BB_FOOTER (a) = BB_HEADER (b);
467438fd1498Szrj }
467538fd1498Szrj BB_HEADER (b) = NULL;
467638fd1498Szrj }
467738fd1498Szrj
467838fd1498Szrj /* In the case basic blocks are not adjacent, move them around. */
467938fd1498Szrj if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
468038fd1498Szrj {
468138fd1498Szrj insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
468238fd1498Szrj
468338fd1498Szrj emit_insn_after_noloc (insn, BB_END (a), a);
468438fd1498Szrj }
468538fd1498Szrj /* Otherwise just re-associate the instructions. */
468638fd1498Szrj else
468738fd1498Szrj {
468838fd1498Szrj insn = BB_HEAD (b);
468938fd1498Szrj BB_END (a) = BB_END (b);
469038fd1498Szrj }
469138fd1498Szrj
469238fd1498Szrj /* emit_insn_after_noloc doesn't call df_insn_change_bb.
469338fd1498Szrj We need to explicitly call. */
469438fd1498Szrj update_bb_for_insn_chain (insn, BB_END (b), a);
469538fd1498Szrj
469638fd1498Szrj /* Skip possible DELETED_LABEL insn. */
469738fd1498Szrj if (!NOTE_INSN_BASIC_BLOCK_P (insn))
469838fd1498Szrj insn = NEXT_INSN (insn);
469938fd1498Szrj gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
470038fd1498Szrj BB_HEAD (b) = BB_END (b) = NULL;
470138fd1498Szrj delete_insn (insn);
470238fd1498Szrj
470338fd1498Szrj df_bb_delete (b->index);
470438fd1498Szrj
470538fd1498Szrj /* If B was a forwarder block, propagate the locus on the edge. */
470638fd1498Szrj if (forwarder_p
470738fd1498Szrj && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
470838fd1498Szrj EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
470938fd1498Szrj
471038fd1498Szrj if (dump_file)
471138fd1498Szrj fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
471238fd1498Szrj }
471338fd1498Szrj
471438fd1498Szrj /* Split edge E. */
471538fd1498Szrj
471638fd1498Szrj static basic_block
cfg_layout_split_edge(edge e)471738fd1498Szrj cfg_layout_split_edge (edge e)
471838fd1498Szrj {
471938fd1498Szrj basic_block new_bb =
472038fd1498Szrj create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
472138fd1498Szrj ? NEXT_INSN (BB_END (e->src)) : get_insns (),
472238fd1498Szrj NULL_RTX, e->src);
472338fd1498Szrj
472438fd1498Szrj if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
472538fd1498Szrj BB_COPY_PARTITION (new_bb, e->src);
472638fd1498Szrj else
472738fd1498Szrj BB_COPY_PARTITION (new_bb, e->dest);
472838fd1498Szrj make_edge (new_bb, e->dest, EDGE_FALLTHRU);
472938fd1498Szrj redirect_edge_and_branch_force (e, new_bb);
473038fd1498Szrj
473138fd1498Szrj return new_bb;
473238fd1498Szrj }
473338fd1498Szrj
473438fd1498Szrj /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
473538fd1498Szrj
473638fd1498Szrj static void
rtl_make_forwarder_block(edge fallthru ATTRIBUTE_UNUSED)473738fd1498Szrj rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
473838fd1498Szrj {
473938fd1498Szrj }
474038fd1498Szrj
474138fd1498Szrj /* Return true if BB contains only labels or non-executable
474238fd1498Szrj instructions. */
474338fd1498Szrj
474438fd1498Szrj static bool
rtl_block_empty_p(basic_block bb)474538fd1498Szrj rtl_block_empty_p (basic_block bb)
474638fd1498Szrj {
474738fd1498Szrj rtx_insn *insn;
474838fd1498Szrj
474938fd1498Szrj if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
475038fd1498Szrj || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
475138fd1498Szrj return true;
475238fd1498Szrj
475338fd1498Szrj FOR_BB_INSNS (bb, insn)
475438fd1498Szrj if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
475538fd1498Szrj return false;
475638fd1498Szrj
475738fd1498Szrj return true;
475838fd1498Szrj }
475938fd1498Szrj
476038fd1498Szrj /* Split a basic block if it ends with a conditional branch and if
476138fd1498Szrj the other part of the block is not empty. */
476238fd1498Szrj
476338fd1498Szrj static basic_block
rtl_split_block_before_cond_jump(basic_block bb)476438fd1498Szrj rtl_split_block_before_cond_jump (basic_block bb)
476538fd1498Szrj {
476638fd1498Szrj rtx_insn *insn;
476738fd1498Szrj rtx_insn *split_point = NULL;
476838fd1498Szrj rtx_insn *last = NULL;
476938fd1498Szrj bool found_code = false;
477038fd1498Szrj
477138fd1498Szrj FOR_BB_INSNS (bb, insn)
477238fd1498Szrj {
477338fd1498Szrj if (any_condjump_p (insn))
477438fd1498Szrj split_point = last;
477538fd1498Szrj else if (NONDEBUG_INSN_P (insn))
477638fd1498Szrj found_code = true;
477738fd1498Szrj last = insn;
477838fd1498Szrj }
477938fd1498Szrj
478038fd1498Szrj /* Did not find everything. */
478138fd1498Szrj if (found_code && split_point)
478238fd1498Szrj return split_block (bb, split_point)->dest;
478338fd1498Szrj else
478438fd1498Szrj return NULL;
478538fd1498Szrj }
478638fd1498Szrj
478738fd1498Szrj /* Return 1 if BB ends with a call, possibly followed by some
478838fd1498Szrj instructions that must stay with the call, 0 otherwise. */
478938fd1498Szrj
479038fd1498Szrj static bool
rtl_block_ends_with_call_p(basic_block bb)479138fd1498Szrj rtl_block_ends_with_call_p (basic_block bb)
479238fd1498Szrj {
479338fd1498Szrj rtx_insn *insn = BB_END (bb);
479438fd1498Szrj
479538fd1498Szrj while (!CALL_P (insn)
479638fd1498Szrj && insn != BB_HEAD (bb)
479738fd1498Szrj && (keep_with_call_p (insn)
479838fd1498Szrj || NOTE_P (insn)
479938fd1498Szrj || DEBUG_INSN_P (insn)))
480038fd1498Szrj insn = PREV_INSN (insn);
480138fd1498Szrj return (CALL_P (insn));
480238fd1498Szrj }
480338fd1498Szrj
480438fd1498Szrj /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
480538fd1498Szrj
480638fd1498Szrj static bool
rtl_block_ends_with_condjump_p(const_basic_block bb)480738fd1498Szrj rtl_block_ends_with_condjump_p (const_basic_block bb)
480838fd1498Szrj {
480938fd1498Szrj return any_condjump_p (BB_END (bb));
481038fd1498Szrj }
481138fd1498Szrj
481238fd1498Szrj /* Return true if we need to add fake edge to exit.
481338fd1498Szrj Helper function for rtl_flow_call_edges_add. */
481438fd1498Szrj
481538fd1498Szrj static bool
need_fake_edge_p(const rtx_insn * insn)481638fd1498Szrj need_fake_edge_p (const rtx_insn *insn)
481738fd1498Szrj {
481838fd1498Szrj if (!INSN_P (insn))
481938fd1498Szrj return false;
482038fd1498Szrj
482138fd1498Szrj if ((CALL_P (insn)
482238fd1498Szrj && !SIBLING_CALL_P (insn)
482338fd1498Szrj && !find_reg_note (insn, REG_NORETURN, NULL)
482438fd1498Szrj && !(RTL_CONST_OR_PURE_CALL_P (insn))))
482538fd1498Szrj return true;
482638fd1498Szrj
482738fd1498Szrj return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
482838fd1498Szrj && MEM_VOLATILE_P (PATTERN (insn)))
482938fd1498Szrj || (GET_CODE (PATTERN (insn)) == PARALLEL
483038fd1498Szrj && asm_noperands (insn) != -1
483138fd1498Szrj && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
483238fd1498Szrj || GET_CODE (PATTERN (insn)) == ASM_INPUT);
483338fd1498Szrj }
483438fd1498Szrj
483538fd1498Szrj /* Add fake edges to the function exit for any non constant and non noreturn
483638fd1498Szrj calls, volatile inline assembly in the bitmap of blocks specified by
483738fd1498Szrj BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
483838fd1498Szrj that were split.
483938fd1498Szrj
484038fd1498Szrj The goal is to expose cases in which entering a basic block does not imply
484138fd1498Szrj that all subsequent instructions must be executed. */
484238fd1498Szrj
484338fd1498Szrj static int
rtl_flow_call_edges_add(sbitmap blocks)484438fd1498Szrj rtl_flow_call_edges_add (sbitmap blocks)
484538fd1498Szrj {
484638fd1498Szrj int i;
484738fd1498Szrj int blocks_split = 0;
484838fd1498Szrj int last_bb = last_basic_block_for_fn (cfun);
484938fd1498Szrj bool check_last_block = false;
485038fd1498Szrj
485138fd1498Szrj if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
485238fd1498Szrj return 0;
485338fd1498Szrj
485438fd1498Szrj if (! blocks)
485538fd1498Szrj check_last_block = true;
485638fd1498Szrj else
485738fd1498Szrj check_last_block = bitmap_bit_p (blocks,
485838fd1498Szrj EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
485938fd1498Szrj
486038fd1498Szrj /* In the last basic block, before epilogue generation, there will be
486138fd1498Szrj a fallthru edge to EXIT. Special care is required if the last insn
486238fd1498Szrj of the last basic block is a call because make_edge folds duplicate
486338fd1498Szrj edges, which would result in the fallthru edge also being marked
486438fd1498Szrj fake, which would result in the fallthru edge being removed by
486538fd1498Szrj remove_fake_edges, which would result in an invalid CFG.
486638fd1498Szrj
486738fd1498Szrj Moreover, we can't elide the outgoing fake edge, since the block
486838fd1498Szrj profiler needs to take this into account in order to solve the minimal
486938fd1498Szrj spanning tree in the case that the call doesn't return.
487038fd1498Szrj
487138fd1498Szrj Handle this by adding a dummy instruction in a new last basic block. */
487238fd1498Szrj if (check_last_block)
487338fd1498Szrj {
487438fd1498Szrj basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
487538fd1498Szrj rtx_insn *insn = BB_END (bb);
487638fd1498Szrj
487738fd1498Szrj /* Back up past insns that must be kept in the same block as a call. */
487838fd1498Szrj while (insn != BB_HEAD (bb)
487938fd1498Szrj && keep_with_call_p (insn))
488038fd1498Szrj insn = PREV_INSN (insn);
488138fd1498Szrj
488238fd1498Szrj if (need_fake_edge_p (insn))
488338fd1498Szrj {
488438fd1498Szrj edge e;
488538fd1498Szrj
488638fd1498Szrj e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
488738fd1498Szrj if (e)
488838fd1498Szrj {
488938fd1498Szrj insert_insn_on_edge (gen_use (const0_rtx), e);
489038fd1498Szrj commit_edge_insertions ();
489138fd1498Szrj }
489238fd1498Szrj }
489338fd1498Szrj }
489438fd1498Szrj
489538fd1498Szrj /* Now add fake edges to the function exit for any non constant
489638fd1498Szrj calls since there is no way that we can determine if they will
489738fd1498Szrj return or not... */
489838fd1498Szrj
489938fd1498Szrj for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
490038fd1498Szrj {
490138fd1498Szrj basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
490238fd1498Szrj rtx_insn *insn;
490338fd1498Szrj rtx_insn *prev_insn;
490438fd1498Szrj
490538fd1498Szrj if (!bb)
490638fd1498Szrj continue;
490738fd1498Szrj
490838fd1498Szrj if (blocks && !bitmap_bit_p (blocks, i))
490938fd1498Szrj continue;
491038fd1498Szrj
491138fd1498Szrj for (insn = BB_END (bb); ; insn = prev_insn)
491238fd1498Szrj {
491338fd1498Szrj prev_insn = PREV_INSN (insn);
491438fd1498Szrj if (need_fake_edge_p (insn))
491538fd1498Szrj {
491638fd1498Szrj edge e;
491738fd1498Szrj rtx_insn *split_at_insn = insn;
491838fd1498Szrj
491938fd1498Szrj /* Don't split the block between a call and an insn that should
492038fd1498Szrj remain in the same block as the call. */
492138fd1498Szrj if (CALL_P (insn))
492238fd1498Szrj while (split_at_insn != BB_END (bb)
492338fd1498Szrj && keep_with_call_p (NEXT_INSN (split_at_insn)))
492438fd1498Szrj split_at_insn = NEXT_INSN (split_at_insn);
492538fd1498Szrj
492638fd1498Szrj /* The handling above of the final block before the epilogue
492738fd1498Szrj should be enough to verify that there is no edge to the exit
492838fd1498Szrj block in CFG already. Calling make_edge in such case would
492938fd1498Szrj cause us to mark that edge as fake and remove it later. */
493038fd1498Szrj
493138fd1498Szrj if (flag_checking && split_at_insn == BB_END (bb))
493238fd1498Szrj {
493338fd1498Szrj e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
493438fd1498Szrj gcc_assert (e == NULL);
493538fd1498Szrj }
493638fd1498Szrj
493738fd1498Szrj /* Note that the following may create a new basic block
493838fd1498Szrj and renumber the existing basic blocks. */
493938fd1498Szrj if (split_at_insn != BB_END (bb))
494038fd1498Szrj {
494138fd1498Szrj e = split_block (bb, split_at_insn);
494238fd1498Szrj if (e)
494338fd1498Szrj blocks_split++;
494438fd1498Szrj }
494538fd1498Szrj
494638fd1498Szrj edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
494738fd1498Szrj ne->probability = profile_probability::guessed_never ();
494838fd1498Szrj }
494938fd1498Szrj
495038fd1498Szrj if (insn == BB_HEAD (bb))
495138fd1498Szrj break;
495238fd1498Szrj }
495338fd1498Szrj }
495438fd1498Szrj
495538fd1498Szrj if (blocks_split)
495638fd1498Szrj verify_flow_info ();
495738fd1498Szrj
495838fd1498Szrj return blocks_split;
495938fd1498Szrj }
496038fd1498Szrj
496138fd1498Szrj /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
496238fd1498Szrj the conditional branch target, SECOND_HEAD should be the fall-thru
496338fd1498Szrj there is no need to handle this here the loop versioning code handles
496438fd1498Szrj this. the reason for SECON_HEAD is that it is needed for condition
496538fd1498Szrj in trees, and this should be of the same type since it is a hook. */
496638fd1498Szrj static void
rtl_lv_add_condition_to_bb(basic_block first_head,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * comp_rtx)496738fd1498Szrj rtl_lv_add_condition_to_bb (basic_block first_head ,
496838fd1498Szrj basic_block second_head ATTRIBUTE_UNUSED,
496938fd1498Szrj basic_block cond_bb, void *comp_rtx)
497038fd1498Szrj {
497138fd1498Szrj rtx_code_label *label;
497238fd1498Szrj rtx_insn *seq, *jump;
497338fd1498Szrj rtx op0 = XEXP ((rtx)comp_rtx, 0);
497438fd1498Szrj rtx op1 = XEXP ((rtx)comp_rtx, 1);
497538fd1498Szrj enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
497638fd1498Szrj machine_mode mode;
497738fd1498Szrj
497838fd1498Szrj
497938fd1498Szrj label = block_label (first_head);
498038fd1498Szrj mode = GET_MODE (op0);
498138fd1498Szrj if (mode == VOIDmode)
498238fd1498Szrj mode = GET_MODE (op1);
498338fd1498Szrj
498438fd1498Szrj start_sequence ();
498538fd1498Szrj op0 = force_operand (op0, NULL_RTX);
498638fd1498Szrj op1 = force_operand (op1, NULL_RTX);
498738fd1498Szrj do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
498838fd1498Szrj profile_probability::uninitialized ());
498938fd1498Szrj jump = get_last_insn ();
499038fd1498Szrj JUMP_LABEL (jump) = label;
499138fd1498Szrj LABEL_NUSES (label)++;
499238fd1498Szrj seq = get_insns ();
499338fd1498Szrj end_sequence ();
499438fd1498Szrj
499538fd1498Szrj /* Add the new cond, in the new head. */
499638fd1498Szrj emit_insn_after (seq, BB_END (cond_bb));
499738fd1498Szrj }
499838fd1498Szrj
499938fd1498Szrj
500038fd1498Szrj /* Given a block B with unconditional branch at its end, get the
500138fd1498Szrj store the return the branch edge and the fall-thru edge in
500238fd1498Szrj BRANCH_EDGE and FALLTHRU_EDGE respectively. */
500338fd1498Szrj static void
rtl_extract_cond_bb_edges(basic_block b,edge * branch_edge,edge * fallthru_edge)500438fd1498Szrj rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
500538fd1498Szrj edge *fallthru_edge)
500638fd1498Szrj {
500738fd1498Szrj edge e = EDGE_SUCC (b, 0);
500838fd1498Szrj
500938fd1498Szrj if (e->flags & EDGE_FALLTHRU)
501038fd1498Szrj {
501138fd1498Szrj *fallthru_edge = e;
501238fd1498Szrj *branch_edge = EDGE_SUCC (b, 1);
501338fd1498Szrj }
501438fd1498Szrj else
501538fd1498Szrj {
501638fd1498Szrj *branch_edge = e;
501738fd1498Szrj *fallthru_edge = EDGE_SUCC (b, 1);
501838fd1498Szrj }
501938fd1498Szrj }
502038fd1498Szrj
502138fd1498Szrj void
init_rtl_bb_info(basic_block bb)502238fd1498Szrj init_rtl_bb_info (basic_block bb)
502338fd1498Szrj {
502438fd1498Szrj gcc_assert (!bb->il.x.rtl);
502538fd1498Szrj bb->il.x.head_ = NULL;
502638fd1498Szrj bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
502738fd1498Szrj }
502838fd1498Szrj
502938fd1498Szrj /* Returns true if it is possible to remove edge E by redirecting
503038fd1498Szrj it to the destination of the other edge from E->src. */
503138fd1498Szrj
503238fd1498Szrj static bool
rtl_can_remove_branch_p(const_edge e)503338fd1498Szrj rtl_can_remove_branch_p (const_edge e)
503438fd1498Szrj {
503538fd1498Szrj const_basic_block src = e->src;
503638fd1498Szrj const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
503738fd1498Szrj const rtx_insn *insn = BB_END (src);
503838fd1498Szrj rtx set;
503938fd1498Szrj
504038fd1498Szrj /* The conditions are taken from try_redirect_by_replacing_jump. */
504138fd1498Szrj if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
504238fd1498Szrj return false;
504338fd1498Szrj
504438fd1498Szrj if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
504538fd1498Szrj return false;
504638fd1498Szrj
504738fd1498Szrj if (BB_PARTITION (src) != BB_PARTITION (target))
504838fd1498Szrj return false;
504938fd1498Szrj
505038fd1498Szrj if (!onlyjump_p (insn)
505138fd1498Szrj || tablejump_p (insn, NULL, NULL))
505238fd1498Szrj return false;
505338fd1498Szrj
505438fd1498Szrj set = single_set (insn);
505538fd1498Szrj if (!set || side_effects_p (set))
505638fd1498Szrj return false;
505738fd1498Szrj
505838fd1498Szrj return true;
505938fd1498Szrj }
506038fd1498Szrj
506138fd1498Szrj static basic_block
rtl_duplicate_bb(basic_block bb)506238fd1498Szrj rtl_duplicate_bb (basic_block bb)
506338fd1498Szrj {
506438fd1498Szrj bb = cfg_layout_duplicate_bb (bb);
506538fd1498Szrj bb->aux = NULL;
506638fd1498Szrj return bb;
506738fd1498Szrj }
506838fd1498Szrj
506938fd1498Szrj /* Do book-keeping of basic block BB for the profile consistency checker.
507038fd1498Szrj If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
507138fd1498Szrj then do post-pass accounting. Store the counting in RECORD. */
507238fd1498Szrj static void
rtl_account_profile_record(basic_block bb,int after_pass,struct profile_record * record)507338fd1498Szrj rtl_account_profile_record (basic_block bb, int after_pass,
507438fd1498Szrj struct profile_record *record)
507538fd1498Szrj {
507638fd1498Szrj rtx_insn *insn;
507738fd1498Szrj FOR_BB_INSNS (bb, insn)
507838fd1498Szrj if (INSN_P (insn))
507938fd1498Szrj {
508038fd1498Szrj record->size[after_pass] += insn_cost (insn, false);
508138fd1498Szrj if (bb->count.initialized_p ())
508238fd1498Szrj record->time[after_pass]
508338fd1498Szrj += insn_cost (insn, true) * bb->count.to_gcov_type ();
508438fd1498Szrj else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
508538fd1498Szrj record->time[after_pass]
508638fd1498Szrj += insn_cost (insn, true) * bb->count.to_frequency (cfun);
508738fd1498Szrj }
508838fd1498Szrj }
508938fd1498Szrj
509038fd1498Szrj /* Implementation of CFG manipulation for linearized RTL. */
509138fd1498Szrj struct cfg_hooks rtl_cfg_hooks = {
509238fd1498Szrj "rtl",
509338fd1498Szrj rtl_verify_flow_info,
509438fd1498Szrj rtl_dump_bb,
509538fd1498Szrj rtl_dump_bb_for_graph,
509638fd1498Szrj rtl_create_basic_block,
509738fd1498Szrj rtl_redirect_edge_and_branch,
509838fd1498Szrj rtl_redirect_edge_and_branch_force,
509938fd1498Szrj rtl_can_remove_branch_p,
510038fd1498Szrj rtl_delete_block,
510138fd1498Szrj rtl_split_block,
510238fd1498Szrj rtl_move_block_after,
510338fd1498Szrj rtl_can_merge_blocks, /* can_merge_blocks_p */
510438fd1498Szrj rtl_merge_blocks,
510538fd1498Szrj rtl_predict_edge,
510638fd1498Szrj rtl_predicted_by_p,
510738fd1498Szrj cfg_layout_can_duplicate_bb_p,
510838fd1498Szrj rtl_duplicate_bb,
510938fd1498Szrj rtl_split_edge,
511038fd1498Szrj rtl_make_forwarder_block,
511138fd1498Szrj rtl_tidy_fallthru_edge,
511238fd1498Szrj rtl_force_nonfallthru,
511338fd1498Szrj rtl_block_ends_with_call_p,
511438fd1498Szrj rtl_block_ends_with_condjump_p,
511538fd1498Szrj rtl_flow_call_edges_add,
511638fd1498Szrj NULL, /* execute_on_growing_pred */
511738fd1498Szrj NULL, /* execute_on_shrinking_pred */
511838fd1498Szrj NULL, /* duplicate loop for trees */
511938fd1498Szrj NULL, /* lv_add_condition_to_bb */
512038fd1498Szrj NULL, /* lv_adjust_loop_header_phi*/
512138fd1498Szrj NULL, /* extract_cond_bb_edges */
512238fd1498Szrj NULL, /* flush_pending_stmts */
512338fd1498Szrj rtl_block_empty_p, /* block_empty_p */
512438fd1498Szrj rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
512538fd1498Szrj rtl_account_profile_record,
512638fd1498Szrj };
512738fd1498Szrj
512838fd1498Szrj /* Implementation of CFG manipulation for cfg layout RTL, where
512938fd1498Szrj basic block connected via fallthru edges does not have to be adjacent.
513038fd1498Szrj This representation will hopefully become the default one in future
513138fd1498Szrj version of the compiler. */
513238fd1498Szrj
513338fd1498Szrj struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
513438fd1498Szrj "cfglayout mode",
513538fd1498Szrj rtl_verify_flow_info_1,
513638fd1498Szrj rtl_dump_bb,
513738fd1498Szrj rtl_dump_bb_for_graph,
513838fd1498Szrj cfg_layout_create_basic_block,
513938fd1498Szrj cfg_layout_redirect_edge_and_branch,
514038fd1498Szrj cfg_layout_redirect_edge_and_branch_force,
514138fd1498Szrj rtl_can_remove_branch_p,
514238fd1498Szrj cfg_layout_delete_block,
514338fd1498Szrj cfg_layout_split_block,
514438fd1498Szrj rtl_move_block_after,
514538fd1498Szrj cfg_layout_can_merge_blocks_p,
514638fd1498Szrj cfg_layout_merge_blocks,
514738fd1498Szrj rtl_predict_edge,
514838fd1498Szrj rtl_predicted_by_p,
514938fd1498Szrj cfg_layout_can_duplicate_bb_p,
515038fd1498Szrj cfg_layout_duplicate_bb,
515138fd1498Szrj cfg_layout_split_edge,
515238fd1498Szrj rtl_make_forwarder_block,
515338fd1498Szrj NULL, /* tidy_fallthru_edge */
515438fd1498Szrj rtl_force_nonfallthru,
515538fd1498Szrj rtl_block_ends_with_call_p,
515638fd1498Szrj rtl_block_ends_with_condjump_p,
515738fd1498Szrj rtl_flow_call_edges_add,
515838fd1498Szrj NULL, /* execute_on_growing_pred */
515938fd1498Szrj NULL, /* execute_on_shrinking_pred */
516038fd1498Szrj duplicate_loop_to_header_edge, /* duplicate loop for trees */
516138fd1498Szrj rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
516238fd1498Szrj NULL, /* lv_adjust_loop_header_phi*/
516338fd1498Szrj rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
516438fd1498Szrj NULL, /* flush_pending_stmts */
516538fd1498Szrj rtl_block_empty_p, /* block_empty_p */
516638fd1498Szrj rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
516738fd1498Szrj rtl_account_profile_record,
516838fd1498Szrj };
516938fd1498Szrj
517038fd1498Szrj #include "gt-cfgrtl.h"
5171