xref: /netbsd-src/external/gpl3/gcc/dist/gcc/cfgrtl.cc (revision 0a3071956a3a9fdebdbf7f338cf2d439b45fc728)
1 /* Control flow graph manipulation code for GNU compiler.
2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains low level functions to manipulate the CFG and analyze it
21    that are aware of the RTL intermediate language.
22 
23    Available functionality:
24      - Basic CFG/RTL manipulation API documented in cfghooks.h
25      - CFG-aware instruction chain manipulation
26 	 delete_insn, delete_insn_chain
27      - Edge splitting and committing to edges
28 	 insert_insn_on_edge, prepend_insn_to_edge, commit_edge_insertions
29      - CFG updating after insn simplification
30 	 purge_dead_edges, purge_all_dead_edges
31      - CFG fixing after coarse manipulation
32 	fixup_abnormal_edges
33 
34    Functions not supposed for generic use:
35      - Infrastructure to determine quickly basic block for insn
36 	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37      - Edge redirection with updating and optimizing of insn chain
38 	 block_label, tidy_fallthru_edge, force_nonfallthru  */
39 
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "backend.h"
44 #include "target.h"
45 #include "rtl.h"
46 #include "tree.h"
47 #include "cfghooks.h"
48 #include "df.h"
49 #include "insn-config.h"
50 #include "memmodel.h"
51 #include "emit-rtl.h"
52 #include "cfgrtl.h"
53 #include "cfganal.h"
54 #include "cfgbuild.h"
55 #include "cfgcleanup.h"
56 #include "bb-reorder.h"
57 #include "rtl-error.h"
58 #include "insn-attr.h"
59 #include "dojump.h"
60 #include "expr.h"
61 #include "cfgloop.h"
62 #include "tree-pass.h"
63 #include "print-rtl.h"
64 #include "rtl-iter.h"
65 #include "gimplify.h"
66 #include "profile.h"
67 #include "sreal.h"
68 
69 /* Disable warnings about missing quoting in GCC diagnostics.  */
70 #if __GNUC__ >= 10
71 #  pragma GCC diagnostic push
72 #  pragma GCC diagnostic ignored "-Wformat-diag"
73 #endif
74 
75 /* Holds the interesting leading and trailing notes for the function.
76    Only applicable if the CFG is in cfglayout mode.  */
77 static GTY(()) rtx_insn *cfg_layout_function_footer;
78 static GTY(()) rtx_insn *cfg_layout_function_header;
79 
80 static rtx_insn *skip_insns_after_block (basic_block);
81 static void record_effective_endpoints (void);
82 static void fixup_reorder_chain (void);
83 
84 void verify_insn_chain (void);
85 static void fixup_fallthru_exit_predecessor (void);
86 static int can_delete_note_p (const rtx_note *);
87 static int can_delete_label_p (const rtx_code_label *);
88 static basic_block rtl_split_edge (edge);
89 static bool rtl_move_block_after (basic_block, basic_block);
90 static int rtl_verify_flow_info (void);
91 static basic_block cfg_layout_split_block (basic_block, void *);
92 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
93 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
94 static void cfg_layout_delete_block (basic_block);
95 static void rtl_delete_block (basic_block);
96 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
97 static edge rtl_redirect_edge_and_branch (edge, basic_block);
98 static basic_block rtl_split_block (basic_block, void *);
99 static void rtl_dump_bb (FILE *, basic_block, int, dump_flags_t);
100 static int rtl_verify_flow_info_1 (void);
101 static void rtl_make_forwarder_block (edge);
102 static bool rtl_bb_info_initialized_p (basic_block bb);
103 
104 /* Return true if NOTE is not one of the ones that must be kept paired,
105    so that we may simply delete it.  */
106 
107 static int
can_delete_note_p(const rtx_note * note)108 can_delete_note_p (const rtx_note *note)
109 {
110   switch (NOTE_KIND (note))
111     {
112     case NOTE_INSN_DELETED:
113     case NOTE_INSN_BASIC_BLOCK:
114     case NOTE_INSN_EPILOGUE_BEG:
115       return true;
116 
117     default:
118       return false;
119     }
120 }
121 
122 /* True if a given label can be deleted.  */
123 
124 static int
can_delete_label_p(const rtx_code_label * label)125 can_delete_label_p (const rtx_code_label *label)
126 {
127   return (!LABEL_PRESERVE_P (label)
128 	  /* User declared labels must be preserved.  */
129 	  && LABEL_NAME (label) == 0
130 	  && !vec_safe_contains<rtx_insn *> (forced_labels,
131 					     const_cast<rtx_code_label *> (label)));
132 }
133 
134 /* Delete INSN by patching it out.  */
135 
136 void
delete_insn(rtx_insn * insn)137 delete_insn (rtx_insn *insn)
138 {
139   rtx note;
140   bool really_delete = true;
141 
142   if (LABEL_P (insn))
143     {
144       /* Some labels can't be directly removed from the INSN chain, as they
145 	 might be references via variables, constant pool etc.
146 	 Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
147       if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
148 	{
149 	  const char *name = LABEL_NAME (insn);
150 	  basic_block bb = BLOCK_FOR_INSN (insn);
151 	  rtx_insn *bb_note = NEXT_INSN (insn);
152 
153 	  really_delete = false;
154 	  PUT_CODE (insn, NOTE);
155 	  NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
156 	  NOTE_DELETED_LABEL_NAME (insn) = name;
157 
158 	  /* If the note following the label starts a basic block, and the
159 	     label is a member of the same basic block, interchange the two.  */
160 	  if (bb_note != NULL_RTX
161 	      && NOTE_INSN_BASIC_BLOCK_P (bb_note)
162 	      && bb != NULL
163 	      && bb == BLOCK_FOR_INSN (bb_note))
164 	    {
165 	      reorder_insns_nobb (insn, insn, bb_note);
166 	      BB_HEAD (bb) = bb_note;
167 	      if (BB_END (bb) == bb_note)
168 		BB_END (bb) = insn;
169 	    }
170 	}
171 
172       remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
173     }
174 
175   if (really_delete)
176     {
177       /* If this insn has already been deleted, something is very wrong.  */
178       gcc_assert (!insn->deleted ());
179       if (INSN_P (insn))
180 	df_insn_delete (insn);
181       remove_insn (insn);
182       insn->set_deleted ();
183     }
184 
185   /* If deleting a jump, decrement the use count of the label.  Deleting
186      the label itself should happen in the normal course of block merging.  */
187   if (JUMP_P (insn))
188     {
189       if (JUMP_LABEL (insn)
190 	  && LABEL_P (JUMP_LABEL (insn)))
191 	LABEL_NUSES (JUMP_LABEL (insn))--;
192 
193       /* If there are more targets, remove them too.  */
194       while ((note
195 	      = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
196 	     && LABEL_P (XEXP (note, 0)))
197 	{
198 	  LABEL_NUSES (XEXP (note, 0))--;
199 	  remove_note (insn, note);
200 	}
201     }
202 
203   /* Also if deleting any insn that references a label as an operand.  */
204   while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
205 	 && LABEL_P (XEXP (note, 0)))
206     {
207       LABEL_NUSES (XEXP (note, 0))--;
208       remove_note (insn, note);
209     }
210 
211   if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
212     {
213       rtvec vec = table->get_labels ();
214       int len = GET_NUM_ELEM (vec);
215       int i;
216 
217       for (i = 0; i < len; i++)
218 	{
219 	  rtx label = XEXP (RTVEC_ELT (vec, i), 0);
220 
221 	  /* When deleting code in bulk (e.g. removing many unreachable
222 	     blocks) we can delete a label that's a target of the vector
223 	     before deleting the vector itself.  */
224 	  if (!NOTE_P (label))
225 	    LABEL_NUSES (label)--;
226 	}
227     }
228 }
229 
230 /* Like delete_insn but also purge dead edges from BB.
231    Return true if any edges are eliminated.  */
232 
233 bool
delete_insn_and_edges(rtx_insn * insn)234 delete_insn_and_edges (rtx_insn *insn)
235 {
236   bool purge = false;
237 
238   if (NONDEBUG_INSN_P (insn) && BLOCK_FOR_INSN (insn))
239     {
240       basic_block bb = BLOCK_FOR_INSN (insn);
241       if (BB_END (bb) == insn)
242 	purge = true;
243       else if (DEBUG_INSN_P (BB_END (bb)))
244 	for (rtx_insn *dinsn = NEXT_INSN (insn);
245 	     DEBUG_INSN_P (dinsn); dinsn = NEXT_INSN (dinsn))
246 	  if (BB_END (bb) == dinsn)
247 	    {
248 	      purge = true;
249 	      break;
250 	    }
251     }
252   delete_insn (insn);
253   if (purge)
254     return purge_dead_edges (BLOCK_FOR_INSN (insn));
255   return false;
256 }
257 
258 /* Unlink a chain of insns between START and FINISH, leaving notes
259    that must be paired.  If CLEAR_BB is true, we set bb field for
260    insns that cannot be removed to NULL.  */
261 
262 void
delete_insn_chain(rtx start,rtx_insn * finish,bool clear_bb)263 delete_insn_chain (rtx start, rtx_insn *finish, bool clear_bb)
264 {
265   /* Unchain the insns one by one.  It would be quicker to delete all of these
266      with a single unchaining, rather than one at a time, but we need to keep
267      the NOTE's.  */
268   rtx_insn *current = finish;
269   while (1)
270     {
271       rtx_insn *prev = PREV_INSN (current);
272       if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
273 	;
274       else
275 	delete_insn (current);
276 
277       if (clear_bb && !current->deleted ())
278 	set_block_for_insn (current, NULL);
279 
280       if (current == start)
281 	break;
282       current = prev;
283     }
284 }
285 
286 /* Create a new basic block consisting of the instructions between HEAD and END
287    inclusive.  This function is designed to allow fast BB construction - reuses
288    the note and basic block struct in BB_NOTE, if any and do not grow
289    BASIC_BLOCK chain and should be used directly only by CFG construction code.
290    END can be NULL in to create new empty basic block before HEAD.  Both END
291    and HEAD can be NULL to create basic block at the end of INSN chain.
292    AFTER is the basic block we should be put after.  */
293 
294 basic_block
create_basic_block_structure(rtx_insn * head,rtx_insn * end,rtx_note * bb_note,basic_block after)295 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
296 			      basic_block after)
297 {
298   basic_block bb;
299 
300   if (bb_note
301       && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
302       && bb->aux == NULL)
303     {
304       /* If we found an existing note, thread it back onto the chain.  */
305 
306       rtx_insn *after;
307 
308       if (LABEL_P (head))
309 	after = head;
310       else
311 	{
312 	  after = PREV_INSN (head);
313 	  head = bb_note;
314 	}
315 
316       if (after != bb_note && NEXT_INSN (after) != bb_note)
317 	reorder_insns_nobb (bb_note, bb_note, after);
318     }
319   else
320     {
321       /* Otherwise we must create a note and a basic block structure.  */
322 
323       bb = alloc_block ();
324 
325       init_rtl_bb_info (bb);
326       if (!head && !end)
327 	head = end = bb_note
328 	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
329       else if (LABEL_P (head) && end)
330 	{
331 	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
332 	  if (head == end)
333 	    end = bb_note;
334 	}
335       else
336 	{
337 	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
338 	  head = bb_note;
339 	  if (!end)
340 	    end = head;
341 	}
342 
343       NOTE_BASIC_BLOCK (bb_note) = bb;
344     }
345 
346   /* Always include the bb note in the block.  */
347   if (NEXT_INSN (end) == bb_note)
348     end = bb_note;
349 
350   BB_HEAD (bb) = head;
351   BB_END (bb) = end;
352   bb->index = last_basic_block_for_fn (cfun)++;
353   bb->flags = BB_NEW | BB_RTL;
354   link_block (bb, after);
355   SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
356   df_bb_refs_record (bb->index, false);
357   update_bb_for_insn (bb);
358   BB_SET_PARTITION (bb, BB_UNPARTITIONED);
359 
360   /* Tag the block so that we know it has been used when considering
361      other basic block notes.  */
362   bb->aux = bb;
363 
364   return bb;
365 }
366 
367 /* Create new basic block consisting of instructions in between HEAD and END
368    and place it to the BB chain after block AFTER.  END can be NULL to
369    create a new empty basic block before HEAD.  Both END and HEAD can be
370    NULL to create basic block at the end of INSN chain.  */
371 
372 static basic_block
rtl_create_basic_block(void * headp,void * endp,basic_block after)373 rtl_create_basic_block (void *headp, void *endp, basic_block after)
374 {
375   rtx_insn *head = (rtx_insn *) headp;
376   rtx_insn *end = (rtx_insn *) endp;
377   basic_block bb;
378 
379   /* Grow the basic block array if needed.  */
380   if ((size_t) last_basic_block_for_fn (cfun)
381       >= basic_block_info_for_fn (cfun)->length ())
382     vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
383 			   last_basic_block_for_fn (cfun) + 1);
384 
385   n_basic_blocks_for_fn (cfun)++;
386 
387   bb = create_basic_block_structure (head, end, NULL, after);
388   bb->aux = NULL;
389   return bb;
390 }
391 
392 static basic_block
cfg_layout_create_basic_block(void * head,void * end,basic_block after)393 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
394 {
395   basic_block newbb = rtl_create_basic_block (head, end, after);
396 
397   return newbb;
398 }
399 
400 /* Delete the insns in a (non-live) block.  We physically delete every
401    non-deleted-note insn, and update the flow graph appropriately.
402 
403    Return nonzero if we deleted an exception handler.  */
404 
405 /* ??? Preserving all such notes strikes me as wrong.  It would be nice
406    to post-process the stream to remove empty blocks, loops, ranges, etc.  */
407 
408 static void
rtl_delete_block(basic_block b)409 rtl_delete_block (basic_block b)
410 {
411   rtx_insn *insn, *end;
412 
413   /* If the head of this block is a CODE_LABEL, then it might be the
414      label for an exception handler which can't be reached.  We need
415      to remove the label from the exception_handler_label list.  */
416   insn = BB_HEAD (b);
417 
418   end = get_last_bb_insn (b);
419 
420   /* Selectively delete the entire chain.  */
421   BB_HEAD (b) = NULL;
422   delete_insn_chain (insn, end, true);
423 
424 
425   if (dump_file)
426     fprintf (dump_file, "deleting block %d\n", b->index);
427   df_bb_delete (b->index);
428 }
429 
430 /* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
431 
432 void
compute_bb_for_insn(void)433 compute_bb_for_insn (void)
434 {
435   basic_block bb;
436 
437   FOR_EACH_BB_FN (bb, cfun)
438     {
439       rtx_insn *end = BB_END (bb);
440       rtx_insn *insn;
441 
442       for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
443 	{
444 	  BLOCK_FOR_INSN (insn) = bb;
445 	  if (insn == end)
446 	    break;
447 	}
448     }
449 }
450 
451 /* Release the basic_block_for_insn array.  */
452 
453 unsigned int
free_bb_for_insn(void)454 free_bb_for_insn (void)
455 {
456   rtx_insn *insn;
457   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
458     if (!BARRIER_P (insn))
459       BLOCK_FOR_INSN (insn) = NULL;
460   return 0;
461 }
462 
463 namespace {
464 
465 const pass_data pass_data_free_cfg =
466 {
467   RTL_PASS, /* type */
468   "*free_cfg", /* name */
469   OPTGROUP_NONE, /* optinfo_flags */
470   TV_NONE, /* tv_id */
471   0, /* properties_required */
472   0, /* properties_provided */
473   PROP_cfg, /* properties_destroyed */
474   0, /* todo_flags_start */
475   0, /* todo_flags_finish */
476 };
477 
478 class pass_free_cfg : public rtl_opt_pass
479 {
480 public:
pass_free_cfg(gcc::context * ctxt)481   pass_free_cfg (gcc::context *ctxt)
482     : rtl_opt_pass (pass_data_free_cfg, ctxt)
483   {}
484 
485   /* opt_pass methods: */
486   virtual unsigned int execute (function *);
487 
488 }; // class pass_free_cfg
489 
490 unsigned int
execute(function *)491 pass_free_cfg::execute (function *)
492 {
493   /* The resource.cc machinery uses DF but the CFG isn't guaranteed to be
494      valid at that point so it would be too late to call df_analyze.  */
495   if (DELAY_SLOTS && optimize > 0 && flag_delayed_branch)
496     {
497       df_note_add_problem ();
498       df_analyze ();
499     }
500 
501   if (crtl->has_bb_partition)
502     insert_section_boundary_note ();
503 
504   free_bb_for_insn ();
505   return 0;
506 }
507 
508 } // anon namespace
509 
510 rtl_opt_pass *
make_pass_free_cfg(gcc::context * ctxt)511 make_pass_free_cfg (gcc::context *ctxt)
512 {
513   return new pass_free_cfg (ctxt);
514 }
515 
516 /* Return RTX to emit after when we want to emit code on the entry of function.  */
517 rtx_insn *
entry_of_function(void)518 entry_of_function (void)
519 {
520   return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
521 	  BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
522 }
523 
524 /* Emit INSN at the entry point of the function, ensuring that it is only
525    executed once per function.  */
526 void
emit_insn_at_entry(rtx insn)527 emit_insn_at_entry (rtx insn)
528 {
529   edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
530   edge e = ei_safe_edge (ei);
531   gcc_assert (e->flags & EDGE_FALLTHRU);
532 
533   insert_insn_on_edge (insn, e);
534   commit_edge_insertions ();
535 }
536 
537 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
538    (or BARRIER if found) and notify df of the bb change.
539    The insn chain range is inclusive
540    (i.e. both BEGIN and END will be updated. */
541 
542 static void
update_bb_for_insn_chain(rtx_insn * begin,rtx_insn * end,basic_block bb)543 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
544 {
545   rtx_insn *insn;
546 
547   end = NEXT_INSN (end);
548   for (insn = begin; insn != end; insn = NEXT_INSN (insn))
549     if (!BARRIER_P (insn))
550       df_insn_change_bb (insn, bb);
551 }
552 
553 /* Update BLOCK_FOR_INSN of insns in BB to BB,
554    and notify df of the change.  */
555 
556 void
update_bb_for_insn(basic_block bb)557 update_bb_for_insn (basic_block bb)
558 {
559   update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
560 }
561 
562 
563 /* Like active_insn_p, except keep the return value use or clobber around
564    even after reload.  */
565 
566 static bool
flow_active_insn_p(const rtx_insn * insn)567 flow_active_insn_p (const rtx_insn *insn)
568 {
569   if (active_insn_p (insn))
570     return true;
571 
572   /* A clobber of the function return value exists for buggy
573      programs that fail to return a value.  Its effect is to
574      keep the return value from being live across the entire
575      function.  If we allow it to be skipped, we introduce the
576      possibility for register lifetime confusion.
577      Similarly, keep a USE of the function return value, otherwise
578      the USE is dropped and we could fail to thread jump if USE
579      appears on some paths and not on others, see PR90257.  */
580   if ((GET_CODE (PATTERN (insn)) == CLOBBER
581        || GET_CODE (PATTERN (insn)) == USE)
582       && REG_P (XEXP (PATTERN (insn), 0))
583       && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
584     return true;
585 
586   return false;
587 }
588 
589 /* Return true if the block has no effect and only forwards control flow to
590    its single destination.  */
591 
592 bool
contains_no_active_insn_p(const_basic_block bb)593 contains_no_active_insn_p (const_basic_block bb)
594 {
595   rtx_insn *insn;
596 
597   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
598       || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
599       || !single_succ_p (bb)
600       || (single_succ_edge (bb)->flags & EDGE_FAKE) != 0)
601     return false;
602 
603   for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
604     if (INSN_P (insn) && flow_active_insn_p (insn))
605       return false;
606 
607   return (!INSN_P (insn)
608 	  || (JUMP_P (insn) && simplejump_p (insn))
609 	  || !flow_active_insn_p (insn));
610 }
611 
612 /* Likewise, but protect loop latches, headers and preheaders.  */
613 /* FIXME: Make this a cfg hook.  */
614 
615 bool
forwarder_block_p(const_basic_block bb)616 forwarder_block_p (const_basic_block bb)
617 {
618   if (!contains_no_active_insn_p (bb))
619     return false;
620 
621   /* Protect loop latches, headers and preheaders.  */
622   if (current_loops)
623     {
624       basic_block dest;
625       if (bb->loop_father->header == bb)
626 	return false;
627       dest = EDGE_SUCC (bb, 0)->dest;
628       if (dest->loop_father->header == dest)
629 	return false;
630     }
631 
632   return true;
633 }
634 
635 /* Return nonzero if we can reach target from src by falling through.  */
636 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode.  */
637 
638 bool
can_fallthru(basic_block src,basic_block target)639 can_fallthru (basic_block src, basic_block target)
640 {
641   rtx_insn *insn = BB_END (src);
642   rtx_insn *insn2;
643   edge e;
644   edge_iterator ei;
645 
646   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
647     return true;
648   if (src->next_bb != target)
649     return false;
650 
651   /* ??? Later we may add code to move jump tables offline.  */
652   if (tablejump_p (insn, NULL, NULL))
653     return false;
654 
655   FOR_EACH_EDGE (e, ei, src->succs)
656     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
657 	&& e->flags & EDGE_FALLTHRU)
658       return false;
659 
660   insn2 = BB_HEAD (target);
661   if (!active_insn_p (insn2))
662     insn2 = next_active_insn (insn2);
663 
664   return next_active_insn (insn) == insn2;
665 }
666 
667 /* Return nonzero if we could reach target from src by falling through,
668    if the target was made adjacent.  If we already have a fall-through
669    edge to the exit block, we can't do that.  */
670 static bool
could_fall_through(basic_block src,basic_block target)671 could_fall_through (basic_block src, basic_block target)
672 {
673   edge e;
674   edge_iterator ei;
675 
676   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
677     return true;
678   FOR_EACH_EDGE (e, ei, src->succs)
679     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
680 	&& e->flags & EDGE_FALLTHRU)
681       return 0;
682   return true;
683 }
684 
685 /* Return the NOTE_INSN_BASIC_BLOCK of BB.  */
686 rtx_note *
bb_note(basic_block bb)687 bb_note (basic_block bb)
688 {
689   rtx_insn *note;
690 
691   note = BB_HEAD (bb);
692   if (LABEL_P (note))
693     note = NEXT_INSN (note);
694 
695   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
696   return as_a <rtx_note *> (note);
697 }
698 
699 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
700    note associated with the BLOCK.  */
701 
702 static rtx_insn *
first_insn_after_basic_block_note(basic_block block)703 first_insn_after_basic_block_note (basic_block block)
704 {
705   rtx_insn *insn;
706 
707   /* Get the first instruction in the block.  */
708   insn = BB_HEAD (block);
709 
710   if (insn == NULL_RTX)
711     return NULL;
712   if (LABEL_P (insn))
713     insn = NEXT_INSN (insn);
714   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
715 
716   return NEXT_INSN (insn);
717 }
718 
719 /* Creates a new basic block just after basic block BB by splitting
720    everything after specified instruction INSNP.  */
721 
722 static basic_block
rtl_split_block(basic_block bb,void * insnp)723 rtl_split_block (basic_block bb, void *insnp)
724 {
725   basic_block new_bb;
726   rtx_insn *insn = (rtx_insn *) insnp;
727   edge e;
728   edge_iterator ei;
729 
730   if (!insn)
731     {
732       insn = first_insn_after_basic_block_note (bb);
733 
734       if (insn)
735 	{
736 	  rtx_insn *next = insn;
737 
738 	  insn = PREV_INSN (insn);
739 
740 	  /* If the block contains only debug insns, insn would have
741 	     been NULL in a non-debug compilation, and then we'd end
742 	     up emitting a DELETED note.  For -fcompare-debug
743 	     stability, emit the note too.  */
744 	  if (insn != BB_END (bb)
745 	      && DEBUG_INSN_P (next)
746 	      && DEBUG_INSN_P (BB_END (bb)))
747 	    {
748 	      while (next != BB_END (bb) && DEBUG_INSN_P (next))
749 		next = NEXT_INSN (next);
750 
751 	      if (next == BB_END (bb))
752 		emit_note_after (NOTE_INSN_DELETED, next);
753 	    }
754 	}
755       else
756 	insn = get_last_insn ();
757     }
758 
759   /* We probably should check type of the insn so that we do not create
760      inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
761      bother.  */
762   if (insn == BB_END (bb))
763     emit_note_after (NOTE_INSN_DELETED, insn);
764 
765   /* Create the new basic block.  */
766   new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
767   BB_COPY_PARTITION (new_bb, bb);
768   BB_END (bb) = insn;
769 
770   /* Redirect the outgoing edges.  */
771   new_bb->succs = bb->succs;
772   bb->succs = NULL;
773   FOR_EACH_EDGE (e, ei, new_bb->succs)
774     e->src = new_bb;
775 
776   /* The new block starts off being dirty.  */
777   df_set_bb_dirty (bb);
778   return new_bb;
779 }
780 
781 /* Return true if LOC1 and LOC2 are equivalent for
782    unique_locus_on_edge_between_p purposes.  */
783 
784 static bool
loc_equal(location_t loc1,location_t loc2)785 loc_equal (location_t loc1, location_t loc2)
786 {
787   if (loc1 == loc2)
788     return true;
789 
790   expanded_location loce1 = expand_location (loc1);
791   expanded_location loce2 = expand_location (loc2);
792 
793   if (loce1.line != loce2.line
794       || loce1.column != loce2.column
795       || loce1.data != loce2.data)
796     return false;
797   if (loce1.file == loce2.file)
798     return true;
799   return (loce1.file != NULL
800 	  && loce2.file != NULL
801 	  && filename_cmp (loce1.file, loce2.file) == 0);
802 }
803 
804 /* Return true if the single edge between blocks A and B is the only place
805    in RTL which holds some unique locus.  */
806 
807 static bool
unique_locus_on_edge_between_p(basic_block a,basic_block b)808 unique_locus_on_edge_between_p (basic_block a, basic_block b)
809 {
810   const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
811   rtx_insn *insn, *end;
812 
813   if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
814     return false;
815 
816   /* First scan block A backward.  */
817   insn = BB_END (a);
818   end = PREV_INSN (BB_HEAD (a));
819   while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
820     insn = PREV_INSN (insn);
821 
822   if (insn != end && loc_equal (INSN_LOCATION (insn), goto_locus))
823     return false;
824 
825   /* Then scan block B forward.  */
826   insn = BB_HEAD (b);
827   if (insn)
828     {
829       end = NEXT_INSN (BB_END (b));
830       while (insn != end && !NONDEBUG_INSN_P (insn))
831 	insn = NEXT_INSN (insn);
832 
833       if (insn != end && INSN_HAS_LOCATION (insn)
834 	  && loc_equal (INSN_LOCATION (insn), goto_locus))
835 	return false;
836     }
837 
838   return true;
839 }
840 
841 /* If the single edge between blocks A and B is the only place in RTL which
842    holds some unique locus, emit a nop with that locus between the blocks.  */
843 
844 static void
emit_nop_for_unique_locus_between(basic_block a,basic_block b)845 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
846 {
847   if (!unique_locus_on_edge_between_p (a, b))
848     return;
849 
850   BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
851   INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
852 }
853 
854 /* Blocks A and B are to be merged into a single block A.  The insns
855    are already contiguous.  */
856 
857 static void
rtl_merge_blocks(basic_block a,basic_block b)858 rtl_merge_blocks (basic_block a, basic_block b)
859 {
860   /* If B is a forwarder block whose outgoing edge has no location, we'll
861      propagate the locus of the edge between A and B onto it.  */
862   const bool forward_edge_locus
863     = (b->flags & BB_FORWARDER_BLOCK) != 0
864       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
865   rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
866   rtx_insn *del_first = NULL, *del_last = NULL;
867   rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
868   int b_empty = 0;
869 
870   if (dump_file)
871     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
872 	     a->index);
873 
874   while (DEBUG_INSN_P (b_end))
875     b_end = PREV_INSN (b_debug_start = b_end);
876 
877   /* If there was a CODE_LABEL beginning B, delete it.  */
878   if (LABEL_P (b_head))
879     {
880       /* Detect basic blocks with nothing but a label.  This can happen
881 	 in particular at the end of a function.  */
882       if (b_head == b_end)
883 	b_empty = 1;
884 
885       del_first = del_last = b_head;
886       b_head = NEXT_INSN (b_head);
887     }
888 
889   /* Delete the basic block note and handle blocks containing just that
890      note.  */
891   if (NOTE_INSN_BASIC_BLOCK_P (b_head))
892     {
893       if (b_head == b_end)
894 	b_empty = 1;
895       if (! del_last)
896 	del_first = b_head;
897 
898       del_last = b_head;
899       b_head = NEXT_INSN (b_head);
900     }
901 
902   /* If there was a jump out of A, delete it.  */
903   if (JUMP_P (a_end))
904     {
905       rtx_insn *prev;
906 
907       for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
908 	if (!NOTE_P (prev)
909 	    || NOTE_INSN_BASIC_BLOCK_P (prev)
910 	    || prev == BB_HEAD (a))
911 	  break;
912 
913       del_first = a_end;
914 
915       a_end = PREV_INSN (del_first);
916     }
917   else if (BARRIER_P (NEXT_INSN (a_end)))
918     del_first = NEXT_INSN (a_end);
919 
920   /* Delete everything marked above as well as crap that might be
921      hanging out between the two blocks.  */
922   BB_END (a) = a_end;
923   BB_HEAD (b) = b_empty ? NULL : b_head;
924   delete_insn_chain (del_first, del_last, true);
925 
926   /* If not optimizing, preserve the locus of the single edge between
927      blocks A and B if necessary by emitting a nop.  */
928   if (!optimize
929       && !forward_edge_locus
930       && !DECL_IGNORED_P (current_function_decl))
931     {
932       emit_nop_for_unique_locus_between (a, b);
933       a_end = BB_END (a);
934     }
935 
936   /* Reassociate the insns of B with A.  */
937   if (!b_empty)
938     {
939       update_bb_for_insn_chain (a_end, b_debug_end, a);
940 
941       BB_END (a) = b_debug_end;
942       BB_HEAD (b) = NULL;
943     }
944   else if (b_end != b_debug_end)
945     {
946       /* Move any deleted labels and other notes between the end of A
947 	 and the debug insns that make up B after the debug insns,
948 	 bringing the debug insns into A while keeping the notes after
949 	 the end of A.  */
950       if (NEXT_INSN (a_end) != b_debug_start)
951 	reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
952 			    b_debug_end);
953       update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
954       BB_END (a) = b_debug_end;
955     }
956 
957   df_bb_delete (b->index);
958 
959   if (forward_edge_locus)
960     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
961 
962   if (dump_file)
963     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
964 }
965 
966 
967 /* Return true when block A and B can be merged.  */
968 
969 static bool
rtl_can_merge_blocks(basic_block a,basic_block b)970 rtl_can_merge_blocks (basic_block a, basic_block b)
971 {
972   /* If we are partitioning hot/cold basic blocks, we don't want to
973      mess up unconditional or indirect jumps that cross between hot
974      and cold sections.
975 
976      Basic block partitioning may result in some jumps that appear to
977      be optimizable (or blocks that appear to be mergeable), but which really
978      must be left untouched (they are required to make it safely across
979      partition boundaries).  See  the comments at the top of
980      bb-reorder.cc:partition_hot_cold_basic_blocks for complete details.  */
981 
982   if (BB_PARTITION (a) != BB_PARTITION (b))
983     return false;
984 
985   /* Protect the loop latches.  */
986   if (current_loops && b->loop_father->latch == b)
987     return false;
988 
989   /* There must be exactly one edge in between the blocks.  */
990   return (single_succ_p (a)
991 	  && single_succ (a) == b
992 	  && single_pred_p (b)
993 	  && a != b
994 	  /* Must be simple edge.  */
995 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
996 	  && a->next_bb == b
997 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
998 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
999 	  /* If the jump insn has side effects,
1000 	     we can't kill the edge.  */
1001 	  && (!JUMP_P (BB_END (a))
1002 	      || (reload_completed
1003 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
1004 }
1005 
1006 /* Return the label in the head of basic block BLOCK.  Create one if it doesn't
1007    exist.  */
1008 
1009 rtx_code_label *
block_label(basic_block block)1010 block_label (basic_block block)
1011 {
1012   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1013     return NULL;
1014 
1015   if (!LABEL_P (BB_HEAD (block)))
1016     {
1017       BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
1018     }
1019 
1020   return as_a <rtx_code_label *> (BB_HEAD (block));
1021 }
1022 
1023 /* Remove all barriers from BB_FOOTER of a BB.  */
1024 
1025 static void
remove_barriers_from_footer(basic_block bb)1026 remove_barriers_from_footer (basic_block bb)
1027 {
1028   rtx_insn *insn = BB_FOOTER (bb);
1029 
1030   /* Remove barriers but keep jumptables.  */
1031   while (insn)
1032     {
1033       if (BARRIER_P (insn))
1034 	{
1035 	  if (PREV_INSN (insn))
1036 	    SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1037 	  else
1038 	    BB_FOOTER (bb) = NEXT_INSN (insn);
1039 	  if (NEXT_INSN (insn))
1040 	    SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1041 	}
1042       if (LABEL_P (insn))
1043 	return;
1044       insn = NEXT_INSN (insn);
1045     }
1046 }
1047 
1048 /* Attempt to perform edge redirection by replacing possibly complex jump
1049    instruction by unconditional jump or removing jump completely.  This can
1050    apply only if all edges now point to the same block.  The parameters and
1051    return values are equivalent to redirect_edge_and_branch.  */
1052 
1053 edge
try_redirect_by_replacing_jump(edge e,basic_block target,bool in_cfglayout)1054 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
1055 {
1056   basic_block src = e->src;
1057   rtx_insn *insn = BB_END (src);
1058   rtx set;
1059   int fallthru = 0;
1060 
1061   /* If we are partitioning hot/cold basic blocks, we don't want to
1062      mess up unconditional or indirect jumps that cross between hot
1063      and cold sections.
1064 
1065      Basic block partitioning may result in some jumps that appear to
1066      be optimizable (or blocks that appear to be mergeable), but which really
1067      must be left untouched (they are required to make it safely across
1068      partition boundaries).  See  the comments at the top of
1069      bb-reorder.cc:partition_hot_cold_basic_blocks for complete details.  */
1070 
1071   if (BB_PARTITION (src) != BB_PARTITION (target))
1072     return NULL;
1073 
1074   /* We can replace or remove a complex jump only when we have exactly
1075      two edges.  Also, if we have exactly one outgoing edge, we can
1076      redirect that.  */
1077   if (EDGE_COUNT (src->succs) >= 3
1078       /* Verify that all targets will be TARGET.  Specifically, the
1079 	 edge that is not E must also go to TARGET.  */
1080       || (EDGE_COUNT (src->succs) == 2
1081 	  && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1082     return NULL;
1083 
1084   if (!onlyjump_p (insn))
1085     return NULL;
1086   if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1087     return NULL;
1088 
1089   /* Avoid removing branch with side effects.  */
1090   set = single_set (insn);
1091   if (!set || side_effects_p (set))
1092     return NULL;
1093 
1094   /* See if we can create the fallthru edge.  */
1095   if (in_cfglayout || can_fallthru (src, target))
1096     {
1097       if (dump_file)
1098 	fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1099       fallthru = 1;
1100 
1101       /* Selectively unlink whole insn chain.  */
1102       if (in_cfglayout)
1103 	{
1104 	  delete_insn_chain (insn, BB_END (src), false);
1105 	  remove_barriers_from_footer (src);
1106 	}
1107       else
1108 	delete_insn_chain (insn, PREV_INSN (BB_HEAD (target)), false);
1109     }
1110 
1111   /* If this already is simplejump, redirect it.  */
1112   else if (simplejump_p (insn))
1113     {
1114       if (e->dest == target)
1115 	return NULL;
1116       if (dump_file)
1117 	fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1118 		 INSN_UID (insn), e->dest->index, target->index);
1119       if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1120 			  block_label (target), 0))
1121 	{
1122 	  gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1123 	  return NULL;
1124 	}
1125     }
1126 
1127   /* Cannot do anything for target exit block.  */
1128   else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1129     return NULL;
1130 
1131   /* Or replace possibly complicated jump insn by simple jump insn.  */
1132   else
1133     {
1134       rtx_code_label *target_label = block_label (target);
1135       rtx_insn *barrier;
1136       rtx_insn *label;
1137       rtx_jump_table_data *table;
1138 
1139       emit_jump_insn_after_noloc (targetm.gen_jump (target_label), insn);
1140       JUMP_LABEL (BB_END (src)) = target_label;
1141       LABEL_NUSES (target_label)++;
1142       if (dump_file)
1143 	fprintf (dump_file, "Replacing insn %i by jump %i\n",
1144 		 INSN_UID (insn), INSN_UID (BB_END (src)));
1145 
1146 
1147       delete_insn_chain (insn, insn, false);
1148 
1149       /* Recognize a tablejump that we are converting to a
1150 	 simple jump and remove its associated CODE_LABEL
1151 	 and ADDR_VEC or ADDR_DIFF_VEC.  */
1152       if (tablejump_p (insn, &label, &table))
1153 	delete_insn_chain (label, table, false);
1154 
1155       barrier = next_nonnote_nondebug_insn (BB_END (src));
1156       if (!barrier || !BARRIER_P (barrier))
1157 	emit_barrier_after (BB_END (src));
1158       else
1159 	{
1160 	  if (barrier != NEXT_INSN (BB_END (src)))
1161 	    {
1162 	      /* Move the jump before barrier so that the notes
1163 		 which originally were or were created before jump table are
1164 		 inside the basic block.  */
1165 	      rtx_insn *new_insn = BB_END (src);
1166 
1167 	      update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1168 				        PREV_INSN (barrier), src);
1169 
1170 	      SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1171 	      SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1172 
1173 	      SET_NEXT_INSN (new_insn) = barrier;
1174 	      SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1175 
1176 	      SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1177 	      SET_PREV_INSN (barrier) = new_insn;
1178 	    }
1179 	}
1180     }
1181 
1182   /* Keep only one edge out and set proper flags.  */
1183   if (!single_succ_p (src))
1184     remove_edge (e);
1185   gcc_assert (single_succ_p (src));
1186 
1187   e = single_succ_edge (src);
1188   if (fallthru)
1189     e->flags = EDGE_FALLTHRU;
1190   else
1191     e->flags = 0;
1192 
1193   e->probability = profile_probability::always ();
1194 
1195   if (e->dest != target)
1196     redirect_edge_succ (e, target);
1197   return e;
1198 }
1199 
1200 /* Subroutine of redirect_branch_edge that tries to patch the jump
1201    instruction INSN so that it reaches block NEW.  Do this
1202    only when it originally reached block OLD.  Return true if this
1203    worked or the original target wasn't OLD, return false if redirection
1204    doesn't work.  */
1205 
1206 static bool
patch_jump_insn(rtx_insn * insn,rtx_insn * old_label,basic_block new_bb)1207 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1208 {
1209   rtx_jump_table_data *table;
1210   rtx tmp;
1211   /* Recognize a tablejump and adjust all matching cases.  */
1212   if (tablejump_p (insn, NULL, &table))
1213     {
1214       rtvec vec;
1215       int j;
1216       rtx_code_label *new_label = block_label (new_bb);
1217 
1218       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1219 	return false;
1220       vec = table->get_labels ();
1221 
1222       for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1223 	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1224 	  {
1225 	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1226 	    --LABEL_NUSES (old_label);
1227 	    ++LABEL_NUSES (new_label);
1228 	  }
1229 
1230       /* Handle casesi dispatch insns.  */
1231       if ((tmp = tablejump_casesi_pattern (insn)) != NULL_RTX
1232 	  && label_ref_label (XEXP (SET_SRC (tmp), 2)) == old_label)
1233 	{
1234 	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1235 						       new_label);
1236 	  --LABEL_NUSES (old_label);
1237 	  ++LABEL_NUSES (new_label);
1238 	}
1239     }
1240   else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1241     {
1242       int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1243       rtx note;
1244 
1245       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1246 	return false;
1247       rtx_code_label *new_label = block_label (new_bb);
1248 
1249       for (i = 0; i < n; ++i)
1250 	{
1251 	  rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1252 	  gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1253 	  if (XEXP (old_ref, 0) == old_label)
1254 	    {
1255 	      ASM_OPERANDS_LABEL (tmp, i)
1256 		= gen_rtx_LABEL_REF (Pmode, new_label);
1257 	      --LABEL_NUSES (old_label);
1258 	      ++LABEL_NUSES (new_label);
1259 	    }
1260 	}
1261 
1262       if (JUMP_LABEL (insn) == old_label)
1263 	{
1264 	  JUMP_LABEL (insn) = new_label;
1265 	  note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1266 	  if (note)
1267 	    remove_note (insn, note);
1268 	}
1269       else
1270 	{
1271 	  note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1272 	  if (note)
1273 	    remove_note (insn, note);
1274 	  if (JUMP_LABEL (insn) != new_label
1275 	      && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1276 	    add_reg_note (insn, REG_LABEL_TARGET, new_label);
1277 	}
1278       while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1279 	     != NULL_RTX)
1280 	XEXP (note, 0) = new_label;
1281     }
1282   else
1283     {
1284       /* ?? We may play the games with moving the named labels from
1285 	 one basic block to the other in case only one computed_jump is
1286 	 available.  */
1287       if (computed_jump_p (insn)
1288 	  /* A return instruction can't be redirected.  */
1289 	  || returnjump_p (insn))
1290 	return false;
1291 
1292       if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1293 	{
1294 	  /* If the insn doesn't go where we think, we're confused.  */
1295 	  gcc_assert (JUMP_LABEL (insn) == old_label);
1296 
1297 	  /* If the substitution doesn't succeed, die.  This can happen
1298 	     if the back end emitted unrecognizable instructions or if
1299 	     target is exit block on some arches.  Or for crossing
1300 	     jumps.  */
1301 	  if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1302 			      block_label (new_bb), 0))
1303 	    {
1304 	      gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1305 			  || CROSSING_JUMP_P (insn));
1306 	      return false;
1307 	    }
1308 	}
1309     }
1310   return true;
1311 }
1312 
1313 
1314 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1315    NULL on failure  */
1316 static edge
redirect_branch_edge(edge e,basic_block target)1317 redirect_branch_edge (edge e, basic_block target)
1318 {
1319   rtx_insn *old_label = BB_HEAD (e->dest);
1320   basic_block src = e->src;
1321   rtx_insn *insn = BB_END (src);
1322 
1323   /* We can only redirect non-fallthru edges of jump insn.  */
1324   if (e->flags & EDGE_FALLTHRU)
1325     return NULL;
1326   else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1327     return NULL;
1328 
1329   if (!currently_expanding_to_rtl)
1330     {
1331       if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
1332 	return NULL;
1333     }
1334   else
1335     /* When expanding this BB might actually contain multiple
1336        jumps (i.e. not yet split by find_many_sub_basic_blocks).
1337        Redirect all of those that match our label.  */
1338     FOR_BB_INSNS (src, insn)
1339       if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1340 					     old_label, target))
1341 	return NULL;
1342 
1343   if (dump_file)
1344     fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1345 	     e->src->index, e->dest->index, target->index);
1346 
1347   if (e->dest != target)
1348     e = redirect_edge_succ_nodup (e, target);
1349 
1350   return e;
1351 }
1352 
1353 /* Called when edge E has been redirected to a new destination,
1354    in order to update the region crossing flag on the edge and
1355    jump.  */
1356 
1357 static void
fixup_partition_crossing(edge e)1358 fixup_partition_crossing (edge e)
1359 {
1360   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1361       == EXIT_BLOCK_PTR_FOR_FN (cfun))
1362     return;
1363   /* If we redirected an existing edge, it may already be marked
1364      crossing, even though the new src is missing a reg crossing note.
1365      But make sure reg crossing note doesn't already exist before
1366      inserting.  */
1367   if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1368     {
1369       e->flags |= EDGE_CROSSING;
1370       if (JUMP_P (BB_END (e->src)))
1371 	CROSSING_JUMP_P (BB_END (e->src)) = 1;
1372     }
1373   else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1374     {
1375       e->flags &= ~EDGE_CROSSING;
1376       /* Remove the section crossing note from jump at end of
1377          src if it exists, and if no other successors are
1378          still crossing.  */
1379       if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1380         {
1381           bool has_crossing_succ = false;
1382           edge e2;
1383           edge_iterator ei;
1384           FOR_EACH_EDGE (e2, ei, e->src->succs)
1385             {
1386               has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1387               if (has_crossing_succ)
1388                 break;
1389             }
1390           if (!has_crossing_succ)
1391 	    CROSSING_JUMP_P (BB_END (e->src)) = 0;
1392         }
1393     }
1394 }
1395 
1396 /* Called when block BB has been reassigned to the cold partition,
1397    because it is now dominated by another cold block,
1398    to ensure that the region crossing attributes are updated.  */
1399 
1400 static void
fixup_new_cold_bb(basic_block bb)1401 fixup_new_cold_bb (basic_block bb)
1402 {
1403   edge e;
1404   edge_iterator ei;
1405 
1406   /* This is called when a hot bb is found to now be dominated
1407      by a cold bb and therefore needs to become cold. Therefore,
1408      its preds will no longer be region crossing. Any non-dominating
1409      preds that were previously hot would also have become cold
1410      in the caller for the same region. Any preds that were previously
1411      region-crossing will be adjusted in fixup_partition_crossing.  */
1412   FOR_EACH_EDGE (e, ei, bb->preds)
1413     {
1414       fixup_partition_crossing (e);
1415     }
1416 
1417   /* Possibly need to make bb's successor edges region crossing,
1418      or remove stale region crossing.  */
1419   FOR_EACH_EDGE (e, ei, bb->succs)
1420     {
1421       /* We can't have fall-through edges across partition boundaries.
1422          Note that force_nonfallthru will do any necessary partition
1423          boundary fixup by calling fixup_partition_crossing itself.  */
1424       if ((e->flags & EDGE_FALLTHRU)
1425           && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1426 	  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1427         force_nonfallthru (e);
1428       else
1429         fixup_partition_crossing (e);
1430     }
1431 }
1432 
1433 /* Attempt to change code to redirect edge E to TARGET.  Don't do that on
1434    expense of adding new instructions or reordering basic blocks.
1435 
1436    Function can be also called with edge destination equivalent to the TARGET.
1437    Then it should try the simplifications and do nothing if none is possible.
1438 
1439    Return edge representing the branch if transformation succeeded.  Return NULL
1440    on failure.
1441    We still return NULL in case E already destinated TARGET and we didn't
1442    managed to simplify instruction stream.  */
1443 
1444 static edge
rtl_redirect_edge_and_branch(edge e,basic_block target)1445 rtl_redirect_edge_and_branch (edge e, basic_block target)
1446 {
1447   edge ret;
1448   basic_block src = e->src;
1449   basic_block dest = e->dest;
1450 
1451   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1452     return NULL;
1453 
1454   if (dest == target)
1455     return e;
1456 
1457   if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1458     {
1459       df_set_bb_dirty (src);
1460       fixup_partition_crossing (ret);
1461       return ret;
1462     }
1463 
1464   ret = redirect_branch_edge (e, target);
1465   if (!ret)
1466     return NULL;
1467 
1468   df_set_bb_dirty (src);
1469   fixup_partition_crossing (ret);
1470   return ret;
1471 }
1472 
1473 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode.  */
1474 
1475 void
emit_barrier_after_bb(basic_block bb)1476 emit_barrier_after_bb (basic_block bb)
1477 {
1478   rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1479   gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1480               || current_ir_type () == IR_RTL_CFGLAYOUT);
1481   if (current_ir_type () == IR_RTL_CFGLAYOUT)
1482     {
1483       rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1484 
1485       if (BB_FOOTER (bb))
1486 	{
1487           rtx_insn *footer_tail = BB_FOOTER (bb);
1488 
1489           while (NEXT_INSN (footer_tail))
1490             footer_tail = NEXT_INSN (footer_tail);
1491           if (!BARRIER_P (footer_tail))
1492             {
1493               SET_NEXT_INSN (footer_tail) = insn;
1494               SET_PREV_INSN (insn) = footer_tail;
1495             }
1496 	}
1497       else
1498         BB_FOOTER (bb) = insn;
1499     }
1500 }
1501 
1502 /* Like force_nonfallthru below, but additionally performs redirection
1503    Used by redirect_edge_and_branch_force.  JUMP_LABEL is used only
1504    when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1505    simple_return_rtx, indicating which kind of returnjump to create.
1506    It should be NULL otherwise.  */
1507 
1508 basic_block
force_nonfallthru_and_redirect(edge e,basic_block target,rtx jump_label)1509 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1510 {
1511   basic_block jump_block, new_bb = NULL, src = e->src;
1512   rtx note;
1513   edge new_edge;
1514   int abnormal_edge_flags = 0;
1515   bool asm_goto_edge = false;
1516   int loc;
1517 
1518   /* In the case the last instruction is conditional jump to the next
1519      instruction, first redirect the jump itself and then continue
1520      by creating a basic block afterwards to redirect fallthru edge.  */
1521   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1522       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1523       && any_condjump_p (BB_END (e->src))
1524       && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1525     {
1526       rtx note;
1527       edge b = unchecked_make_edge (e->src, target, 0);
1528       bool redirected;
1529 
1530       redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1531 				  block_label (target), 0);
1532       gcc_assert (redirected);
1533 
1534       note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1535       if (note)
1536 	{
1537 	  int prob = XINT (note, 0);
1538 
1539 	  b->probability = profile_probability::from_reg_br_prob_note (prob);
1540 	  e->probability -= e->probability;
1541 	}
1542     }
1543 
1544   if (e->flags & EDGE_ABNORMAL)
1545     {
1546       /* Irritating special case - fallthru edge to the same block as abnormal
1547 	 edge.
1548 	 We can't redirect abnormal edge, but we still can split the fallthru
1549 	 one and create separate abnormal edge to original destination.
1550 	 This allows bb-reorder to make such edge non-fallthru.  */
1551       gcc_assert (e->dest == target);
1552       abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1553       e->flags &= EDGE_FALLTHRU;
1554     }
1555   else
1556     {
1557       gcc_assert (e->flags & EDGE_FALLTHRU);
1558       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1559 	{
1560 	  /* We can't redirect the entry block.  Create an empty block
1561 	     at the start of the function which we use to add the new
1562 	     jump.  */
1563 	  edge tmp;
1564 	  edge_iterator ei;
1565 	  bool found = false;
1566 
1567 	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1568 					       ENTRY_BLOCK_PTR_FOR_FN (cfun));
1569 	  bb->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
1570 
1571 	  /* Make sure new block ends up in correct hot/cold section.  */
1572 	  BB_COPY_PARTITION (bb, e->dest);
1573 
1574 	  /* Change the existing edge's source to be the new block, and add
1575 	     a new edge from the entry block to the new block.  */
1576 	  e->src = bb;
1577 	  for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1578 	       (tmp = ei_safe_edge (ei)); )
1579 	    {
1580 	      if (tmp == e)
1581 		{
1582 		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1583 		  found = true;
1584 		  break;
1585 		}
1586 	      else
1587 		ei_next (&ei);
1588 	    }
1589 
1590 	  gcc_assert (found);
1591 
1592 	  vec_safe_push (bb->succs, e);
1593 	  make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1594 				 EDGE_FALLTHRU);
1595 	}
1596     }
1597 
1598   /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1599      don't point to the target or fallthru label.  */
1600   if (JUMP_P (BB_END (e->src))
1601       && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1602       && (e->flags & EDGE_FALLTHRU)
1603       && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1604     {
1605       int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1606       bool adjust_jump_target = false;
1607 
1608       for (i = 0; i < n; ++i)
1609 	{
1610 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1611 	    {
1612 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1613 	      XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1614 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1615 	      adjust_jump_target = true;
1616 	    }
1617 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1618 	    asm_goto_edge = true;
1619 	}
1620       if (adjust_jump_target)
1621 	{
1622 	  rtx_insn *insn = BB_END (e->src);
1623 	  rtx note;
1624 	  rtx_insn *old_label = BB_HEAD (e->dest);
1625 	  rtx_insn *new_label = BB_HEAD (target);
1626 
1627 	  if (JUMP_LABEL (insn) == old_label)
1628 	    {
1629 	      JUMP_LABEL (insn) = new_label;
1630 	      note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1631 	      if (note)
1632 		remove_note (insn, note);
1633 	    }
1634 	  else
1635 	    {
1636 	      note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1637 	      if (note)
1638 		remove_note (insn, note);
1639 	      if (JUMP_LABEL (insn) != new_label
1640 		  && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1641 		add_reg_note (insn, REG_LABEL_TARGET, new_label);
1642 	    }
1643 	  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1644 		 != NULL_RTX)
1645 	    XEXP (note, 0) = new_label;
1646 	}
1647     }
1648 
1649   if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1650     {
1651       rtx_insn *new_head;
1652       profile_count count = e->count ();
1653       profile_probability probability = e->probability;
1654       /* Create the new structures.  */
1655 
1656       /* If the old block ended with a tablejump, skip its table
1657 	 by searching forward from there.  Otherwise start searching
1658 	 forward from the last instruction of the old block.  */
1659       rtx_jump_table_data *table;
1660       if (tablejump_p (BB_END (e->src), NULL, &table))
1661 	new_head = table;
1662       else
1663 	new_head = BB_END (e->src);
1664       new_head = NEXT_INSN (new_head);
1665 
1666       jump_block = create_basic_block (new_head, NULL, e->src);
1667       jump_block->count = count;
1668 
1669       /* Make sure new block ends up in correct hot/cold section.  */
1670 
1671       BB_COPY_PARTITION (jump_block, e->src);
1672 
1673       /* Wire edge in.  */
1674       new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1675       new_edge->probability = probability;
1676 
1677       /* Redirect old edge.  */
1678       redirect_edge_pred (e, jump_block);
1679       e->probability = profile_probability::always ();
1680 
1681       /* If e->src was previously region crossing, it no longer is
1682          and the reg crossing note should be removed.  */
1683       fixup_partition_crossing (new_edge);
1684 
1685       /* If asm goto has any label refs to target's label,
1686 	 add also edge from asm goto bb to target.  */
1687       if (asm_goto_edge)
1688 	{
1689 	  new_edge->probability = new_edge->probability.apply_scale (1, 2);
1690 	  jump_block->count = jump_block->count.apply_scale (1, 2);
1691 	  edge new_edge2 = make_edge (new_edge->src, target,
1692 				      e->flags & ~EDGE_FALLTHRU);
1693 	  new_edge2->probability = probability - new_edge->probability;
1694 	}
1695 
1696       new_bb = jump_block;
1697     }
1698   else
1699     jump_block = e->src;
1700 
1701   loc = e->goto_locus;
1702   e->flags &= ~EDGE_FALLTHRU;
1703   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1704     {
1705       if (jump_label == ret_rtx)
1706 	emit_jump_insn_after_setloc (targetm.gen_return (),
1707 				     BB_END (jump_block), loc);
1708       else
1709 	{
1710 	  gcc_assert (jump_label == simple_return_rtx);
1711 	  emit_jump_insn_after_setloc (targetm.gen_simple_return (),
1712 				       BB_END (jump_block), loc);
1713 	}
1714       set_return_jump_label (BB_END (jump_block));
1715     }
1716   else
1717     {
1718       rtx_code_label *label = block_label (target);
1719       emit_jump_insn_after_setloc (targetm.gen_jump (label),
1720 				   BB_END (jump_block), loc);
1721       JUMP_LABEL (BB_END (jump_block)) = label;
1722       LABEL_NUSES (label)++;
1723     }
1724 
1725   /* We might be in cfg layout mode, and if so, the following routine will
1726      insert the barrier correctly.  */
1727   emit_barrier_after_bb (jump_block);
1728   redirect_edge_succ_nodup (e, target);
1729 
1730   if (abnormal_edge_flags)
1731     make_edge (src, target, abnormal_edge_flags);
1732 
1733   df_mark_solutions_dirty ();
1734   fixup_partition_crossing (e);
1735   return new_bb;
1736 }
1737 
1738 /* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
1739    (and possibly create new basic block) to make edge non-fallthru.
1740    Return newly created BB or NULL if none.  */
1741 
1742 static basic_block
rtl_force_nonfallthru(edge e)1743 rtl_force_nonfallthru (edge e)
1744 {
1745   return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1746 }
1747 
1748 /* Redirect edge even at the expense of creating new jump insn or
1749    basic block.  Return new basic block if created, NULL otherwise.
1750    Conversion must be possible.  */
1751 
1752 static basic_block
rtl_redirect_edge_and_branch_force(edge e,basic_block target)1753 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1754 {
1755   if (redirect_edge_and_branch (e, target)
1756       || e->dest == target)
1757     return NULL;
1758 
1759   /* In case the edge redirection failed, try to force it to be non-fallthru
1760      and redirect newly created simplejump.  */
1761   df_set_bb_dirty (e->src);
1762   return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1763 }
1764 
1765 /* The given edge should potentially be a fallthru edge.  If that is in
1766    fact true, delete the jump and barriers that are in the way.  */
1767 
1768 static void
rtl_tidy_fallthru_edge(edge e)1769 rtl_tidy_fallthru_edge (edge e)
1770 {
1771   rtx_insn *q;
1772   basic_block b = e->src, c = b->next_bb;
1773 
1774   /* ??? In a late-running flow pass, other folks may have deleted basic
1775      blocks by nopping out blocks, leaving multiple BARRIERs between here
1776      and the target label. They ought to be chastised and fixed.
1777 
1778      We can also wind up with a sequence of undeletable labels between
1779      one block and the next.
1780 
1781      So search through a sequence of barriers, labels, and notes for
1782      the head of block C and assert that we really do fall through.  */
1783 
1784   for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1785     if (NONDEBUG_INSN_P (q))
1786       return;
1787 
1788   /* Remove what will soon cease being the jump insn from the source block.
1789      If block B consisted only of this single jump, turn it into a deleted
1790      note.  */
1791   q = BB_END (b);
1792   if (JUMP_P (q)
1793       && onlyjump_p (q)
1794       && (any_uncondjump_p (q)
1795 	  || single_succ_p (b)))
1796     {
1797       rtx_insn *label;
1798       rtx_jump_table_data *table;
1799 
1800       if (tablejump_p (q, &label, &table))
1801 	{
1802 	  /* The label is likely mentioned in some instruction before
1803 	     the tablejump and might not be DCEd, so turn it into
1804 	     a note instead and move before the tablejump that is going to
1805 	     be deleted.  */
1806 	  const char *name = LABEL_NAME (label);
1807 	  PUT_CODE (label, NOTE);
1808 	  NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1809 	  NOTE_DELETED_LABEL_NAME (label) = name;
1810 	  reorder_insns (label, label, PREV_INSN (q));
1811 	  delete_insn (table);
1812 	}
1813 
1814       q = PREV_INSN (q);
1815     }
1816   /* Unconditional jumps with side-effects (i.e. which we can't just delete
1817      together with the barrier) should never have a fallthru edge.  */
1818   else if (JUMP_P (q) && any_uncondjump_p (q))
1819     return;
1820 
1821   /* Selectively unlink the sequence.  */
1822   if (q != PREV_INSN (BB_HEAD (c)))
1823     delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1824 
1825   e->flags |= EDGE_FALLTHRU;
1826 }
1827 
1828 /* Should move basic block BB after basic block AFTER.  NIY.  */
1829 
1830 static bool
rtl_move_block_after(basic_block bb ATTRIBUTE_UNUSED,basic_block after ATTRIBUTE_UNUSED)1831 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1832 		      basic_block after ATTRIBUTE_UNUSED)
1833 {
1834   return false;
1835 }
1836 
1837 /* Locate the last bb in the same partition as START_BB.  */
1838 
1839 static basic_block
last_bb_in_partition(basic_block start_bb)1840 last_bb_in_partition (basic_block start_bb)
1841 {
1842   basic_block bb;
1843   FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1844     {
1845       if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1846         return bb;
1847     }
1848   /* Return bb before the exit block.  */
1849   return bb->prev_bb;
1850 }
1851 
1852 /* Split a (typically critical) edge.  Return the new block.
1853    The edge must not be abnormal.
1854 
1855    ??? The code generally expects to be called on critical edges.
1856    The case of a block ending in an unconditional jump to a
1857    block with multiple predecessors is not handled optimally.  */
1858 
1859 static basic_block
rtl_split_edge(edge edge_in)1860 rtl_split_edge (edge edge_in)
1861 {
1862   basic_block bb, new_bb;
1863   rtx_insn *before;
1864 
1865   /* Abnormal edges cannot be split.  */
1866   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1867 
1868   /* We are going to place the new block in front of edge destination.
1869      Avoid existence of fallthru predecessors.  */
1870   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1871     {
1872       edge e = find_fallthru_edge (edge_in->dest->preds);
1873 
1874       if (e)
1875 	force_nonfallthru (e);
1876     }
1877 
1878   /* Create the basic block note.  */
1879   if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1880     before = BB_HEAD (edge_in->dest);
1881   else
1882     before = NULL;
1883 
1884   /* If this is a fall through edge to the exit block, the blocks might be
1885      not adjacent, and the right place is after the source.  */
1886   if ((edge_in->flags & EDGE_FALLTHRU)
1887       && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1888     {
1889       before = NEXT_INSN (BB_END (edge_in->src));
1890       bb = create_basic_block (before, NULL, edge_in->src);
1891       BB_COPY_PARTITION (bb, edge_in->src);
1892     }
1893   else
1894     {
1895       if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1896         {
1897           bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1898           BB_COPY_PARTITION (bb, edge_in->dest);
1899         }
1900       else
1901         {
1902           basic_block after = edge_in->dest->prev_bb;
1903           /* If this is post-bb reordering, and the edge crosses a partition
1904              boundary, the new block needs to be inserted in the bb chain
1905              at the end of the src partition (since we put the new bb into
1906              that partition, see below). Otherwise we may end up creating
1907              an extra partition crossing in the chain, which is illegal.
1908              It can't go after the src, because src may have a fall-through
1909              to a different block.  */
1910           if (crtl->bb_reorder_complete
1911               && (edge_in->flags & EDGE_CROSSING))
1912             {
1913               after = last_bb_in_partition (edge_in->src);
1914               before = get_last_bb_insn (after);
1915               /* The instruction following the last bb in partition should
1916                  be a barrier, since it cannot end in a fall-through.  */
1917               gcc_checking_assert (BARRIER_P (before));
1918               before = NEXT_INSN (before);
1919             }
1920           bb = create_basic_block (before, NULL, after);
1921           /* Put the split bb into the src partition, to avoid creating
1922              a situation where a cold bb dominates a hot bb, in the case
1923              where src is cold and dest is hot. The src will dominate
1924              the new bb (whereas it might not have dominated dest).  */
1925           BB_COPY_PARTITION (bb, edge_in->src);
1926         }
1927     }
1928 
1929   make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1930 
1931   /* Can't allow a region crossing edge to be fallthrough.  */
1932   if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1933       && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1934     {
1935       new_bb = force_nonfallthru (single_succ_edge (bb));
1936       gcc_assert (!new_bb);
1937     }
1938 
1939   /* For non-fallthru edges, we must adjust the predecessor's
1940      jump instruction to target our new block.  */
1941   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1942     {
1943       edge redirected = redirect_edge_and_branch (edge_in, bb);
1944       gcc_assert (redirected);
1945     }
1946   else
1947     {
1948       if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1949 	{
1950 	  /* For asm goto even splitting of fallthru edge might
1951 	     need insn patching, as other labels might point to the
1952 	     old label.  */
1953 	  rtx_insn *last = BB_END (edge_in->src);
1954 	  if (last
1955 	      && JUMP_P (last)
1956 	      && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1957 	      && (extract_asm_operands (PATTERN (last))
1958 		  || JUMP_LABEL (last) == before)
1959 	      && patch_jump_insn (last, before, bb))
1960 	    df_set_bb_dirty (edge_in->src);
1961 	}
1962       redirect_edge_succ (edge_in, bb);
1963     }
1964 
1965   return bb;
1966 }
1967 
1968 /* Queue instructions for insertion on an edge between two basic blocks.
1969    The new instructions and basic blocks (if any) will not appear in the
1970    CFG until commit_edge_insertions is called.  If there are already
1971    queued instructions on the edge, PATTERN is appended to them.  */
1972 
1973 void
insert_insn_on_edge(rtx pattern,edge e)1974 insert_insn_on_edge (rtx pattern, edge e)
1975 {
1976   /* We cannot insert instructions on an abnormal critical edge.
1977      It will be easier to find the culprit if we die now.  */
1978   gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1979 
1980   if (e->insns.r == NULL_RTX)
1981     start_sequence ();
1982   else
1983     push_to_sequence (e->insns.r);
1984 
1985   emit_insn (pattern);
1986 
1987   e->insns.r = get_insns ();
1988   end_sequence ();
1989 }
1990 
1991 /* Like insert_insn_on_edge, but if there are already queued instructions
1992    on the edge, PATTERN is prepended to them.  */
1993 
1994 void
prepend_insn_to_edge(rtx pattern,edge e)1995 prepend_insn_to_edge (rtx pattern, edge e)
1996 {
1997   /* We cannot insert instructions on an abnormal critical edge.
1998      It will be easier to find the culprit if we die now.  */
1999   gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
2000 
2001   start_sequence ();
2002 
2003   emit_insn (pattern);
2004   emit_insn (e->insns.r);
2005 
2006   e->insns.r = get_insns ();
2007   end_sequence ();
2008 }
2009 
2010 /* Update the CFG for the instructions queued on edge E.  */
2011 
2012 void
commit_one_edge_insertion(edge e)2013 commit_one_edge_insertion (edge e)
2014 {
2015   rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
2016   basic_block bb;
2017 
2018   /* Pull the insns off the edge now since the edge might go away.  */
2019   insns = e->insns.r;
2020   e->insns.r = NULL;
2021 
2022   /* Figure out where to put these insns.  If the destination has
2023      one predecessor, insert there.  Except for the exit block.  */
2024   if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2025     {
2026       bb = e->dest;
2027 
2028       /* Get the location correct wrt a code label, and "nice" wrt
2029 	 a basic block note, and before everything else.  */
2030       tmp = BB_HEAD (bb);
2031       if (LABEL_P (tmp))
2032 	tmp = NEXT_INSN (tmp);
2033       if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2034 	tmp = NEXT_INSN (tmp);
2035       if (tmp == BB_HEAD (bb))
2036 	before = tmp;
2037       else if (tmp)
2038 	after = PREV_INSN (tmp);
2039       else
2040 	after = get_last_insn ();
2041     }
2042 
2043   /* If the source has one successor and the edge is not abnormal,
2044      insert there.  Except for the entry block.
2045      Don't do this if the predecessor ends in a jump other than
2046      unconditional simple jump.  E.g. for asm goto that points all
2047      its labels at the fallthru basic block, we can't insert instructions
2048      before the asm goto, as the asm goto can have various of side effects,
2049      and can't emit instructions after the asm goto, as it must end
2050      the basic block.  */
2051   else if ((e->flags & EDGE_ABNORMAL) == 0
2052 	   && single_succ_p (e->src)
2053 	   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2054 	   && (!JUMP_P (BB_END (e->src))
2055 	       || simplejump_p (BB_END (e->src))))
2056     {
2057       bb = e->src;
2058 
2059       /* It is possible to have a non-simple jump here.  Consider a target
2060 	 where some forms of unconditional jumps clobber a register.  This
2061 	 happens on the fr30 for example.
2062 
2063 	 We know this block has a single successor, so we can just emit
2064 	 the queued insns before the jump.  */
2065       if (JUMP_P (BB_END (bb)))
2066 	before = BB_END (bb);
2067       else
2068 	{
2069 	  /* We'd better be fallthru, or we've lost track of what's what.  */
2070 	  gcc_assert (e->flags & EDGE_FALLTHRU);
2071 
2072 	  after = BB_END (bb);
2073 	}
2074     }
2075 
2076   /* Otherwise we must split the edge.  */
2077   else
2078     {
2079       bb = split_edge (e);
2080 
2081       /* If E crossed a partition boundary, we needed to make bb end in
2082          a region-crossing jump, even though it was originally fallthru.  */
2083       if (JUMP_P (BB_END (bb)))
2084 	before = BB_END (bb);
2085       else
2086         after = BB_END (bb);
2087     }
2088 
2089   /* Now that we've found the spot, do the insertion.  */
2090   if (before)
2091     {
2092       emit_insn_before_noloc (insns, before, bb);
2093       last = prev_nonnote_insn (before);
2094     }
2095   else
2096     last = emit_insn_after_noloc (insns, after, bb);
2097 
2098   if (returnjump_p (last))
2099     {
2100       /* ??? Remove all outgoing edges from BB and add one for EXIT.
2101 	 This is not currently a problem because this only happens
2102 	 for the (single) epilogue, which already has a fallthru edge
2103 	 to EXIT.  */
2104 
2105       e = single_succ_edge (bb);
2106       gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2107 		  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2108 
2109       e->flags &= ~EDGE_FALLTHRU;
2110       emit_barrier_after (last);
2111 
2112       if (before)
2113 	delete_insn (before);
2114     }
2115   else
2116     gcc_assert (!JUMP_P (last));
2117 }
2118 
2119 /* Update the CFG for all queued instructions.  */
2120 
2121 void
commit_edge_insertions(void)2122 commit_edge_insertions (void)
2123 {
2124   basic_block bb;
2125 
2126   /* Optimization passes that invoke this routine can cause hot blocks
2127      previously reached by both hot and cold blocks to become dominated only
2128      by cold blocks. This will cause the verification below to fail,
2129      and lead to now cold code in the hot section. In some cases this
2130      may only be visible after newly unreachable blocks are deleted,
2131      which will be done by fixup_partitions.  */
2132   fixup_partitions ();
2133 
2134   if (!currently_expanding_to_rtl)
2135     checking_verify_flow_info ();
2136 
2137   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2138 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2139     {
2140       edge e;
2141       edge_iterator ei;
2142 
2143       FOR_EACH_EDGE (e, ei, bb->succs)
2144 	if (e->insns.r)
2145 	  {
2146 	    if (currently_expanding_to_rtl)
2147 	      rebuild_jump_labels_chain (e->insns.r);
2148 	    commit_one_edge_insertion (e);
2149 	  }
2150     }
2151 }
2152 
2153 
2154 /* Print out RTL-specific basic block information (live information
2155    at start and end with TDF_DETAILS).  FLAGS are the TDF_* masks
2156    documented in dumpfile.h.  */
2157 
2158 static void
rtl_dump_bb(FILE * outf,basic_block bb,int indent,dump_flags_t flags)2159 rtl_dump_bb (FILE *outf, basic_block bb, int indent, dump_flags_t flags)
2160 {
2161   char *s_indent;
2162 
2163   s_indent = (char *) alloca ((size_t) indent + 1);
2164   memset (s_indent, ' ', (size_t) indent);
2165   s_indent[indent] = '\0';
2166 
2167   if (df && (flags & TDF_DETAILS))
2168     {
2169       df_dump_top (bb, outf);
2170       putc ('\n', outf);
2171     }
2172 
2173   if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK
2174       && rtl_bb_info_initialized_p (bb))
2175     {
2176       rtx_insn *last = BB_END (bb);
2177       if (last)
2178 	last = NEXT_INSN (last);
2179       for (rtx_insn *insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2180 	{
2181 	  if (flags & TDF_DETAILS)
2182 	    df_dump_insn_top (insn, outf);
2183 	  if (! (flags & TDF_SLIM))
2184 	    print_rtl_single (outf, insn);
2185 	  else
2186 	    dump_insn_slim (outf, insn);
2187 	  if (flags & TDF_DETAILS)
2188 	    df_dump_insn_bottom (insn, outf);
2189 	}
2190     }
2191 
2192   if (df && (flags & TDF_DETAILS))
2193     {
2194       df_dump_bottom (bb, outf);
2195       putc ('\n', outf);
2196     }
2197 
2198 }
2199 
2200 /* Like dump_function_to_file, but for RTL.  Print out dataflow information
2201    for the start of each basic block.  FLAGS are the TDF_* masks documented
2202    in dumpfile.h.  */
2203 
2204 void
print_rtl_with_bb(FILE * outf,const rtx_insn * rtx_first,dump_flags_t flags)2205 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, dump_flags_t flags)
2206 {
2207   const rtx_insn *tmp_rtx;
2208   if (rtx_first == 0)
2209     fprintf (outf, "(nil)\n");
2210   else
2211     {
2212       enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2213       int max_uid = get_max_uid ();
2214       basic_block *start = XCNEWVEC (basic_block, max_uid);
2215       basic_block *end = XCNEWVEC (basic_block, max_uid);
2216       enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2217       basic_block bb;
2218 
2219       /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2220 	 insns, but the CFG is not maintained so the basic block info
2221 	 is not reliable.  Therefore it's omitted from the dumps.  */
2222       if (! (cfun->curr_properties & PROP_cfg))
2223         flags &= ~TDF_BLOCKS;
2224 
2225       if (df)
2226 	df_dump_start (outf);
2227 
2228       if (cfun->curr_properties & PROP_cfg)
2229 	{
2230 	  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2231 	    {
2232 	      rtx_insn *x;
2233 
2234 	      start[INSN_UID (BB_HEAD (bb))] = bb;
2235 	      end[INSN_UID (BB_END (bb))] = bb;
2236 	      if (flags & TDF_BLOCKS)
2237 		{
2238 		  for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2239 		    {
2240 		      enum bb_state state = IN_MULTIPLE_BB;
2241 
2242 		      if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2243 			state = IN_ONE_BB;
2244 		      in_bb_p[INSN_UID (x)] = state;
2245 
2246 		      if (x == BB_END (bb))
2247 			break;
2248 		    }
2249 		}
2250 	    }
2251 	}
2252 
2253       for (tmp_rtx = rtx_first; tmp_rtx != NULL; tmp_rtx = NEXT_INSN (tmp_rtx))
2254 	{
2255 	  if (flags & TDF_BLOCKS)
2256 	    {
2257 	      bb = start[INSN_UID (tmp_rtx)];
2258 	      if (bb != NULL)
2259 		{
2260 		  dump_bb_info (outf, bb, 0, dump_flags, true, false);
2261 		  if (df && (flags & TDF_DETAILS))
2262 		    df_dump_top (bb, outf);
2263 		}
2264 
2265 	      if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2266 		  && !NOTE_P (tmp_rtx)
2267 		  && !BARRIER_P (tmp_rtx))
2268 		fprintf (outf, ";; Insn is not within a basic block\n");
2269 	      else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2270 		fprintf (outf, ";; Insn is in multiple basic blocks\n");
2271 	    }
2272 
2273 	  if (flags & TDF_DETAILS)
2274 	    df_dump_insn_top (tmp_rtx, outf);
2275 	  if (! (flags & TDF_SLIM))
2276 	    print_rtl_single (outf, tmp_rtx);
2277 	  else
2278 	    dump_insn_slim (outf, tmp_rtx);
2279 	  if (flags & TDF_DETAILS)
2280 	    df_dump_insn_bottom (tmp_rtx, outf);
2281 
2282 	  bb = end[INSN_UID (tmp_rtx)];
2283 	  if (bb != NULL)
2284 	    {
2285 	      if (flags & TDF_BLOCKS)
2286 		{
2287 		  dump_bb_info (outf, bb, 0, dump_flags, false, true);
2288 		  if (df && (flags & TDF_DETAILS))
2289 		    df_dump_bottom (bb, outf);
2290 		  putc ('\n', outf);
2291 		}
2292 	      /* Emit a hint if the fallthrough target of current basic block
2293 	         isn't the one placed right next.  */
2294 	      else if (EDGE_COUNT (bb->succs) > 0)
2295 		{
2296 		  gcc_assert (BB_END (bb) == tmp_rtx);
2297 		  const rtx_insn *ninsn = NEXT_INSN (tmp_rtx);
2298 		  /* Bypass intervening deleted-insn notes and debug insns.  */
2299 		  while (ninsn
2300 			 && !NONDEBUG_INSN_P (ninsn)
2301 			 && !start[INSN_UID (ninsn)])
2302 		    ninsn = NEXT_INSN (ninsn);
2303 		  edge e = find_fallthru_edge (bb->succs);
2304 		  if (e && ninsn)
2305 		    {
2306 		      basic_block dest = e->dest;
2307 		      if (start[INSN_UID (ninsn)] != dest)
2308 			fprintf (outf, "%s      ; pc falls through to BB %d\n",
2309 				 print_rtx_head, dest->index);
2310 		    }
2311 		}
2312 	    }
2313 	}
2314 
2315       free (start);
2316       free (end);
2317       free (in_bb_p);
2318     }
2319 }
2320 
2321 /* Update the branch probability of BB if a REG_BR_PROB is present.  */
2322 
2323 void
update_br_prob_note(basic_block bb)2324 update_br_prob_note (basic_block bb)
2325 {
2326   rtx note;
2327   note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2328   if (!JUMP_P (BB_END (bb)) || !BRANCH_EDGE (bb)->probability.initialized_p ())
2329     {
2330       if (note)
2331 	{
2332 	  rtx *note_link, this_rtx;
2333 
2334 	  note_link = &REG_NOTES (BB_END (bb));
2335 	  for (this_rtx = *note_link; this_rtx; this_rtx = XEXP (this_rtx, 1))
2336 	    if (this_rtx == note)
2337 	      {
2338 		*note_link = XEXP (this_rtx, 1);
2339 		break;
2340 	      }
2341 	}
2342       return;
2343     }
2344   if (!note
2345       || XINT (note, 0) == BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ())
2346     return;
2347   XINT (note, 0) = BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ();
2348 }
2349 
2350 /* Get the last insn associated with block BB (that includes barriers and
2351    tablejumps after BB).  */
2352 rtx_insn *
get_last_bb_insn(basic_block bb)2353 get_last_bb_insn (basic_block bb)
2354 {
2355   rtx_jump_table_data *table;
2356   rtx_insn *tmp;
2357   rtx_insn *end = BB_END (bb);
2358 
2359   /* Include any jump table following the basic block.  */
2360   if (tablejump_p (end, NULL, &table))
2361     end = table;
2362 
2363   /* Include any barriers that may follow the basic block.  */
2364   tmp = next_nonnote_nondebug_insn_bb (end);
2365   while (tmp && BARRIER_P (tmp))
2366     {
2367       end = tmp;
2368       tmp = next_nonnote_nondebug_insn_bb (end);
2369     }
2370 
2371   return end;
2372 }
2373 
2374 /* Add all BBs reachable from entry via hot paths into the SET.  */
2375 
2376 void
find_bbs_reachable_by_hot_paths(hash_set<basic_block> * set)2377 find_bbs_reachable_by_hot_paths (hash_set<basic_block> *set)
2378 {
2379   auto_vec<basic_block, 64> worklist;
2380 
2381   set->add (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2382   worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2383 
2384   while (worklist.length () > 0)
2385     {
2386       basic_block bb = worklist.pop ();
2387       edge_iterator ei;
2388       edge e;
2389 
2390       FOR_EACH_EDGE (e, ei, bb->succs)
2391 	if (BB_PARTITION (e->dest) != BB_COLD_PARTITION
2392 	    && !set->add (e->dest))
2393 	  worklist.safe_push (e->dest);
2394     }
2395 }
2396 
2397 /* Sanity check partition hotness to ensure that basic blocks in
2398    the cold partition don't dominate basic blocks in the hot partition.
2399    If FLAG_ONLY is true, report violations as errors. Otherwise
2400    re-mark the dominated blocks as cold, since this is run after
2401    cfg optimizations that may make hot blocks previously reached
2402    by both hot and cold blocks now only reachable along cold paths.  */
2403 
2404 static auto_vec<basic_block>
find_partition_fixes(bool flag_only)2405 find_partition_fixes (bool flag_only)
2406 {
2407   basic_block bb;
2408   auto_vec<basic_block> bbs_to_fix;
2409   hash_set<basic_block> set;
2410 
2411   /* Callers check this.  */
2412   gcc_checking_assert (crtl->has_bb_partition);
2413 
2414   find_bbs_reachable_by_hot_paths (&set);
2415 
2416   FOR_EACH_BB_FN (bb, cfun)
2417     if (!set.contains (bb)
2418 	&& BB_PARTITION (bb) != BB_COLD_PARTITION)
2419       {
2420 	if (flag_only)
2421 	  error ("non-cold basic block %d reachable only "
2422 		 "by paths crossing the cold partition", bb->index);
2423 	else
2424 	  BB_SET_PARTITION (bb, BB_COLD_PARTITION);
2425 	bbs_to_fix.safe_push (bb);
2426       }
2427 
2428   return bbs_to_fix;
2429 }
2430 
2431 /* Perform cleanup on the hot/cold bb partitioning after optimization
2432    passes that modify the cfg.  */
2433 
2434 void
fixup_partitions(void)2435 fixup_partitions (void)
2436 {
2437   if (!crtl->has_bb_partition)
2438     return;
2439 
2440   /* Delete any blocks that became unreachable and weren't
2441      already cleaned up, for example during edge forwarding
2442      and convert_jumps_to_returns. This will expose more
2443      opportunities for fixing the partition boundaries here.
2444      Also, the calculation of the dominance graph during verification
2445      will assert if there are unreachable nodes.  */
2446   delete_unreachable_blocks ();
2447 
2448   /* If there are partitions, do a sanity check on them: A basic block in
2449      a cold partition cannot dominate a basic block in a hot partition.
2450      Fixup any that now violate this requirement, as a result of edge
2451      forwarding and unreachable block deletion.  */
2452   auto_vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2453 
2454   /* Do the partition fixup after all necessary blocks have been converted to
2455      cold, so that we only update the region crossings the minimum number of
2456      places, which can require forcing edges to be non fallthru.  */
2457   if (! bbs_to_fix.is_empty ())
2458     {
2459       do
2460 	{
2461 	  basic_block bb = bbs_to_fix.pop ();
2462 	  fixup_new_cold_bb (bb);
2463 	}
2464       while (! bbs_to_fix.is_empty ());
2465 
2466       /* Fix up hot cold block grouping if needed.  */
2467       if (crtl->bb_reorder_complete && current_ir_type () == IR_RTL_CFGRTL)
2468 	{
2469 	  basic_block bb, first = NULL, second = NULL;
2470 	  int current_partition = BB_UNPARTITIONED;
2471 
2472 	  FOR_EACH_BB_FN (bb, cfun)
2473 	    {
2474 	      if (current_partition != BB_UNPARTITIONED
2475 		  && BB_PARTITION (bb) != current_partition)
2476 		{
2477 		  if (first == NULL)
2478 		    first = bb;
2479 		  else if (second == NULL)
2480 		    second = bb;
2481 		  else
2482 		    {
2483 		      /* If we switch partitions for the 3rd, 5th etc. time,
2484 			 move bbs first (inclusive) .. second (exclusive) right
2485 			 before bb.  */
2486 		      basic_block prev_first = first->prev_bb;
2487 		      basic_block prev_second = second->prev_bb;
2488 		      basic_block prev_bb = bb->prev_bb;
2489 		      prev_first->next_bb = second;
2490 		      second->prev_bb = prev_first;
2491 		      prev_second->next_bb = bb;
2492 		      bb->prev_bb = prev_second;
2493 		      prev_bb->next_bb = first;
2494 		      first->prev_bb = prev_bb;
2495 		      rtx_insn *prev_first_insn = PREV_INSN (BB_HEAD (first));
2496 		      rtx_insn *prev_second_insn
2497 			= PREV_INSN (BB_HEAD (second));
2498 		      rtx_insn *prev_bb_insn = PREV_INSN (BB_HEAD (bb));
2499 		      SET_NEXT_INSN (prev_first_insn) = BB_HEAD (second);
2500 		      SET_PREV_INSN (BB_HEAD (second)) = prev_first_insn;
2501 		      SET_NEXT_INSN (prev_second_insn) = BB_HEAD (bb);
2502 		      SET_PREV_INSN (BB_HEAD (bb)) = prev_second_insn;
2503 		      SET_NEXT_INSN (prev_bb_insn) = BB_HEAD (first);
2504 		      SET_PREV_INSN (BB_HEAD (first)) = prev_bb_insn;
2505 		      second = NULL;
2506 		    }
2507 		}
2508 	      current_partition = BB_PARTITION (bb);
2509 	    }
2510 	  gcc_assert (!second);
2511 	}
2512     }
2513 }
2514 
2515 /* Verify, in the basic block chain, that there is at most one switch
2516    between hot/cold partitions. This condition will not be true until
2517    after reorder_basic_blocks is called.  */
2518 
2519 static int
verify_hot_cold_block_grouping(void)2520 verify_hot_cold_block_grouping (void)
2521 {
2522   basic_block bb;
2523   int err = 0;
2524   bool switched_sections = false;
2525   int current_partition = BB_UNPARTITIONED;
2526 
2527   /* Even after bb reordering is complete, we go into cfglayout mode
2528      again (in compgoto). Ensure we don't call this before going back
2529      into linearized RTL when any layout fixes would have been committed.  */
2530   if (!crtl->bb_reorder_complete
2531       || current_ir_type () != IR_RTL_CFGRTL)
2532     return err;
2533 
2534   FOR_EACH_BB_FN (bb, cfun)
2535     {
2536       if (current_partition != BB_UNPARTITIONED
2537           && BB_PARTITION (bb) != current_partition)
2538 	{
2539 	  if (switched_sections)
2540 	    {
2541 	      error ("multiple hot/cold transitions found (bb %i)",
2542 		     bb->index);
2543 	      err = 1;
2544 	    }
2545 	  else
2546             switched_sections = true;
2547 
2548           if (!crtl->has_bb_partition)
2549             error ("partition found but function partition flag not set");
2550 	}
2551       current_partition = BB_PARTITION (bb);
2552     }
2553 
2554   return err;
2555 }
2556 
2557 
2558 /* Perform several checks on the edges out of each block, such as
2559    the consistency of the branch probabilities, the correctness
2560    of hot/cold partition crossing edges, and the number of expected
2561    successor edges.  Also verify that the dominance relationship
2562    between hot/cold blocks is sane.  */
2563 
2564 static int
rtl_verify_edges(void)2565 rtl_verify_edges (void)
2566 {
2567   int err = 0;
2568   basic_block bb;
2569 
2570   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2571     {
2572       int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2573       int n_eh = 0, n_abnormal = 0;
2574       edge e, fallthru = NULL;
2575       edge_iterator ei;
2576       rtx note;
2577       bool has_crossing_edge = false;
2578 
2579       if (JUMP_P (BB_END (bb))
2580 	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2581 	  && EDGE_COUNT (bb->succs) >= 2
2582 	  && any_condjump_p (BB_END (bb)))
2583 	{
2584 	  if (!BRANCH_EDGE (bb)->probability.initialized_p ())
2585 	    {
2586 	      if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
2587 		{
2588 		  error ("verify_flow_info: "
2589 			 "REG_BR_PROB is set but cfg probability is not");
2590 		  err = 1;
2591 		}
2592 	    }
2593 	  else if (XINT (note, 0)
2594 	           != BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ()
2595 	           && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2596 	    {
2597 	      error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2598 		     XINT (note, 0),
2599 		     BRANCH_EDGE (bb)->probability.to_reg_br_prob_note ());
2600 	      err = 1;
2601 	    }
2602 	}
2603 
2604       FOR_EACH_EDGE (e, ei, bb->succs)
2605 	{
2606 	  bool is_crossing;
2607 
2608 	  if (e->flags & EDGE_FALLTHRU)
2609 	    n_fallthru++, fallthru = e;
2610 
2611 	  is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2612 			 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2613 			 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2614           has_crossing_edge |= is_crossing;
2615 	  if (e->flags & EDGE_CROSSING)
2616 	    {
2617 	      if (!is_crossing)
2618 		{
2619 		  error ("EDGE_CROSSING incorrectly set across same section");
2620 		  err = 1;
2621 		}
2622 	      if (e->flags & EDGE_FALLTHRU)
2623 		{
2624 		  error ("fallthru edge crosses section boundary in bb %i",
2625 			 e->src->index);
2626 		  err = 1;
2627 		}
2628 	      if (e->flags & EDGE_EH)
2629 		{
2630 		  error ("EH edge crosses section boundary in bb %i",
2631 			 e->src->index);
2632 		  err = 1;
2633 		}
2634               if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2635 		{
2636 		  error ("No region crossing jump at section boundary in bb %i",
2637 			 bb->index);
2638 		  err = 1;
2639 		}
2640 	    }
2641 	  else if (is_crossing)
2642 	    {
2643 	      error ("EDGE_CROSSING missing across section boundary");
2644 	      err = 1;
2645 	    }
2646 
2647 	  if ((e->flags & ~(EDGE_DFS_BACK
2648 			    | EDGE_CAN_FALLTHRU
2649 			    | EDGE_IRREDUCIBLE_LOOP
2650 			    | EDGE_LOOP_EXIT
2651 			    | EDGE_CROSSING
2652 			    | EDGE_PRESERVE)) == 0)
2653 	    n_branch++;
2654 
2655 	  if (e->flags & EDGE_ABNORMAL_CALL)
2656 	    n_abnormal_call++;
2657 
2658 	  if (e->flags & EDGE_SIBCALL)
2659 	    n_sibcall++;
2660 
2661 	  if (e->flags & EDGE_EH)
2662 	    n_eh++;
2663 
2664 	  if (e->flags & EDGE_ABNORMAL)
2665 	    n_abnormal++;
2666 	}
2667 
2668       if (!has_crossing_edge
2669 	  && JUMP_P (BB_END (bb))
2670 	  && CROSSING_JUMP_P (BB_END (bb)))
2671 	{
2672 	  print_rtl_with_bb (stderr, get_insns (), TDF_BLOCKS | TDF_DETAILS);
2673 	  error ("Region crossing jump across same section in bb %i",
2674 		 bb->index);
2675 	  err = 1;
2676 	}
2677 
2678       if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2679 	{
2680 	  error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2681 	  err = 1;
2682 	}
2683       if (n_eh > 1)
2684 	{
2685 	  error ("too many exception handling edges in bb %i", bb->index);
2686 	  err = 1;
2687 	}
2688       if (n_branch
2689 	  && (!JUMP_P (BB_END (bb))
2690 	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2691 				   || any_condjump_p (BB_END (bb))))))
2692 	{
2693 	  error ("too many outgoing branch edges from bb %i", bb->index);
2694 	  err = 1;
2695 	}
2696       if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2697 	{
2698 	  error ("fallthru edge after unconditional jump in bb %i", bb->index);
2699 	  err = 1;
2700 	}
2701       if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2702 	{
2703 	  error ("wrong number of branch edges after unconditional jump"
2704 		 " in bb %i", bb->index);
2705 	  err = 1;
2706 	}
2707       if (n_branch != 1 && any_condjump_p (BB_END (bb))
2708 	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2709 	{
2710 	  error ("wrong amount of branch edges after conditional jump"
2711 		 " in bb %i", bb->index);
2712 	  err = 1;
2713 	}
2714       if (n_abnormal_call && !CALL_P (BB_END (bb)))
2715 	{
2716 	  error ("abnormal call edges for non-call insn in bb %i", bb->index);
2717 	  err = 1;
2718 	}
2719       if (n_sibcall && !CALL_P (BB_END (bb)))
2720 	{
2721 	  error ("sibcall edges for non-call insn in bb %i", bb->index);
2722 	  err = 1;
2723 	}
2724       if (n_abnormal > n_eh
2725 	  && !(CALL_P (BB_END (bb))
2726 	       && n_abnormal == n_abnormal_call + n_sibcall)
2727 	  && (!JUMP_P (BB_END (bb))
2728 	      || any_condjump_p (BB_END (bb))
2729 	      || any_uncondjump_p (BB_END (bb))))
2730 	{
2731 	  error ("abnormal edges for no purpose in bb %i", bb->index);
2732 	  err = 1;
2733 	}
2734 
2735       int has_eh = -1;
2736       FOR_EACH_EDGE (e, ei, bb->preds)
2737 	{
2738 	  if (has_eh == -1)
2739 	    has_eh = (e->flags & EDGE_EH);
2740 	  if ((e->flags & EDGE_EH) == has_eh)
2741 	    continue;
2742 	  error ("EH incoming edge mixed with non-EH incoming edges "
2743 		 "in bb %i", bb->index);
2744 	  err = 1;
2745 	  break;
2746 	}
2747     }
2748 
2749   /* If there are partitions, do a sanity check on them: A basic block in
2750      a cold partition cannot dominate a basic block in a hot partition.  */
2751   if (crtl->has_bb_partition && !err
2752       && current_ir_type () == IR_RTL_CFGLAYOUT)
2753     {
2754       auto_vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2755       err = !bbs_to_fix.is_empty ();
2756     }
2757 
2758   /* Clean up.  */
2759   return err;
2760 }
2761 
2762 /* Checks on the instructions within blocks. Currently checks that each
2763    block starts with a basic block note, and that basic block notes and
2764    control flow jumps are not found in the middle of the block.  */
2765 
2766 static int
rtl_verify_bb_insns(void)2767 rtl_verify_bb_insns (void)
2768 {
2769   rtx_insn *x;
2770   int err = 0;
2771   basic_block bb;
2772 
2773   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2774     {
2775       /* Now check the header of basic
2776 	 block.  It ought to contain optional CODE_LABEL followed
2777 	 by NOTE_BASIC_BLOCK.  */
2778       x = BB_HEAD (bb);
2779       if (LABEL_P (x))
2780 	{
2781 	  if (BB_END (bb) == x)
2782 	    {
2783 	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2784 		     bb->index);
2785 	      err = 1;
2786 	    }
2787 
2788 	  x = NEXT_INSN (x);
2789 	}
2790 
2791       if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2792 	{
2793 	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2794 		 bb->index);
2795 	  err = 1;
2796 	}
2797 
2798       if (BB_END (bb) == x)
2799 	/* Do checks for empty blocks here.  */
2800 	;
2801       else
2802 	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2803 	  {
2804 	    if (NOTE_INSN_BASIC_BLOCK_P (x))
2805 	      {
2806 		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2807 		       INSN_UID (x), bb->index);
2808 		err = 1;
2809 	      }
2810 
2811 	    if (x == BB_END (bb))
2812 	      break;
2813 
2814 	    if (control_flow_insn_p (x))
2815 	      {
2816 		error ("in basic block %d:", bb->index);
2817 		fatal_insn ("flow control insn inside a basic block", x);
2818 	      }
2819 	  }
2820     }
2821 
2822   /* Clean up.  */
2823   return err;
2824 }
2825 
2826 /* Verify that block pointers for instructions in basic blocks, headers and
2827    footers are set appropriately.  */
2828 
2829 static int
rtl_verify_bb_pointers(void)2830 rtl_verify_bb_pointers (void)
2831 {
2832   int err = 0;
2833   basic_block bb;
2834 
2835   /* Check the general integrity of the basic blocks.  */
2836   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2837     {
2838       rtx_insn *insn;
2839 
2840       if (!(bb->flags & BB_RTL))
2841 	{
2842 	  error ("BB_RTL flag not set for block %d", bb->index);
2843 	  err = 1;
2844 	}
2845 
2846       FOR_BB_INSNS (bb, insn)
2847 	if (BLOCK_FOR_INSN (insn) != bb)
2848 	  {
2849 	    error ("insn %d basic block pointer is %d, should be %d",
2850 		   INSN_UID (insn),
2851 		   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2852 		   bb->index);
2853 	    err = 1;
2854 	  }
2855 
2856       for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2857 	if (!BARRIER_P (insn)
2858 	    && BLOCK_FOR_INSN (insn) != NULL)
2859 	  {
2860 	    error ("insn %d in header of bb %d has non-NULL basic block",
2861 		   INSN_UID (insn), bb->index);
2862 	    err = 1;
2863 	  }
2864       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2865 	if (!BARRIER_P (insn)
2866 	    && BLOCK_FOR_INSN (insn) != NULL)
2867 	  {
2868 	    error ("insn %d in footer of bb %d has non-NULL basic block",
2869 		   INSN_UID (insn), bb->index);
2870 	    err = 1;
2871 	  }
2872     }
2873 
2874   /* Clean up.  */
2875   return err;
2876 }
2877 
2878 /* Verify the CFG and RTL consistency common for both underlying RTL and
2879    cfglayout RTL.
2880 
2881    Currently it does following checks:
2882 
2883    - overlapping of basic blocks
2884    - insns with wrong BLOCK_FOR_INSN pointers
2885    - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2886    - tails of basic blocks (ensure that boundary is necessary)
2887    - scans body of the basic block for JUMP_INSN, CODE_LABEL
2888      and NOTE_INSN_BASIC_BLOCK
2889    - verify that no fall_thru edge crosses hot/cold partition boundaries
2890    - verify that there are no pending RTL branch predictions
2891    - verify that hot blocks are not dominated by cold blocks
2892 
2893    In future it can be extended check a lot of other stuff as well
2894    (reachability of basic blocks, life information, etc. etc.).  */
2895 
2896 static int
rtl_verify_flow_info_1(void)2897 rtl_verify_flow_info_1 (void)
2898 {
2899   int err = 0;
2900 
2901   err |= rtl_verify_bb_pointers ();
2902 
2903   err |= rtl_verify_bb_insns ();
2904 
2905   err |= rtl_verify_edges ();
2906 
2907   return err;
2908 }
2909 
2910 /* Walk the instruction chain and verify that bb head/end pointers
2911   are correct, and that instructions are in exactly one bb and have
2912   correct block pointers.  */
2913 
2914 static int
rtl_verify_bb_insn_chain(void)2915 rtl_verify_bb_insn_chain (void)
2916 {
2917   basic_block bb;
2918   int err = 0;
2919   rtx_insn *x;
2920   rtx_insn *last_head = get_last_insn ();
2921   basic_block *bb_info;
2922   const int max_uid = get_max_uid ();
2923 
2924   bb_info = XCNEWVEC (basic_block, max_uid);
2925 
2926   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2927     {
2928       rtx_insn *head = BB_HEAD (bb);
2929       rtx_insn *end = BB_END (bb);
2930 
2931       for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2932 	{
2933 	  /* Verify the end of the basic block is in the INSN chain.  */
2934 	  if (x == end)
2935 	    break;
2936 
2937             /* And that the code outside of basic blocks has NULL bb field.  */
2938           if (!BARRIER_P (x)
2939               && BLOCK_FOR_INSN (x) != NULL)
2940             {
2941               error ("insn %d outside of basic blocks has non-NULL bb field",
2942                      INSN_UID (x));
2943               err = 1;
2944             }
2945 	}
2946 
2947       if (!x)
2948 	{
2949 	  error ("end insn %d for block %d not found in the insn stream",
2950 		 INSN_UID (end), bb->index);
2951 	  err = 1;
2952 	}
2953 
2954       /* Work backwards from the end to the head of the basic block
2955 	 to verify the head is in the RTL chain.  */
2956       for (; x != NULL_RTX; x = PREV_INSN (x))
2957 	{
2958 	  /* While walking over the insn chain, verify insns appear
2959 	     in only one basic block.  */
2960 	  if (bb_info[INSN_UID (x)] != NULL)
2961 	    {
2962 	      error ("insn %d is in multiple basic blocks (%d and %d)",
2963 		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2964 	      err = 1;
2965 	    }
2966 
2967 	  bb_info[INSN_UID (x)] = bb;
2968 
2969 	  if (x == head)
2970 	    break;
2971 	}
2972       if (!x)
2973 	{
2974 	  error ("head insn %d for block %d not found in the insn stream",
2975 		 INSN_UID (head), bb->index);
2976 	  err = 1;
2977 	}
2978 
2979       last_head = PREV_INSN (x);
2980     }
2981 
2982   for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2983     {
2984       /* Check that the code before the first basic block has NULL
2985 	 bb field.  */
2986       if (!BARRIER_P (x)
2987 	  && BLOCK_FOR_INSN (x) != NULL)
2988 	{
2989 	  error ("insn %d outside of basic blocks has non-NULL bb field",
2990 		 INSN_UID (x));
2991 	  err = 1;
2992 	}
2993     }
2994   free (bb_info);
2995 
2996   return err;
2997 }
2998 
2999 /* Verify that fallthru edges point to adjacent blocks in layout order and
3000    that barriers exist after non-fallthru blocks.  */
3001 
3002 static int
rtl_verify_fallthru(void)3003 rtl_verify_fallthru (void)
3004 {
3005   basic_block bb;
3006   int err = 0;
3007 
3008   FOR_EACH_BB_REVERSE_FN (bb, cfun)
3009     {
3010       edge e;
3011 
3012       e = find_fallthru_edge (bb->succs);
3013       if (!e)
3014 	{
3015 	  rtx_insn *insn;
3016 
3017 	  /* Ensure existence of barrier in BB with no fallthru edges.  */
3018 	  for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
3019 	    {
3020 	      if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
3021 		{
3022 		  error ("missing barrier after block %i", bb->index);
3023 		  err = 1;
3024 		  break;
3025 		}
3026 	      if (BARRIER_P (insn))
3027 		break;
3028 	    }
3029 	}
3030       else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
3031 	       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3032 	{
3033 	  rtx_insn *insn;
3034 
3035 	  if (e->src->next_bb != e->dest)
3036 	    {
3037 	      error
3038 		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
3039 		 e->src->index, e->dest->index);
3040 	      err = 1;
3041 	    }
3042 	  else
3043 	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
3044 		 insn = NEXT_INSN (insn))
3045 	      if (BARRIER_P (insn) || NONDEBUG_INSN_P (insn))
3046 		{
3047 		  error ("verify_flow_info: Incorrect fallthru %i->%i",
3048 			 e->src->index, e->dest->index);
3049 		  error ("wrong insn in the fallthru edge");
3050 		  debug_rtx (insn);
3051 		  err = 1;
3052 		}
3053 	}
3054     }
3055 
3056    return err;
3057 }
3058 
3059 /* Verify that blocks are laid out in consecutive order. While walking the
3060    instructions, verify that all expected instructions are inside the basic
3061    blocks, and that all returns are followed by barriers.  */
3062 
3063 static int
rtl_verify_bb_layout(void)3064 rtl_verify_bb_layout (void)
3065 {
3066   basic_block bb;
3067   int err = 0;
3068   rtx_insn *x, *y;
3069   int num_bb_notes;
3070   rtx_insn * const rtx_first = get_insns ();
3071   basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
3072 
3073   num_bb_notes = 0;
3074 
3075   for (x = rtx_first; x; x = NEXT_INSN (x))
3076     {
3077       if (NOTE_INSN_BASIC_BLOCK_P (x))
3078 	{
3079 	  bb = NOTE_BASIC_BLOCK (x);
3080 
3081 	  num_bb_notes++;
3082 	  if (bb != last_bb_seen->next_bb)
3083 	    internal_error ("basic blocks not laid down consecutively");
3084 
3085 	  curr_bb = last_bb_seen = bb;
3086 	}
3087 
3088       if (!curr_bb)
3089 	{
3090 	  switch (GET_CODE (x))
3091 	    {
3092 	    case BARRIER:
3093 	    case NOTE:
3094 	      break;
3095 
3096 	    case CODE_LABEL:
3097 	      /* An ADDR_VEC is placed outside any basic block.  */
3098 	      if (NEXT_INSN (x)
3099 		  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
3100 		x = NEXT_INSN (x);
3101 
3102 	      /* But in any case, non-deletable labels can appear anywhere.  */
3103 	      break;
3104 
3105 	    default:
3106 	      fatal_insn ("insn outside basic block", x);
3107 	    }
3108 	}
3109 
3110       if (JUMP_P (x)
3111 	  && returnjump_p (x) && ! condjump_p (x)
3112 	  && ! ((y = next_nonnote_nondebug_insn (x))
3113 		&& BARRIER_P (y)))
3114 	    fatal_insn ("return not followed by barrier", x);
3115 
3116       if (curr_bb && x == BB_END (curr_bb))
3117 	curr_bb = NULL;
3118     }
3119 
3120   if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
3121     internal_error
3122       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
3123        num_bb_notes, n_basic_blocks_for_fn (cfun));
3124 
3125    return err;
3126 }
3127 
3128 /* Verify the CFG and RTL consistency common for both underlying RTL and
3129    cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3130 
3131    Currently it does following checks:
3132    - all checks of rtl_verify_flow_info_1
3133    - test head/end pointers
3134    - check that blocks are laid out in consecutive order
3135    - check that all insns are in the basic blocks
3136      (except the switch handling code, barriers and notes)
3137    - check that all returns are followed by barriers
3138    - check that all fallthru edge points to the adjacent blocks
3139    - verify that there is a single hot/cold partition boundary after bbro  */
3140 
3141 static int
rtl_verify_flow_info(void)3142 rtl_verify_flow_info (void)
3143 {
3144   int err = 0;
3145 
3146   err |= rtl_verify_flow_info_1 ();
3147 
3148   err |= rtl_verify_bb_insn_chain ();
3149 
3150   err |= rtl_verify_fallthru ();
3151 
3152   err |= rtl_verify_bb_layout ();
3153 
3154   err |= verify_hot_cold_block_grouping ();
3155 
3156   return err;
3157 }
3158 
3159 /* Assume that the preceding pass has possibly eliminated jump instructions
3160    or converted the unconditional jumps.  Eliminate the edges from CFG.
3161    Return true if any edges are eliminated.  */
3162 
3163 bool
purge_dead_edges(basic_block bb)3164 purge_dead_edges (basic_block bb)
3165 {
3166   edge e;
3167   rtx_insn *insn = BB_END (bb);
3168   rtx note;
3169   bool purged = false;
3170   bool found;
3171   edge_iterator ei;
3172 
3173   if ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb))
3174     do
3175       insn = PREV_INSN (insn);
3176     while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3177 
3178   /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
3179   if (NONJUMP_INSN_P (insn)
3180       && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3181     {
3182       rtx eqnote;
3183 
3184       if (! may_trap_p (PATTERN (insn))
3185 	  || ((eqnote = find_reg_equal_equiv_note (insn))
3186 	      && ! may_trap_p (XEXP (eqnote, 0))))
3187 	remove_note (insn, note);
3188     }
3189 
3190   /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
3191   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3192     {
3193       bool remove = false;
3194 
3195       /* There are three types of edges we need to handle correctly here: EH
3196 	 edges, abnormal call EH edges, and abnormal call non-EH edges.  The
3197 	 latter can appear when nonlocal gotos are used.  */
3198       if (e->flags & EDGE_ABNORMAL_CALL)
3199 	{
3200 	  if (!CALL_P (insn))
3201 	    remove = true;
3202 	  else if (can_nonlocal_goto (insn))
3203 	    ;
3204 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3205 	    ;
3206 	  else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3207 	    ;
3208 	  else
3209 	    remove = true;
3210 	}
3211       else if (e->flags & EDGE_EH)
3212 	remove = !can_throw_internal (insn);
3213 
3214       if (remove)
3215 	{
3216 	  remove_edge (e);
3217 	  df_set_bb_dirty (bb);
3218 	  purged = true;
3219 	}
3220       else
3221 	ei_next (&ei);
3222     }
3223 
3224   if (JUMP_P (insn))
3225     {
3226       rtx note;
3227       edge b,f;
3228       edge_iterator ei;
3229 
3230       /* We do care only about conditional jumps and simplejumps.  */
3231       if (!any_condjump_p (insn)
3232 	  && !returnjump_p (insn)
3233 	  && !simplejump_p (insn))
3234 	return purged;
3235 
3236       /* Branch probability/prediction notes are defined only for
3237 	 condjumps.  We've possibly turned condjump into simplejump.  */
3238       if (simplejump_p (insn))
3239 	{
3240 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
3241 	  if (note)
3242 	    remove_note (insn, note);
3243 	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3244 	    remove_note (insn, note);
3245 	}
3246 
3247       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3248 	{
3249 	  /* Avoid abnormal flags to leak from computed jumps turned
3250 	     into simplejumps.  */
3251 
3252 	  e->flags &= ~EDGE_ABNORMAL;
3253 
3254 	  /* See if this edge is one we should keep.  */
3255 	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3256 	    /* A conditional jump can fall through into the next
3257 	       block, so we should keep the edge.  */
3258 	    {
3259 	      ei_next (&ei);
3260 	      continue;
3261 	    }
3262 	  else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3263 		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3264 	    /* If the destination block is the target of the jump,
3265 	       keep the edge.  */
3266 	    {
3267 	      ei_next (&ei);
3268 	      continue;
3269 	    }
3270 	  else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3271 		   && returnjump_p (insn))
3272 	    /* If the destination block is the exit block, and this
3273 	       instruction is a return, then keep the edge.  */
3274 	    {
3275 	      ei_next (&ei);
3276 	      continue;
3277 	    }
3278 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3279 	    /* Keep the edges that correspond to exceptions thrown by
3280 	       this instruction and rematerialize the EDGE_ABNORMAL
3281 	       flag we just cleared above.  */
3282 	    {
3283 	      e->flags |= EDGE_ABNORMAL;
3284 	      ei_next (&ei);
3285 	      continue;
3286 	    }
3287 
3288 	  /* We do not need this edge.  */
3289 	  df_set_bb_dirty (bb);
3290 	  purged = true;
3291 	  remove_edge (e);
3292 	}
3293 
3294       if (EDGE_COUNT (bb->succs) == 0 || !purged)
3295 	return purged;
3296 
3297       if (dump_file)
3298 	fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3299 
3300       if (!optimize)
3301 	return purged;
3302 
3303       /* Redistribute probabilities.  */
3304       if (single_succ_p (bb))
3305 	{
3306 	  single_succ_edge (bb)->probability = profile_probability::always ();
3307 	}
3308       else
3309 	{
3310 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
3311 	  if (!note)
3312 	    return purged;
3313 
3314 	  b = BRANCH_EDGE (bb);
3315 	  f = FALLTHRU_EDGE (bb);
3316 	  b->probability = profile_probability::from_reg_br_prob_note
3317 					 (XINT (note, 0));
3318 	  f->probability = b->probability.invert ();
3319 	}
3320 
3321       return purged;
3322     }
3323   else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3324     {
3325       /* First, there should not be any EH or ABCALL edges resulting
3326 	 from non-local gotos and the like.  If there were, we shouldn't
3327 	 have created the sibcall in the first place.  Second, there
3328 	 should of course never have been a fallthru edge.  */
3329       gcc_assert (single_succ_p (bb));
3330       gcc_assert (single_succ_edge (bb)->flags
3331 		  == (EDGE_SIBCALL | EDGE_ABNORMAL));
3332 
3333       return 0;
3334     }
3335 
3336   /* If we don't see a jump insn, we don't know exactly why the block would
3337      have been broken at this point.  Look for a simple, non-fallthru edge,
3338      as these are only created by conditional branches.  If we find such an
3339      edge we know that there used to be a jump here and can then safely
3340      remove all non-fallthru edges.  */
3341   found = false;
3342   FOR_EACH_EDGE (e, ei, bb->succs)
3343     if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3344       {
3345 	found = true;
3346 	break;
3347       }
3348 
3349   if (!found)
3350     return purged;
3351 
3352   /* Remove all but the fake and fallthru edges.  The fake edge may be
3353      the only successor for this block in the case of noreturn
3354      calls.  */
3355   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3356     {
3357       if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3358 	{
3359 	  df_set_bb_dirty (bb);
3360 	  remove_edge (e);
3361 	  purged = true;
3362 	}
3363       else
3364 	ei_next (&ei);
3365     }
3366 
3367   gcc_assert (single_succ_p (bb));
3368 
3369   single_succ_edge (bb)->probability = profile_probability::always ();
3370 
3371   if (dump_file)
3372     fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3373 	     bb->index);
3374   return purged;
3375 }
3376 
3377 /* Search all basic blocks for potentially dead edges and purge them.  Return
3378    true if some edge has been eliminated.  */
3379 
3380 bool
purge_all_dead_edges(void)3381 purge_all_dead_edges (void)
3382 {
3383   int purged = false;
3384   basic_block bb;
3385 
3386   FOR_EACH_BB_FN (bb, cfun)
3387     {
3388       bool purged_here = purge_dead_edges (bb);
3389 
3390       purged |= purged_here;
3391     }
3392 
3393   return purged;
3394 }
3395 
3396 /* This is used by a few passes that emit some instructions after abnormal
3397    calls, moving the basic block's end, while they in fact do want to emit
3398    them on the fallthru edge.  Look for abnormal call edges, find backward
3399    the call in the block and insert the instructions on the edge instead.
3400 
3401    Similarly, handle instructions throwing exceptions internally.
3402 
3403    Return true when instructions have been found and inserted on edges.  */
3404 
3405 bool
fixup_abnormal_edges(void)3406 fixup_abnormal_edges (void)
3407 {
3408   bool inserted = false;
3409   basic_block bb;
3410 
3411   FOR_EACH_BB_FN (bb, cfun)
3412     {
3413       edge e;
3414       edge_iterator ei;
3415 
3416       /* Look for cases we are interested in - calls or instructions causing
3417          exceptions.  */
3418       FOR_EACH_EDGE (e, ei, bb->succs)
3419 	if ((e->flags & EDGE_ABNORMAL_CALL)
3420 	    || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3421 		== (EDGE_ABNORMAL | EDGE_EH)))
3422 	  break;
3423 
3424       if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3425 	{
3426 	  rtx_insn *insn;
3427 
3428 	  /* Get past the new insns generated.  Allow notes, as the insns
3429 	     may be already deleted.  */
3430 	  insn = BB_END (bb);
3431 	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3432 		 && !can_throw_internal (insn)
3433 		 && insn != BB_HEAD (bb))
3434 	    insn = PREV_INSN (insn);
3435 
3436 	  if (CALL_P (insn) || can_throw_internal (insn))
3437 	    {
3438 	      rtx_insn *stop, *next;
3439 
3440 	      e = find_fallthru_edge (bb->succs);
3441 
3442 	      stop = NEXT_INSN (BB_END (bb));
3443 	      BB_END (bb) = insn;
3444 
3445 	      for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3446 		{
3447 		  next = NEXT_INSN (insn);
3448 		  if (INSN_P (insn))
3449 		    {
3450 		      delete_insn (insn);
3451 
3452 		      /* Sometimes there's still the return value USE.
3453 			 If it's placed after a trapping call (i.e. that
3454 			 call is the last insn anyway), we have no fallthru
3455 			 edge.  Simply delete this use and don't try to insert
3456 			 on the non-existent edge.
3457 			 Similarly, sometimes a call that can throw is
3458 			 followed in the source with __builtin_unreachable (),
3459 			 meaning that there is UB if the call returns rather
3460 			 than throws.  If there weren't any instructions
3461 			 following such calls before, supposedly even the ones
3462 			 we've deleted aren't significant and can be
3463 			 removed.  */
3464 		      if (e)
3465 			{
3466 			  /* We're not deleting it, we're moving it.  */
3467 			  insn->set_undeleted ();
3468 			  SET_PREV_INSN (insn) = NULL_RTX;
3469 			  SET_NEXT_INSN (insn) = NULL_RTX;
3470 
3471 			  insert_insn_on_edge (insn, e);
3472 			  inserted = true;
3473 			}
3474 		    }
3475 		  else if (!BARRIER_P (insn))
3476 		    set_block_for_insn (insn, NULL);
3477 		}
3478 	    }
3479 
3480 	  /* It may be that we don't find any trapping insn.  In this
3481 	     case we discovered quite late that the insn that had been
3482 	     marked as can_throw_internal in fact couldn't trap at all.
3483 	     So we should in fact delete the EH edges out of the block.  */
3484 	  else
3485 	    purge_dead_edges (bb);
3486 	}
3487     }
3488 
3489   return inserted;
3490 }
3491 
3492 /* Delete the unconditional jump INSN and adjust the CFG correspondingly.
3493    Note that the INSN should be deleted *after* removing dead edges, so
3494    that the kept edge is the fallthrough edge for a (set (pc) (pc))
3495    but not for a (set (pc) (label_ref FOO)).  */
3496 
3497 void
update_cfg_for_uncondjump(rtx_insn * insn)3498 update_cfg_for_uncondjump (rtx_insn *insn)
3499 {
3500   basic_block bb = BLOCK_FOR_INSN (insn);
3501   gcc_assert (BB_END (bb) == insn);
3502 
3503   purge_dead_edges (bb);
3504 
3505   if (current_ir_type () != IR_RTL_CFGLAYOUT)
3506     {
3507       if (!find_fallthru_edge (bb->succs))
3508 	{
3509 	  auto barrier = next_nonnote_nondebug_insn (insn);
3510 	  if (!barrier || !BARRIER_P (barrier))
3511 	    emit_barrier_after (insn);
3512 	}
3513       return;
3514     }
3515 
3516   delete_insn (insn);
3517   if (EDGE_COUNT (bb->succs) == 1)
3518     {
3519       rtx_insn *insn;
3520 
3521       single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3522 
3523       /* Remove barriers from the footer if there are any.  */
3524       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
3525 	if (BARRIER_P (insn))
3526 	  {
3527 	    if (PREV_INSN (insn))
3528 	      SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
3529 	    else
3530 	      BB_FOOTER (bb) = NEXT_INSN (insn);
3531 	    if (NEXT_INSN (insn))
3532 	      SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
3533 	  }
3534 	else if (LABEL_P (insn))
3535 	  break;
3536     }
3537 }
3538 
3539 /* Cut the insns from FIRST to LAST out of the insns stream.  */
3540 
3541 rtx_insn *
unlink_insn_chain(rtx_insn * first,rtx_insn * last)3542 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3543 {
3544   rtx_insn *prevfirst = PREV_INSN (first);
3545   rtx_insn *nextlast = NEXT_INSN (last);
3546 
3547   SET_PREV_INSN (first) = NULL;
3548   SET_NEXT_INSN (last) = NULL;
3549   if (prevfirst)
3550     SET_NEXT_INSN (prevfirst) = nextlast;
3551   if (nextlast)
3552     SET_PREV_INSN (nextlast) = prevfirst;
3553   else
3554     set_last_insn (prevfirst);
3555   if (!prevfirst)
3556     set_first_insn (nextlast);
3557   return first;
3558 }
3559 
3560 /* Skip over inter-block insns occurring after BB which are typically
3561    associated with BB (e.g., barriers). If there are any such insns,
3562    we return the last one. Otherwise, we return the end of BB.  */
3563 
3564 static rtx_insn *
skip_insns_after_block(basic_block bb)3565 skip_insns_after_block (basic_block bb)
3566 {
3567   rtx_insn *insn, *last_insn, *next_head, *prev;
3568 
3569   next_head = NULL;
3570   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3571     next_head = BB_HEAD (bb->next_bb);
3572 
3573   for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3574     {
3575       if (insn == next_head)
3576 	break;
3577 
3578       switch (GET_CODE (insn))
3579 	{
3580 	case BARRIER:
3581 	  last_insn = insn;
3582 	  continue;
3583 
3584 	case NOTE:
3585 	  gcc_assert (NOTE_KIND (insn) != NOTE_INSN_BLOCK_END);
3586 	  continue;
3587 
3588 	case CODE_LABEL:
3589 	  if (NEXT_INSN (insn)
3590 	      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3591 	    {
3592 	      insn = NEXT_INSN (insn);
3593 	      last_insn = insn;
3594 	      continue;
3595 	    }
3596 	  break;
3597 
3598 	default:
3599 	  break;
3600 	}
3601 
3602       break;
3603     }
3604 
3605   /* It is possible to hit contradictory sequence.  For instance:
3606 
3607      jump_insn
3608      NOTE_INSN_BLOCK_BEG
3609      barrier
3610 
3611      Where barrier belongs to jump_insn, but the note does not.  This can be
3612      created by removing the basic block originally following
3613      NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */
3614 
3615   for (insn = last_insn; insn != BB_END (bb); insn = prev)
3616     {
3617       prev = PREV_INSN (insn);
3618       if (NOTE_P (insn))
3619 	switch (NOTE_KIND (insn))
3620 	  {
3621 	  case NOTE_INSN_BLOCK_END:
3622 	    gcc_unreachable ();
3623 	    break;
3624 	  case NOTE_INSN_DELETED:
3625 	  case NOTE_INSN_DELETED_LABEL:
3626 	  case NOTE_INSN_DELETED_DEBUG_LABEL:
3627 	    continue;
3628 	  default:
3629 	    reorder_insns (insn, insn, last_insn);
3630 	  }
3631     }
3632 
3633   return last_insn;
3634 }
3635 
3636 /* Locate or create a label for a given basic block.  */
3637 
3638 static rtx_insn *
label_for_bb(basic_block bb)3639 label_for_bb (basic_block bb)
3640 {
3641   rtx_insn *label = BB_HEAD (bb);
3642 
3643   if (!LABEL_P (label))
3644     {
3645       if (dump_file)
3646 	fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3647 
3648       label = block_label (bb);
3649     }
3650 
3651   return label;
3652 }
3653 
3654 /* Locate the effective beginning and end of the insn chain for each
3655    block, as defined by skip_insns_after_block above.  */
3656 
3657 static void
record_effective_endpoints(void)3658 record_effective_endpoints (void)
3659 {
3660   rtx_insn *next_insn;
3661   basic_block bb;
3662   rtx_insn *insn;
3663 
3664   for (insn = get_insns ();
3665        insn
3666        && NOTE_P (insn)
3667        && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3668        insn = NEXT_INSN (insn))
3669     continue;
3670   /* No basic blocks at all?  */
3671   gcc_assert (insn);
3672 
3673   if (PREV_INSN (insn))
3674     cfg_layout_function_header =
3675 	    unlink_insn_chain (get_insns (), PREV_INSN (insn));
3676   else
3677     cfg_layout_function_header = NULL;
3678 
3679   next_insn = get_insns ();
3680   FOR_EACH_BB_FN (bb, cfun)
3681     {
3682       rtx_insn *end;
3683 
3684       if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3685 	BB_HEADER (bb) = unlink_insn_chain (next_insn,
3686 						PREV_INSN (BB_HEAD (bb)));
3687       end = skip_insns_after_block (bb);
3688       if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3689 	BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3690       next_insn = NEXT_INSN (BB_END (bb));
3691     }
3692 
3693   cfg_layout_function_footer = next_insn;
3694   if (cfg_layout_function_footer)
3695     cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3696 }
3697 
3698 namespace {
3699 
3700 const pass_data pass_data_into_cfg_layout_mode =
3701 {
3702   RTL_PASS, /* type */
3703   "into_cfglayout", /* name */
3704   OPTGROUP_NONE, /* optinfo_flags */
3705   TV_CFG, /* tv_id */
3706   0, /* properties_required */
3707   PROP_cfglayout, /* properties_provided */
3708   0, /* properties_destroyed */
3709   0, /* todo_flags_start */
3710   0, /* todo_flags_finish */
3711 };
3712 
3713 class pass_into_cfg_layout_mode : public rtl_opt_pass
3714 {
3715 public:
pass_into_cfg_layout_mode(gcc::context * ctxt)3716   pass_into_cfg_layout_mode (gcc::context *ctxt)
3717     : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3718   {}
3719 
3720   /* opt_pass methods: */
execute(function *)3721   virtual unsigned int execute (function *)
3722     {
3723       cfg_layout_initialize (0);
3724       return 0;
3725     }
3726 
3727 }; // class pass_into_cfg_layout_mode
3728 
3729 } // anon namespace
3730 
3731 rtl_opt_pass *
make_pass_into_cfg_layout_mode(gcc::context * ctxt)3732 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3733 {
3734   return new pass_into_cfg_layout_mode (ctxt);
3735 }
3736 
3737 namespace {
3738 
3739 const pass_data pass_data_outof_cfg_layout_mode =
3740 {
3741   RTL_PASS, /* type */
3742   "outof_cfglayout", /* name */
3743   OPTGROUP_NONE, /* optinfo_flags */
3744   TV_CFG, /* tv_id */
3745   0, /* properties_required */
3746   0, /* properties_provided */
3747   PROP_cfglayout, /* properties_destroyed */
3748   0, /* todo_flags_start */
3749   0, /* todo_flags_finish */
3750 };
3751 
3752 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3753 {
3754 public:
pass_outof_cfg_layout_mode(gcc::context * ctxt)3755   pass_outof_cfg_layout_mode (gcc::context *ctxt)
3756     : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3757   {}
3758 
3759   /* opt_pass methods: */
3760   virtual unsigned int execute (function *);
3761 
3762 }; // class pass_outof_cfg_layout_mode
3763 
3764 unsigned int
execute(function * fun)3765 pass_outof_cfg_layout_mode::execute (function *fun)
3766 {
3767   basic_block bb;
3768 
3769   FOR_EACH_BB_FN (bb, fun)
3770     if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3771       bb->aux = bb->next_bb;
3772 
3773   cfg_layout_finalize ();
3774 
3775   return 0;
3776 }
3777 
3778 } // anon namespace
3779 
3780 rtl_opt_pass *
make_pass_outof_cfg_layout_mode(gcc::context * ctxt)3781 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3782 {
3783   return new pass_outof_cfg_layout_mode (ctxt);
3784 }
3785 
3786 
3787 /* Link the basic blocks in the correct order, compacting the basic
3788    block queue while at it.  If STAY_IN_CFGLAYOUT_MODE is false, this
3789    function also clears the basic block header and footer fields.
3790 
3791    This function is usually called after a pass (e.g. tracer) finishes
3792    some transformations while in cfglayout mode.  The required sequence
3793    of the basic blocks is in a linked list along the bb->aux field.
3794    This functions re-links the basic block prev_bb and next_bb pointers
3795    accordingly, and it compacts and renumbers the blocks.
3796 
3797    FIXME: This currently works only for RTL, but the only RTL-specific
3798    bits are the STAY_IN_CFGLAYOUT_MODE bits.  The tracer pass was moved
3799    to GIMPLE a long time ago, but it doesn't relink the basic block
3800    chain.  It could do that (to give better initial RTL) if this function
3801    is made IR-agnostic (and moved to cfganal.cc or cfg.cc while at it).  */
3802 
3803 void
relink_block_chain(bool stay_in_cfglayout_mode)3804 relink_block_chain (bool stay_in_cfglayout_mode)
3805 {
3806   basic_block bb, prev_bb;
3807   int index;
3808 
3809   /* Maybe dump the re-ordered sequence.  */
3810   if (dump_file)
3811     {
3812       fprintf (dump_file, "Reordered sequence:\n");
3813       for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3814 	   NUM_FIXED_BLOCKS;
3815 	   bb;
3816 	   bb = (basic_block) bb->aux, index++)
3817 	{
3818 	  fprintf (dump_file, " %i ", index);
3819 	  if (get_bb_original (bb))
3820 	    fprintf (dump_file, "duplicate of %i\n",
3821 		     get_bb_original (bb)->index);
3822 	  else if (forwarder_block_p (bb)
3823 		   && !LABEL_P (BB_HEAD (bb)))
3824 	    fprintf (dump_file, "compensation\n");
3825 	  else
3826 	    fprintf (dump_file, "bb %i\n", bb->index);
3827 	}
3828     }
3829 
3830   /* Now reorder the blocks.  */
3831   prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3832   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3833   for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3834     {
3835       bb->prev_bb = prev_bb;
3836       prev_bb->next_bb = bb;
3837     }
3838   prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3839   EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3840 
3841   /* Then, clean up the aux fields.  */
3842   FOR_ALL_BB_FN (bb, cfun)
3843     {
3844       bb->aux = NULL;
3845       if (!stay_in_cfglayout_mode)
3846 	BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3847     }
3848 
3849   /* Maybe reset the original copy tables, they are not valid anymore
3850      when we renumber the basic blocks in compact_blocks.  If we are
3851      are going out of cfglayout mode, don't re-allocate the tables.  */
3852   if (original_copy_tables_initialized_p ())
3853     free_original_copy_tables ();
3854   if (stay_in_cfglayout_mode)
3855     initialize_original_copy_tables ();
3856 
3857   /* Finally, put basic_block_info in the new order.  */
3858   compact_blocks ();
3859 }
3860 
3861 
3862 /* Given a reorder chain, rearrange the code to match.  */
3863 
3864 static void
fixup_reorder_chain(void)3865 fixup_reorder_chain (void)
3866 {
3867   basic_block bb;
3868   rtx_insn *insn = NULL;
3869 
3870   if (cfg_layout_function_header)
3871     {
3872       set_first_insn (cfg_layout_function_header);
3873       insn = cfg_layout_function_header;
3874       while (NEXT_INSN (insn))
3875 	insn = NEXT_INSN (insn);
3876     }
3877 
3878   /* First do the bulk reordering -- rechain the blocks without regard to
3879      the needed changes to jumps and labels.  */
3880 
3881   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3882        bb->aux)
3883     {
3884       if (BB_HEADER (bb))
3885 	{
3886 	  if (insn)
3887 	    SET_NEXT_INSN (insn) = BB_HEADER (bb);
3888 	  else
3889 	    set_first_insn (BB_HEADER (bb));
3890 	  SET_PREV_INSN (BB_HEADER (bb)) = insn;
3891 	  insn = BB_HEADER (bb);
3892 	  while (NEXT_INSN (insn))
3893 	    insn = NEXT_INSN (insn);
3894 	}
3895       if (insn)
3896 	SET_NEXT_INSN (insn) = BB_HEAD (bb);
3897       else
3898 	set_first_insn (BB_HEAD (bb));
3899       SET_PREV_INSN (BB_HEAD (bb)) = insn;
3900       insn = BB_END (bb);
3901       if (BB_FOOTER (bb))
3902 	{
3903 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3904 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3905 	  while (NEXT_INSN (insn))
3906 	    insn = NEXT_INSN (insn);
3907 	}
3908     }
3909 
3910   SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3911   if (cfg_layout_function_footer)
3912     SET_PREV_INSN (cfg_layout_function_footer) = insn;
3913 
3914   while (NEXT_INSN (insn))
3915     insn = NEXT_INSN (insn);
3916 
3917   set_last_insn (insn);
3918   if (flag_checking)
3919     verify_insn_chain ();
3920 
3921   /* Now add jumps and labels as needed to match the blocks new
3922      outgoing edges.  */
3923 
3924   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3925        bb->aux)
3926     {
3927       edge e_fall, e_taken, e;
3928       rtx_insn *bb_end_insn;
3929       rtx ret_label = NULL_RTX;
3930       basic_block nb;
3931       edge_iterator ei;
3932 
3933       if (EDGE_COUNT (bb->succs) == 0)
3934 	continue;
3935 
3936       /* Find the old fallthru edge, and another non-EH edge for
3937 	 a taken jump.  */
3938       e_taken = e_fall = NULL;
3939 
3940       FOR_EACH_EDGE (e, ei, bb->succs)
3941 	if (e->flags & EDGE_FALLTHRU)
3942 	  e_fall = e;
3943 	else if (! (e->flags & EDGE_EH))
3944 	  e_taken = e;
3945 
3946       bb_end_insn = BB_END (bb);
3947       if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
3948 	{
3949 	  ret_label = JUMP_LABEL (bb_end_jump);
3950 	  if (any_condjump_p (bb_end_jump))
3951 	    {
3952 	      /* This might happen if the conditional jump has side
3953 		 effects and could therefore not be optimized away.
3954 		 Make the basic block to end with a barrier in order
3955 		 to prevent rtl_verify_flow_info from complaining.  */
3956 	      if (!e_fall)
3957 		{
3958 		  gcc_assert (!onlyjump_p (bb_end_jump)
3959 			      || returnjump_p (bb_end_jump)
3960                               || (e_taken->flags & EDGE_CROSSING));
3961 		  emit_barrier_after (bb_end_jump);
3962 		  continue;
3963 		}
3964 
3965 	      /* If the old fallthru is still next, nothing to do.  */
3966 	      if (bb->aux == e_fall->dest
3967 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3968 		continue;
3969 
3970 	      /* The degenerated case of conditional jump jumping to the next
3971 		 instruction can happen for jumps with side effects.  We need
3972 		 to construct a forwarder block and this will be done just
3973 		 fine by force_nonfallthru below.  */
3974 	      if (!e_taken)
3975 		;
3976 
3977 	      /* There is another special case: if *neither* block is next,
3978 		 such as happens at the very end of a function, then we'll
3979 		 need to add a new unconditional jump.  Choose the taken
3980 		 edge based on known or assumed probability.  */
3981 	      else if (bb->aux != e_taken->dest)
3982 		{
3983 		  rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
3984 
3985 		  if (note
3986 		      && profile_probability::from_reg_br_prob_note
3987 				 (XINT (note, 0)) < profile_probability::even ()
3988 		      && invert_jump (bb_end_jump,
3989 				      (e_fall->dest
3990 				       == EXIT_BLOCK_PTR_FOR_FN (cfun)
3991 				       ? NULL_RTX
3992 				       : label_for_bb (e_fall->dest)), 0))
3993 		    {
3994 		      e_fall->flags &= ~EDGE_FALLTHRU;
3995 		      gcc_checking_assert (could_fall_through
3996 					   (e_taken->src, e_taken->dest));
3997 		      e_taken->flags |= EDGE_FALLTHRU;
3998 		      update_br_prob_note (bb);
3999 		      e = e_fall, e_fall = e_taken, e_taken = e;
4000 		    }
4001 		}
4002 
4003 	      /* If the "jumping" edge is a crossing edge, and the fall
4004 		 through edge is non-crossing, leave things as they are.  */
4005 	      else if ((e_taken->flags & EDGE_CROSSING)
4006 		       && !(e_fall->flags & EDGE_CROSSING))
4007 		continue;
4008 
4009 	      /* Otherwise we can try to invert the jump.  This will
4010 		 basically never fail, however, keep up the pretense.  */
4011 	      else if (invert_jump (bb_end_jump,
4012 				    (e_fall->dest
4013 				     == EXIT_BLOCK_PTR_FOR_FN (cfun)
4014 				     ? NULL_RTX
4015 				     : label_for_bb (e_fall->dest)), 0))
4016 		{
4017 		  e_fall->flags &= ~EDGE_FALLTHRU;
4018 		  gcc_checking_assert (could_fall_through
4019 				       (e_taken->src, e_taken->dest));
4020 		  e_taken->flags |= EDGE_FALLTHRU;
4021 		  update_br_prob_note (bb);
4022 		  if (LABEL_NUSES (ret_label) == 0
4023 		      && single_pred_p (e_taken->dest))
4024 		    delete_insn (as_a<rtx_insn *> (ret_label));
4025 		  continue;
4026 		}
4027 	    }
4028 	  else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
4029 	    {
4030 	      /* If the old fallthru is still next or if
4031 		 asm goto doesn't have a fallthru (e.g. when followed by
4032 		 __builtin_unreachable ()), nothing to do.  */
4033 	      if (! e_fall
4034 		  || bb->aux == e_fall->dest
4035 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4036 		continue;
4037 
4038 	      /* Otherwise we'll have to use the fallthru fixup below.  */
4039 	    }
4040 	  else
4041 	    {
4042 	      /* Otherwise we have some return, switch or computed
4043 		 jump.  In the 99% case, there should not have been a
4044 		 fallthru edge.  */
4045 	      gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
4046 	      continue;
4047 	    }
4048 	}
4049       else
4050 	{
4051 	  /* No fallthru implies a noreturn function with EH edges, or
4052 	     something similarly bizarre.  In any case, we don't need to
4053 	     do anything.  */
4054 	  if (! e_fall)
4055 	    continue;
4056 
4057 	  /* If the fallthru block is still next, nothing to do.  */
4058 	  if (bb->aux == e_fall->dest)
4059 	    continue;
4060 
4061 	  /* A fallthru to exit block.  */
4062 	  if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4063 	    continue;
4064 	}
4065 
4066       /* We got here if we need to add a new jump insn.
4067 	 Note force_nonfallthru can delete E_FALL and thus we have to
4068 	 save E_FALL->src prior to the call to force_nonfallthru.  */
4069       nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
4070       if (nb)
4071 	{
4072 	  nb->aux = bb->aux;
4073 	  bb->aux = nb;
4074 	  /* Don't process this new block.  */
4075 	  bb = nb;
4076 	}
4077     }
4078 
4079   relink_block_chain (/*stay_in_cfglayout_mode=*/false);
4080 
4081   /* Annoying special case - jump around dead jumptables left in the code.  */
4082   FOR_EACH_BB_FN (bb, cfun)
4083     {
4084       edge e = find_fallthru_edge (bb->succs);
4085 
4086       if (e && !can_fallthru (e->src, e->dest))
4087 	force_nonfallthru (e);
4088     }
4089 
4090   /* Ensure goto_locus from edges has some instructions with that locus in RTL
4091      when not optimizing.  */
4092   if (!optimize && !DECL_IGNORED_P (current_function_decl))
4093     FOR_EACH_BB_FN (bb, cfun)
4094       {
4095         edge e;
4096         edge_iterator ei;
4097 
4098         FOR_EACH_EDGE (e, ei, bb->succs)
4099 	  if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
4100 	      && !(e->flags & EDGE_ABNORMAL))
4101 	    {
4102 	      edge e2;
4103 	      edge_iterator ei2;
4104 	      basic_block dest, nb;
4105 	      rtx_insn *end;
4106 
4107 	      insn = BB_END (e->src);
4108 	      end = PREV_INSN (BB_HEAD (e->src));
4109 	      while (insn != end
4110 		     && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
4111 		insn = PREV_INSN (insn);
4112 	      if (insn != end
4113 		  && loc_equal (INSN_LOCATION (insn), e->goto_locus))
4114 		continue;
4115 	      if (simplejump_p (BB_END (e->src))
4116 		  && !INSN_HAS_LOCATION (BB_END (e->src)))
4117 		{
4118 		  INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
4119 		  continue;
4120 		}
4121 	      dest = e->dest;
4122 	      if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4123 		{
4124 		  /* Non-fallthru edges to the exit block cannot be split.  */
4125 		  if (!(e->flags & EDGE_FALLTHRU))
4126 		    continue;
4127 		}
4128 	      else
4129 		{
4130 		  insn = BB_HEAD (dest);
4131 		  end = NEXT_INSN (BB_END (dest));
4132 		  while (insn != end && !NONDEBUG_INSN_P (insn))
4133 		    insn = NEXT_INSN (insn);
4134 		  if (insn != end && INSN_HAS_LOCATION (insn)
4135 		      && loc_equal (INSN_LOCATION (insn), e->goto_locus))
4136 		    continue;
4137 		}
4138 	      nb = split_edge (e);
4139 	      if (!INSN_P (BB_END (nb)))
4140 		BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
4141 							 nb);
4142 	      INSN_LOCATION (BB_END (nb)) = e->goto_locus;
4143 
4144 	      /* If there are other incoming edges to the destination block
4145 		 with the same goto locus, redirect them to the new block as
4146 		 well, this can prevent other such blocks from being created
4147 		 in subsequent iterations of the loop.  */
4148 	      for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
4149 		if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
4150 		    && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
4151 		    && e->goto_locus == e2->goto_locus)
4152 		  redirect_edge_and_branch (e2, nb);
4153 		else
4154 		  ei_next (&ei2);
4155 	    }
4156       }
4157 }
4158 
4159 /* Perform sanity checks on the insn chain.
4160    1. Check that next/prev pointers are consistent in both the forward and
4161       reverse direction.
4162    2. Count insns in chain, going both directions, and check if equal.
4163    3. Check that get_last_insn () returns the actual end of chain.  */
4164 
4165 DEBUG_FUNCTION void
verify_insn_chain(void)4166 verify_insn_chain (void)
4167 {
4168   rtx_insn *x, *prevx, *nextx;
4169   int insn_cnt1, insn_cnt2;
4170 
4171   for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4172        x != 0;
4173        prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4174     gcc_assert (PREV_INSN (x) == prevx);
4175 
4176   gcc_assert (prevx == get_last_insn ());
4177 
4178   for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4179        x != 0;
4180        nextx = x, insn_cnt2++, x = PREV_INSN (x))
4181     gcc_assert (NEXT_INSN (x) == nextx);
4182 
4183   gcc_assert (insn_cnt1 == insn_cnt2);
4184 }
4185 
4186 /* If we have assembler epilogues, the block falling through to exit must
4187    be the last one in the reordered chain when we reach final.  Ensure
4188    that this condition is met.  */
4189 static void
fixup_fallthru_exit_predecessor(void)4190 fixup_fallthru_exit_predecessor (void)
4191 {
4192   edge e;
4193   basic_block bb = NULL;
4194 
4195   /* This transformation is not valid before reload, because we might
4196      separate a call from the instruction that copies the return
4197      value.  */
4198   gcc_assert (reload_completed);
4199 
4200   e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4201   if (e)
4202     bb = e->src;
4203 
4204   if (bb && bb->aux)
4205     {
4206       basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4207 
4208       /* If the very first block is the one with the fall-through exit
4209 	 edge, we have to split that block.  */
4210       if (c == bb)
4211 	{
4212 	  bb = split_block_after_labels (bb)->dest;
4213 	  bb->aux = c->aux;
4214 	  c->aux = bb;
4215 	  BB_FOOTER (bb) = BB_FOOTER (c);
4216 	  BB_FOOTER (c) = NULL;
4217 	}
4218 
4219       while (c->aux != bb)
4220 	c = (basic_block) c->aux;
4221 
4222       c->aux = bb->aux;
4223       while (c->aux)
4224 	c = (basic_block) c->aux;
4225 
4226       c->aux = bb;
4227       bb->aux = NULL;
4228     }
4229 }
4230 
4231 /* In case there are more than one fallthru predecessors of exit, force that
4232    there is only one.  */
4233 
4234 static void
force_one_exit_fallthru(void)4235 force_one_exit_fallthru (void)
4236 {
4237   edge e, predecessor = NULL;
4238   bool more = false;
4239   edge_iterator ei;
4240   basic_block forwarder, bb;
4241 
4242   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4243     if (e->flags & EDGE_FALLTHRU)
4244       {
4245 	if (predecessor == NULL)
4246 	  predecessor = e;
4247 	else
4248 	  {
4249 	    more = true;
4250 	    break;
4251 	  }
4252       }
4253 
4254   if (!more)
4255     return;
4256 
4257   /* Exit has several fallthru predecessors.  Create a forwarder block for
4258      them.  */
4259   forwarder = split_edge (predecessor);
4260   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4261        (e = ei_safe_edge (ei)); )
4262     {
4263       if (e->src == forwarder
4264 	  || !(e->flags & EDGE_FALLTHRU))
4265 	ei_next (&ei);
4266       else
4267 	redirect_edge_and_branch_force (e, forwarder);
4268     }
4269 
4270   /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4271      exit block.  */
4272   FOR_EACH_BB_FN (bb, cfun)
4273     {
4274       if (bb->aux == NULL && bb != forwarder)
4275 	{
4276 	  bb->aux = forwarder;
4277 	  break;
4278 	}
4279     }
4280 }
4281 
4282 /* Return true in case it is possible to duplicate the basic block BB.  */
4283 
4284 static bool
cfg_layout_can_duplicate_bb_p(const_basic_block bb)4285 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4286 {
4287   /* Do not attempt to duplicate tablejumps, as we need to unshare
4288      the dispatch table.  This is difficult to do, as the instructions
4289      computing jump destination may be hoisted outside the basic block.  */
4290   if (tablejump_p (BB_END (bb), NULL, NULL))
4291     return false;
4292 
4293   /* Do not duplicate blocks containing insns that can't be copied.  */
4294   if (targetm.cannot_copy_insn_p)
4295     {
4296       rtx_insn *insn = BB_HEAD (bb);
4297       while (1)
4298 	{
4299 	  if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4300 	    return false;
4301 	  if (insn == BB_END (bb))
4302 	    break;
4303 	  insn = NEXT_INSN (insn);
4304 	}
4305     }
4306 
4307   return true;
4308 }
4309 
4310 rtx_insn *
duplicate_insn_chain(rtx_insn * from,rtx_insn * to,class loop * loop,copy_bb_data * id)4311 duplicate_insn_chain (rtx_insn *from, rtx_insn *to,
4312 		      class loop *loop, copy_bb_data *id)
4313 {
4314   rtx_insn *insn, *next, *copy;
4315   rtx_note *last;
4316 
4317   /* Avoid updating of boundaries of previous basic block.  The
4318      note will get removed from insn stream in fixup.  */
4319   last = emit_note (NOTE_INSN_DELETED);
4320 
4321   /* Create copy at the end of INSN chain.  The chain will
4322      be reordered later.  */
4323   for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4324     {
4325       switch (GET_CODE (insn))
4326 	{
4327 	case DEBUG_INSN:
4328 	  /* Don't duplicate label debug insns.  */
4329 	  if (DEBUG_BIND_INSN_P (insn)
4330 	      && TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4331 	    break;
4332 	  /* FALLTHRU */
4333 	case INSN:
4334 	case CALL_INSN:
4335 	case JUMP_INSN:
4336 	  copy = emit_copy_of_insn_after (insn, get_last_insn ());
4337 	  if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4338 	      && ANY_RETURN_P (JUMP_LABEL (insn)))
4339 	    JUMP_LABEL (copy) = JUMP_LABEL (insn);
4340           maybe_copy_prologue_epilogue_insn (insn, copy);
4341 	  /* If requested remap dependence info of cliques brought in
4342 	     via inlining.  */
4343 	  if (id)
4344 	    {
4345 	      subrtx_iterator::array_type array;
4346 	      FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4347 		if (MEM_P (*iter) && MEM_EXPR (*iter))
4348 		  {
4349 		    tree op = MEM_EXPR (*iter);
4350 		    if (TREE_CODE (op) == WITH_SIZE_EXPR)
4351 		      op = TREE_OPERAND (op, 0);
4352 		    while (handled_component_p (op))
4353 		      op = TREE_OPERAND (op, 0);
4354 		    if ((TREE_CODE (op) == MEM_REF
4355 			 || TREE_CODE (op) == TARGET_MEM_REF)
4356 			&& MR_DEPENDENCE_CLIQUE (op) > 1
4357 			&& (!loop
4358 			    || (MR_DEPENDENCE_CLIQUE (op)
4359 				!= loop->owned_clique)))
4360 		      {
4361 			if (!id->dependence_map)
4362 			  id->dependence_map = new hash_map<dependence_hash,
4363 			      unsigned short>;
4364 			bool existed;
4365 			unsigned short &newc = id->dependence_map->get_or_insert
4366 					 (MR_DEPENDENCE_CLIQUE (op), &existed);
4367 			if (!existed)
4368 			  {
4369 			    gcc_assert
4370 			      (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
4371 			    newc = ++cfun->last_clique;
4372 			  }
4373 			/* We cannot adjust MR_DEPENDENCE_CLIQUE in-place
4374 			   since MEM_EXPR is shared so make a copy and
4375 			   walk to the subtree again.  */
4376 			tree new_expr = unshare_expr (MEM_EXPR (*iter));
4377 			tree orig_new_expr = new_expr;
4378 			if (TREE_CODE (new_expr) == WITH_SIZE_EXPR)
4379 			  new_expr = TREE_OPERAND (new_expr, 0);
4380 			while (handled_component_p (new_expr))
4381 			  new_expr = TREE_OPERAND (new_expr, 0);
4382 			MR_DEPENDENCE_CLIQUE (new_expr) = newc;
4383 			set_mem_expr (const_cast <rtx> (*iter), orig_new_expr);
4384 		      }
4385 		  }
4386 	    }
4387 	  break;
4388 
4389 	case JUMP_TABLE_DATA:
4390 	  /* Avoid copying of dispatch tables.  We never duplicate
4391 	     tablejumps, so this can hit only in case the table got
4392 	     moved far from original jump.
4393 	     Avoid copying following barrier as well if any
4394 	     (and debug insns in between).  */
4395 	  for (next = NEXT_INSN (insn);
4396 	       next != NEXT_INSN (to);
4397 	       next = NEXT_INSN (next))
4398 	    if (!DEBUG_INSN_P (next))
4399 	      break;
4400 	  if (next != NEXT_INSN (to) && BARRIER_P (next))
4401 	    insn = next;
4402 	  break;
4403 
4404 	case CODE_LABEL:
4405 	  break;
4406 
4407 	case BARRIER:
4408 	  emit_barrier ();
4409 	  break;
4410 
4411 	case NOTE:
4412 	  switch (NOTE_KIND (insn))
4413 	    {
4414 	      /* In case prologue is empty and function contain label
4415 		 in first BB, we may want to copy the block.  */
4416 	    case NOTE_INSN_PROLOGUE_END:
4417 
4418 	    case NOTE_INSN_DELETED:
4419 	    case NOTE_INSN_DELETED_LABEL:
4420 	    case NOTE_INSN_DELETED_DEBUG_LABEL:
4421 	      /* No problem to strip these.  */
4422 	    case NOTE_INSN_FUNCTION_BEG:
4423 	      /* There is always just single entry to function.  */
4424 	    case NOTE_INSN_BASIC_BLOCK:
4425               /* We should only switch text sections once.  */
4426 	    case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4427 	      break;
4428 
4429 	    case NOTE_INSN_EPILOGUE_BEG:
4430 	    case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
4431 	      emit_note_copy (as_a <rtx_note *> (insn));
4432 	      break;
4433 
4434 	    default:
4435 	      /* All other notes should have already been eliminated.  */
4436 	      gcc_unreachable ();
4437 	    }
4438 	  break;
4439 	default:
4440 	  gcc_unreachable ();
4441 	}
4442     }
4443   insn = NEXT_INSN (last);
4444   delete_insn (last);
4445   return insn;
4446 }
4447 
4448 /* Create a duplicate of the basic block BB.  */
4449 
4450 static basic_block
cfg_layout_duplicate_bb(basic_block bb,copy_bb_data * id)4451 cfg_layout_duplicate_bb (basic_block bb, copy_bb_data *id)
4452 {
4453   rtx_insn *insn;
4454   basic_block new_bb;
4455 
4456   class loop *loop = (id && current_loops) ? bb->loop_father : NULL;
4457 
4458   insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb), loop, id);
4459   new_bb = create_basic_block (insn,
4460 			       insn ? get_last_insn () : NULL,
4461 			       EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4462 
4463   BB_COPY_PARTITION (new_bb, bb);
4464   if (BB_HEADER (bb))
4465     {
4466       insn = BB_HEADER (bb);
4467       while (NEXT_INSN (insn))
4468 	insn = NEXT_INSN (insn);
4469       insn = duplicate_insn_chain (BB_HEADER (bb), insn, loop, id);
4470       if (insn)
4471 	BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4472     }
4473 
4474   if (BB_FOOTER (bb))
4475     {
4476       insn = BB_FOOTER (bb);
4477       while (NEXT_INSN (insn))
4478 	insn = NEXT_INSN (insn);
4479       insn = duplicate_insn_chain (BB_FOOTER (bb), insn, loop, id);
4480       if (insn)
4481 	BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4482     }
4483 
4484   return new_bb;
4485 }
4486 
4487 
4488 /* Main entry point to this module - initialize the datastructures for
4489    CFG layout changes.  It keeps LOOPS up-to-date if not null.
4490 
4491    FLAGS is a set of additional flags to pass to cleanup_cfg().  */
4492 
4493 void
cfg_layout_initialize(int flags)4494 cfg_layout_initialize (int flags)
4495 {
4496   rtx_insn_list *x;
4497   basic_block bb;
4498 
4499   /* Once bb partitioning is complete, cfg layout mode should not be
4500      re-entered.  Entering cfg layout mode may require fixups.  As an
4501      example, if edge forwarding performed when optimizing the cfg
4502      layout required moving a block from the hot to the cold
4503      section. This would create an illegal partitioning unless some
4504      manual fixup was performed.  */
4505   gcc_assert (!crtl->bb_reorder_complete || !crtl->has_bb_partition);
4506 
4507   initialize_original_copy_tables ();
4508 
4509   cfg_layout_rtl_register_cfg_hooks ();
4510 
4511   record_effective_endpoints ();
4512 
4513   /* Make sure that the targets of non local gotos are marked.  */
4514   for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4515     {
4516       bb = BLOCK_FOR_INSN (x->insn ());
4517       bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4518     }
4519 
4520   cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4521 }
4522 
4523 /* Splits superblocks.  */
4524 void
break_superblocks(void)4525 break_superblocks (void)
4526 {
4527   bool need = false;
4528   basic_block bb;
4529 
4530   auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
4531   bitmap_clear (superblocks);
4532 
4533   FOR_EACH_BB_FN (bb, cfun)
4534     if (bb->flags & BB_SUPERBLOCK)
4535       {
4536 	bb->flags &= ~BB_SUPERBLOCK;
4537 	bitmap_set_bit (superblocks, bb->index);
4538 	need = true;
4539       }
4540 
4541   if (need)
4542     {
4543       rebuild_jump_labels (get_insns ());
4544       find_many_sub_basic_blocks (superblocks);
4545     }
4546 }
4547 
4548 /* Finalize the changes: reorder insn list according to the sequence specified
4549    by aux pointers, enter compensation code, rebuild scope forest.  */
4550 
4551 void
cfg_layout_finalize(void)4552 cfg_layout_finalize (void)
4553 {
4554   free_dominance_info (CDI_DOMINATORS);
4555   force_one_exit_fallthru ();
4556   rtl_register_cfg_hooks ();
4557   if (reload_completed && !targetm.have_epilogue ())
4558     fixup_fallthru_exit_predecessor ();
4559   fixup_reorder_chain ();
4560 
4561   rebuild_jump_labels (get_insns ());
4562   delete_dead_jumptables ();
4563 
4564   if (flag_checking)
4565     verify_insn_chain ();
4566   checking_verify_flow_info ();
4567 }
4568 
4569 
4570 /* Same as split_block but update cfg_layout structures.  */
4571 
4572 static basic_block
cfg_layout_split_block(basic_block bb,void * insnp)4573 cfg_layout_split_block (basic_block bb, void *insnp)
4574 {
4575   rtx insn = (rtx) insnp;
4576   basic_block new_bb = rtl_split_block (bb, insn);
4577 
4578   BB_FOOTER (new_bb) = BB_FOOTER (bb);
4579   BB_FOOTER (bb) = NULL;
4580 
4581   return new_bb;
4582 }
4583 
4584 /* Redirect Edge to DEST.  */
4585 static edge
cfg_layout_redirect_edge_and_branch(edge e,basic_block dest)4586 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4587 {
4588   basic_block src = e->src;
4589   edge ret;
4590 
4591   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4592     return NULL;
4593 
4594   if (e->dest == dest)
4595     return e;
4596 
4597   if (e->flags & EDGE_CROSSING
4598       && BB_PARTITION (e->src) == BB_PARTITION (dest)
4599       && simplejump_p (BB_END (src)))
4600     {
4601       if (dump_file)
4602 	fprintf (dump_file,
4603 	  	 "Removing crossing jump while redirecting edge form %i to %i\n",
4604 		 e->src->index, dest->index);
4605       delete_insn (BB_END (src));
4606       remove_barriers_from_footer (src);
4607       e->flags |= EDGE_FALLTHRU;
4608     }
4609 
4610   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4611       && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4612     {
4613       df_set_bb_dirty (src);
4614       return ret;
4615     }
4616 
4617   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4618       && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4619     {
4620       if (dump_file)
4621 	fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4622 		 e->src->index, dest->index);
4623 
4624       df_set_bb_dirty (e->src);
4625       redirect_edge_succ (e, dest);
4626       return e;
4627     }
4628 
4629   /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4630      in the case the basic block appears to be in sequence.  Avoid this
4631      transformation.  */
4632 
4633   if (e->flags & EDGE_FALLTHRU)
4634     {
4635       /* Redirect any branch edges unified with the fallthru one.  */
4636       if (JUMP_P (BB_END (src))
4637 	  && label_is_jump_target_p (BB_HEAD (e->dest),
4638 				     BB_END (src)))
4639 	{
4640 	  edge redirected;
4641 
4642 	  if (dump_file)
4643 	    fprintf (dump_file, "Fallthru edge unified with branch "
4644 		     "%i->%i redirected to %i\n",
4645 		     e->src->index, e->dest->index, dest->index);
4646 	  e->flags &= ~EDGE_FALLTHRU;
4647 	  redirected = redirect_branch_edge (e, dest);
4648 	  gcc_assert (redirected);
4649 	  redirected->flags |= EDGE_FALLTHRU;
4650 	  df_set_bb_dirty (redirected->src);
4651 	  return redirected;
4652 	}
4653       /* In case we are redirecting fallthru edge to the branch edge
4654 	 of conditional jump, remove it.  */
4655       if (EDGE_COUNT (src->succs) == 2)
4656 	{
4657 	  /* Find the edge that is different from E.  */
4658 	  edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4659 
4660 	  if (s->dest == dest
4661 	      && any_condjump_p (BB_END (src))
4662 	      && onlyjump_p (BB_END (src)))
4663 	    delete_insn (BB_END (src));
4664 	}
4665       if (dump_file)
4666 	fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4667 		 e->src->index, e->dest->index, dest->index);
4668       ret = redirect_edge_succ_nodup (e, dest);
4669     }
4670   else
4671     ret = redirect_branch_edge (e, dest);
4672 
4673   if (!ret)
4674     return NULL;
4675 
4676   fixup_partition_crossing (ret);
4677   /* We don't want simplejumps in the insn stream during cfglayout.  */
4678   gcc_assert (!simplejump_p (BB_END (src)) || CROSSING_JUMP_P (BB_END (src)));
4679 
4680   df_set_bb_dirty (src);
4681   return ret;
4682 }
4683 
4684 /* Simple wrapper as we always can redirect fallthru edges.  */
4685 static basic_block
cfg_layout_redirect_edge_and_branch_force(edge e,basic_block dest)4686 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4687 {
4688   edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4689 
4690   gcc_assert (redirected);
4691   return NULL;
4692 }
4693 
4694 /* Same as delete_basic_block but update cfg_layout structures.  */
4695 
4696 static void
cfg_layout_delete_block(basic_block bb)4697 cfg_layout_delete_block (basic_block bb)
4698 {
4699   rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4700   rtx_insn **to;
4701 
4702   if (BB_HEADER (bb))
4703     {
4704       next = BB_HEAD (bb);
4705       if (prev)
4706 	SET_NEXT_INSN (prev) = BB_HEADER (bb);
4707       else
4708 	set_first_insn (BB_HEADER (bb));
4709       SET_PREV_INSN (BB_HEADER (bb)) = prev;
4710       insn = BB_HEADER (bb);
4711       while (NEXT_INSN (insn))
4712 	insn = NEXT_INSN (insn);
4713       SET_NEXT_INSN (insn) = next;
4714       SET_PREV_INSN (next) = insn;
4715     }
4716   next = NEXT_INSN (BB_END (bb));
4717   if (BB_FOOTER (bb))
4718     {
4719       insn = BB_FOOTER (bb);
4720       while (insn)
4721 	{
4722 	  if (BARRIER_P (insn))
4723 	    {
4724 	      if (PREV_INSN (insn))
4725 		SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4726 	      else
4727 		BB_FOOTER (bb) = NEXT_INSN (insn);
4728 	      if (NEXT_INSN (insn))
4729 		SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4730 	    }
4731 	  if (LABEL_P (insn))
4732 	    break;
4733 	  insn = NEXT_INSN (insn);
4734 	}
4735       if (BB_FOOTER (bb))
4736 	{
4737 	  insn = BB_END (bb);
4738 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4739 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4740 	  while (NEXT_INSN (insn))
4741 	    insn = NEXT_INSN (insn);
4742 	  SET_NEXT_INSN (insn) = next;
4743 	  if (next)
4744 	    SET_PREV_INSN (next) = insn;
4745 	  else
4746 	    set_last_insn (insn);
4747 	}
4748     }
4749   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4750     to = &BB_HEADER (bb->next_bb);
4751   else
4752     to = &cfg_layout_function_footer;
4753 
4754   rtl_delete_block (bb);
4755 
4756   if (prev)
4757     prev = NEXT_INSN (prev);
4758   else
4759     prev = get_insns ();
4760   if (next)
4761     next = PREV_INSN (next);
4762   else
4763     next = get_last_insn ();
4764 
4765   if (next && NEXT_INSN (next) != prev)
4766     {
4767       remaints = unlink_insn_chain (prev, next);
4768       insn = remaints;
4769       while (NEXT_INSN (insn))
4770 	insn = NEXT_INSN (insn);
4771       SET_NEXT_INSN (insn) = *to;
4772       if (*to)
4773 	SET_PREV_INSN (*to) = insn;
4774       *to = remaints;
4775     }
4776 }
4777 
4778 /* Return true when blocks A and B can be safely merged.  */
4779 
4780 static bool
cfg_layout_can_merge_blocks_p(basic_block a,basic_block b)4781 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4782 {
4783   /* If we are partitioning hot/cold basic blocks, we don't want to
4784      mess up unconditional or indirect jumps that cross between hot
4785      and cold sections.
4786 
4787      Basic block partitioning may result in some jumps that appear to
4788      be optimizable (or blocks that appear to be mergeable), but which really
4789      must be left untouched (they are required to make it safely across
4790      partition boundaries).  See  the comments at the top of
4791      bb-reorder.cc:partition_hot_cold_basic_blocks for complete details.  */
4792 
4793   if (BB_PARTITION (a) != BB_PARTITION (b))
4794     return false;
4795 
4796   /* Protect the loop latches.  */
4797   if (current_loops && b->loop_father->latch == b)
4798     return false;
4799 
4800   /* If we would end up moving B's instructions, make sure it doesn't fall
4801      through into the exit block, since we cannot recover from a fallthrough
4802      edge into the exit block occurring in the middle of a function.  */
4803   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4804     {
4805       edge e = find_fallthru_edge (b->succs);
4806       if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4807 	return false;
4808     }
4809 
4810   /* There must be exactly one edge in between the blocks.  */
4811   return (single_succ_p (a)
4812 	  && single_succ (a) == b
4813 	  && single_pred_p (b) == 1
4814 	  && a != b
4815 	  /* Must be simple edge.  */
4816 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4817 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4818 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4819 	  /* If the jump insn has side effects, we can't kill the edge.
4820 	     When not optimizing, try_redirect_by_replacing_jump will
4821 	     not allow us to redirect an edge by replacing a table jump.  */
4822 	  && (!JUMP_P (BB_END (a))
4823 	      || ((!optimize || reload_completed)
4824 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4825 }
4826 
4827 /* Merge block A and B.  The blocks must be mergeable.  */
4828 
4829 static void
cfg_layout_merge_blocks(basic_block a,basic_block b)4830 cfg_layout_merge_blocks (basic_block a, basic_block b)
4831 {
4832   /* If B is a forwarder block whose outgoing edge has no location, we'll
4833      propagate the locus of the edge between A and B onto it.  */
4834   const bool forward_edge_locus
4835     = (b->flags & BB_FORWARDER_BLOCK) != 0
4836       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION;
4837   rtx_insn *insn;
4838 
4839   gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4840 
4841   if (dump_file)
4842     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4843 			 a->index);
4844 
4845   /* If there was a CODE_LABEL beginning B, delete it.  */
4846   if (LABEL_P (BB_HEAD (b)))
4847     {
4848       delete_insn (BB_HEAD (b));
4849     }
4850 
4851   /* We should have fallthru edge in a, or we can do dummy redirection to get
4852      it cleaned up.  */
4853   if (JUMP_P (BB_END (a)))
4854     try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4855   gcc_assert (!JUMP_P (BB_END (a)));
4856 
4857   /* If not optimizing, preserve the locus of the single edge between
4858      blocks A and B if necessary by emitting a nop.  */
4859   if (!optimize
4860       && !forward_edge_locus
4861       && !DECL_IGNORED_P (current_function_decl))
4862     emit_nop_for_unique_locus_between (a, b);
4863 
4864   /* Move things from b->footer after a->footer.  */
4865   if (BB_FOOTER (b))
4866     {
4867       if (!BB_FOOTER (a))
4868 	BB_FOOTER (a) = BB_FOOTER (b);
4869       else
4870 	{
4871 	  rtx_insn *last = BB_FOOTER (a);
4872 
4873 	  while (NEXT_INSN (last))
4874 	    last = NEXT_INSN (last);
4875 	  SET_NEXT_INSN (last) = BB_FOOTER (b);
4876 	  SET_PREV_INSN (BB_FOOTER (b)) = last;
4877 	}
4878       BB_FOOTER (b) = NULL;
4879     }
4880 
4881   /* Move things from b->header before a->footer.
4882      Note that this may include dead tablejump data, but we don't clean
4883      those up until we go out of cfglayout mode.  */
4884    if (BB_HEADER (b))
4885      {
4886       if (! BB_FOOTER (a))
4887 	BB_FOOTER (a) = BB_HEADER (b);
4888       else
4889 	{
4890 	  rtx_insn *last = BB_HEADER (b);
4891 
4892 	  while (NEXT_INSN (last))
4893 	    last = NEXT_INSN (last);
4894 	  SET_NEXT_INSN (last) = BB_FOOTER (a);
4895 	  SET_PREV_INSN (BB_FOOTER (a)) = last;
4896 	  BB_FOOTER (a) = BB_HEADER (b);
4897 	}
4898       BB_HEADER (b) = NULL;
4899     }
4900 
4901   /* In the case basic blocks are not adjacent, move them around.  */
4902   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4903     {
4904       insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4905 
4906       emit_insn_after_noloc (insn, BB_END (a), a);
4907     }
4908   /* Otherwise just re-associate the instructions.  */
4909   else
4910     {
4911       insn = BB_HEAD (b);
4912       BB_END (a) = BB_END (b);
4913     }
4914 
4915   /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4916      We need to explicitly call. */
4917   update_bb_for_insn_chain (insn, BB_END (b), a);
4918 
4919   /* Skip possible DELETED_LABEL insn.  */
4920   if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4921     insn = NEXT_INSN (insn);
4922   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4923   BB_HEAD (b) = BB_END (b) = NULL;
4924   delete_insn (insn);
4925 
4926   df_bb_delete (b->index);
4927 
4928   if (forward_edge_locus)
4929     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4930 
4931   if (dump_file)
4932     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4933 }
4934 
4935 /* Split edge E.  */
4936 
4937 static basic_block
cfg_layout_split_edge(edge e)4938 cfg_layout_split_edge (edge e)
4939 {
4940   basic_block new_bb =
4941     create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4942 			? NEXT_INSN (BB_END (e->src)) : get_insns (),
4943 			NULL_RTX, e->src);
4944 
4945   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4946     BB_COPY_PARTITION (new_bb, e->src);
4947   else
4948     BB_COPY_PARTITION (new_bb, e->dest);
4949   make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4950   redirect_edge_and_branch_force (e, new_bb);
4951 
4952   return new_bb;
4953 }
4954 
4955 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */
4956 
4957 static void
rtl_make_forwarder_block(edge fallthru ATTRIBUTE_UNUSED)4958 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4959 {
4960 }
4961 
4962 /* Return true if BB contains only labels or non-executable
4963    instructions.  */
4964 
4965 static bool
rtl_block_empty_p(basic_block bb)4966 rtl_block_empty_p (basic_block bb)
4967 {
4968   rtx_insn *insn;
4969 
4970   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4971       || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4972     return true;
4973 
4974   FOR_BB_INSNS (bb, insn)
4975     if (NONDEBUG_INSN_P (insn)
4976 	&& (!any_uncondjump_p (insn) || !onlyjump_p (insn)))
4977       return false;
4978 
4979   return true;
4980 }
4981 
4982 /* Split a basic block if it ends with a conditional branch and if
4983    the other part of the block is not empty.  */
4984 
4985 static basic_block
rtl_split_block_before_cond_jump(basic_block bb)4986 rtl_split_block_before_cond_jump (basic_block bb)
4987 {
4988   rtx_insn *insn;
4989   rtx_insn *split_point = NULL;
4990   rtx_insn *last = NULL;
4991   bool found_code = false;
4992 
4993   FOR_BB_INSNS (bb, insn)
4994     {
4995       if (any_condjump_p (insn))
4996 	split_point = last;
4997       else if (NONDEBUG_INSN_P (insn))
4998 	found_code = true;
4999       last = insn;
5000     }
5001 
5002   /* Did not find everything.  */
5003   if (found_code && split_point)
5004     return split_block (bb, split_point)->dest;
5005   else
5006     return NULL;
5007 }
5008 
5009 /* Return 1 if BB ends with a call, possibly followed by some
5010    instructions that must stay with the call, 0 otherwise.  */
5011 
5012 static bool
rtl_block_ends_with_call_p(basic_block bb)5013 rtl_block_ends_with_call_p (basic_block bb)
5014 {
5015   rtx_insn *insn = BB_END (bb);
5016 
5017   while (!CALL_P (insn)
5018 	 && insn != BB_HEAD (bb)
5019 	 && (keep_with_call_p (insn)
5020 	     || NOTE_P (insn)
5021 	     || DEBUG_INSN_P (insn)))
5022     insn = PREV_INSN (insn);
5023   return (CALL_P (insn));
5024 }
5025 
5026 /* Return 1 if BB ends with a conditional branch, 0 otherwise.  */
5027 
5028 static bool
rtl_block_ends_with_condjump_p(const_basic_block bb)5029 rtl_block_ends_with_condjump_p (const_basic_block bb)
5030 {
5031   return any_condjump_p (BB_END (bb));
5032 }
5033 
5034 /* Return true if we need to add fake edge to exit.
5035    Helper function for rtl_flow_call_edges_add.  */
5036 
5037 static bool
need_fake_edge_p(const rtx_insn * insn)5038 need_fake_edge_p (const rtx_insn *insn)
5039 {
5040   if (!INSN_P (insn))
5041     return false;
5042 
5043   if ((CALL_P (insn)
5044        && !SIBLING_CALL_P (insn)
5045        && !find_reg_note (insn, REG_NORETURN, NULL)
5046        && !(RTL_CONST_OR_PURE_CALL_P (insn))))
5047     return true;
5048 
5049   return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5050 	   && MEM_VOLATILE_P (PATTERN (insn)))
5051 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
5052 	      && asm_noperands (insn) != -1
5053 	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
5054 	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
5055 }
5056 
5057 /* Add fake edges to the function exit for any non constant and non noreturn
5058    calls, volatile inline assembly in the bitmap of blocks specified by
5059    BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
5060    that were split.
5061 
5062    The goal is to expose cases in which entering a basic block does not imply
5063    that all subsequent instructions must be executed.  */
5064 
5065 static int
rtl_flow_call_edges_add(sbitmap blocks)5066 rtl_flow_call_edges_add (sbitmap blocks)
5067 {
5068   int i;
5069   int blocks_split = 0;
5070   int last_bb = last_basic_block_for_fn (cfun);
5071   bool check_last_block = false;
5072 
5073   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
5074     return 0;
5075 
5076   if (! blocks)
5077     check_last_block = true;
5078   else
5079     check_last_block = bitmap_bit_p (blocks,
5080 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
5081 
5082   /* In the last basic block, before epilogue generation, there will be
5083      a fallthru edge to EXIT.  Special care is required if the last insn
5084      of the last basic block is a call because make_edge folds duplicate
5085      edges, which would result in the fallthru edge also being marked
5086      fake, which would result in the fallthru edge being removed by
5087      remove_fake_edges, which would result in an invalid CFG.
5088 
5089      Moreover, we can't elide the outgoing fake edge, since the block
5090      profiler needs to take this into account in order to solve the minimal
5091      spanning tree in the case that the call doesn't return.
5092 
5093      Handle this by adding a dummy instruction in a new last basic block.  */
5094   if (check_last_block)
5095     {
5096       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5097       rtx_insn *insn = BB_END (bb);
5098 
5099       /* Back up past insns that must be kept in the same block as a call.  */
5100       while (insn != BB_HEAD (bb)
5101 	     && keep_with_call_p (insn))
5102 	insn = PREV_INSN (insn);
5103 
5104       if (need_fake_edge_p (insn))
5105 	{
5106 	  edge e;
5107 
5108 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
5109 	  if (e)
5110 	    {
5111 	      insert_insn_on_edge (gen_use (const0_rtx), e);
5112 	      commit_edge_insertions ();
5113 	    }
5114 	}
5115     }
5116 
5117   /* Now add fake edges to the function exit for any non constant
5118      calls since there is no way that we can determine if they will
5119      return or not...  */
5120 
5121   for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
5122     {
5123       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5124       rtx_insn *insn;
5125       rtx_insn *prev_insn;
5126 
5127       if (!bb)
5128 	continue;
5129 
5130       if (blocks && !bitmap_bit_p (blocks, i))
5131 	continue;
5132 
5133       for (insn = BB_END (bb); ; insn = prev_insn)
5134 	{
5135 	  prev_insn = PREV_INSN (insn);
5136 	  if (need_fake_edge_p (insn))
5137 	    {
5138 	      edge e;
5139 	      rtx_insn *split_at_insn = insn;
5140 
5141 	      /* Don't split the block between a call and an insn that should
5142 		 remain in the same block as the call.  */
5143 	      if (CALL_P (insn))
5144 		while (split_at_insn != BB_END (bb)
5145 		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
5146 		  split_at_insn = NEXT_INSN (split_at_insn);
5147 
5148 	      /* The handling above of the final block before the epilogue
5149 		 should be enough to verify that there is no edge to the exit
5150 		 block in CFG already.  Calling make_edge in such case would
5151 		 cause us to mark that edge as fake and remove it later.  */
5152 
5153 	      if (flag_checking && split_at_insn == BB_END (bb))
5154 		{
5155 		  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
5156 		  gcc_assert (e == NULL);
5157 		}
5158 
5159 	      /* Note that the following may create a new basic block
5160 		 and renumber the existing basic blocks.  */
5161 	      if (split_at_insn != BB_END (bb))
5162 		{
5163 		  e = split_block (bb, split_at_insn);
5164 		  if (e)
5165 		    blocks_split++;
5166 		}
5167 
5168 	      edge ne = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
5169 	      ne->probability = profile_probability::guessed_never ();
5170 	    }
5171 
5172 	  if (insn == BB_HEAD (bb))
5173 	    break;
5174 	}
5175     }
5176 
5177   if (blocks_split)
5178     verify_flow_info ();
5179 
5180   return blocks_split;
5181 }
5182 
5183 /* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
5184    the conditional branch target, SECOND_HEAD should be the fall-thru
5185    there is no need to handle this here the loop versioning code handles
5186    this.  the reason for SECON_HEAD is that it is needed for condition
5187    in trees, and this should be of the same type since it is a hook.  */
5188 static void
rtl_lv_add_condition_to_bb(basic_block first_head,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * comp_rtx)5189 rtl_lv_add_condition_to_bb (basic_block first_head ,
5190 			    basic_block second_head ATTRIBUTE_UNUSED,
5191 			    basic_block cond_bb, void *comp_rtx)
5192 {
5193   rtx_code_label *label;
5194   rtx_insn *seq, *jump;
5195   rtx op0 = XEXP ((rtx)comp_rtx, 0);
5196   rtx op1 = XEXP ((rtx)comp_rtx, 1);
5197   enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
5198   machine_mode mode;
5199 
5200 
5201   label = block_label (first_head);
5202   mode = GET_MODE (op0);
5203   if (mode == VOIDmode)
5204     mode = GET_MODE (op1);
5205 
5206   start_sequence ();
5207   op0 = force_operand (op0, NULL_RTX);
5208   op1 = force_operand (op1, NULL_RTX);
5209   do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label,
5210 			   profile_probability::uninitialized ());
5211   jump = get_last_insn ();
5212   JUMP_LABEL (jump) = label;
5213   LABEL_NUSES (label)++;
5214   seq = get_insns ();
5215   end_sequence ();
5216 
5217   /* Add the new cond, in the new head.  */
5218   emit_insn_after (seq, BB_END (cond_bb));
5219 }
5220 
5221 
5222 /* Given a block B with unconditional branch at its end, get the
5223    store the return the branch edge and the fall-thru edge in
5224    BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
5225 static void
rtl_extract_cond_bb_edges(basic_block b,edge * branch_edge,edge * fallthru_edge)5226 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
5227 			   edge *fallthru_edge)
5228 {
5229   edge e = EDGE_SUCC (b, 0);
5230 
5231   if (e->flags & EDGE_FALLTHRU)
5232     {
5233       *fallthru_edge = e;
5234       *branch_edge = EDGE_SUCC (b, 1);
5235     }
5236   else
5237     {
5238       *branch_edge = e;
5239       *fallthru_edge = EDGE_SUCC (b, 1);
5240     }
5241 }
5242 
5243 void
init_rtl_bb_info(basic_block bb)5244 init_rtl_bb_info (basic_block bb)
5245 {
5246   gcc_assert (!bb->il.x.rtl);
5247   bb->il.x.head_ = NULL;
5248   bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5249 }
5250 
5251 static bool
rtl_bb_info_initialized_p(basic_block bb)5252 rtl_bb_info_initialized_p (basic_block bb)
5253 {
5254   return bb->il.x.rtl;
5255 }
5256 
5257 /* Returns true if it is possible to remove edge E by redirecting
5258    it to the destination of the other edge from E->src.  */
5259 
5260 static bool
rtl_can_remove_branch_p(const_edge e)5261 rtl_can_remove_branch_p (const_edge e)
5262 {
5263   const_basic_block src = e->src;
5264   const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5265   const rtx_insn *insn = BB_END (src);
5266   rtx set;
5267 
5268   /* The conditions are taken from try_redirect_by_replacing_jump.  */
5269   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5270     return false;
5271 
5272   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5273     return false;
5274 
5275   if (BB_PARTITION (src) != BB_PARTITION (target))
5276     return false;
5277 
5278   if (!onlyjump_p (insn)
5279       || tablejump_p (insn, NULL, NULL))
5280     return false;
5281 
5282   set = single_set (insn);
5283   if (!set || side_effects_p (set))
5284     return false;
5285 
5286   return true;
5287 }
5288 
5289 static basic_block
rtl_duplicate_bb(basic_block bb,copy_bb_data * id)5290 rtl_duplicate_bb (basic_block bb, copy_bb_data *id)
5291 {
5292   bb = cfg_layout_duplicate_bb (bb, id);
5293   bb->aux = NULL;
5294   return bb;
5295 }
5296 
5297 /* Do book-keeping of basic block BB for the profile consistency checker.
5298    Store the counting in RECORD.  */
5299 static void
rtl_account_profile_record(basic_block bb,struct profile_record * record)5300 rtl_account_profile_record (basic_block bb, struct profile_record *record)
5301 {
5302   rtx_insn *insn;
5303   FOR_BB_INSNS (bb, insn)
5304     if (INSN_P (insn))
5305       {
5306 	record->size += insn_cost (insn, false);
5307 	if (profile_info)
5308 	  {
5309 	    if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
5310 		&& ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
5311 		&& bb->count.ipa ().initialized_p ())
5312 	      record->time
5313 		+= insn_cost (insn, true) * bb->count.ipa ().to_gcov_type ();
5314 	  }
5315 	else if (bb->count.initialized_p ()
5316 		 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
5317 	  record->time
5318 	    += insn_cost (insn, true)
5319 	       * bb->count.to_sreal_scale
5320 		      (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
5321 	else
5322 	  record->time += insn_cost (insn, true);
5323       }
5324 }
5325 
5326 /* Implementation of CFG manipulation for linearized RTL.  */
5327 struct cfg_hooks rtl_cfg_hooks = {
5328   "rtl",
5329   rtl_verify_flow_info,
5330   rtl_dump_bb,
5331   rtl_dump_bb_for_graph,
5332   rtl_create_basic_block,
5333   rtl_redirect_edge_and_branch,
5334   rtl_redirect_edge_and_branch_force,
5335   rtl_can_remove_branch_p,
5336   rtl_delete_block,
5337   rtl_split_block,
5338   rtl_move_block_after,
5339   rtl_can_merge_blocks,  /* can_merge_blocks_p */
5340   rtl_merge_blocks,
5341   rtl_predict_edge,
5342   rtl_predicted_by_p,
5343   cfg_layout_can_duplicate_bb_p,
5344   rtl_duplicate_bb,
5345   rtl_split_edge,
5346   rtl_make_forwarder_block,
5347   rtl_tidy_fallthru_edge,
5348   rtl_force_nonfallthru,
5349   rtl_block_ends_with_call_p,
5350   rtl_block_ends_with_condjump_p,
5351   rtl_flow_call_edges_add,
5352   NULL, /* execute_on_growing_pred */
5353   NULL, /* execute_on_shrinking_pred */
5354   NULL, /* duplicate loop for trees */
5355   NULL, /* lv_add_condition_to_bb */
5356   NULL, /* lv_adjust_loop_header_phi*/
5357   NULL, /* extract_cond_bb_edges */
5358   NULL, /* flush_pending_stmts */
5359   rtl_block_empty_p, /* block_empty_p */
5360   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5361   rtl_account_profile_record,
5362 };
5363 
5364 /* Implementation of CFG manipulation for cfg layout RTL, where
5365    basic block connected via fallthru edges does not have to be adjacent.
5366    This representation will hopefully become the default one in future
5367    version of the compiler.  */
5368 
5369 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5370   "cfglayout mode",
5371   rtl_verify_flow_info_1,
5372   rtl_dump_bb,
5373   rtl_dump_bb_for_graph,
5374   cfg_layout_create_basic_block,
5375   cfg_layout_redirect_edge_and_branch,
5376   cfg_layout_redirect_edge_and_branch_force,
5377   rtl_can_remove_branch_p,
5378   cfg_layout_delete_block,
5379   cfg_layout_split_block,
5380   rtl_move_block_after,
5381   cfg_layout_can_merge_blocks_p,
5382   cfg_layout_merge_blocks,
5383   rtl_predict_edge,
5384   rtl_predicted_by_p,
5385   cfg_layout_can_duplicate_bb_p,
5386   cfg_layout_duplicate_bb,
5387   cfg_layout_split_edge,
5388   rtl_make_forwarder_block,
5389   NULL, /* tidy_fallthru_edge */
5390   rtl_force_nonfallthru,
5391   rtl_block_ends_with_call_p,
5392   rtl_block_ends_with_condjump_p,
5393   rtl_flow_call_edges_add,
5394   NULL, /* execute_on_growing_pred */
5395   NULL, /* execute_on_shrinking_pred */
5396   duplicate_loop_body_to_header_edge, /* duplicate loop for trees */
5397   rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5398   NULL, /* lv_adjust_loop_header_phi*/
5399   rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5400   NULL, /* flush_pending_stmts */
5401   rtl_block_empty_p, /* block_empty_p */
5402   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5403   rtl_account_profile_record,
5404 };
5405 
5406 #include "gt-cfgrtl.h"
5407 
5408 #if __GNUC__ >= 10
5409 #  pragma GCC diagnostic pop
5410 #endif
5411