xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cfgrtl.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Control flow graph manipulation code for GNU compiler.
2    Copyright (C) 1987-2015 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains low level functions to manipulate the CFG and analyze it
21    that are aware of the RTL intermediate language.
22 
23    Available functionality:
24      - Basic CFG/RTL manipulation API documented in cfghooks.h
25      - CFG-aware instruction chain manipulation
26 	 delete_insn, delete_insn_chain
27      - Edge splitting and committing to edges
28 	 insert_insn_on_edge, commit_edge_insertions
29      - CFG updating after insn simplification
30 	 purge_dead_edges, purge_all_dead_edges
31      - CFG fixing after coarse manipulation
32 	fixup_abnormal_edges
33 
34    Functions not supposed for generic use:
35      - Infrastructure to determine quickly basic block for insn
36 	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37      - Edge redirection with updating and optimizing of insn chain
38 	 block_label, tidy_fallthru_edge, force_nonfallthru  */
39 
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "hash-set.h"
45 #include "machmode.h"
46 #include "vec.h"
47 #include "double-int.h"
48 #include "input.h"
49 #include "alias.h"
50 #include "symtab.h"
51 #include "wide-int.h"
52 #include "inchash.h"
53 #include "tree.h"
54 #include "hard-reg-set.h"
55 #include "predict.h"
56 #include "hashtab.h"
57 #include "function.h"
58 #include "dominance.h"
59 #include "cfg.h"
60 #include "cfgrtl.h"
61 #include "cfganal.h"
62 #include "cfgbuild.h"
63 #include "cfgcleanup.h"
64 #include "basic-block.h"
65 #include "bb-reorder.h"
66 #include "regs.h"
67 #include "flags.h"
68 #include "except.h"
69 #include "rtl-error.h"
70 #include "tm_p.h"
71 #include "obstack.h"
72 #include "insn-attr.h"
73 #include "insn-config.h"
74 #include "rtl.h"
75 #include "statistics.h"
76 #include "real.h"
77 #include "fixed-value.h"
78 #include "expmed.h"
79 #include "dojump.h"
80 #include "explow.h"
81 #include "calls.h"
82 #include "emit-rtl.h"
83 #include "varasm.h"
84 #include "stmt.h"
85 #include "expr.h"
86 #include "target.h"
87 #include "common/common-target.h"
88 #include "cfgloop.h"
89 #include "ggc.h"
90 #include "tree-pass.h"
91 #include "df.h"
92 
93 /* Holds the interesting leading and trailing notes for the function.
94    Only applicable if the CFG is in cfglayout mode.  */
95 static GTY(()) rtx_insn *cfg_layout_function_footer;
96 static GTY(()) rtx_insn *cfg_layout_function_header;
97 
98 static rtx_insn *skip_insns_after_block (basic_block);
99 static void record_effective_endpoints (void);
100 static rtx label_for_bb (basic_block);
101 static void fixup_reorder_chain (void);
102 
103 void verify_insn_chain (void);
104 static void fixup_fallthru_exit_predecessor (void);
105 static int can_delete_note_p (const rtx_note *);
106 static int can_delete_label_p (const rtx_code_label *);
107 static basic_block rtl_split_edge (edge);
108 static bool rtl_move_block_after (basic_block, basic_block);
109 static int rtl_verify_flow_info (void);
110 static basic_block cfg_layout_split_block (basic_block, void *);
111 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
112 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
113 static void cfg_layout_delete_block (basic_block);
114 static void rtl_delete_block (basic_block);
115 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
116 static edge rtl_redirect_edge_and_branch (edge, basic_block);
117 static basic_block rtl_split_block (basic_block, void *);
118 static void rtl_dump_bb (FILE *, basic_block, int, int);
119 static int rtl_verify_flow_info_1 (void);
120 static void rtl_make_forwarder_block (edge);
121 
122 /* Return true if NOTE is not one of the ones that must be kept paired,
123    so that we may simply delete it.  */
124 
125 static int
126 can_delete_note_p (const rtx_note *note)
127 {
128   switch (NOTE_KIND (note))
129     {
130     case NOTE_INSN_DELETED:
131     case NOTE_INSN_BASIC_BLOCK:
132     case NOTE_INSN_EPILOGUE_BEG:
133       return true;
134 
135     default:
136       return false;
137     }
138 }
139 
140 /* True if a given label can be deleted.  */
141 
142 static int
143 can_delete_label_p (const rtx_code_label *label)
144 {
145   return (!LABEL_PRESERVE_P (label)
146 	  /* User declared labels must be preserved.  */
147 	  && LABEL_NAME (label) == 0
148 	  && !in_expr_list_p (forced_labels, label));
149 }
150 
151 /* Delete INSN by patching it out.  */
152 
153 void
154 delete_insn (rtx uncast_insn)
155 {
156   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
157   rtx note;
158   bool really_delete = true;
159 
160   if (LABEL_P (insn))
161     {
162       /* Some labels can't be directly removed from the INSN chain, as they
163 	 might be references via variables, constant pool etc.
164 	 Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
165       if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
166 	{
167 	  const char *name = LABEL_NAME (insn);
168 	  basic_block bb = BLOCK_FOR_INSN (insn);
169 	  rtx_insn *bb_note = NEXT_INSN (insn);
170 
171 	  really_delete = false;
172 	  PUT_CODE (insn, NOTE);
173 	  NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
174 	  NOTE_DELETED_LABEL_NAME (insn) = name;
175 
176 	  /* If the note following the label starts a basic block, and the
177 	     label is a member of the same basic block, interchange the two.  */
178 	  if (bb_note != NULL_RTX
179 	      && NOTE_INSN_BASIC_BLOCK_P (bb_note)
180 	      && bb != NULL
181 	      && bb == BLOCK_FOR_INSN (bb_note))
182 	    {
183 	      reorder_insns_nobb (insn, insn, bb_note);
184 	      BB_HEAD (bb) = bb_note;
185 	      if (BB_END (bb) == bb_note)
186 		BB_END (bb) = insn;
187 	    }
188 	}
189 
190       remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
191     }
192 
193   if (really_delete)
194     {
195       /* If this insn has already been deleted, something is very wrong.  */
196       gcc_assert (!insn->deleted ());
197       if (INSN_P (insn))
198 	df_insn_delete (insn);
199       remove_insn (insn);
200       insn->set_deleted ();
201     }
202 
203   /* If deleting a jump, decrement the use count of the label.  Deleting
204      the label itself should happen in the normal course of block merging.  */
205   if (JUMP_P (insn))
206     {
207       if (JUMP_LABEL (insn)
208 	  && LABEL_P (JUMP_LABEL (insn)))
209 	LABEL_NUSES (JUMP_LABEL (insn))--;
210 
211       /* If there are more targets, remove them too.  */
212       while ((note
213 	      = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
214 	     && LABEL_P (XEXP (note, 0)))
215 	{
216 	  LABEL_NUSES (XEXP (note, 0))--;
217 	  remove_note (insn, note);
218 	}
219     }
220 
221   /* Also if deleting any insn that references a label as an operand.  */
222   while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
223 	 && LABEL_P (XEXP (note, 0)))
224     {
225       LABEL_NUSES (XEXP (note, 0))--;
226       remove_note (insn, note);
227     }
228 
229   if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
230     {
231       rtvec vec = table->get_labels ();
232       int len = GET_NUM_ELEM (vec);
233       int i;
234 
235       for (i = 0; i < len; i++)
236 	{
237 	  rtx label = XEXP (RTVEC_ELT (vec, i), 0);
238 
239 	  /* When deleting code in bulk (e.g. removing many unreachable
240 	     blocks) we can delete a label that's a target of the vector
241 	     before deleting the vector itself.  */
242 	  if (!NOTE_P (label))
243 	    LABEL_NUSES (label)--;
244 	}
245     }
246 }
247 
248 /* Like delete_insn but also purge dead edges from BB.  */
249 
250 void
251 delete_insn_and_edges (rtx_insn *insn)
252 {
253   bool purge = false;
254 
255   if (INSN_P (insn)
256       && BLOCK_FOR_INSN (insn)
257       && BB_END (BLOCK_FOR_INSN (insn)) == insn)
258     purge = true;
259   delete_insn (insn);
260   if (purge)
261     purge_dead_edges (BLOCK_FOR_INSN (insn));
262 }
263 
264 /* Unlink a chain of insns between START and FINISH, leaving notes
265    that must be paired.  If CLEAR_BB is true, we set bb field for
266    insns that cannot be removed to NULL.  */
267 
268 void
269 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
270 {
271   rtx_insn *prev, *current;
272 
273   /* Unchain the insns one by one.  It would be quicker to delete all of these
274      with a single unchaining, rather than one at a time, but we need to keep
275      the NOTE's.  */
276   current = safe_as_a <rtx_insn *> (finish);
277   while (1)
278     {
279       prev = PREV_INSN (current);
280       if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
281 	;
282       else
283 	delete_insn (current);
284 
285       if (clear_bb && !current->deleted ())
286 	set_block_for_insn (current, NULL);
287 
288       if (current == start)
289 	break;
290       current = prev;
291     }
292 }
293 
294 /* Create a new basic block consisting of the instructions between HEAD and END
295    inclusive.  This function is designed to allow fast BB construction - reuses
296    the note and basic block struct in BB_NOTE, if any and do not grow
297    BASIC_BLOCK chain and should be used directly only by CFG construction code.
298    END can be NULL in to create new empty basic block before HEAD.  Both END
299    and HEAD can be NULL to create basic block at the end of INSN chain.
300    AFTER is the basic block we should be put after.  */
301 
302 basic_block
303 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
304 			      basic_block after)
305 {
306   basic_block bb;
307 
308   if (bb_note
309       && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
310       && bb->aux == NULL)
311     {
312       /* If we found an existing note, thread it back onto the chain.  */
313 
314       rtx_insn *after;
315 
316       if (LABEL_P (head))
317 	after = head;
318       else
319 	{
320 	  after = PREV_INSN (head);
321 	  head = bb_note;
322 	}
323 
324       if (after != bb_note && NEXT_INSN (after) != bb_note)
325 	reorder_insns_nobb (bb_note, bb_note, after);
326     }
327   else
328     {
329       /* Otherwise we must create a note and a basic block structure.  */
330 
331       bb = alloc_block ();
332 
333       init_rtl_bb_info (bb);
334       if (!head && !end)
335 	head = end = bb_note
336 	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
337       else if (LABEL_P (head) && end)
338 	{
339 	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
340 	  if (head == end)
341 	    end = bb_note;
342 	}
343       else
344 	{
345 	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
346 	  head = bb_note;
347 	  if (!end)
348 	    end = head;
349 	}
350 
351       NOTE_BASIC_BLOCK (bb_note) = bb;
352     }
353 
354   /* Always include the bb note in the block.  */
355   if (NEXT_INSN (end) == bb_note)
356     end = bb_note;
357 
358   BB_HEAD (bb) = head;
359   BB_END (bb) = end;
360   bb->index = last_basic_block_for_fn (cfun)++;
361   bb->flags = BB_NEW | BB_RTL;
362   link_block (bb, after);
363   SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
364   df_bb_refs_record (bb->index, false);
365   update_bb_for_insn (bb);
366   BB_SET_PARTITION (bb, BB_UNPARTITIONED);
367 
368   /* Tag the block so that we know it has been used when considering
369      other basic block notes.  */
370   bb->aux = bb;
371 
372   return bb;
373 }
374 
375 /* Create new basic block consisting of instructions in between HEAD and END
376    and place it to the BB chain after block AFTER.  END can be NULL to
377    create a new empty basic block before HEAD.  Both END and HEAD can be
378    NULL to create basic block at the end of INSN chain.  */
379 
380 static basic_block
381 rtl_create_basic_block (void *headp, void *endp, basic_block after)
382 {
383   rtx_insn *head = (rtx_insn *) headp;
384   rtx_insn *end = (rtx_insn *) endp;
385   basic_block bb;
386 
387   /* Grow the basic block array if needed.  */
388   if ((size_t) last_basic_block_for_fn (cfun)
389       >= basic_block_info_for_fn (cfun)->length ())
390     {
391       size_t new_size =
392 	(last_basic_block_for_fn (cfun)
393 	 + (last_basic_block_for_fn (cfun) + 3) / 4);
394       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
395     }
396 
397   n_basic_blocks_for_fn (cfun)++;
398 
399   bb = create_basic_block_structure (head, end, NULL, after);
400   bb->aux = NULL;
401   return bb;
402 }
403 
404 static basic_block
405 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
406 {
407   basic_block newbb = rtl_create_basic_block (head, end, after);
408 
409   return newbb;
410 }
411 
412 /* Delete the insns in a (non-live) block.  We physically delete every
413    non-deleted-note insn, and update the flow graph appropriately.
414 
415    Return nonzero if we deleted an exception handler.  */
416 
417 /* ??? Preserving all such notes strikes me as wrong.  It would be nice
418    to post-process the stream to remove empty blocks, loops, ranges, etc.  */
419 
420 static void
421 rtl_delete_block (basic_block b)
422 {
423   rtx_insn *insn, *end;
424 
425   /* If the head of this block is a CODE_LABEL, then it might be the
426      label for an exception handler which can't be reached.  We need
427      to remove the label from the exception_handler_label list.  */
428   insn = BB_HEAD (b);
429 
430   end = get_last_bb_insn (b);
431 
432   /* Selectively delete the entire chain.  */
433   BB_HEAD (b) = NULL;
434   delete_insn_chain (insn, end, true);
435 
436 
437   if (dump_file)
438     fprintf (dump_file, "deleting block %d\n", b->index);
439   df_bb_delete (b->index);
440 }
441 
442 /* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
443 
444 void
445 compute_bb_for_insn (void)
446 {
447   basic_block bb;
448 
449   FOR_EACH_BB_FN (bb, cfun)
450     {
451       rtx_insn *end = BB_END (bb);
452       rtx_insn *insn;
453 
454       for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
455 	{
456 	  BLOCK_FOR_INSN (insn) = bb;
457 	  if (insn == end)
458 	    break;
459 	}
460     }
461 }
462 
463 /* Release the basic_block_for_insn array.  */
464 
465 unsigned int
466 free_bb_for_insn (void)
467 {
468   rtx_insn *insn;
469   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
470     if (!BARRIER_P (insn))
471       BLOCK_FOR_INSN (insn) = NULL;
472   return 0;
473 }
474 
475 namespace {
476 
477 const pass_data pass_data_free_cfg =
478 {
479   RTL_PASS, /* type */
480   "*free_cfg", /* name */
481   OPTGROUP_NONE, /* optinfo_flags */
482   TV_NONE, /* tv_id */
483   0, /* properties_required */
484   0, /* properties_provided */
485   PROP_cfg, /* properties_destroyed */
486   0, /* todo_flags_start */
487   0, /* todo_flags_finish */
488 };
489 
490 class pass_free_cfg : public rtl_opt_pass
491 {
492 public:
493   pass_free_cfg (gcc::context *ctxt)
494     : rtl_opt_pass (pass_data_free_cfg, ctxt)
495   {}
496 
497   /* opt_pass methods: */
498   virtual unsigned int execute (function *);
499 
500 }; // class pass_free_cfg
501 
502 unsigned int
503 pass_free_cfg::execute (function *)
504 {
505 #ifdef DELAY_SLOTS
506   /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
507      valid at that point so it would be too late to call df_analyze.  */
508   if (optimize > 0 && flag_delayed_branch)
509     {
510       df_note_add_problem ();
511       df_analyze ();
512     }
513 #endif
514 
515   if (crtl->has_bb_partition)
516     insert_section_boundary_note ();
517 
518   free_bb_for_insn ();
519   return 0;
520 }
521 
522 } // anon namespace
523 
524 rtl_opt_pass *
525 make_pass_free_cfg (gcc::context *ctxt)
526 {
527   return new pass_free_cfg (ctxt);
528 }
529 
530 /* Return RTX to emit after when we want to emit code on the entry of function.  */
531 rtx_insn *
532 entry_of_function (void)
533 {
534   return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
535 	  BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
536 }
537 
538 /* Emit INSN at the entry point of the function, ensuring that it is only
539    executed once per function.  */
540 void
541 emit_insn_at_entry (rtx insn)
542 {
543   edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
544   edge e = ei_safe_edge (ei);
545   gcc_assert (e->flags & EDGE_FALLTHRU);
546 
547   insert_insn_on_edge (insn, e);
548   commit_edge_insertions ();
549 }
550 
551 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
552    (or BARRIER if found) and notify df of the bb change.
553    The insn chain range is inclusive
554    (i.e. both BEGIN and END will be updated. */
555 
556 static void
557 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
558 {
559   rtx_insn *insn;
560 
561   end = NEXT_INSN (end);
562   for (insn = begin; insn != end; insn = NEXT_INSN (insn))
563     if (!BARRIER_P (insn))
564       df_insn_change_bb (insn, bb);
565 }
566 
567 /* Update BLOCK_FOR_INSN of insns in BB to BB,
568    and notify df of the change.  */
569 
570 void
571 update_bb_for_insn (basic_block bb)
572 {
573   update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
574 }
575 
576 
577 /* Like active_insn_p, except keep the return value clobber around
578    even after reload.  */
579 
580 static bool
581 flow_active_insn_p (const rtx_insn *insn)
582 {
583   if (active_insn_p (insn))
584     return true;
585 
586   /* A clobber of the function return value exists for buggy
587      programs that fail to return a value.  Its effect is to
588      keep the return value from being live across the entire
589      function.  If we allow it to be skipped, we introduce the
590      possibility for register lifetime confusion.  */
591   if (GET_CODE (PATTERN (insn)) == CLOBBER
592       && REG_P (XEXP (PATTERN (insn), 0))
593       && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
594     return true;
595 
596   return false;
597 }
598 
599 /* Return true if the block has no effect and only forwards control flow to
600    its single destination.  */
601 
602 bool
603 contains_no_active_insn_p (const_basic_block bb)
604 {
605   rtx_insn *insn;
606 
607   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
608       || !single_succ_p (bb))
609     return false;
610 
611   for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
612     if (INSN_P (insn) && flow_active_insn_p (insn))
613       return false;
614 
615   return (!INSN_P (insn)
616 	  || (JUMP_P (insn) && simplejump_p (insn))
617 	  || !flow_active_insn_p (insn));
618 }
619 
620 /* Likewise, but protect loop latches, headers and preheaders.  */
621 /* FIXME: Make this a cfg hook.  */
622 
623 bool
624 forwarder_block_p (const_basic_block bb)
625 {
626   if (!contains_no_active_insn_p (bb))
627     return false;
628 
629   /* Protect loop latches, headers and preheaders.  */
630   if (current_loops)
631     {
632       basic_block dest;
633       if (bb->loop_father->header == bb)
634 	return false;
635       dest = EDGE_SUCC (bb, 0)->dest;
636       if (dest->loop_father->header == dest)
637 	return false;
638     }
639 
640   return true;
641 }
642 
643 /* Return nonzero if we can reach target from src by falling through.  */
644 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode.  */
645 
646 bool
647 can_fallthru (basic_block src, basic_block target)
648 {
649   rtx_insn *insn = BB_END (src);
650   rtx_insn *insn2;
651   edge e;
652   edge_iterator ei;
653 
654   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
655     return true;
656   if (src->next_bb != target)
657     return false;
658 
659   /* ??? Later we may add code to move jump tables offline.  */
660   if (tablejump_p (insn, NULL, NULL))
661     return false;
662 
663   FOR_EACH_EDGE (e, ei, src->succs)
664     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
665 	&& e->flags & EDGE_FALLTHRU)
666       return false;
667 
668   insn2 = BB_HEAD (target);
669   if (!active_insn_p (insn2))
670     insn2 = next_active_insn (insn2);
671 
672   return next_active_insn (insn) == insn2;
673 }
674 
675 /* Return nonzero if we could reach target from src by falling through,
676    if the target was made adjacent.  If we already have a fall-through
677    edge to the exit block, we can't do that.  */
678 static bool
679 could_fall_through (basic_block src, basic_block target)
680 {
681   edge e;
682   edge_iterator ei;
683 
684   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
685     return true;
686   FOR_EACH_EDGE (e, ei, src->succs)
687     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
688 	&& e->flags & EDGE_FALLTHRU)
689       return 0;
690   return true;
691 }
692 
693 /* Return the NOTE_INSN_BASIC_BLOCK of BB.  */
694 rtx_note *
695 bb_note (basic_block bb)
696 {
697   rtx_insn *note;
698 
699   note = BB_HEAD (bb);
700   if (LABEL_P (note))
701     note = NEXT_INSN (note);
702 
703   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
704   return as_a <rtx_note *> (note);
705 }
706 
707 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
708    note associated with the BLOCK.  */
709 
710 static rtx_insn *
711 first_insn_after_basic_block_note (basic_block block)
712 {
713   rtx_insn *insn;
714 
715   /* Get the first instruction in the block.  */
716   insn = BB_HEAD (block);
717 
718   if (insn == NULL_RTX)
719     return NULL;
720   if (LABEL_P (insn))
721     insn = NEXT_INSN (insn);
722   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
723 
724   return NEXT_INSN (insn);
725 }
726 
727 /* Creates a new basic block just after basic block BB by splitting
728    everything after specified instruction INSNP.  */
729 
730 static basic_block
731 rtl_split_block (basic_block bb, void *insnp)
732 {
733   basic_block new_bb;
734   rtx_insn *insn = (rtx_insn *) insnp;
735   edge e;
736   edge_iterator ei;
737 
738   if (!insn)
739     {
740       insn = first_insn_after_basic_block_note (bb);
741 
742       if (insn)
743 	{
744 	  rtx_insn *next = insn;
745 
746 	  insn = PREV_INSN (insn);
747 
748 	  /* If the block contains only debug insns, insn would have
749 	     been NULL in a non-debug compilation, and then we'd end
750 	     up emitting a DELETED note.  For -fcompare-debug
751 	     stability, emit the note too.  */
752 	  if (insn != BB_END (bb)
753 	      && DEBUG_INSN_P (next)
754 	      && DEBUG_INSN_P (BB_END (bb)))
755 	    {
756 	      while (next != BB_END (bb) && DEBUG_INSN_P (next))
757 		next = NEXT_INSN (next);
758 
759 	      if (next == BB_END (bb))
760 		emit_note_after (NOTE_INSN_DELETED, next);
761 	    }
762 	}
763       else
764 	insn = get_last_insn ();
765     }
766 
767   /* We probably should check type of the insn so that we do not create
768      inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
769      bother.  */
770   if (insn == BB_END (bb))
771     emit_note_after (NOTE_INSN_DELETED, insn);
772 
773   /* Create the new basic block.  */
774   new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
775   BB_COPY_PARTITION (new_bb, bb);
776   BB_END (bb) = insn;
777 
778   /* Redirect the outgoing edges.  */
779   new_bb->succs = bb->succs;
780   bb->succs = NULL;
781   FOR_EACH_EDGE (e, ei, new_bb->succs)
782     e->src = new_bb;
783 
784   /* The new block starts off being dirty.  */
785   df_set_bb_dirty (bb);
786   return new_bb;
787 }
788 
789 /* Return true if the single edge between blocks A and B is the only place
790    in RTL which holds some unique locus.  */
791 
792 static bool
793 unique_locus_on_edge_between_p (basic_block a, basic_block b)
794 {
795   const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
796   rtx_insn *insn, *end;
797 
798   if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
799     return false;
800 
801   /* First scan block A backward.  */
802   insn = BB_END (a);
803   end = PREV_INSN (BB_HEAD (a));
804   while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
805     insn = PREV_INSN (insn);
806 
807   if (insn != end && INSN_LOCATION (insn) == goto_locus)
808     return false;
809 
810   /* Then scan block B forward.  */
811   insn = BB_HEAD (b);
812   if (insn)
813     {
814       end = NEXT_INSN (BB_END (b));
815       while (insn != end && !NONDEBUG_INSN_P (insn))
816 	insn = NEXT_INSN (insn);
817 
818       if (insn != end && INSN_HAS_LOCATION (insn)
819 	  && INSN_LOCATION (insn) == goto_locus)
820 	return false;
821     }
822 
823   return true;
824 }
825 
826 /* If the single edge between blocks A and B is the only place in RTL which
827    holds some unique locus, emit a nop with that locus between the blocks.  */
828 
829 static void
830 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
831 {
832   if (!unique_locus_on_edge_between_p (a, b))
833     return;
834 
835   BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
836   INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
837 }
838 
839 /* Blocks A and B are to be merged into a single block A.  The insns
840    are already contiguous.  */
841 
842 static void
843 rtl_merge_blocks (basic_block a, basic_block b)
844 {
845   rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
846   rtx_insn *del_first = NULL, *del_last = NULL;
847   rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
848   bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
849   int b_empty = 0;
850 
851   if (dump_file)
852     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
853 	     a->index);
854 
855   while (DEBUG_INSN_P (b_end))
856     b_end = PREV_INSN (b_debug_start = b_end);
857 
858   /* If there was a CODE_LABEL beginning B, delete it.  */
859   if (LABEL_P (b_head))
860     {
861       /* Detect basic blocks with nothing but a label.  This can happen
862 	 in particular at the end of a function.  */
863       if (b_head == b_end)
864 	b_empty = 1;
865 
866       del_first = del_last = b_head;
867       b_head = NEXT_INSN (b_head);
868     }
869 
870   /* Delete the basic block note and handle blocks containing just that
871      note.  */
872   if (NOTE_INSN_BASIC_BLOCK_P (b_head))
873     {
874       if (b_head == b_end)
875 	b_empty = 1;
876       if (! del_last)
877 	del_first = b_head;
878 
879       del_last = b_head;
880       b_head = NEXT_INSN (b_head);
881     }
882 
883   /* If there was a jump out of A, delete it.  */
884   if (JUMP_P (a_end))
885     {
886       rtx_insn *prev;
887 
888       for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
889 	if (!NOTE_P (prev)
890 	    || NOTE_INSN_BASIC_BLOCK_P (prev)
891 	    || prev == BB_HEAD (a))
892 	  break;
893 
894       del_first = a_end;
895 
896 #ifdef HAVE_cc0
897       /* If this was a conditional jump, we need to also delete
898 	 the insn that set cc0.  */
899       if (only_sets_cc0_p (prev))
900 	{
901 	  rtx_insn *tmp = prev;
902 
903 	  prev = prev_nonnote_insn (prev);
904 	  if (!prev)
905 	    prev = BB_HEAD (a);
906 	  del_first = tmp;
907 	}
908 #endif
909 
910       a_end = PREV_INSN (del_first);
911     }
912   else if (BARRIER_P (NEXT_INSN (a_end)))
913     del_first = NEXT_INSN (a_end);
914 
915   /* Delete everything marked above as well as crap that might be
916      hanging out between the two blocks.  */
917   BB_END (a) = a_end;
918   BB_HEAD (b) = b_empty ? NULL : b_head;
919   delete_insn_chain (del_first, del_last, true);
920 
921   /* When not optimizing and the edge is the only place in RTL which holds
922      some unique locus, emit a nop with that locus in between.  */
923   if (!optimize)
924     {
925       emit_nop_for_unique_locus_between (a, b);
926       a_end = BB_END (a);
927     }
928 
929   /* Reassociate the insns of B with A.  */
930   if (!b_empty)
931     {
932       update_bb_for_insn_chain (a_end, b_debug_end, a);
933 
934       BB_END (a) = b_debug_end;
935       BB_HEAD (b) = NULL;
936     }
937   else if (b_end != b_debug_end)
938     {
939       /* Move any deleted labels and other notes between the end of A
940 	 and the debug insns that make up B after the debug insns,
941 	 bringing the debug insns into A while keeping the notes after
942 	 the end of A.  */
943       if (NEXT_INSN (a_end) != b_debug_start)
944 	reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
945 			    b_debug_end);
946       update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
947       BB_END (a) = b_debug_end;
948     }
949 
950   df_bb_delete (b->index);
951 
952   /* If B was a forwarder block, propagate the locus on the edge.  */
953   if (forwarder_p
954       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
955     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
956 
957   if (dump_file)
958     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
959 }
960 
961 
962 /* Return true when block A and B can be merged.  */
963 
964 static bool
965 rtl_can_merge_blocks (basic_block a, basic_block b)
966 {
967   /* If we are partitioning hot/cold basic blocks, we don't want to
968      mess up unconditional or indirect jumps that cross between hot
969      and cold sections.
970 
971      Basic block partitioning may result in some jumps that appear to
972      be optimizable (or blocks that appear to be mergeable), but which really
973      must be left untouched (they are required to make it safely across
974      partition boundaries).  See  the comments at the top of
975      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
976 
977   if (BB_PARTITION (a) != BB_PARTITION (b))
978     return false;
979 
980   /* Protect the loop latches.  */
981   if (current_loops && b->loop_father->latch == b)
982     return false;
983 
984   /* There must be exactly one edge in between the blocks.  */
985   return (single_succ_p (a)
986 	  && single_succ (a) == b
987 	  && single_pred_p (b)
988 	  && a != b
989 	  /* Must be simple edge.  */
990 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
991 	  && a->next_bb == b
992 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
993 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
994 	  /* If the jump insn has side effects,
995 	     we can't kill the edge.  */
996 	  && (!JUMP_P (BB_END (a))
997 	      || (reload_completed
998 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
999 }
1000 
1001 /* Return the label in the head of basic block BLOCK.  Create one if it doesn't
1002    exist.  */
1003 
1004 rtx
1005 block_label (basic_block block)
1006 {
1007   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1008     return NULL_RTX;
1009 
1010   if (!LABEL_P (BB_HEAD (block)))
1011     {
1012       BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
1013     }
1014 
1015   return BB_HEAD (block);
1016 }
1017 
1018 /* Attempt to perform edge redirection by replacing possibly complex jump
1019    instruction by unconditional jump or removing jump completely.  This can
1020    apply only if all edges now point to the same block.  The parameters and
1021    return values are equivalent to redirect_edge_and_branch.  */
1022 
1023 edge
1024 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
1025 {
1026   basic_block src = e->src;
1027   rtx_insn *insn = BB_END (src), *kill_from;
1028   rtx set;
1029   int fallthru = 0;
1030 
1031   /* If we are partitioning hot/cold basic blocks, we don't want to
1032      mess up unconditional or indirect jumps that cross between hot
1033      and cold sections.
1034 
1035      Basic block partitioning may result in some jumps that appear to
1036      be optimizable (or blocks that appear to be mergeable), but which really
1037      must be left untouched (they are required to make it safely across
1038      partition boundaries).  See  the comments at the top of
1039      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
1040 
1041   if (BB_PARTITION (src) != BB_PARTITION (target))
1042     return NULL;
1043 
1044   /* We can replace or remove a complex jump only when we have exactly
1045      two edges.  Also, if we have exactly one outgoing edge, we can
1046      redirect that.  */
1047   if (EDGE_COUNT (src->succs) >= 3
1048       /* Verify that all targets will be TARGET.  Specifically, the
1049 	 edge that is not E must also go to TARGET.  */
1050       || (EDGE_COUNT (src->succs) == 2
1051 	  && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1052     return NULL;
1053 
1054   if (!onlyjump_p (insn))
1055     return NULL;
1056   if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1057     return NULL;
1058 
1059   /* Avoid removing branch with side effects.  */
1060   set = single_set (insn);
1061   if (!set || side_effects_p (set))
1062     return NULL;
1063 
1064   /* In case we zap a conditional jump, we'll need to kill
1065      the cc0 setter too.  */
1066   kill_from = insn;
1067 #ifdef HAVE_cc0
1068   if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1069       && only_sets_cc0_p (PREV_INSN (insn)))
1070     kill_from = PREV_INSN (insn);
1071 #endif
1072 
1073   /* See if we can create the fallthru edge.  */
1074   if (in_cfglayout || can_fallthru (src, target))
1075     {
1076       if (dump_file)
1077 	fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1078       fallthru = 1;
1079 
1080       /* Selectively unlink whole insn chain.  */
1081       if (in_cfglayout)
1082 	{
1083 	  rtx_insn *insn = BB_FOOTER (src);
1084 
1085 	  delete_insn_chain (kill_from, BB_END (src), false);
1086 
1087 	  /* Remove barriers but keep jumptables.  */
1088 	  while (insn)
1089 	    {
1090 	      if (BARRIER_P (insn))
1091 		{
1092 		  if (PREV_INSN (insn))
1093 		    SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1094 		  else
1095 		    BB_FOOTER (src) = NEXT_INSN (insn);
1096 		  if (NEXT_INSN (insn))
1097 		    SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1098 		}
1099 	      if (LABEL_P (insn))
1100 		break;
1101 	      insn = NEXT_INSN (insn);
1102 	    }
1103 	}
1104       else
1105 	delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1106 			   false);
1107     }
1108 
1109   /* If this already is simplejump, redirect it.  */
1110   else if (simplejump_p (insn))
1111     {
1112       if (e->dest == target)
1113 	return NULL;
1114       if (dump_file)
1115 	fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1116 		 INSN_UID (insn), e->dest->index, target->index);
1117       if (!redirect_jump (insn, block_label (target), 0))
1118 	{
1119 	  gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1120 	  return NULL;
1121 	}
1122     }
1123 
1124   /* Cannot do anything for target exit block.  */
1125   else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1126     return NULL;
1127 
1128   /* Or replace possibly complicated jump insn by simple jump insn.  */
1129   else
1130     {
1131       rtx target_label = block_label (target);
1132       rtx_insn *barrier;
1133       rtx label;
1134       rtx_jump_table_data *table;
1135 
1136       emit_jump_insn_after_noloc (gen_jump (target_label), insn);
1137       JUMP_LABEL (BB_END (src)) = target_label;
1138       LABEL_NUSES (target_label)++;
1139       if (dump_file)
1140 	fprintf (dump_file, "Replacing insn %i by jump %i\n",
1141 		 INSN_UID (insn), INSN_UID (BB_END (src)));
1142 
1143 
1144       delete_insn_chain (kill_from, insn, false);
1145 
1146       /* Recognize a tablejump that we are converting to a
1147 	 simple jump and remove its associated CODE_LABEL
1148 	 and ADDR_VEC or ADDR_DIFF_VEC.  */
1149       if (tablejump_p (insn, &label, &table))
1150 	delete_insn_chain (label, table, false);
1151 
1152       barrier = next_nonnote_insn (BB_END (src));
1153       if (!barrier || !BARRIER_P (barrier))
1154 	emit_barrier_after (BB_END (src));
1155       else
1156 	{
1157 	  if (barrier != NEXT_INSN (BB_END (src)))
1158 	    {
1159 	      /* Move the jump before barrier so that the notes
1160 		 which originally were or were created before jump table are
1161 		 inside the basic block.  */
1162 	      rtx_insn *new_insn = BB_END (src);
1163 
1164 	      update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1165 				        PREV_INSN (barrier), src);
1166 
1167 	      SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1168 	      SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1169 
1170 	      SET_NEXT_INSN (new_insn) = barrier;
1171 	      SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1172 
1173 	      SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1174 	      SET_PREV_INSN (barrier) = new_insn;
1175 	    }
1176 	}
1177     }
1178 
1179   /* Keep only one edge out and set proper flags.  */
1180   if (!single_succ_p (src))
1181     remove_edge (e);
1182   gcc_assert (single_succ_p (src));
1183 
1184   e = single_succ_edge (src);
1185   if (fallthru)
1186     e->flags = EDGE_FALLTHRU;
1187   else
1188     e->flags = 0;
1189 
1190   e->probability = REG_BR_PROB_BASE;
1191   e->count = src->count;
1192 
1193   if (e->dest != target)
1194     redirect_edge_succ (e, target);
1195   return e;
1196 }
1197 
1198 /* Subroutine of redirect_branch_edge that tries to patch the jump
1199    instruction INSN so that it reaches block NEW.  Do this
1200    only when it originally reached block OLD.  Return true if this
1201    worked or the original target wasn't OLD, return false if redirection
1202    doesn't work.  */
1203 
1204 static bool
1205 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1206 {
1207   rtx_jump_table_data *table;
1208   rtx tmp;
1209   /* Recognize a tablejump and adjust all matching cases.  */
1210   if (tablejump_p (insn, NULL, &table))
1211     {
1212       rtvec vec;
1213       int j;
1214       rtx new_label = block_label (new_bb);
1215 
1216       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1217 	return false;
1218       vec = table->get_labels ();
1219 
1220       for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1221 	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1222 	  {
1223 	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1224 	    --LABEL_NUSES (old_label);
1225 	    ++LABEL_NUSES (new_label);
1226 	  }
1227 
1228       /* Handle casesi dispatch insns.  */
1229       if ((tmp = single_set (insn)) != NULL
1230 	  && SET_DEST (tmp) == pc_rtx
1231 	  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1232 	  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1233 	  && LABEL_REF_LABEL (XEXP (SET_SRC (tmp), 2)) == old_label)
1234 	{
1235 	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1236 						       new_label);
1237 	  --LABEL_NUSES (old_label);
1238 	  ++LABEL_NUSES (new_label);
1239 	}
1240     }
1241   else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1242     {
1243       int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1244       rtx new_label, note;
1245 
1246       if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1247 	return false;
1248       new_label = block_label (new_bb);
1249 
1250       for (i = 0; i < n; ++i)
1251 	{
1252 	  rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1253 	  gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1254 	  if (XEXP (old_ref, 0) == old_label)
1255 	    {
1256 	      ASM_OPERANDS_LABEL (tmp, i)
1257 		= gen_rtx_LABEL_REF (Pmode, new_label);
1258 	      --LABEL_NUSES (old_label);
1259 	      ++LABEL_NUSES (new_label);
1260 	    }
1261 	}
1262 
1263       if (JUMP_LABEL (insn) == old_label)
1264 	{
1265 	  JUMP_LABEL (insn) = new_label;
1266 	  note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1267 	  if (note)
1268 	    remove_note (insn, note);
1269 	}
1270       else
1271 	{
1272 	  note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1273 	  if (note)
1274 	    remove_note (insn, note);
1275 	  if (JUMP_LABEL (insn) != new_label
1276 	      && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1277 	    add_reg_note (insn, REG_LABEL_TARGET, new_label);
1278 	}
1279       while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1280 	     != NULL_RTX)
1281 	XEXP (note, 0) = new_label;
1282     }
1283   else
1284     {
1285       /* ?? We may play the games with moving the named labels from
1286 	 one basic block to the other in case only one computed_jump is
1287 	 available.  */
1288       if (computed_jump_p (insn)
1289 	  /* A return instruction can't be redirected.  */
1290 	  || returnjump_p (insn))
1291 	return false;
1292 
1293       if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1294 	{
1295 	  /* If the insn doesn't go where we think, we're confused.  */
1296 	  gcc_assert (JUMP_LABEL (insn) == old_label);
1297 
1298 	  /* If the substitution doesn't succeed, die.  This can happen
1299 	     if the back end emitted unrecognizable instructions or if
1300 	     target is exit block on some arches.  */
1301 	  if (!redirect_jump (insn, block_label (new_bb), 0))
1302 	    {
1303 	      gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
1304 	      return false;
1305 	    }
1306 	}
1307     }
1308   return true;
1309 }
1310 
1311 
1312 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1313    NULL on failure  */
1314 static edge
1315 redirect_branch_edge (edge e, basic_block target)
1316 {
1317   rtx_insn *old_label = BB_HEAD (e->dest);
1318   basic_block src = e->src;
1319   rtx_insn *insn = BB_END (src);
1320 
1321   /* We can only redirect non-fallthru edges of jump insn.  */
1322   if (e->flags & EDGE_FALLTHRU)
1323     return NULL;
1324   else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1325     return NULL;
1326 
1327   if (!currently_expanding_to_rtl)
1328     {
1329       if (!patch_jump_insn (insn, old_label, target))
1330 	return NULL;
1331     }
1332   else
1333     /* When expanding this BB might actually contain multiple
1334        jumps (i.e. not yet split by find_many_sub_basic_blocks).
1335        Redirect all of those that match our label.  */
1336     FOR_BB_INSNS (src, insn)
1337       if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1338 	return NULL;
1339 
1340   if (dump_file)
1341     fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1342 	     e->src->index, e->dest->index, target->index);
1343 
1344   if (e->dest != target)
1345     e = redirect_edge_succ_nodup (e, target);
1346 
1347   return e;
1348 }
1349 
1350 /* Called when edge E has been redirected to a new destination,
1351    in order to update the region crossing flag on the edge and
1352    jump.  */
1353 
1354 static void
1355 fixup_partition_crossing (edge e)
1356 {
1357   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1358       == EXIT_BLOCK_PTR_FOR_FN (cfun))
1359     return;
1360   /* If we redirected an existing edge, it may already be marked
1361      crossing, even though the new src is missing a reg crossing note.
1362      But make sure reg crossing note doesn't already exist before
1363      inserting.  */
1364   if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1365     {
1366       e->flags |= EDGE_CROSSING;
1367       if (JUMP_P (BB_END (e->src))
1368 	  && !CROSSING_JUMP_P (BB_END (e->src)))
1369 	CROSSING_JUMP_P (BB_END (e->src)) = 1;
1370     }
1371   else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1372     {
1373       e->flags &= ~EDGE_CROSSING;
1374       /* Remove the section crossing note from jump at end of
1375          src if it exists, and if no other successors are
1376          still crossing.  */
1377       if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1378         {
1379           bool has_crossing_succ = false;
1380           edge e2;
1381           edge_iterator ei;
1382           FOR_EACH_EDGE (e2, ei, e->src->succs)
1383             {
1384               has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1385               if (has_crossing_succ)
1386                 break;
1387             }
1388           if (!has_crossing_succ)
1389 	    CROSSING_JUMP_P (BB_END (e->src)) = 0;
1390         }
1391     }
1392 }
1393 
1394 /* Called when block BB has been reassigned to the cold partition,
1395    because it is now dominated by another cold block,
1396    to ensure that the region crossing attributes are updated.  */
1397 
1398 static void
1399 fixup_new_cold_bb (basic_block bb)
1400 {
1401   edge e;
1402   edge_iterator ei;
1403 
1404   /* This is called when a hot bb is found to now be dominated
1405      by a cold bb and therefore needs to become cold. Therefore,
1406      its preds will no longer be region crossing. Any non-dominating
1407      preds that were previously hot would also have become cold
1408      in the caller for the same region. Any preds that were previously
1409      region-crossing will be adjusted in fixup_partition_crossing.  */
1410   FOR_EACH_EDGE (e, ei, bb->preds)
1411     {
1412       fixup_partition_crossing (e);
1413     }
1414 
1415   /* Possibly need to make bb's successor edges region crossing,
1416      or remove stale region crossing.  */
1417   FOR_EACH_EDGE (e, ei, bb->succs)
1418     {
1419       /* We can't have fall-through edges across partition boundaries.
1420          Note that force_nonfallthru will do any necessary partition
1421          boundary fixup by calling fixup_partition_crossing itself.  */
1422       if ((e->flags & EDGE_FALLTHRU)
1423           && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1424 	  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1425         force_nonfallthru (e);
1426       else
1427         fixup_partition_crossing (e);
1428     }
1429 }
1430 
1431 /* Attempt to change code to redirect edge E to TARGET.  Don't do that on
1432    expense of adding new instructions or reordering basic blocks.
1433 
1434    Function can be also called with edge destination equivalent to the TARGET.
1435    Then it should try the simplifications and do nothing if none is possible.
1436 
1437    Return edge representing the branch if transformation succeeded.  Return NULL
1438    on failure.
1439    We still return NULL in case E already destinated TARGET and we didn't
1440    managed to simplify instruction stream.  */
1441 
1442 static edge
1443 rtl_redirect_edge_and_branch (edge e, basic_block target)
1444 {
1445   edge ret;
1446   basic_block src = e->src;
1447   basic_block dest = e->dest;
1448 
1449   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1450     return NULL;
1451 
1452   if (dest == target)
1453     return e;
1454 
1455   if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1456     {
1457       df_set_bb_dirty (src);
1458       fixup_partition_crossing (ret);
1459       return ret;
1460     }
1461 
1462   ret = redirect_branch_edge (e, target);
1463   if (!ret)
1464     return NULL;
1465 
1466   df_set_bb_dirty (src);
1467   fixup_partition_crossing (ret);
1468   return ret;
1469 }
1470 
1471 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode.  */
1472 
1473 void
1474 emit_barrier_after_bb (basic_block bb)
1475 {
1476   rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1477   gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1478               || current_ir_type () == IR_RTL_CFGLAYOUT);
1479   if (current_ir_type () == IR_RTL_CFGLAYOUT)
1480     {
1481       rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1482 
1483       if (BB_FOOTER (bb))
1484 	{
1485           rtx_insn *footer_tail = BB_FOOTER (bb);
1486 
1487           while (NEXT_INSN (footer_tail))
1488             footer_tail = NEXT_INSN (footer_tail);
1489           if (!BARRIER_P (footer_tail))
1490             {
1491               SET_NEXT_INSN (footer_tail) = insn;
1492               SET_PREV_INSN (insn) = footer_tail;
1493             }
1494 	}
1495       else
1496         BB_FOOTER (bb) = insn;
1497     }
1498 }
1499 
1500 /* Like force_nonfallthru below, but additionally performs redirection
1501    Used by redirect_edge_and_branch_force.  JUMP_LABEL is used only
1502    when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1503    simple_return_rtx, indicating which kind of returnjump to create.
1504    It should be NULL otherwise.  */
1505 
1506 basic_block
1507 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1508 {
1509   basic_block jump_block, new_bb = NULL, src = e->src;
1510   rtx note;
1511   edge new_edge;
1512   int abnormal_edge_flags = 0;
1513   bool asm_goto_edge = false;
1514   int loc;
1515 
1516   /* In the case the last instruction is conditional jump to the next
1517      instruction, first redirect the jump itself and then continue
1518      by creating a basic block afterwards to redirect fallthru edge.  */
1519   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1520       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1521       && any_condjump_p (BB_END (e->src))
1522       && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1523     {
1524       rtx note;
1525       edge b = unchecked_make_edge (e->src, target, 0);
1526       bool redirected;
1527 
1528       redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1529       gcc_assert (redirected);
1530 
1531       note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1532       if (note)
1533 	{
1534 	  int prob = XINT (note, 0);
1535 
1536 	  b->probability = prob;
1537           /* Update this to use GCOV_COMPUTE_SCALE.  */
1538 	  b->count = e->count * prob / REG_BR_PROB_BASE;
1539 	  e->probability -= e->probability;
1540 	  e->count -= b->count;
1541 	  if (e->probability < 0)
1542 	    e->probability = 0;
1543 	  if (e->count < 0)
1544 	    e->count = 0;
1545 	}
1546     }
1547 
1548   if (e->flags & EDGE_ABNORMAL)
1549     {
1550       /* Irritating special case - fallthru edge to the same block as abnormal
1551 	 edge.
1552 	 We can't redirect abnormal edge, but we still can split the fallthru
1553 	 one and create separate abnormal edge to original destination.
1554 	 This allows bb-reorder to make such edge non-fallthru.  */
1555       gcc_assert (e->dest == target);
1556       abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1557       e->flags &= EDGE_FALLTHRU;
1558     }
1559   else
1560     {
1561       gcc_assert (e->flags & EDGE_FALLTHRU);
1562       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1563 	{
1564 	  /* We can't redirect the entry block.  Create an empty block
1565 	     at the start of the function which we use to add the new
1566 	     jump.  */
1567 	  edge tmp;
1568 	  edge_iterator ei;
1569 	  bool found = false;
1570 
1571 	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1572 					       ENTRY_BLOCK_PTR_FOR_FN (cfun));
1573 
1574 	  /* Change the existing edge's source to be the new block, and add
1575 	     a new edge from the entry block to the new block.  */
1576 	  e->src = bb;
1577 	  for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1578 	       (tmp = ei_safe_edge (ei)); )
1579 	    {
1580 	      if (tmp == e)
1581 		{
1582 		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1583 		  found = true;
1584 		  break;
1585 		}
1586 	      else
1587 		ei_next (&ei);
1588 	    }
1589 
1590 	  gcc_assert (found);
1591 
1592 	  vec_safe_push (bb->succs, e);
1593 	  make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1594 				 EDGE_FALLTHRU);
1595 	}
1596     }
1597 
1598   /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1599      don't point to the target or fallthru label.  */
1600   if (JUMP_P (BB_END (e->src))
1601       && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1602       && (e->flags & EDGE_FALLTHRU)
1603       && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1604     {
1605       int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1606       bool adjust_jump_target = false;
1607 
1608       for (i = 0; i < n; ++i)
1609 	{
1610 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1611 	    {
1612 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1613 	      XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1614 	      LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1615 	      adjust_jump_target = true;
1616 	    }
1617 	  if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1618 	    asm_goto_edge = true;
1619 	}
1620       if (adjust_jump_target)
1621 	{
1622 	  rtx_insn *insn = BB_END (e->src);
1623 	  rtx note;
1624 	  rtx_insn *old_label = BB_HEAD (e->dest);
1625 	  rtx_insn *new_label = BB_HEAD (target);
1626 
1627 	  if (JUMP_LABEL (insn) == old_label)
1628 	    {
1629 	      JUMP_LABEL (insn) = new_label;
1630 	      note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1631 	      if (note)
1632 		remove_note (insn, note);
1633 	    }
1634 	  else
1635 	    {
1636 	      note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1637 	      if (note)
1638 		remove_note (insn, note);
1639 	      if (JUMP_LABEL (insn) != new_label
1640 		  && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1641 		add_reg_note (insn, REG_LABEL_TARGET, new_label);
1642 	    }
1643 	  while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1644 		 != NULL_RTX)
1645 	    XEXP (note, 0) = new_label;
1646 	}
1647     }
1648 
1649   if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1650     {
1651       rtx_insn *new_head;
1652       gcov_type count = e->count;
1653       int probability = e->probability;
1654       /* Create the new structures.  */
1655 
1656       /* If the old block ended with a tablejump, skip its table
1657 	 by searching forward from there.  Otherwise start searching
1658 	 forward from the last instruction of the old block.  */
1659       rtx_jump_table_data *table;
1660       if (tablejump_p (BB_END (e->src), NULL, &table))
1661 	new_head = table;
1662       else
1663 	new_head = BB_END (e->src);
1664       new_head = NEXT_INSN (new_head);
1665 
1666       jump_block = create_basic_block (new_head, NULL, e->src);
1667       jump_block->count = count;
1668       jump_block->frequency = EDGE_FREQUENCY (e);
1669 
1670       /* Make sure new block ends up in correct hot/cold section.  */
1671 
1672       BB_COPY_PARTITION (jump_block, e->src);
1673 
1674       /* Wire edge in.  */
1675       new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1676       new_edge->probability = probability;
1677       new_edge->count = count;
1678 
1679       /* Redirect old edge.  */
1680       redirect_edge_pred (e, jump_block);
1681       e->probability = REG_BR_PROB_BASE;
1682 
1683       /* If e->src was previously region crossing, it no longer is
1684          and the reg crossing note should be removed.  */
1685       fixup_partition_crossing (new_edge);
1686 
1687       /* If asm goto has any label refs to target's label,
1688 	 add also edge from asm goto bb to target.  */
1689       if (asm_goto_edge)
1690 	{
1691 	  new_edge->probability /= 2;
1692 	  new_edge->count /= 2;
1693 	  jump_block->count /= 2;
1694 	  jump_block->frequency /= 2;
1695 	  new_edge = make_edge (new_edge->src, target,
1696 				e->flags & ~EDGE_FALLTHRU);
1697 	  new_edge->probability = probability - probability / 2;
1698 	  new_edge->count = count - count / 2;
1699 	}
1700 
1701       new_bb = jump_block;
1702     }
1703   else
1704     jump_block = e->src;
1705 
1706   loc = e->goto_locus;
1707   e->flags &= ~EDGE_FALLTHRU;
1708   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1709     {
1710       if (jump_label == ret_rtx)
1711 	{
1712 #ifdef HAVE_return
1713 	  emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1714 #else
1715 	  gcc_unreachable ();
1716 #endif
1717 	}
1718       else
1719 	{
1720 	  gcc_assert (jump_label == simple_return_rtx);
1721 #ifdef HAVE_simple_return
1722 	  emit_jump_insn_after_setloc (gen_simple_return (),
1723 				       BB_END (jump_block), loc);
1724 #else
1725 	  gcc_unreachable ();
1726 #endif
1727 	}
1728       set_return_jump_label (BB_END (jump_block));
1729     }
1730   else
1731     {
1732       rtx label = block_label (target);
1733       emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1734       JUMP_LABEL (BB_END (jump_block)) = label;
1735       LABEL_NUSES (label)++;
1736     }
1737 
1738   /* We might be in cfg layout mode, and if so, the following routine will
1739      insert the barrier correctly.  */
1740   emit_barrier_after_bb (jump_block);
1741   redirect_edge_succ_nodup (e, target);
1742 
1743   if (abnormal_edge_flags)
1744     make_edge (src, target, abnormal_edge_flags);
1745 
1746   df_mark_solutions_dirty ();
1747   fixup_partition_crossing (e);
1748   return new_bb;
1749 }
1750 
1751 /* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
1752    (and possibly create new basic block) to make edge non-fallthru.
1753    Return newly created BB or NULL if none.  */
1754 
1755 static basic_block
1756 rtl_force_nonfallthru (edge e)
1757 {
1758   return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1759 }
1760 
1761 /* Redirect edge even at the expense of creating new jump insn or
1762    basic block.  Return new basic block if created, NULL otherwise.
1763    Conversion must be possible.  */
1764 
1765 static basic_block
1766 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1767 {
1768   if (redirect_edge_and_branch (e, target)
1769       || e->dest == target)
1770     return NULL;
1771 
1772   /* In case the edge redirection failed, try to force it to be non-fallthru
1773      and redirect newly created simplejump.  */
1774   df_set_bb_dirty (e->src);
1775   return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1776 }
1777 
1778 /* The given edge should potentially be a fallthru edge.  If that is in
1779    fact true, delete the jump and barriers that are in the way.  */
1780 
1781 static void
1782 rtl_tidy_fallthru_edge (edge e)
1783 {
1784   rtx_insn *q;
1785   basic_block b = e->src, c = b->next_bb;
1786 
1787   /* ??? In a late-running flow pass, other folks may have deleted basic
1788      blocks by nopping out blocks, leaving multiple BARRIERs between here
1789      and the target label. They ought to be chastised and fixed.
1790 
1791      We can also wind up with a sequence of undeletable labels between
1792      one block and the next.
1793 
1794      So search through a sequence of barriers, labels, and notes for
1795      the head of block C and assert that we really do fall through.  */
1796 
1797   for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1798     if (INSN_P (q))
1799       return;
1800 
1801   /* Remove what will soon cease being the jump insn from the source block.
1802      If block B consisted only of this single jump, turn it into a deleted
1803      note.  */
1804   q = BB_END (b);
1805   if (JUMP_P (q)
1806       && onlyjump_p (q)
1807       && (any_uncondjump_p (q)
1808 	  || single_succ_p (b)))
1809     {
1810       rtx label;
1811       rtx_jump_table_data *table;
1812 
1813       if (tablejump_p (q, &label, &table))
1814 	{
1815 	  /* The label is likely mentioned in some instruction before
1816 	     the tablejump and might not be DCEd, so turn it into
1817 	     a note instead and move before the tablejump that is going to
1818 	     be deleted.  */
1819 	  const char *name = LABEL_NAME (label);
1820 	  PUT_CODE (label, NOTE);
1821 	  NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1822 	  NOTE_DELETED_LABEL_NAME (label) = name;
1823 	  rtx_insn *lab = safe_as_a <rtx_insn *> (label);
1824 	  reorder_insns (lab, lab, PREV_INSN (q));
1825 	  delete_insn (table);
1826 	}
1827 
1828 #ifdef HAVE_cc0
1829       /* If this was a conditional jump, we need to also delete
1830 	 the insn that set cc0.  */
1831       if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1832 	q = PREV_INSN (q);
1833 #endif
1834 
1835       q = PREV_INSN (q);
1836     }
1837 
1838   /* Selectively unlink the sequence.  */
1839   if (q != PREV_INSN (BB_HEAD (c)))
1840     delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1841 
1842   e->flags |= EDGE_FALLTHRU;
1843 }
1844 
1845 /* Should move basic block BB after basic block AFTER.  NIY.  */
1846 
1847 static bool
1848 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1849 		      basic_block after ATTRIBUTE_UNUSED)
1850 {
1851   return false;
1852 }
1853 
1854 /* Locate the last bb in the same partition as START_BB.  */
1855 
1856 static basic_block
1857 last_bb_in_partition (basic_block start_bb)
1858 {
1859   basic_block bb;
1860   FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1861     {
1862       if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1863         return bb;
1864     }
1865   /* Return bb before the exit block.  */
1866   return bb->prev_bb;
1867 }
1868 
1869 /* Split a (typically critical) edge.  Return the new block.
1870    The edge must not be abnormal.
1871 
1872    ??? The code generally expects to be called on critical edges.
1873    The case of a block ending in an unconditional jump to a
1874    block with multiple predecessors is not handled optimally.  */
1875 
1876 static basic_block
1877 rtl_split_edge (edge edge_in)
1878 {
1879   basic_block bb, new_bb;
1880   rtx_insn *before;
1881 
1882   /* Abnormal edges cannot be split.  */
1883   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1884 
1885   /* We are going to place the new block in front of edge destination.
1886      Avoid existence of fallthru predecessors.  */
1887   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1888     {
1889       edge e = find_fallthru_edge (edge_in->dest->preds);
1890 
1891       if (e)
1892 	force_nonfallthru (e);
1893     }
1894 
1895   /* Create the basic block note.  */
1896   if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1897     before = BB_HEAD (edge_in->dest);
1898   else
1899     before = NULL;
1900 
1901   /* If this is a fall through edge to the exit block, the blocks might be
1902      not adjacent, and the right place is after the source.  */
1903   if ((edge_in->flags & EDGE_FALLTHRU)
1904       && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1905     {
1906       before = NEXT_INSN (BB_END (edge_in->src));
1907       bb = create_basic_block (before, NULL, edge_in->src);
1908       BB_COPY_PARTITION (bb, edge_in->src);
1909     }
1910   else
1911     {
1912       if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1913         {
1914           bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1915           BB_COPY_PARTITION (bb, edge_in->dest);
1916         }
1917       else
1918         {
1919           basic_block after = edge_in->dest->prev_bb;
1920           /* If this is post-bb reordering, and the edge crosses a partition
1921              boundary, the new block needs to be inserted in the bb chain
1922              at the end of the src partition (since we put the new bb into
1923              that partition, see below). Otherwise we may end up creating
1924              an extra partition crossing in the chain, which is illegal.
1925              It can't go after the src, because src may have a fall-through
1926              to a different block.  */
1927           if (crtl->bb_reorder_complete
1928               && (edge_in->flags & EDGE_CROSSING))
1929             {
1930               after = last_bb_in_partition (edge_in->src);
1931               before = get_last_bb_insn (after);
1932               /* The instruction following the last bb in partition should
1933                  be a barrier, since it cannot end in a fall-through.  */
1934               gcc_checking_assert (BARRIER_P (before));
1935               before = NEXT_INSN (before);
1936             }
1937           bb = create_basic_block (before, NULL, after);
1938           /* Put the split bb into the src partition, to avoid creating
1939              a situation where a cold bb dominates a hot bb, in the case
1940              where src is cold and dest is hot. The src will dominate
1941              the new bb (whereas it might not have dominated dest).  */
1942           BB_COPY_PARTITION (bb, edge_in->src);
1943         }
1944     }
1945 
1946   make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1947 
1948   /* Can't allow a region crossing edge to be fallthrough.  */
1949   if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1950       && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1951     {
1952       new_bb = force_nonfallthru (single_succ_edge (bb));
1953       gcc_assert (!new_bb);
1954     }
1955 
1956   /* For non-fallthru edges, we must adjust the predecessor's
1957      jump instruction to target our new block.  */
1958   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1959     {
1960       edge redirected = redirect_edge_and_branch (edge_in, bb);
1961       gcc_assert (redirected);
1962     }
1963   else
1964     {
1965       if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1966 	{
1967 	  /* For asm goto even splitting of fallthru edge might
1968 	     need insn patching, as other labels might point to the
1969 	     old label.  */
1970 	  rtx_insn *last = BB_END (edge_in->src);
1971 	  if (last
1972 	      && JUMP_P (last)
1973 	      && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1974 	      && extract_asm_operands (PATTERN (last)) != NULL_RTX
1975 	      && patch_jump_insn (last, before, bb))
1976 	    df_set_bb_dirty (edge_in->src);
1977 	}
1978       redirect_edge_succ (edge_in, bb);
1979     }
1980 
1981   return bb;
1982 }
1983 
1984 /* Queue instructions for insertion on an edge between two basic blocks.
1985    The new instructions and basic blocks (if any) will not appear in the
1986    CFG until commit_edge_insertions is called.  */
1987 
1988 void
1989 insert_insn_on_edge (rtx pattern, edge e)
1990 {
1991   /* We cannot insert instructions on an abnormal critical edge.
1992      It will be easier to find the culprit if we die now.  */
1993   gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1994 
1995   if (e->insns.r == NULL_RTX)
1996     start_sequence ();
1997   else
1998     push_to_sequence (e->insns.r);
1999 
2000   emit_insn (pattern);
2001 
2002   e->insns.r = get_insns ();
2003   end_sequence ();
2004 }
2005 
2006 /* Update the CFG for the instructions queued on edge E.  */
2007 
2008 void
2009 commit_one_edge_insertion (edge e)
2010 {
2011   rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
2012   basic_block bb;
2013 
2014   /* Pull the insns off the edge now since the edge might go away.  */
2015   insns = e->insns.r;
2016   e->insns.r = NULL;
2017 
2018   /* Figure out where to put these insns.  If the destination has
2019      one predecessor, insert there.  Except for the exit block.  */
2020   if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2021     {
2022       bb = e->dest;
2023 
2024       /* Get the location correct wrt a code label, and "nice" wrt
2025 	 a basic block note, and before everything else.  */
2026       tmp = BB_HEAD (bb);
2027       if (LABEL_P (tmp))
2028 	tmp = NEXT_INSN (tmp);
2029       if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2030 	tmp = NEXT_INSN (tmp);
2031       if (tmp == BB_HEAD (bb))
2032 	before = tmp;
2033       else if (tmp)
2034 	after = PREV_INSN (tmp);
2035       else
2036 	after = get_last_insn ();
2037     }
2038 
2039   /* If the source has one successor and the edge is not abnormal,
2040      insert there.  Except for the entry block.
2041      Don't do this if the predecessor ends in a jump other than
2042      unconditional simple jump.  E.g. for asm goto that points all
2043      its labels at the fallthru basic block, we can't insert instructions
2044      before the asm goto, as the asm goto can have various of side effects,
2045      and can't emit instructions after the asm goto, as it must end
2046      the basic block.  */
2047   else if ((e->flags & EDGE_ABNORMAL) == 0
2048 	   && single_succ_p (e->src)
2049 	   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2050 	   && (!JUMP_P (BB_END (e->src))
2051 	       || simplejump_p (BB_END (e->src))))
2052     {
2053       bb = e->src;
2054 
2055       /* It is possible to have a non-simple jump here.  Consider a target
2056 	 where some forms of unconditional jumps clobber a register.  This
2057 	 happens on the fr30 for example.
2058 
2059 	 We know this block has a single successor, so we can just emit
2060 	 the queued insns before the jump.  */
2061       if (JUMP_P (BB_END (bb)))
2062 	before = BB_END (bb);
2063       else
2064 	{
2065 	  /* We'd better be fallthru, or we've lost track of what's what.  */
2066 	  gcc_assert (e->flags & EDGE_FALLTHRU);
2067 
2068 	  after = BB_END (bb);
2069 	}
2070     }
2071 
2072   /* Otherwise we must split the edge.  */
2073   else
2074     {
2075       bb = split_edge (e);
2076 
2077       /* If E crossed a partition boundary, we needed to make bb end in
2078          a region-crossing jump, even though it was originally fallthru.  */
2079       if (JUMP_P (BB_END (bb)))
2080 	before = BB_END (bb);
2081       else
2082         after = BB_END (bb);
2083     }
2084 
2085   /* Now that we've found the spot, do the insertion.  */
2086   if (before)
2087     {
2088       emit_insn_before_noloc (insns, before, bb);
2089       last = prev_nonnote_insn (before);
2090     }
2091   else
2092     last = emit_insn_after_noloc (insns, after, bb);
2093 
2094   if (returnjump_p (last))
2095     {
2096       /* ??? Remove all outgoing edges from BB and add one for EXIT.
2097 	 This is not currently a problem because this only happens
2098 	 for the (single) epilogue, which already has a fallthru edge
2099 	 to EXIT.  */
2100 
2101       e = single_succ_edge (bb);
2102       gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2103 		  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2104 
2105       e->flags &= ~EDGE_FALLTHRU;
2106       emit_barrier_after (last);
2107 
2108       if (before)
2109 	delete_insn (before);
2110     }
2111   else
2112     gcc_assert (!JUMP_P (last));
2113 }
2114 
2115 /* Update the CFG for all queued instructions.  */
2116 
2117 void
2118 commit_edge_insertions (void)
2119 {
2120   basic_block bb;
2121 
2122   /* Optimization passes that invoke this routine can cause hot blocks
2123      previously reached by both hot and cold blocks to become dominated only
2124      by cold blocks. This will cause the verification below to fail,
2125      and lead to now cold code in the hot section. In some cases this
2126      may only be visible after newly unreachable blocks are deleted,
2127      which will be done by fixup_partitions.  */
2128   fixup_partitions ();
2129 
2130 #ifdef ENABLE_CHECKING
2131   verify_flow_info ();
2132 #endif
2133 
2134   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2135 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2136     {
2137       edge e;
2138       edge_iterator ei;
2139 
2140       FOR_EACH_EDGE (e, ei, bb->succs)
2141 	if (e->insns.r)
2142 	  commit_one_edge_insertion (e);
2143     }
2144 }
2145 
2146 
2147 /* Print out RTL-specific basic block information (live information
2148    at start and end with TDF_DETAILS).  FLAGS are the TDF_* masks
2149    documented in dumpfile.h.  */
2150 
2151 static void
2152 rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
2153 {
2154   rtx_insn *insn;
2155   rtx_insn *last;
2156   char *s_indent;
2157 
2158   s_indent = (char *) alloca ((size_t) indent + 1);
2159   memset (s_indent, ' ', (size_t) indent);
2160   s_indent[indent] = '\0';
2161 
2162   if (df && (flags & TDF_DETAILS))
2163     {
2164       df_dump_top (bb, outf);
2165       putc ('\n', outf);
2166     }
2167 
2168   if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2169     for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
2170 	 insn = NEXT_INSN (insn))
2171       {
2172 	if (flags & TDF_DETAILS)
2173 	  df_dump_insn_top (insn, outf);
2174 	if (! (flags & TDF_SLIM))
2175 	  print_rtl_single (outf, insn);
2176 	else
2177 	  dump_insn_slim (outf, insn);
2178 	if (flags & TDF_DETAILS)
2179 	  df_dump_insn_bottom (insn, outf);
2180       }
2181 
2182   if (df && (flags & TDF_DETAILS))
2183     {
2184       df_dump_bottom (bb, outf);
2185       putc ('\n', outf);
2186     }
2187 
2188 }
2189 
2190 /* Like dump_function_to_file, but for RTL.  Print out dataflow information
2191    for the start of each basic block.  FLAGS are the TDF_* masks documented
2192    in dumpfile.h.  */
2193 
2194 void
2195 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, int flags)
2196 {
2197   const rtx_insn *tmp_rtx;
2198   if (rtx_first == 0)
2199     fprintf (outf, "(nil)\n");
2200   else
2201     {
2202       enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2203       int max_uid = get_max_uid ();
2204       basic_block *start = XCNEWVEC (basic_block, max_uid);
2205       basic_block *end = XCNEWVEC (basic_block, max_uid);
2206       enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2207       basic_block bb;
2208 
2209       /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2210 	 insns, but the CFG is not maintained so the basic block info
2211 	 is not reliable.  Therefore it's omitted from the dumps.  */
2212       if (! (cfun->curr_properties & PROP_cfg))
2213         flags &= ~TDF_BLOCKS;
2214 
2215       if (df)
2216 	df_dump_start (outf);
2217 
2218       if (flags & TDF_BLOCKS)
2219 	{
2220 	  FOR_EACH_BB_REVERSE_FN (bb, cfun)
2221 	    {
2222 	      rtx_insn *x;
2223 
2224 	      start[INSN_UID (BB_HEAD (bb))] = bb;
2225 	      end[INSN_UID (BB_END (bb))] = bb;
2226 	      for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2227 		{
2228 		  enum bb_state state = IN_MULTIPLE_BB;
2229 
2230 		  if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2231 		    state = IN_ONE_BB;
2232 		  in_bb_p[INSN_UID (x)] = state;
2233 
2234 		  if (x == BB_END (bb))
2235 		    break;
2236 		}
2237 	    }
2238 	}
2239 
2240       for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
2241 	{
2242 	  if (flags & TDF_BLOCKS)
2243 	    {
2244 	      bb = start[INSN_UID (tmp_rtx)];
2245 	      if (bb != NULL)
2246 		{
2247 		  dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
2248 		  if (df && (flags & TDF_DETAILS))
2249 		    df_dump_top (bb, outf);
2250 		}
2251 
2252 	      if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2253 		  && !NOTE_P (tmp_rtx)
2254 		  && !BARRIER_P (tmp_rtx))
2255 		fprintf (outf, ";; Insn is not within a basic block\n");
2256 	      else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2257 		fprintf (outf, ";; Insn is in multiple basic blocks\n");
2258 	    }
2259 
2260 	  if (flags & TDF_DETAILS)
2261 	    df_dump_insn_top (tmp_rtx, outf);
2262 	  if (! (flags & TDF_SLIM))
2263 	    print_rtl_single (outf, tmp_rtx);
2264 	  else
2265 	    dump_insn_slim (outf, tmp_rtx);
2266 	  if (flags & TDF_DETAILS)
2267 	    df_dump_insn_bottom (tmp_rtx, outf);
2268 
2269 	  if (flags & TDF_BLOCKS)
2270 	    {
2271 	      bb = end[INSN_UID (tmp_rtx)];
2272 	      if (bb != NULL)
2273 		{
2274 		  dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2275 		  if (df && (flags & TDF_DETAILS))
2276 		    df_dump_bottom (bb, outf);
2277 		  putc ('\n', outf);
2278 		}
2279 	    }
2280 	}
2281 
2282       free (start);
2283       free (end);
2284       free (in_bb_p);
2285     }
2286 }
2287 
2288 /* Update the branch probability of BB if a REG_BR_PROB is present.  */
2289 
2290 void
2291 update_br_prob_note (basic_block bb)
2292 {
2293   rtx note;
2294   if (!JUMP_P (BB_END (bb)))
2295     return;
2296   note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2297   if (!note || XINT (note, 0) == BRANCH_EDGE (bb)->probability)
2298     return;
2299   XINT (note, 0) = BRANCH_EDGE (bb)->probability;
2300 }
2301 
2302 /* Get the last insn associated with block BB (that includes barriers and
2303    tablejumps after BB).  */
2304 rtx_insn *
2305 get_last_bb_insn (basic_block bb)
2306 {
2307   rtx_jump_table_data *table;
2308   rtx_insn *tmp;
2309   rtx_insn *end = BB_END (bb);
2310 
2311   /* Include any jump table following the basic block.  */
2312   if (tablejump_p (end, NULL, &table))
2313     end = table;
2314 
2315   /* Include any barriers that may follow the basic block.  */
2316   tmp = next_nonnote_insn_bb (end);
2317   while (tmp && BARRIER_P (tmp))
2318     {
2319       end = tmp;
2320       tmp = next_nonnote_insn_bb (end);
2321     }
2322 
2323   return end;
2324 }
2325 
2326 /* Sanity check partition hotness to ensure that basic blocks in
2327    the cold partition don't dominate basic blocks in the hot partition.
2328    If FLAG_ONLY is true, report violations as errors. Otherwise
2329    re-mark the dominated blocks as cold, since this is run after
2330    cfg optimizations that may make hot blocks previously reached
2331    by both hot and cold blocks now only reachable along cold paths.  */
2332 
2333 static vec<basic_block>
2334 find_partition_fixes (bool flag_only)
2335 {
2336   basic_block bb;
2337   vec<basic_block> bbs_in_cold_partition = vNULL;
2338   vec<basic_block> bbs_to_fix = vNULL;
2339 
2340   /* Callers check this.  */
2341   gcc_checking_assert (crtl->has_bb_partition);
2342 
2343   FOR_EACH_BB_FN (bb, cfun)
2344     if ((BB_PARTITION (bb) == BB_COLD_PARTITION))
2345       bbs_in_cold_partition.safe_push (bb);
2346 
2347   if (bbs_in_cold_partition.is_empty ())
2348     return vNULL;
2349 
2350   bool dom_calculated_here = !dom_info_available_p (CDI_DOMINATORS);
2351 
2352   if (dom_calculated_here)
2353     calculate_dominance_info (CDI_DOMINATORS);
2354 
2355   while (! bbs_in_cold_partition.is_empty  ())
2356     {
2357       bb = bbs_in_cold_partition.pop ();
2358       /* Any blocks dominated by a block in the cold section
2359          must also be cold.  */
2360       basic_block son;
2361       for (son = first_dom_son (CDI_DOMINATORS, bb);
2362            son;
2363            son = next_dom_son (CDI_DOMINATORS, son))
2364         {
2365           /* If son is not yet cold, then mark it cold here and
2366              enqueue it for further processing.  */
2367           if ((BB_PARTITION (son) != BB_COLD_PARTITION))
2368             {
2369               if (flag_only)
2370                 error ("non-cold basic block %d dominated "
2371                        "by a block in the cold partition (%d)", son->index, bb->index);
2372               else
2373                 BB_SET_PARTITION (son, BB_COLD_PARTITION);
2374               bbs_to_fix.safe_push (son);
2375               bbs_in_cold_partition.safe_push (son);
2376             }
2377         }
2378     }
2379 
2380   if (dom_calculated_here)
2381     free_dominance_info (CDI_DOMINATORS);
2382 
2383   return bbs_to_fix;
2384 }
2385 
2386 /* Perform cleanup on the hot/cold bb partitioning after optimization
2387    passes that modify the cfg.  */
2388 
2389 void
2390 fixup_partitions (void)
2391 {
2392   basic_block bb;
2393 
2394   if (!crtl->has_bb_partition)
2395     return;
2396 
2397   /* Delete any blocks that became unreachable and weren't
2398      already cleaned up, for example during edge forwarding
2399      and convert_jumps_to_returns. This will expose more
2400      opportunities for fixing the partition boundaries here.
2401      Also, the calculation of the dominance graph during verification
2402      will assert if there are unreachable nodes.  */
2403   delete_unreachable_blocks ();
2404 
2405   /* If there are partitions, do a sanity check on them: A basic block in
2406      a cold partition cannot dominate a basic block in a hot partition.
2407      Fixup any that now violate this requirement, as a result of edge
2408      forwarding and unreachable block deletion.  */
2409   vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2410 
2411   /* Do the partition fixup after all necessary blocks have been converted to
2412      cold, so that we only update the region crossings the minimum number of
2413      places, which can require forcing edges to be non fallthru.  */
2414   while (! bbs_to_fix.is_empty ())
2415     {
2416       bb = bbs_to_fix.pop ();
2417       fixup_new_cold_bb (bb);
2418     }
2419 }
2420 
2421 /* Verify, in the basic block chain, that there is at most one switch
2422    between hot/cold partitions. This condition will not be true until
2423    after reorder_basic_blocks is called.  */
2424 
2425 static int
2426 verify_hot_cold_block_grouping (void)
2427 {
2428   basic_block bb;
2429   int err = 0;
2430   bool switched_sections = false;
2431   int current_partition = BB_UNPARTITIONED;
2432 
2433   /* Even after bb reordering is complete, we go into cfglayout mode
2434      again (in compgoto). Ensure we don't call this before going back
2435      into linearized RTL when any layout fixes would have been committed.  */
2436   if (!crtl->bb_reorder_complete
2437       || current_ir_type () != IR_RTL_CFGRTL)
2438     return err;
2439 
2440   FOR_EACH_BB_FN (bb, cfun)
2441     {
2442       if (current_partition != BB_UNPARTITIONED
2443           && BB_PARTITION (bb) != current_partition)
2444 	{
2445 	  if (switched_sections)
2446 	    {
2447 	      error ("multiple hot/cold transitions found (bb %i)",
2448 		     bb->index);
2449 	      err = 1;
2450 	    }
2451 	  else
2452             switched_sections = true;
2453 
2454           if (!crtl->has_bb_partition)
2455             error ("partition found but function partition flag not set");
2456 	}
2457       current_partition = BB_PARTITION (bb);
2458     }
2459 
2460   return err;
2461 }
2462 
2463 
2464 /* Perform several checks on the edges out of each block, such as
2465    the consistency of the branch probabilities, the correctness
2466    of hot/cold partition crossing edges, and the number of expected
2467    successor edges.  Also verify that the dominance relationship
2468    between hot/cold blocks is sane.  */
2469 
2470 static int
2471 rtl_verify_edges (void)
2472 {
2473   int err = 0;
2474   basic_block bb;
2475 
2476   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2477     {
2478       int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2479       int n_eh = 0, n_abnormal = 0;
2480       edge e, fallthru = NULL;
2481       edge_iterator ei;
2482       rtx note;
2483       bool has_crossing_edge = false;
2484 
2485       if (JUMP_P (BB_END (bb))
2486 	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2487 	  && EDGE_COUNT (bb->succs) >= 2
2488 	  && any_condjump_p (BB_END (bb)))
2489 	{
2490 	  if (XINT (note, 0) != BRANCH_EDGE (bb)->probability
2491 	      && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2492 	    {
2493 	      error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2494 		     XINT (note, 0), BRANCH_EDGE (bb)->probability);
2495 	      err = 1;
2496 	    }
2497 	}
2498 
2499       FOR_EACH_EDGE (e, ei, bb->succs)
2500 	{
2501 	  bool is_crossing;
2502 
2503 	  if (e->flags & EDGE_FALLTHRU)
2504 	    n_fallthru++, fallthru = e;
2505 
2506 	  is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2507 			 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2508 			 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2509           has_crossing_edge |= is_crossing;
2510 	  if (e->flags & EDGE_CROSSING)
2511 	    {
2512 	      if (!is_crossing)
2513 		{
2514 		  error ("EDGE_CROSSING incorrectly set across same section");
2515 		  err = 1;
2516 		}
2517 	      if (e->flags & EDGE_FALLTHRU)
2518 		{
2519 		  error ("fallthru edge crosses section boundary in bb %i",
2520 			 e->src->index);
2521 		  err = 1;
2522 		}
2523 	      if (e->flags & EDGE_EH)
2524 		{
2525 		  error ("EH edge crosses section boundary in bb %i",
2526 			 e->src->index);
2527 		  err = 1;
2528 		}
2529               if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2530 		{
2531 		  error ("No region crossing jump at section boundary in bb %i",
2532 			 bb->index);
2533 		  err = 1;
2534 		}
2535 	    }
2536 	  else if (is_crossing)
2537 	    {
2538 	      error ("EDGE_CROSSING missing across section boundary");
2539 	      err = 1;
2540 	    }
2541 
2542 	  if ((e->flags & ~(EDGE_DFS_BACK
2543 			    | EDGE_CAN_FALLTHRU
2544 			    | EDGE_IRREDUCIBLE_LOOP
2545 			    | EDGE_LOOP_EXIT
2546 			    | EDGE_CROSSING
2547 			    | EDGE_PRESERVE)) == 0)
2548 	    n_branch++;
2549 
2550 	  if (e->flags & EDGE_ABNORMAL_CALL)
2551 	    n_abnormal_call++;
2552 
2553 	  if (e->flags & EDGE_SIBCALL)
2554 	    n_sibcall++;
2555 
2556 	  if (e->flags & EDGE_EH)
2557 	    n_eh++;
2558 
2559 	  if (e->flags & EDGE_ABNORMAL)
2560 	    n_abnormal++;
2561 	}
2562 
2563         if (!has_crossing_edge
2564 	    && JUMP_P (BB_END (bb))
2565 	    && CROSSING_JUMP_P (BB_END (bb)))
2566           {
2567             print_rtl_with_bb (stderr, get_insns (), TDF_RTL | TDF_BLOCKS | TDF_DETAILS);
2568             error ("Region crossing jump across same section in bb %i",
2569                    bb->index);
2570             err = 1;
2571           }
2572 
2573       if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2574 	{
2575 	  error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2576 	  err = 1;
2577 	}
2578       if (n_eh > 1)
2579 	{
2580 	  error ("too many exception handling edges in bb %i", bb->index);
2581 	  err = 1;
2582 	}
2583       if (n_branch
2584 	  && (!JUMP_P (BB_END (bb))
2585 	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2586 				   || any_condjump_p (BB_END (bb))))))
2587 	{
2588 	  error ("too many outgoing branch edges from bb %i", bb->index);
2589 	  err = 1;
2590 	}
2591       if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2592 	{
2593 	  error ("fallthru edge after unconditional jump in bb %i", bb->index);
2594 	  err = 1;
2595 	}
2596       if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2597 	{
2598 	  error ("wrong number of branch edges after unconditional jump"
2599 		 " in bb %i", bb->index);
2600 	  err = 1;
2601 	}
2602       if (n_branch != 1 && any_condjump_p (BB_END (bb))
2603 	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2604 	{
2605 	  error ("wrong amount of branch edges after conditional jump"
2606 		 " in bb %i", bb->index);
2607 	  err = 1;
2608 	}
2609       if (n_abnormal_call && !CALL_P (BB_END (bb)))
2610 	{
2611 	  error ("abnormal call edges for non-call insn in bb %i", bb->index);
2612 	  err = 1;
2613 	}
2614       if (n_sibcall && !CALL_P (BB_END (bb)))
2615 	{
2616 	  error ("sibcall edges for non-call insn in bb %i", bb->index);
2617 	  err = 1;
2618 	}
2619       if (n_abnormal > n_eh
2620 	  && !(CALL_P (BB_END (bb))
2621 	       && n_abnormal == n_abnormal_call + n_sibcall)
2622 	  && (!JUMP_P (BB_END (bb))
2623 	      || any_condjump_p (BB_END (bb))
2624 	      || any_uncondjump_p (BB_END (bb))))
2625 	{
2626 	  error ("abnormal edges for no purpose in bb %i", bb->index);
2627 	  err = 1;
2628 	}
2629     }
2630 
2631   /* If there are partitions, do a sanity check on them: A basic block in
2632      a cold partition cannot dominate a basic block in a hot partition.  */
2633   if (crtl->has_bb_partition && !err)
2634     {
2635       vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2636       err = !bbs_to_fix.is_empty ();
2637     }
2638 
2639   /* Clean up.  */
2640   return err;
2641 }
2642 
2643 /* Checks on the instructions within blocks. Currently checks that each
2644    block starts with a basic block note, and that basic block notes and
2645    control flow jumps are not found in the middle of the block.  */
2646 
2647 static int
2648 rtl_verify_bb_insns (void)
2649 {
2650   rtx_insn *x;
2651   int err = 0;
2652   basic_block bb;
2653 
2654   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2655     {
2656       /* Now check the header of basic
2657 	 block.  It ought to contain optional CODE_LABEL followed
2658 	 by NOTE_BASIC_BLOCK.  */
2659       x = BB_HEAD (bb);
2660       if (LABEL_P (x))
2661 	{
2662 	  if (BB_END (bb) == x)
2663 	    {
2664 	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2665 		     bb->index);
2666 	      err = 1;
2667 	    }
2668 
2669 	  x = NEXT_INSN (x);
2670 	}
2671 
2672       if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2673 	{
2674 	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2675 		 bb->index);
2676 	  err = 1;
2677 	}
2678 
2679       if (BB_END (bb) == x)
2680 	/* Do checks for empty blocks here.  */
2681 	;
2682       else
2683 	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2684 	  {
2685 	    if (NOTE_INSN_BASIC_BLOCK_P (x))
2686 	      {
2687 		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2688 		       INSN_UID (x), bb->index);
2689 		err = 1;
2690 	      }
2691 
2692 	    if (x == BB_END (bb))
2693 	      break;
2694 
2695 	    if (control_flow_insn_p (x))
2696 	      {
2697 		error ("in basic block %d:", bb->index);
2698 		fatal_insn ("flow control insn inside a basic block", x);
2699 	      }
2700 	  }
2701     }
2702 
2703   /* Clean up.  */
2704   return err;
2705 }
2706 
2707 /* Verify that block pointers for instructions in basic blocks, headers and
2708    footers are set appropriately.  */
2709 
2710 static int
2711 rtl_verify_bb_pointers (void)
2712 {
2713   int err = 0;
2714   basic_block bb;
2715 
2716   /* Check the general integrity of the basic blocks.  */
2717   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2718     {
2719       rtx_insn *insn;
2720 
2721       if (!(bb->flags & BB_RTL))
2722 	{
2723 	  error ("BB_RTL flag not set for block %d", bb->index);
2724 	  err = 1;
2725 	}
2726 
2727       FOR_BB_INSNS (bb, insn)
2728 	if (BLOCK_FOR_INSN (insn) != bb)
2729 	  {
2730 	    error ("insn %d basic block pointer is %d, should be %d",
2731 		   INSN_UID (insn),
2732 		   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2733 		   bb->index);
2734 	    err = 1;
2735 	  }
2736 
2737       for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2738 	if (!BARRIER_P (insn)
2739 	    && BLOCK_FOR_INSN (insn) != NULL)
2740 	  {
2741 	    error ("insn %d in header of bb %d has non-NULL basic block",
2742 		   INSN_UID (insn), bb->index);
2743 	    err = 1;
2744 	  }
2745       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2746 	if (!BARRIER_P (insn)
2747 	    && BLOCK_FOR_INSN (insn) != NULL)
2748 	  {
2749 	    error ("insn %d in footer of bb %d has non-NULL basic block",
2750 		   INSN_UID (insn), bb->index);
2751 	    err = 1;
2752 	  }
2753     }
2754 
2755   /* Clean up.  */
2756   return err;
2757 }
2758 
2759 /* Verify the CFG and RTL consistency common for both underlying RTL and
2760    cfglayout RTL.
2761 
2762    Currently it does following checks:
2763 
2764    - overlapping of basic blocks
2765    - insns with wrong BLOCK_FOR_INSN pointers
2766    - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2767    - tails of basic blocks (ensure that boundary is necessary)
2768    - scans body of the basic block for JUMP_INSN, CODE_LABEL
2769      and NOTE_INSN_BASIC_BLOCK
2770    - verify that no fall_thru edge crosses hot/cold partition boundaries
2771    - verify that there are no pending RTL branch predictions
2772    - verify that hot blocks are not dominated by cold blocks
2773 
2774    In future it can be extended check a lot of other stuff as well
2775    (reachability of basic blocks, life information, etc. etc.).  */
2776 
2777 static int
2778 rtl_verify_flow_info_1 (void)
2779 {
2780   int err = 0;
2781 
2782   err |= rtl_verify_bb_pointers ();
2783 
2784   err |= rtl_verify_bb_insns ();
2785 
2786   err |= rtl_verify_edges ();
2787 
2788   return err;
2789 }
2790 
2791 /* Walk the instruction chain and verify that bb head/end pointers
2792   are correct, and that instructions are in exactly one bb and have
2793   correct block pointers.  */
2794 
2795 static int
2796 rtl_verify_bb_insn_chain (void)
2797 {
2798   basic_block bb;
2799   int err = 0;
2800   rtx_insn *x;
2801   rtx_insn *last_head = get_last_insn ();
2802   basic_block *bb_info;
2803   const int max_uid = get_max_uid ();
2804 
2805   bb_info = XCNEWVEC (basic_block, max_uid);
2806 
2807   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2808     {
2809       rtx_insn *head = BB_HEAD (bb);
2810       rtx_insn *end = BB_END (bb);
2811 
2812       for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2813 	{
2814 	  /* Verify the end of the basic block is in the INSN chain.  */
2815 	  if (x == end)
2816 	    break;
2817 
2818             /* And that the code outside of basic blocks has NULL bb field.  */
2819           if (!BARRIER_P (x)
2820               && BLOCK_FOR_INSN (x) != NULL)
2821             {
2822               error ("insn %d outside of basic blocks has non-NULL bb field",
2823                      INSN_UID (x));
2824               err = 1;
2825             }
2826 	}
2827 
2828       if (!x)
2829 	{
2830 	  error ("end insn %d for block %d not found in the insn stream",
2831 		 INSN_UID (end), bb->index);
2832 	  err = 1;
2833 	}
2834 
2835       /* Work backwards from the end to the head of the basic block
2836 	 to verify the head is in the RTL chain.  */
2837       for (; x != NULL_RTX; x = PREV_INSN (x))
2838 	{
2839 	  /* While walking over the insn chain, verify insns appear
2840 	     in only one basic block.  */
2841 	  if (bb_info[INSN_UID (x)] != NULL)
2842 	    {
2843 	      error ("insn %d is in multiple basic blocks (%d and %d)",
2844 		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2845 	      err = 1;
2846 	    }
2847 
2848 	  bb_info[INSN_UID (x)] = bb;
2849 
2850 	  if (x == head)
2851 	    break;
2852 	}
2853       if (!x)
2854 	{
2855 	  error ("head insn %d for block %d not found in the insn stream",
2856 		 INSN_UID (head), bb->index);
2857 	  err = 1;
2858 	}
2859 
2860       last_head = PREV_INSN (x);
2861     }
2862 
2863   for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2864     {
2865       /* Check that the code before the first basic block has NULL
2866 	 bb field.  */
2867       if (!BARRIER_P (x)
2868 	  && BLOCK_FOR_INSN (x) != NULL)
2869 	{
2870 	  error ("insn %d outside of basic blocks has non-NULL bb field",
2871 		 INSN_UID (x));
2872 	  err = 1;
2873 	}
2874     }
2875   free (bb_info);
2876 
2877   return err;
2878 }
2879 
2880 /* Verify that fallthru edges point to adjacent blocks in layout order and
2881    that barriers exist after non-fallthru blocks.  */
2882 
2883 static int
2884 rtl_verify_fallthru (void)
2885 {
2886   basic_block bb;
2887   int err = 0;
2888 
2889   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2890     {
2891       edge e;
2892 
2893       e = find_fallthru_edge (bb->succs);
2894       if (!e)
2895 	{
2896 	  rtx_insn *insn;
2897 
2898 	  /* Ensure existence of barrier in BB with no fallthru edges.  */
2899 	  for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2900 	    {
2901 	      if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2902 		{
2903 		  error ("missing barrier after block %i", bb->index);
2904 		  err = 1;
2905 		  break;
2906 		}
2907 	      if (BARRIER_P (insn))
2908 		break;
2909 	    }
2910 	}
2911       else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2912 	       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2913 	{
2914 	  rtx_insn *insn;
2915 
2916 	  if (e->src->next_bb != e->dest)
2917 	    {
2918 	      error
2919 		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2920 		 e->src->index, e->dest->index);
2921 	      err = 1;
2922 	    }
2923 	  else
2924 	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2925 		 insn = NEXT_INSN (insn))
2926 	      if (BARRIER_P (insn) || INSN_P (insn))
2927 		{
2928 		  error ("verify_flow_info: Incorrect fallthru %i->%i",
2929 			 e->src->index, e->dest->index);
2930 		  fatal_insn ("wrong insn in the fallthru edge", insn);
2931 		  err = 1;
2932 		}
2933 	}
2934     }
2935 
2936    return err;
2937 }
2938 
2939 /* Verify that blocks are laid out in consecutive order. While walking the
2940    instructions, verify that all expected instructions are inside the basic
2941    blocks, and that all returns are followed by barriers.  */
2942 
2943 static int
2944 rtl_verify_bb_layout (void)
2945 {
2946   basic_block bb;
2947   int err = 0;
2948   rtx_insn *x;
2949   int num_bb_notes;
2950   rtx_insn * const rtx_first = get_insns ();
2951   basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
2952 
2953   num_bb_notes = 0;
2954   last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
2955 
2956   for (x = rtx_first; x; x = NEXT_INSN (x))
2957     {
2958       if (NOTE_INSN_BASIC_BLOCK_P (x))
2959 	{
2960 	  bb = NOTE_BASIC_BLOCK (x);
2961 
2962 	  num_bb_notes++;
2963 	  if (bb != last_bb_seen->next_bb)
2964 	    internal_error ("basic blocks not laid down consecutively");
2965 
2966 	  curr_bb = last_bb_seen = bb;
2967 	}
2968 
2969       if (!curr_bb)
2970 	{
2971 	  switch (GET_CODE (x))
2972 	    {
2973 	    case BARRIER:
2974 	    case NOTE:
2975 	      break;
2976 
2977 	    case CODE_LABEL:
2978 	      /* An ADDR_VEC is placed outside any basic block.  */
2979 	      if (NEXT_INSN (x)
2980 		  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2981 		x = NEXT_INSN (x);
2982 
2983 	      /* But in any case, non-deletable labels can appear anywhere.  */
2984 	      break;
2985 
2986 	    default:
2987 	      fatal_insn ("insn outside basic block", x);
2988 	    }
2989 	}
2990 
2991       if (JUMP_P (x)
2992 	  && returnjump_p (x) && ! condjump_p (x)
2993 	  && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2994 	    fatal_insn ("return not followed by barrier", x);
2995 
2996       if (curr_bb && x == BB_END (curr_bb))
2997 	curr_bb = NULL;
2998     }
2999 
3000   if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
3001     internal_error
3002       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
3003        num_bb_notes, n_basic_blocks_for_fn (cfun));
3004 
3005    return err;
3006 }
3007 
3008 /* Verify the CFG and RTL consistency common for both underlying RTL and
3009    cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3010 
3011    Currently it does following checks:
3012    - all checks of rtl_verify_flow_info_1
3013    - test head/end pointers
3014    - check that blocks are laid out in consecutive order
3015    - check that all insns are in the basic blocks
3016      (except the switch handling code, barriers and notes)
3017    - check that all returns are followed by barriers
3018    - check that all fallthru edge points to the adjacent blocks
3019    - verify that there is a single hot/cold partition boundary after bbro  */
3020 
3021 static int
3022 rtl_verify_flow_info (void)
3023 {
3024   int err = 0;
3025 
3026   err |= rtl_verify_flow_info_1 ();
3027 
3028   err |= rtl_verify_bb_insn_chain ();
3029 
3030   err |= rtl_verify_fallthru ();
3031 
3032   err |= rtl_verify_bb_layout ();
3033 
3034   err |= verify_hot_cold_block_grouping ();
3035 
3036   return err;
3037 }
3038 
3039 /* Assume that the preceding pass has possibly eliminated jump instructions
3040    or converted the unconditional jumps.  Eliminate the edges from CFG.
3041    Return true if any edges are eliminated.  */
3042 
3043 bool
3044 purge_dead_edges (basic_block bb)
3045 {
3046   edge e;
3047   rtx_insn *insn = BB_END (bb);
3048   rtx note;
3049   bool purged = false;
3050   bool found;
3051   edge_iterator ei;
3052 
3053   if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
3054     do
3055       insn = PREV_INSN (insn);
3056     while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3057 
3058   /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
3059   if (NONJUMP_INSN_P (insn)
3060       && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3061     {
3062       rtx eqnote;
3063 
3064       if (! may_trap_p (PATTERN (insn))
3065 	  || ((eqnote = find_reg_equal_equiv_note (insn))
3066 	      && ! may_trap_p (XEXP (eqnote, 0))))
3067 	remove_note (insn, note);
3068     }
3069 
3070   /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
3071   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3072     {
3073       bool remove = false;
3074 
3075       /* There are three types of edges we need to handle correctly here: EH
3076 	 edges, abnormal call EH edges, and abnormal call non-EH edges.  The
3077 	 latter can appear when nonlocal gotos are used.  */
3078       if (e->flags & EDGE_ABNORMAL_CALL)
3079 	{
3080 	  if (!CALL_P (insn))
3081 	    remove = true;
3082 	  else if (can_nonlocal_goto (insn))
3083 	    ;
3084 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3085 	    ;
3086 	  else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3087 	    ;
3088 	  else
3089 	    remove = true;
3090 	}
3091       else if (e->flags & EDGE_EH)
3092 	remove = !can_throw_internal (insn);
3093 
3094       if (remove)
3095 	{
3096 	  remove_edge (e);
3097 	  df_set_bb_dirty (bb);
3098 	  purged = true;
3099 	}
3100       else
3101 	ei_next (&ei);
3102     }
3103 
3104   if (JUMP_P (insn))
3105     {
3106       rtx note;
3107       edge b,f;
3108       edge_iterator ei;
3109 
3110       /* We do care only about conditional jumps and simplejumps.  */
3111       if (!any_condjump_p (insn)
3112 	  && !returnjump_p (insn)
3113 	  && !simplejump_p (insn))
3114 	return purged;
3115 
3116       /* Branch probability/prediction notes are defined only for
3117 	 condjumps.  We've possibly turned condjump into simplejump.  */
3118       if (simplejump_p (insn))
3119 	{
3120 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
3121 	  if (note)
3122 	    remove_note (insn, note);
3123 	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3124 	    remove_note (insn, note);
3125 	}
3126 
3127       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3128 	{
3129 	  /* Avoid abnormal flags to leak from computed jumps turned
3130 	     into simplejumps.  */
3131 
3132 	  e->flags &= ~EDGE_ABNORMAL;
3133 
3134 	  /* See if this edge is one we should keep.  */
3135 	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3136 	    /* A conditional jump can fall through into the next
3137 	       block, so we should keep the edge.  */
3138 	    {
3139 	      ei_next (&ei);
3140 	      continue;
3141 	    }
3142 	  else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3143 		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3144 	    /* If the destination block is the target of the jump,
3145 	       keep the edge.  */
3146 	    {
3147 	      ei_next (&ei);
3148 	      continue;
3149 	    }
3150 	  else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3151 		   && returnjump_p (insn))
3152 	    /* If the destination block is the exit block, and this
3153 	       instruction is a return, then keep the edge.  */
3154 	    {
3155 	      ei_next (&ei);
3156 	      continue;
3157 	    }
3158 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3159 	    /* Keep the edges that correspond to exceptions thrown by
3160 	       this instruction and rematerialize the EDGE_ABNORMAL
3161 	       flag we just cleared above.  */
3162 	    {
3163 	      e->flags |= EDGE_ABNORMAL;
3164 	      ei_next (&ei);
3165 	      continue;
3166 	    }
3167 
3168 	  /* We do not need this edge.  */
3169 	  df_set_bb_dirty (bb);
3170 	  purged = true;
3171 	  remove_edge (e);
3172 	}
3173 
3174       if (EDGE_COUNT (bb->succs) == 0 || !purged)
3175 	return purged;
3176 
3177       if (dump_file)
3178 	fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3179 
3180       if (!optimize)
3181 	return purged;
3182 
3183       /* Redistribute probabilities.  */
3184       if (single_succ_p (bb))
3185 	{
3186 	  single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3187 	  single_succ_edge (bb)->count = bb->count;
3188 	}
3189       else
3190 	{
3191 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
3192 	  if (!note)
3193 	    return purged;
3194 
3195 	  b = BRANCH_EDGE (bb);
3196 	  f = FALLTHRU_EDGE (bb);
3197 	  b->probability = XINT (note, 0);
3198 	  f->probability = REG_BR_PROB_BASE - b->probability;
3199           /* Update these to use GCOV_COMPUTE_SCALE.  */
3200 	  b->count = bb->count * b->probability / REG_BR_PROB_BASE;
3201 	  f->count = bb->count * f->probability / REG_BR_PROB_BASE;
3202 	}
3203 
3204       return purged;
3205     }
3206   else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3207     {
3208       /* First, there should not be any EH or ABCALL edges resulting
3209 	 from non-local gotos and the like.  If there were, we shouldn't
3210 	 have created the sibcall in the first place.  Second, there
3211 	 should of course never have been a fallthru edge.  */
3212       gcc_assert (single_succ_p (bb));
3213       gcc_assert (single_succ_edge (bb)->flags
3214 		  == (EDGE_SIBCALL | EDGE_ABNORMAL));
3215 
3216       return 0;
3217     }
3218 
3219   /* If we don't see a jump insn, we don't know exactly why the block would
3220      have been broken at this point.  Look for a simple, non-fallthru edge,
3221      as these are only created by conditional branches.  If we find such an
3222      edge we know that there used to be a jump here and can then safely
3223      remove all non-fallthru edges.  */
3224   found = false;
3225   FOR_EACH_EDGE (e, ei, bb->succs)
3226     if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3227       {
3228 	found = true;
3229 	break;
3230       }
3231 
3232   if (!found)
3233     return purged;
3234 
3235   /* Remove all but the fake and fallthru edges.  The fake edge may be
3236      the only successor for this block in the case of noreturn
3237      calls.  */
3238   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3239     {
3240       if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3241 	{
3242 	  df_set_bb_dirty (bb);
3243 	  remove_edge (e);
3244 	  purged = true;
3245 	}
3246       else
3247 	ei_next (&ei);
3248     }
3249 
3250   gcc_assert (single_succ_p (bb));
3251 
3252   single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3253   single_succ_edge (bb)->count = bb->count;
3254 
3255   if (dump_file)
3256     fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3257 	     bb->index);
3258   return purged;
3259 }
3260 
3261 /* Search all basic blocks for potentially dead edges and purge them.  Return
3262    true if some edge has been eliminated.  */
3263 
3264 bool
3265 purge_all_dead_edges (void)
3266 {
3267   int purged = false;
3268   basic_block bb;
3269 
3270   FOR_EACH_BB_FN (bb, cfun)
3271     {
3272       bool purged_here = purge_dead_edges (bb);
3273 
3274       purged |= purged_here;
3275     }
3276 
3277   return purged;
3278 }
3279 
3280 /* This is used by a few passes that emit some instructions after abnormal
3281    calls, moving the basic block's end, while they in fact do want to emit
3282    them on the fallthru edge.  Look for abnormal call edges, find backward
3283    the call in the block and insert the instructions on the edge instead.
3284 
3285    Similarly, handle instructions throwing exceptions internally.
3286 
3287    Return true when instructions have been found and inserted on edges.  */
3288 
3289 bool
3290 fixup_abnormal_edges (void)
3291 {
3292   bool inserted = false;
3293   basic_block bb;
3294 
3295   FOR_EACH_BB_FN (bb, cfun)
3296     {
3297       edge e;
3298       edge_iterator ei;
3299 
3300       /* Look for cases we are interested in - calls or instructions causing
3301          exceptions.  */
3302       FOR_EACH_EDGE (e, ei, bb->succs)
3303 	if ((e->flags & EDGE_ABNORMAL_CALL)
3304 	    || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3305 		== (EDGE_ABNORMAL | EDGE_EH)))
3306 	  break;
3307 
3308       if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3309 	{
3310 	  rtx_insn *insn;
3311 
3312 	  /* Get past the new insns generated.  Allow notes, as the insns
3313 	     may be already deleted.  */
3314 	  insn = BB_END (bb);
3315 	  while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3316 		 && !can_throw_internal (insn)
3317 		 && insn != BB_HEAD (bb))
3318 	    insn = PREV_INSN (insn);
3319 
3320 	  if (CALL_P (insn) || can_throw_internal (insn))
3321 	    {
3322 	      rtx_insn *stop, *next;
3323 
3324 	      e = find_fallthru_edge (bb->succs);
3325 
3326 	      stop = NEXT_INSN (BB_END (bb));
3327 	      BB_END (bb) = insn;
3328 
3329 	      for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3330 		{
3331 		  next = NEXT_INSN (insn);
3332 		  if (INSN_P (insn))
3333 		    {
3334 		      delete_insn (insn);
3335 
3336 		      /* Sometimes there's still the return value USE.
3337 			 If it's placed after a trapping call (i.e. that
3338 			 call is the last insn anyway), we have no fallthru
3339 			 edge.  Simply delete this use and don't try to insert
3340 			 on the non-existent edge.  */
3341 		      if (GET_CODE (PATTERN (insn)) != USE)
3342 			{
3343 			  /* We're not deleting it, we're moving it.  */
3344 			  insn->set_undeleted ();
3345 			  SET_PREV_INSN (insn) = NULL_RTX;
3346 			  SET_NEXT_INSN (insn) = NULL_RTX;
3347 
3348 			  insert_insn_on_edge (insn, e);
3349 			  inserted = true;
3350 			}
3351 		    }
3352 		  else if (!BARRIER_P (insn))
3353 		    set_block_for_insn (insn, NULL);
3354 		}
3355 	    }
3356 
3357 	  /* It may be that we don't find any trapping insn.  In this
3358 	     case we discovered quite late that the insn that had been
3359 	     marked as can_throw_internal in fact couldn't trap at all.
3360 	     So we should in fact delete the EH edges out of the block.  */
3361 	  else
3362 	    purge_dead_edges (bb);
3363 	}
3364     }
3365 
3366   return inserted;
3367 }
3368 
3369 /* Cut the insns from FIRST to LAST out of the insns stream.  */
3370 
3371 rtx_insn *
3372 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3373 {
3374   rtx_insn *prevfirst = PREV_INSN (first);
3375   rtx_insn *nextlast = NEXT_INSN (last);
3376 
3377   SET_PREV_INSN (first) = NULL;
3378   SET_NEXT_INSN (last) = NULL;
3379   if (prevfirst)
3380     SET_NEXT_INSN (prevfirst) = nextlast;
3381   if (nextlast)
3382     SET_PREV_INSN (nextlast) = prevfirst;
3383   else
3384     set_last_insn (prevfirst);
3385   if (!prevfirst)
3386     set_first_insn (nextlast);
3387   return first;
3388 }
3389 
3390 /* Skip over inter-block insns occurring after BB which are typically
3391    associated with BB (e.g., barriers). If there are any such insns,
3392    we return the last one. Otherwise, we return the end of BB.  */
3393 
3394 static rtx_insn *
3395 skip_insns_after_block (basic_block bb)
3396 {
3397   rtx_insn *insn, *last_insn, *next_head, *prev;
3398 
3399   next_head = NULL;
3400   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3401     next_head = BB_HEAD (bb->next_bb);
3402 
3403   for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3404     {
3405       if (insn == next_head)
3406 	break;
3407 
3408       switch (GET_CODE (insn))
3409 	{
3410 	case BARRIER:
3411 	  last_insn = insn;
3412 	  continue;
3413 
3414 	case NOTE:
3415 	  switch (NOTE_KIND (insn))
3416 	    {
3417 	    case NOTE_INSN_BLOCK_END:
3418 	      gcc_unreachable ();
3419 	      continue;
3420 	    default:
3421 	      continue;
3422 	      break;
3423 	    }
3424 	  break;
3425 
3426 	case CODE_LABEL:
3427 	  if (NEXT_INSN (insn)
3428 	      && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3429 	    {
3430 	      insn = NEXT_INSN (insn);
3431 	      last_insn = insn;
3432 	      continue;
3433 	    }
3434 	  break;
3435 
3436 	default:
3437 	  break;
3438 	}
3439 
3440       break;
3441     }
3442 
3443   /* It is possible to hit contradictory sequence.  For instance:
3444 
3445      jump_insn
3446      NOTE_INSN_BLOCK_BEG
3447      barrier
3448 
3449      Where barrier belongs to jump_insn, but the note does not.  This can be
3450      created by removing the basic block originally following
3451      NOTE_INSN_BLOCK_BEG.  In such case reorder the notes.  */
3452 
3453   for (insn = last_insn; insn != BB_END (bb); insn = prev)
3454     {
3455       prev = PREV_INSN (insn);
3456       if (NOTE_P (insn))
3457 	switch (NOTE_KIND (insn))
3458 	  {
3459 	  case NOTE_INSN_BLOCK_END:
3460 	    gcc_unreachable ();
3461 	    break;
3462 	  case NOTE_INSN_DELETED:
3463 	  case NOTE_INSN_DELETED_LABEL:
3464 	  case NOTE_INSN_DELETED_DEBUG_LABEL:
3465 	    continue;
3466 	  default:
3467 	    reorder_insns (insn, insn, last_insn);
3468 	  }
3469     }
3470 
3471   return last_insn;
3472 }
3473 
3474 /* Locate or create a label for a given basic block.  */
3475 
3476 static rtx
3477 label_for_bb (basic_block bb)
3478 {
3479   rtx label = BB_HEAD (bb);
3480 
3481   if (!LABEL_P (label))
3482     {
3483       if (dump_file)
3484 	fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3485 
3486       label = block_label (bb);
3487     }
3488 
3489   return label;
3490 }
3491 
3492 /* Locate the effective beginning and end of the insn chain for each
3493    block, as defined by skip_insns_after_block above.  */
3494 
3495 static void
3496 record_effective_endpoints (void)
3497 {
3498   rtx_insn *next_insn;
3499   basic_block bb;
3500   rtx_insn *insn;
3501 
3502   for (insn = get_insns ();
3503        insn
3504        && NOTE_P (insn)
3505        && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3506        insn = NEXT_INSN (insn))
3507     continue;
3508   /* No basic blocks at all?  */
3509   gcc_assert (insn);
3510 
3511   if (PREV_INSN (insn))
3512     cfg_layout_function_header =
3513 	    unlink_insn_chain (get_insns (), PREV_INSN (insn));
3514   else
3515     cfg_layout_function_header = NULL;
3516 
3517   next_insn = get_insns ();
3518   FOR_EACH_BB_FN (bb, cfun)
3519     {
3520       rtx_insn *end;
3521 
3522       if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3523 	BB_HEADER (bb) = unlink_insn_chain (next_insn,
3524 						PREV_INSN (BB_HEAD (bb)));
3525       end = skip_insns_after_block (bb);
3526       if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3527 	BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3528       next_insn = NEXT_INSN (BB_END (bb));
3529     }
3530 
3531   cfg_layout_function_footer = next_insn;
3532   if (cfg_layout_function_footer)
3533     cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3534 }
3535 
3536 namespace {
3537 
3538 const pass_data pass_data_into_cfg_layout_mode =
3539 {
3540   RTL_PASS, /* type */
3541   "into_cfglayout", /* name */
3542   OPTGROUP_NONE, /* optinfo_flags */
3543   TV_CFG, /* tv_id */
3544   0, /* properties_required */
3545   PROP_cfglayout, /* properties_provided */
3546   0, /* properties_destroyed */
3547   0, /* todo_flags_start */
3548   0, /* todo_flags_finish */
3549 };
3550 
3551 class pass_into_cfg_layout_mode : public rtl_opt_pass
3552 {
3553 public:
3554   pass_into_cfg_layout_mode (gcc::context *ctxt)
3555     : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3556   {}
3557 
3558   /* opt_pass methods: */
3559   virtual unsigned int execute (function *)
3560     {
3561       cfg_layout_initialize (0);
3562       return 0;
3563     }
3564 
3565 }; // class pass_into_cfg_layout_mode
3566 
3567 } // anon namespace
3568 
3569 rtl_opt_pass *
3570 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3571 {
3572   return new pass_into_cfg_layout_mode (ctxt);
3573 }
3574 
3575 namespace {
3576 
3577 const pass_data pass_data_outof_cfg_layout_mode =
3578 {
3579   RTL_PASS, /* type */
3580   "outof_cfglayout", /* name */
3581   OPTGROUP_NONE, /* optinfo_flags */
3582   TV_CFG, /* tv_id */
3583   0, /* properties_required */
3584   0, /* properties_provided */
3585   PROP_cfglayout, /* properties_destroyed */
3586   0, /* todo_flags_start */
3587   0, /* todo_flags_finish */
3588 };
3589 
3590 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3591 {
3592 public:
3593   pass_outof_cfg_layout_mode (gcc::context *ctxt)
3594     : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3595   {}
3596 
3597   /* opt_pass methods: */
3598   virtual unsigned int execute (function *);
3599 
3600 }; // class pass_outof_cfg_layout_mode
3601 
3602 unsigned int
3603 pass_outof_cfg_layout_mode::execute (function *fun)
3604 {
3605   basic_block bb;
3606 
3607   FOR_EACH_BB_FN (bb, fun)
3608     if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3609       bb->aux = bb->next_bb;
3610 
3611   cfg_layout_finalize ();
3612 
3613   return 0;
3614 }
3615 
3616 } // anon namespace
3617 
3618 rtl_opt_pass *
3619 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3620 {
3621   return new pass_outof_cfg_layout_mode (ctxt);
3622 }
3623 
3624 
3625 /* Link the basic blocks in the correct order, compacting the basic
3626    block queue while at it.  If STAY_IN_CFGLAYOUT_MODE is false, this
3627    function also clears the basic block header and footer fields.
3628 
3629    This function is usually called after a pass (e.g. tracer) finishes
3630    some transformations while in cfglayout mode.  The required sequence
3631    of the basic blocks is in a linked list along the bb->aux field.
3632    This functions re-links the basic block prev_bb and next_bb pointers
3633    accordingly, and it compacts and renumbers the blocks.
3634 
3635    FIXME: This currently works only for RTL, but the only RTL-specific
3636    bits are the STAY_IN_CFGLAYOUT_MODE bits.  The tracer pass was moved
3637    to GIMPLE a long time ago, but it doesn't relink the basic block
3638    chain.  It could do that (to give better initial RTL) if this function
3639    is made IR-agnostic (and moved to cfganal.c or cfg.c while at it).  */
3640 
3641 void
3642 relink_block_chain (bool stay_in_cfglayout_mode)
3643 {
3644   basic_block bb, prev_bb;
3645   int index;
3646 
3647   /* Maybe dump the re-ordered sequence.  */
3648   if (dump_file)
3649     {
3650       fprintf (dump_file, "Reordered sequence:\n");
3651       for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3652 	   NUM_FIXED_BLOCKS;
3653 	   bb;
3654 	   bb = (basic_block) bb->aux, index++)
3655 	{
3656 	  fprintf (dump_file, " %i ", index);
3657 	  if (get_bb_original (bb))
3658 	    fprintf (dump_file, "duplicate of %i ",
3659 		     get_bb_original (bb)->index);
3660 	  else if (forwarder_block_p (bb)
3661 		   && !LABEL_P (BB_HEAD (bb)))
3662 	    fprintf (dump_file, "compensation ");
3663 	  else
3664 	    fprintf (dump_file, "bb %i ", bb->index);
3665 	  fprintf (dump_file, " [%i]\n", bb->frequency);
3666 	}
3667     }
3668 
3669   /* Now reorder the blocks.  */
3670   prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3671   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3672   for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3673     {
3674       bb->prev_bb = prev_bb;
3675       prev_bb->next_bb = bb;
3676     }
3677   prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3678   EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3679 
3680   /* Then, clean up the aux fields.  */
3681   FOR_ALL_BB_FN (bb, cfun)
3682     {
3683       bb->aux = NULL;
3684       if (!stay_in_cfglayout_mode)
3685 	BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3686     }
3687 
3688   /* Maybe reset the original copy tables, they are not valid anymore
3689      when we renumber the basic blocks in compact_blocks.  If we are
3690      are going out of cfglayout mode, don't re-allocate the tables.  */
3691   free_original_copy_tables ();
3692   if (stay_in_cfglayout_mode)
3693     initialize_original_copy_tables ();
3694 
3695   /* Finally, put basic_block_info in the new order.  */
3696   compact_blocks ();
3697 }
3698 
3699 
3700 /* Given a reorder chain, rearrange the code to match.  */
3701 
3702 static void
3703 fixup_reorder_chain (void)
3704 {
3705   basic_block bb;
3706   rtx_insn *insn = NULL;
3707 
3708   if (cfg_layout_function_header)
3709     {
3710       set_first_insn (cfg_layout_function_header);
3711       insn = cfg_layout_function_header;
3712       while (NEXT_INSN (insn))
3713 	insn = NEXT_INSN (insn);
3714     }
3715 
3716   /* First do the bulk reordering -- rechain the blocks without regard to
3717      the needed changes to jumps and labels.  */
3718 
3719   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3720        bb->aux)
3721     {
3722       if (BB_HEADER (bb))
3723 	{
3724 	  if (insn)
3725 	    SET_NEXT_INSN (insn) = BB_HEADER (bb);
3726 	  else
3727 	    set_first_insn (BB_HEADER (bb));
3728 	  SET_PREV_INSN (BB_HEADER (bb)) = insn;
3729 	  insn = BB_HEADER (bb);
3730 	  while (NEXT_INSN (insn))
3731 	    insn = NEXT_INSN (insn);
3732 	}
3733       if (insn)
3734 	SET_NEXT_INSN (insn) = BB_HEAD (bb);
3735       else
3736 	set_first_insn (BB_HEAD (bb));
3737       SET_PREV_INSN (BB_HEAD (bb)) = insn;
3738       insn = BB_END (bb);
3739       if (BB_FOOTER (bb))
3740 	{
3741 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3742 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3743 	  while (NEXT_INSN (insn))
3744 	    insn = NEXT_INSN (insn);
3745 	}
3746     }
3747 
3748   SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3749   if (cfg_layout_function_footer)
3750     SET_PREV_INSN (cfg_layout_function_footer) = insn;
3751 
3752   while (NEXT_INSN (insn))
3753     insn = NEXT_INSN (insn);
3754 
3755   set_last_insn (insn);
3756 #ifdef ENABLE_CHECKING
3757   verify_insn_chain ();
3758 #endif
3759 
3760   /* Now add jumps and labels as needed to match the blocks new
3761      outgoing edges.  */
3762 
3763   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3764        bb->aux)
3765     {
3766       edge e_fall, e_taken, e;
3767       rtx_insn *bb_end_insn;
3768       rtx ret_label = NULL_RTX;
3769       basic_block nb;
3770       edge_iterator ei;
3771 
3772       if (EDGE_COUNT (bb->succs) == 0)
3773 	continue;
3774 
3775       /* Find the old fallthru edge, and another non-EH edge for
3776 	 a taken jump.  */
3777       e_taken = e_fall = NULL;
3778 
3779       FOR_EACH_EDGE (e, ei, bb->succs)
3780 	if (e->flags & EDGE_FALLTHRU)
3781 	  e_fall = e;
3782 	else if (! (e->flags & EDGE_EH))
3783 	  e_taken = e;
3784 
3785       bb_end_insn = BB_END (bb);
3786       if (JUMP_P (bb_end_insn))
3787 	{
3788 	  ret_label = JUMP_LABEL (bb_end_insn);
3789 	  if (any_condjump_p (bb_end_insn))
3790 	    {
3791 	      /* This might happen if the conditional jump has side
3792 		 effects and could therefore not be optimized away.
3793 		 Make the basic block to end with a barrier in order
3794 		 to prevent rtl_verify_flow_info from complaining.  */
3795 	      if (!e_fall)
3796 		{
3797 		  gcc_assert (!onlyjump_p (bb_end_insn)
3798 			      || returnjump_p (bb_end_insn)
3799                               || (e_taken->flags & EDGE_CROSSING));
3800 		  emit_barrier_after (bb_end_insn);
3801 		  continue;
3802 		}
3803 
3804 	      /* If the old fallthru is still next, nothing to do.  */
3805 	      if (bb->aux == e_fall->dest
3806 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3807 		continue;
3808 
3809 	      /* The degenerated case of conditional jump jumping to the next
3810 		 instruction can happen for jumps with side effects.  We need
3811 		 to construct a forwarder block and this will be done just
3812 		 fine by force_nonfallthru below.  */
3813 	      if (!e_taken)
3814 		;
3815 
3816 	      /* There is another special case: if *neither* block is next,
3817 		 such as happens at the very end of a function, then we'll
3818 		 need to add a new unconditional jump.  Choose the taken
3819 		 edge based on known or assumed probability.  */
3820 	      else if (bb->aux != e_taken->dest)
3821 		{
3822 		  rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3823 
3824 		  if (note
3825 		      && XINT (note, 0) < REG_BR_PROB_BASE / 2
3826 		      && invert_jump (bb_end_insn,
3827 				      (e_fall->dest
3828 				       == EXIT_BLOCK_PTR_FOR_FN (cfun)
3829 				       ? NULL_RTX
3830 				       : label_for_bb (e_fall->dest)), 0))
3831 		    {
3832 		      e_fall->flags &= ~EDGE_FALLTHRU;
3833 		      gcc_checking_assert (could_fall_through
3834 					   (e_taken->src, e_taken->dest));
3835 		      e_taken->flags |= EDGE_FALLTHRU;
3836 		      update_br_prob_note (bb);
3837 		      e = e_fall, e_fall = e_taken, e_taken = e;
3838 		    }
3839 		}
3840 
3841 	      /* If the "jumping" edge is a crossing edge, and the fall
3842 		 through edge is non-crossing, leave things as they are.  */
3843 	      else if ((e_taken->flags & EDGE_CROSSING)
3844 		       && !(e_fall->flags & EDGE_CROSSING))
3845 		continue;
3846 
3847 	      /* Otherwise we can try to invert the jump.  This will
3848 		 basically never fail, however, keep up the pretense.  */
3849 	      else if (invert_jump (bb_end_insn,
3850 				    (e_fall->dest
3851 				     == EXIT_BLOCK_PTR_FOR_FN (cfun)
3852 				     ? NULL_RTX
3853 				     : label_for_bb (e_fall->dest)), 0))
3854 		{
3855 		  e_fall->flags &= ~EDGE_FALLTHRU;
3856 		  gcc_checking_assert (could_fall_through
3857 				       (e_taken->src, e_taken->dest));
3858 		  e_taken->flags |= EDGE_FALLTHRU;
3859 		  update_br_prob_note (bb);
3860 		  if (LABEL_NUSES (ret_label) == 0
3861 		      && single_pred_p (e_taken->dest))
3862 		    delete_insn (ret_label);
3863 		  continue;
3864 		}
3865 	    }
3866 	  else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3867 	    {
3868 	      /* If the old fallthru is still next or if
3869 		 asm goto doesn't have a fallthru (e.g. when followed by
3870 		 __builtin_unreachable ()), nothing to do.  */
3871 	      if (! e_fall
3872 		  || bb->aux == e_fall->dest
3873 		  || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3874 		continue;
3875 
3876 	      /* Otherwise we'll have to use the fallthru fixup below.  */
3877 	    }
3878 	  else
3879 	    {
3880 	      /* Otherwise we have some return, switch or computed
3881 		 jump.  In the 99% case, there should not have been a
3882 		 fallthru edge.  */
3883 	      gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3884 	      continue;
3885 	    }
3886 	}
3887       else
3888 	{
3889 	  /* No fallthru implies a noreturn function with EH edges, or
3890 	     something similarly bizarre.  In any case, we don't need to
3891 	     do anything.  */
3892 	  if (! e_fall)
3893 	    continue;
3894 
3895 	  /* If the fallthru block is still next, nothing to do.  */
3896 	  if (bb->aux == e_fall->dest)
3897 	    continue;
3898 
3899 	  /* A fallthru to exit block.  */
3900 	  if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3901 	    continue;
3902 	}
3903 
3904       /* We got here if we need to add a new jump insn.
3905 	 Note force_nonfallthru can delete E_FALL and thus we have to
3906 	 save E_FALL->src prior to the call to force_nonfallthru.  */
3907       nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3908       if (nb)
3909 	{
3910 	  nb->aux = bb->aux;
3911 	  bb->aux = nb;
3912 	  /* Don't process this new block.  */
3913 	  bb = nb;
3914 	}
3915     }
3916 
3917   relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3918 
3919   /* Annoying special case - jump around dead jumptables left in the code.  */
3920   FOR_EACH_BB_FN (bb, cfun)
3921     {
3922       edge e = find_fallthru_edge (bb->succs);
3923 
3924       if (e && !can_fallthru (e->src, e->dest))
3925 	force_nonfallthru (e);
3926     }
3927 
3928   /* Ensure goto_locus from edges has some instructions with that locus
3929      in RTL.  */
3930   if (!optimize)
3931     FOR_EACH_BB_FN (bb, cfun)
3932       {
3933         edge e;
3934         edge_iterator ei;
3935 
3936         FOR_EACH_EDGE (e, ei, bb->succs)
3937 	  if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3938 	      && !(e->flags & EDGE_ABNORMAL))
3939 	    {
3940 	      edge e2;
3941 	      edge_iterator ei2;
3942 	      basic_block dest, nb;
3943 	      rtx_insn *end;
3944 
3945 	      insn = BB_END (e->src);
3946 	      end = PREV_INSN (BB_HEAD (e->src));
3947 	      while (insn != end
3948 		     && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3949 		insn = PREV_INSN (insn);
3950 	      if (insn != end
3951 		  && INSN_LOCATION (insn) == e->goto_locus)
3952 		continue;
3953 	      if (simplejump_p (BB_END (e->src))
3954 		  && !INSN_HAS_LOCATION (BB_END (e->src)))
3955 		{
3956 		  INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
3957 		  continue;
3958 		}
3959 	      dest = e->dest;
3960 	      if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3961 		{
3962 		  /* Non-fallthru edges to the exit block cannot be split.  */
3963 		  if (!(e->flags & EDGE_FALLTHRU))
3964 		    continue;
3965 		}
3966 	      else
3967 		{
3968 		  insn = BB_HEAD (dest);
3969 		  end = NEXT_INSN (BB_END (dest));
3970 		  while (insn != end && !NONDEBUG_INSN_P (insn))
3971 		    insn = NEXT_INSN (insn);
3972 		  if (insn != end && INSN_HAS_LOCATION (insn)
3973 		      && INSN_LOCATION (insn) == e->goto_locus)
3974 		    continue;
3975 		}
3976 	      nb = split_edge (e);
3977 	      if (!INSN_P (BB_END (nb)))
3978 		BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3979 							 nb);
3980 	      INSN_LOCATION (BB_END (nb)) = e->goto_locus;
3981 
3982 	      /* If there are other incoming edges to the destination block
3983 		 with the same goto locus, redirect them to the new block as
3984 		 well, this can prevent other such blocks from being created
3985 		 in subsequent iterations of the loop.  */
3986 	      for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3987 		if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
3988 		    && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
3989 		    && e->goto_locus == e2->goto_locus)
3990 		  redirect_edge_and_branch (e2, nb);
3991 		else
3992 		  ei_next (&ei2);
3993 	    }
3994       }
3995 }
3996 
3997 /* Perform sanity checks on the insn chain.
3998    1. Check that next/prev pointers are consistent in both the forward and
3999       reverse direction.
4000    2. Count insns in chain, going both directions, and check if equal.
4001    3. Check that get_last_insn () returns the actual end of chain.  */
4002 
4003 DEBUG_FUNCTION void
4004 verify_insn_chain (void)
4005 {
4006   rtx_insn *x, *prevx, *nextx;
4007   int insn_cnt1, insn_cnt2;
4008 
4009   for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4010        x != 0;
4011        prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4012     gcc_assert (PREV_INSN (x) == prevx);
4013 
4014   gcc_assert (prevx == get_last_insn ());
4015 
4016   for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4017        x != 0;
4018        nextx = x, insn_cnt2++, x = PREV_INSN (x))
4019     gcc_assert (NEXT_INSN (x) == nextx);
4020 
4021   gcc_assert (insn_cnt1 == insn_cnt2);
4022 }
4023 
4024 /* If we have assembler epilogues, the block falling through to exit must
4025    be the last one in the reordered chain when we reach final.  Ensure
4026    that this condition is met.  */
4027 static void
4028 fixup_fallthru_exit_predecessor (void)
4029 {
4030   edge e;
4031   basic_block bb = NULL;
4032 
4033   /* This transformation is not valid before reload, because we might
4034      separate a call from the instruction that copies the return
4035      value.  */
4036   gcc_assert (reload_completed);
4037 
4038   e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4039   if (e)
4040     bb = e->src;
4041 
4042   if (bb && bb->aux)
4043     {
4044       basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4045 
4046       /* If the very first block is the one with the fall-through exit
4047 	 edge, we have to split that block.  */
4048       if (c == bb)
4049 	{
4050 	  bb = split_block (bb, NULL)->dest;
4051 	  bb->aux = c->aux;
4052 	  c->aux = bb;
4053 	  BB_FOOTER (bb) = BB_FOOTER (c);
4054 	  BB_FOOTER (c) = NULL;
4055 	}
4056 
4057       while (c->aux != bb)
4058 	c = (basic_block) c->aux;
4059 
4060       c->aux = bb->aux;
4061       while (c->aux)
4062 	c = (basic_block) c->aux;
4063 
4064       c->aux = bb;
4065       bb->aux = NULL;
4066     }
4067 }
4068 
4069 /* In case there are more than one fallthru predecessors of exit, force that
4070    there is only one.  */
4071 
4072 static void
4073 force_one_exit_fallthru (void)
4074 {
4075   edge e, predecessor = NULL;
4076   bool more = false;
4077   edge_iterator ei;
4078   basic_block forwarder, bb;
4079 
4080   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4081     if (e->flags & EDGE_FALLTHRU)
4082       {
4083 	if (predecessor == NULL)
4084 	  predecessor = e;
4085 	else
4086 	  {
4087 	    more = true;
4088 	    break;
4089 	  }
4090       }
4091 
4092   if (!more)
4093     return;
4094 
4095   /* Exit has several fallthru predecessors.  Create a forwarder block for
4096      them.  */
4097   forwarder = split_edge (predecessor);
4098   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4099        (e = ei_safe_edge (ei)); )
4100     {
4101       if (e->src == forwarder
4102 	  || !(e->flags & EDGE_FALLTHRU))
4103 	ei_next (&ei);
4104       else
4105 	redirect_edge_and_branch_force (e, forwarder);
4106     }
4107 
4108   /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4109      exit block.  */
4110   FOR_EACH_BB_FN (bb, cfun)
4111     {
4112       if (bb->aux == NULL && bb != forwarder)
4113 	{
4114 	  bb->aux = forwarder;
4115 	  break;
4116 	}
4117     }
4118 }
4119 
4120 /* Return true in case it is possible to duplicate the basic block BB.  */
4121 
4122 static bool
4123 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4124 {
4125   /* Do not attempt to duplicate tablejumps, as we need to unshare
4126      the dispatch table.  This is difficult to do, as the instructions
4127      computing jump destination may be hoisted outside the basic block.  */
4128   if (tablejump_p (BB_END (bb), NULL, NULL))
4129     return false;
4130 
4131   /* Do not duplicate blocks containing insns that can't be copied.  */
4132   if (targetm.cannot_copy_insn_p)
4133     {
4134       rtx_insn *insn = BB_HEAD (bb);
4135       while (1)
4136 	{
4137 	  if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4138 	    return false;
4139 	  if (insn == BB_END (bb))
4140 	    break;
4141 	  insn = NEXT_INSN (insn);
4142 	}
4143     }
4144 
4145   return true;
4146 }
4147 
4148 rtx_insn *
4149 duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
4150 {
4151   rtx_insn *insn, *next, *copy;
4152   rtx_note *last;
4153 
4154   /* Avoid updating of boundaries of previous basic block.  The
4155      note will get removed from insn stream in fixup.  */
4156   last = emit_note (NOTE_INSN_DELETED);
4157 
4158   /* Create copy at the end of INSN chain.  The chain will
4159      be reordered later.  */
4160   for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4161     {
4162       switch (GET_CODE (insn))
4163 	{
4164 	case DEBUG_INSN:
4165 	  /* Don't duplicate label debug insns.  */
4166 	  if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4167 	    break;
4168 	  /* FALLTHRU */
4169 	case INSN:
4170 	case CALL_INSN:
4171 	case JUMP_INSN:
4172 	  copy = emit_copy_of_insn_after (insn, get_last_insn ());
4173 	  if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4174 	      && ANY_RETURN_P (JUMP_LABEL (insn)))
4175 	    JUMP_LABEL (copy) = JUMP_LABEL (insn);
4176           maybe_copy_prologue_epilogue_insn (insn, copy);
4177 	  break;
4178 
4179 	case JUMP_TABLE_DATA:
4180 	  /* Avoid copying of dispatch tables.  We never duplicate
4181 	     tablejumps, so this can hit only in case the table got
4182 	     moved far from original jump.
4183 	     Avoid copying following barrier as well if any
4184 	     (and debug insns in between).  */
4185 	  for (next = NEXT_INSN (insn);
4186 	       next != NEXT_INSN (to);
4187 	       next = NEXT_INSN (next))
4188 	    if (!DEBUG_INSN_P (next))
4189 	      break;
4190 	  if (next != NEXT_INSN (to) && BARRIER_P (next))
4191 	    insn = next;
4192 	  break;
4193 
4194 	case CODE_LABEL:
4195 	  break;
4196 
4197 	case BARRIER:
4198 	  emit_barrier ();
4199 	  break;
4200 
4201 	case NOTE:
4202 	  switch (NOTE_KIND (insn))
4203 	    {
4204 	      /* In case prologue is empty and function contain label
4205 		 in first BB, we may want to copy the block.  */
4206 	    case NOTE_INSN_PROLOGUE_END:
4207 
4208 	    case NOTE_INSN_DELETED:
4209 	    case NOTE_INSN_DELETED_LABEL:
4210 	    case NOTE_INSN_DELETED_DEBUG_LABEL:
4211 	      /* No problem to strip these.  */
4212 	    case NOTE_INSN_FUNCTION_BEG:
4213 	      /* There is always just single entry to function.  */
4214 	    case NOTE_INSN_BASIC_BLOCK:
4215               /* We should only switch text sections once.  */
4216 	    case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4217 	      break;
4218 
4219 	    case NOTE_INSN_EPILOGUE_BEG:
4220 	      emit_note_copy (as_a <rtx_note *> (insn));
4221 	      break;
4222 
4223 	    default:
4224 	      /* All other notes should have already been eliminated.  */
4225 	      gcc_unreachable ();
4226 	    }
4227 	  break;
4228 	default:
4229 	  gcc_unreachable ();
4230 	}
4231     }
4232   insn = NEXT_INSN (last);
4233   delete_insn (last);
4234   return insn;
4235 }
4236 
4237 /* Create a duplicate of the basic block BB.  */
4238 
4239 static basic_block
4240 cfg_layout_duplicate_bb (basic_block bb)
4241 {
4242   rtx_insn *insn;
4243   basic_block new_bb;
4244 
4245   insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4246   new_bb = create_basic_block (insn,
4247 			       insn ? get_last_insn () : NULL,
4248 			       EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4249 
4250   BB_COPY_PARTITION (new_bb, bb);
4251   if (BB_HEADER (bb))
4252     {
4253       insn = BB_HEADER (bb);
4254       while (NEXT_INSN (insn))
4255 	insn = NEXT_INSN (insn);
4256       insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4257       if (insn)
4258 	BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4259     }
4260 
4261   if (BB_FOOTER (bb))
4262     {
4263       insn = BB_FOOTER (bb);
4264       while (NEXT_INSN (insn))
4265 	insn = NEXT_INSN (insn);
4266       insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4267       if (insn)
4268 	BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4269     }
4270 
4271   return new_bb;
4272 }
4273 
4274 
4275 /* Main entry point to this module - initialize the datastructures for
4276    CFG layout changes.  It keeps LOOPS up-to-date if not null.
4277 
4278    FLAGS is a set of additional flags to pass to cleanup_cfg().  */
4279 
4280 void
4281 cfg_layout_initialize (unsigned int flags)
4282 {
4283   rtx_insn_list *x;
4284   basic_block bb;
4285 
4286   /* Once bb partitioning is complete, cfg layout mode should not be
4287      re-entered.  Entering cfg layout mode may require fixups.  As an
4288      example, if edge forwarding performed when optimizing the cfg
4289      layout required moving a block from the hot to the cold
4290      section. This would create an illegal partitioning unless some
4291      manual fixup was performed.  */
4292   gcc_assert (!(crtl->bb_reorder_complete
4293 		&& flag_reorder_blocks_and_partition));
4294 
4295   initialize_original_copy_tables ();
4296 
4297   cfg_layout_rtl_register_cfg_hooks ();
4298 
4299   record_effective_endpoints ();
4300 
4301   /* Make sure that the targets of non local gotos are marked.  */
4302   for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4303     {
4304       bb = BLOCK_FOR_INSN (x->insn ());
4305       bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4306     }
4307 
4308   cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4309 }
4310 
4311 /* Splits superblocks.  */
4312 void
4313 break_superblocks (void)
4314 {
4315   sbitmap superblocks;
4316   bool need = false;
4317   basic_block bb;
4318 
4319   superblocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
4320   bitmap_clear (superblocks);
4321 
4322   FOR_EACH_BB_FN (bb, cfun)
4323     if (bb->flags & BB_SUPERBLOCK)
4324       {
4325 	bb->flags &= ~BB_SUPERBLOCK;
4326 	bitmap_set_bit (superblocks, bb->index);
4327 	need = true;
4328       }
4329 
4330   if (need)
4331     {
4332       rebuild_jump_labels (get_insns ());
4333       find_many_sub_basic_blocks (superblocks);
4334     }
4335 
4336   free (superblocks);
4337 }
4338 
4339 /* Finalize the changes: reorder insn list according to the sequence specified
4340    by aux pointers, enter compensation code, rebuild scope forest.  */
4341 
4342 void
4343 cfg_layout_finalize (void)
4344 {
4345 #ifdef ENABLE_CHECKING
4346   verify_flow_info ();
4347 #endif
4348   force_one_exit_fallthru ();
4349   rtl_register_cfg_hooks ();
4350   if (reload_completed
4351 #ifdef HAVE_epilogue
4352       && !HAVE_epilogue
4353 #endif
4354       )
4355     fixup_fallthru_exit_predecessor ();
4356   fixup_reorder_chain ();
4357 
4358   rebuild_jump_labels (get_insns ());
4359   delete_dead_jumptables ();
4360 
4361 #ifdef ENABLE_CHECKING
4362   verify_insn_chain ();
4363   verify_flow_info ();
4364 #endif
4365 }
4366 
4367 
4368 /* Same as split_block but update cfg_layout structures.  */
4369 
4370 static basic_block
4371 cfg_layout_split_block (basic_block bb, void *insnp)
4372 {
4373   rtx insn = (rtx) insnp;
4374   basic_block new_bb = rtl_split_block (bb, insn);
4375 
4376   BB_FOOTER (new_bb) = BB_FOOTER (bb);
4377   BB_FOOTER (bb) = NULL;
4378 
4379   return new_bb;
4380 }
4381 
4382 /* Redirect Edge to DEST.  */
4383 static edge
4384 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4385 {
4386   basic_block src = e->src;
4387   edge ret;
4388 
4389   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4390     return NULL;
4391 
4392   if (e->dest == dest)
4393     return e;
4394 
4395   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4396       && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4397     {
4398       df_set_bb_dirty (src);
4399       return ret;
4400     }
4401 
4402   if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4403       && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4404     {
4405       if (dump_file)
4406 	fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4407 		 e->src->index, dest->index);
4408 
4409       df_set_bb_dirty (e->src);
4410       redirect_edge_succ (e, dest);
4411       return e;
4412     }
4413 
4414   /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4415      in the case the basic block appears to be in sequence.  Avoid this
4416      transformation.  */
4417 
4418   if (e->flags & EDGE_FALLTHRU)
4419     {
4420       /* Redirect any branch edges unified with the fallthru one.  */
4421       if (JUMP_P (BB_END (src))
4422 	  && label_is_jump_target_p (BB_HEAD (e->dest),
4423 				     BB_END (src)))
4424 	{
4425 	  edge redirected;
4426 
4427 	  if (dump_file)
4428 	    fprintf (dump_file, "Fallthru edge unified with branch "
4429 		     "%i->%i redirected to %i\n",
4430 		     e->src->index, e->dest->index, dest->index);
4431 	  e->flags &= ~EDGE_FALLTHRU;
4432 	  redirected = redirect_branch_edge (e, dest);
4433 	  gcc_assert (redirected);
4434 	  redirected->flags |= EDGE_FALLTHRU;
4435 	  df_set_bb_dirty (redirected->src);
4436 	  return redirected;
4437 	}
4438       /* In case we are redirecting fallthru edge to the branch edge
4439 	 of conditional jump, remove it.  */
4440       if (EDGE_COUNT (src->succs) == 2)
4441 	{
4442 	  /* Find the edge that is different from E.  */
4443 	  edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4444 
4445 	  if (s->dest == dest
4446 	      && any_condjump_p (BB_END (src))
4447 	      && onlyjump_p (BB_END (src)))
4448 	    delete_insn (BB_END (src));
4449 	}
4450       if (dump_file)
4451 	fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4452 		 e->src->index, e->dest->index, dest->index);
4453       ret = redirect_edge_succ_nodup (e, dest);
4454     }
4455   else
4456     ret = redirect_branch_edge (e, dest);
4457 
4458   /* We don't want simplejumps in the insn stream during cfglayout.  */
4459   gcc_assert (!simplejump_p (BB_END (src)));
4460 
4461   df_set_bb_dirty (src);
4462   return ret;
4463 }
4464 
4465 /* Simple wrapper as we always can redirect fallthru edges.  */
4466 static basic_block
4467 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4468 {
4469   edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4470 
4471   gcc_assert (redirected);
4472   return NULL;
4473 }
4474 
4475 /* Same as delete_basic_block but update cfg_layout structures.  */
4476 
4477 static void
4478 cfg_layout_delete_block (basic_block bb)
4479 {
4480   rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4481   rtx_insn **to;
4482 
4483   if (BB_HEADER (bb))
4484     {
4485       next = BB_HEAD (bb);
4486       if (prev)
4487 	SET_NEXT_INSN (prev) = BB_HEADER (bb);
4488       else
4489 	set_first_insn (BB_HEADER (bb));
4490       SET_PREV_INSN (BB_HEADER (bb)) = prev;
4491       insn = BB_HEADER (bb);
4492       while (NEXT_INSN (insn))
4493 	insn = NEXT_INSN (insn);
4494       SET_NEXT_INSN (insn) = next;
4495       SET_PREV_INSN (next) = insn;
4496     }
4497   next = NEXT_INSN (BB_END (bb));
4498   if (BB_FOOTER (bb))
4499     {
4500       insn = BB_FOOTER (bb);
4501       while (insn)
4502 	{
4503 	  if (BARRIER_P (insn))
4504 	    {
4505 	      if (PREV_INSN (insn))
4506 		SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4507 	      else
4508 		BB_FOOTER (bb) = NEXT_INSN (insn);
4509 	      if (NEXT_INSN (insn))
4510 		SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4511 	    }
4512 	  if (LABEL_P (insn))
4513 	    break;
4514 	  insn = NEXT_INSN (insn);
4515 	}
4516       if (BB_FOOTER (bb))
4517 	{
4518 	  insn = BB_END (bb);
4519 	  SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4520 	  SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4521 	  while (NEXT_INSN (insn))
4522 	    insn = NEXT_INSN (insn);
4523 	  SET_NEXT_INSN (insn) = next;
4524 	  if (next)
4525 	    SET_PREV_INSN (next) = insn;
4526 	  else
4527 	    set_last_insn (insn);
4528 	}
4529     }
4530   if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4531     to = &BB_HEADER (bb->next_bb);
4532   else
4533     to = &cfg_layout_function_footer;
4534 
4535   rtl_delete_block (bb);
4536 
4537   if (prev)
4538     prev = NEXT_INSN (prev);
4539   else
4540     prev = get_insns ();
4541   if (next)
4542     next = PREV_INSN (next);
4543   else
4544     next = get_last_insn ();
4545 
4546   if (next && NEXT_INSN (next) != prev)
4547     {
4548       remaints = unlink_insn_chain (prev, next);
4549       insn = remaints;
4550       while (NEXT_INSN (insn))
4551 	insn = NEXT_INSN (insn);
4552       SET_NEXT_INSN (insn) = *to;
4553       if (*to)
4554 	SET_PREV_INSN (*to) = insn;
4555       *to = remaints;
4556     }
4557 }
4558 
4559 /* Return true when blocks A and B can be safely merged.  */
4560 
4561 static bool
4562 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4563 {
4564   /* If we are partitioning hot/cold basic blocks, we don't want to
4565      mess up unconditional or indirect jumps that cross between hot
4566      and cold sections.
4567 
4568      Basic block partitioning may result in some jumps that appear to
4569      be optimizable (or blocks that appear to be mergeable), but which really
4570      must be left untouched (they are required to make it safely across
4571      partition boundaries).  See  the comments at the top of
4572      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
4573 
4574   if (BB_PARTITION (a) != BB_PARTITION (b))
4575     return false;
4576 
4577   /* Protect the loop latches.  */
4578   if (current_loops && b->loop_father->latch == b)
4579     return false;
4580 
4581   /* If we would end up moving B's instructions, make sure it doesn't fall
4582      through into the exit block, since we cannot recover from a fallthrough
4583      edge into the exit block occurring in the middle of a function.  */
4584   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4585     {
4586       edge e = find_fallthru_edge (b->succs);
4587       if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4588 	return false;
4589     }
4590 
4591   /* There must be exactly one edge in between the blocks.  */
4592   return (single_succ_p (a)
4593 	  && single_succ (a) == b
4594 	  && single_pred_p (b) == 1
4595 	  && a != b
4596 	  /* Must be simple edge.  */
4597 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4598 	  && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4599 	  && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4600 	  /* If the jump insn has side effects, we can't kill the edge.
4601 	     When not optimizing, try_redirect_by_replacing_jump will
4602 	     not allow us to redirect an edge by replacing a table jump.  */
4603 	  && (!JUMP_P (BB_END (a))
4604 	      || ((!optimize || reload_completed)
4605 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4606 }
4607 
4608 /* Merge block A and B.  The blocks must be mergeable.  */
4609 
4610 static void
4611 cfg_layout_merge_blocks (basic_block a, basic_block b)
4612 {
4613   bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
4614   rtx_insn *insn;
4615 
4616   gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4617 
4618   if (dump_file)
4619     fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4620 			 a->index);
4621 
4622   /* If there was a CODE_LABEL beginning B, delete it.  */
4623   if (LABEL_P (BB_HEAD (b)))
4624     {
4625       delete_insn (BB_HEAD (b));
4626     }
4627 
4628   /* We should have fallthru edge in a, or we can do dummy redirection to get
4629      it cleaned up.  */
4630   if (JUMP_P (BB_END (a)))
4631     try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4632   gcc_assert (!JUMP_P (BB_END (a)));
4633 
4634   /* When not optimizing and the edge is the only place in RTL which holds
4635      some unique locus, emit a nop with that locus in between.  */
4636   if (!optimize)
4637     emit_nop_for_unique_locus_between (a, b);
4638 
4639   /* Move things from b->footer after a->footer.  */
4640   if (BB_FOOTER (b))
4641     {
4642       if (!BB_FOOTER (a))
4643 	BB_FOOTER (a) = BB_FOOTER (b);
4644       else
4645 	{
4646 	  rtx_insn *last = BB_FOOTER (a);
4647 
4648 	  while (NEXT_INSN (last))
4649 	    last = NEXT_INSN (last);
4650 	  SET_NEXT_INSN (last) = BB_FOOTER (b);
4651 	  SET_PREV_INSN (BB_FOOTER (b)) = last;
4652 	}
4653       BB_FOOTER (b) = NULL;
4654     }
4655 
4656   /* Move things from b->header before a->footer.
4657      Note that this may include dead tablejump data, but we don't clean
4658      those up until we go out of cfglayout mode.  */
4659    if (BB_HEADER (b))
4660      {
4661       if (! BB_FOOTER (a))
4662 	BB_FOOTER (a) = BB_HEADER (b);
4663       else
4664 	{
4665 	  rtx_insn *last = BB_HEADER (b);
4666 
4667 	  while (NEXT_INSN (last))
4668 	    last = NEXT_INSN (last);
4669 	  SET_NEXT_INSN (last) = BB_FOOTER (a);
4670 	  SET_PREV_INSN (BB_FOOTER (a)) = last;
4671 	  BB_FOOTER (a) = BB_HEADER (b);
4672 	}
4673       BB_HEADER (b) = NULL;
4674     }
4675 
4676   /* In the case basic blocks are not adjacent, move them around.  */
4677   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4678     {
4679       insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4680 
4681       emit_insn_after_noloc (insn, BB_END (a), a);
4682     }
4683   /* Otherwise just re-associate the instructions.  */
4684   else
4685     {
4686       insn = BB_HEAD (b);
4687       BB_END (a) = BB_END (b);
4688     }
4689 
4690   /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4691      We need to explicitly call. */
4692   update_bb_for_insn_chain (insn, BB_END (b), a);
4693 
4694   /* Skip possible DELETED_LABEL insn.  */
4695   if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4696     insn = NEXT_INSN (insn);
4697   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4698   BB_HEAD (b) = BB_END (b) = NULL;
4699   delete_insn (insn);
4700 
4701   df_bb_delete (b->index);
4702 
4703   /* If B was a forwarder block, propagate the locus on the edge.  */
4704   if (forwarder_p
4705       && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
4706     EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4707 
4708   if (dump_file)
4709     fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4710 }
4711 
4712 /* Split edge E.  */
4713 
4714 static basic_block
4715 cfg_layout_split_edge (edge e)
4716 {
4717   basic_block new_bb =
4718     create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4719 			? NEXT_INSN (BB_END (e->src)) : get_insns (),
4720 			NULL_RTX, e->src);
4721 
4722   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4723     BB_COPY_PARTITION (new_bb, e->src);
4724   else
4725     BB_COPY_PARTITION (new_bb, e->dest);
4726   make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4727   redirect_edge_and_branch_force (e, new_bb);
4728 
4729   return new_bb;
4730 }
4731 
4732 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */
4733 
4734 static void
4735 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4736 {
4737 }
4738 
4739 /* Return true if BB contains only labels or non-executable
4740    instructions.  */
4741 
4742 static bool
4743 rtl_block_empty_p (basic_block bb)
4744 {
4745   rtx_insn *insn;
4746 
4747   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4748       || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4749     return true;
4750 
4751   FOR_BB_INSNS (bb, insn)
4752     if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4753       return false;
4754 
4755   return true;
4756 }
4757 
4758 /* Split a basic block if it ends with a conditional branch and if
4759    the other part of the block is not empty.  */
4760 
4761 static basic_block
4762 rtl_split_block_before_cond_jump (basic_block bb)
4763 {
4764   rtx_insn *insn;
4765   rtx_insn *split_point = NULL;
4766   rtx_insn *last = NULL;
4767   bool found_code = false;
4768 
4769   FOR_BB_INSNS (bb, insn)
4770     {
4771       if (any_condjump_p (insn))
4772 	split_point = last;
4773       else if (NONDEBUG_INSN_P (insn))
4774 	found_code = true;
4775       last = insn;
4776     }
4777 
4778   /* Did not find everything.  */
4779   if (found_code && split_point)
4780     return split_block (bb, split_point)->dest;
4781   else
4782     return NULL;
4783 }
4784 
4785 /* Return 1 if BB ends with a call, possibly followed by some
4786    instructions that must stay with the call, 0 otherwise.  */
4787 
4788 static bool
4789 rtl_block_ends_with_call_p (basic_block bb)
4790 {
4791   rtx_insn *insn = BB_END (bb);
4792 
4793   while (!CALL_P (insn)
4794 	 && insn != BB_HEAD (bb)
4795 	 && (keep_with_call_p (insn)
4796 	     || NOTE_P (insn)
4797 	     || DEBUG_INSN_P (insn)))
4798     insn = PREV_INSN (insn);
4799   return (CALL_P (insn));
4800 }
4801 
4802 /* Return 1 if BB ends with a conditional branch, 0 otherwise.  */
4803 
4804 static bool
4805 rtl_block_ends_with_condjump_p (const_basic_block bb)
4806 {
4807   return any_condjump_p (BB_END (bb));
4808 }
4809 
4810 /* Return true if we need to add fake edge to exit.
4811    Helper function for rtl_flow_call_edges_add.  */
4812 
4813 static bool
4814 need_fake_edge_p (const rtx_insn *insn)
4815 {
4816   if (!INSN_P (insn))
4817     return false;
4818 
4819   if ((CALL_P (insn)
4820        && !SIBLING_CALL_P (insn)
4821        && !find_reg_note (insn, REG_NORETURN, NULL)
4822        && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4823     return true;
4824 
4825   return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4826 	   && MEM_VOLATILE_P (PATTERN (insn)))
4827 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
4828 	      && asm_noperands (insn) != -1
4829 	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4830 	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4831 }
4832 
4833 /* Add fake edges to the function exit for any non constant and non noreturn
4834    calls, volatile inline assembly in the bitmap of blocks specified by
4835    BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
4836    that were split.
4837 
4838    The goal is to expose cases in which entering a basic block does not imply
4839    that all subsequent instructions must be executed.  */
4840 
4841 static int
4842 rtl_flow_call_edges_add (sbitmap blocks)
4843 {
4844   int i;
4845   int blocks_split = 0;
4846   int last_bb = last_basic_block_for_fn (cfun);
4847   bool check_last_block = false;
4848 
4849   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
4850     return 0;
4851 
4852   if (! blocks)
4853     check_last_block = true;
4854   else
4855     check_last_block = bitmap_bit_p (blocks,
4856 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
4857 
4858   /* In the last basic block, before epilogue generation, there will be
4859      a fallthru edge to EXIT.  Special care is required if the last insn
4860      of the last basic block is a call because make_edge folds duplicate
4861      edges, which would result in the fallthru edge also being marked
4862      fake, which would result in the fallthru edge being removed by
4863      remove_fake_edges, which would result in an invalid CFG.
4864 
4865      Moreover, we can't elide the outgoing fake edge, since the block
4866      profiler needs to take this into account in order to solve the minimal
4867      spanning tree in the case that the call doesn't return.
4868 
4869      Handle this by adding a dummy instruction in a new last basic block.  */
4870   if (check_last_block)
4871     {
4872       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
4873       rtx_insn *insn = BB_END (bb);
4874 
4875       /* Back up past insns that must be kept in the same block as a call.  */
4876       while (insn != BB_HEAD (bb)
4877 	     && keep_with_call_p (insn))
4878 	insn = PREV_INSN (insn);
4879 
4880       if (need_fake_edge_p (insn))
4881 	{
4882 	  edge e;
4883 
4884 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4885 	  if (e)
4886 	    {
4887 	      insert_insn_on_edge (gen_use (const0_rtx), e);
4888 	      commit_edge_insertions ();
4889 	    }
4890 	}
4891     }
4892 
4893   /* Now add fake edges to the function exit for any non constant
4894      calls since there is no way that we can determine if they will
4895      return or not...  */
4896 
4897   for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4898     {
4899       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
4900       rtx_insn *insn;
4901       rtx_insn *prev_insn;
4902 
4903       if (!bb)
4904 	continue;
4905 
4906       if (blocks && !bitmap_bit_p (blocks, i))
4907 	continue;
4908 
4909       for (insn = BB_END (bb); ; insn = prev_insn)
4910 	{
4911 	  prev_insn = PREV_INSN (insn);
4912 	  if (need_fake_edge_p (insn))
4913 	    {
4914 	      edge e;
4915 	      rtx_insn *split_at_insn = insn;
4916 
4917 	      /* Don't split the block between a call and an insn that should
4918 		 remain in the same block as the call.  */
4919 	      if (CALL_P (insn))
4920 		while (split_at_insn != BB_END (bb)
4921 		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
4922 		  split_at_insn = NEXT_INSN (split_at_insn);
4923 
4924 	      /* The handling above of the final block before the epilogue
4925 		 should be enough to verify that there is no edge to the exit
4926 		 block in CFG already.  Calling make_edge in such case would
4927 		 cause us to mark that edge as fake and remove it later.  */
4928 
4929 #ifdef ENABLE_CHECKING
4930 	      if (split_at_insn == BB_END (bb))
4931 		{
4932 		  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4933 		  gcc_assert (e == NULL);
4934 		}
4935 #endif
4936 
4937 	      /* Note that the following may create a new basic block
4938 		 and renumber the existing basic blocks.  */
4939 	      if (split_at_insn != BB_END (bb))
4940 		{
4941 		  e = split_block (bb, split_at_insn);
4942 		  if (e)
4943 		    blocks_split++;
4944 		}
4945 
4946 	      make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
4947 	    }
4948 
4949 	  if (insn == BB_HEAD (bb))
4950 	    break;
4951 	}
4952     }
4953 
4954   if (blocks_split)
4955     verify_flow_info ();
4956 
4957   return blocks_split;
4958 }
4959 
4960 /* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
4961    the conditional branch target, SECOND_HEAD should be the fall-thru
4962    there is no need to handle this here the loop versioning code handles
4963    this.  the reason for SECON_HEAD is that it is needed for condition
4964    in trees, and this should be of the same type since it is a hook.  */
4965 static void
4966 rtl_lv_add_condition_to_bb (basic_block first_head ,
4967 			    basic_block second_head ATTRIBUTE_UNUSED,
4968 			    basic_block cond_bb, void *comp_rtx)
4969 {
4970   rtx label;
4971   rtx_insn *seq, *jump;
4972   rtx op0 = XEXP ((rtx)comp_rtx, 0);
4973   rtx op1 = XEXP ((rtx)comp_rtx, 1);
4974   enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4975   machine_mode mode;
4976 
4977 
4978   label = block_label (first_head);
4979   mode = GET_MODE (op0);
4980   if (mode == VOIDmode)
4981     mode = GET_MODE (op1);
4982 
4983   start_sequence ();
4984   op0 = force_operand (op0, NULL_RTX);
4985   op1 = force_operand (op1, NULL_RTX);
4986   do_compare_rtx_and_jump (op0, op1, comp, 0,
4987 			   mode, NULL_RTX, NULL_RTX, label, -1);
4988   jump = get_last_insn ();
4989   JUMP_LABEL (jump) = label;
4990   LABEL_NUSES (label)++;
4991   seq = get_insns ();
4992   end_sequence ();
4993 
4994   /* Add the new cond, in the new head.  */
4995   emit_insn_after (seq, BB_END (cond_bb));
4996 }
4997 
4998 
4999 /* Given a block B with unconditional branch at its end, get the
5000    store the return the branch edge and the fall-thru edge in
5001    BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
5002 static void
5003 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
5004 			   edge *fallthru_edge)
5005 {
5006   edge e = EDGE_SUCC (b, 0);
5007 
5008   if (e->flags & EDGE_FALLTHRU)
5009     {
5010       *fallthru_edge = e;
5011       *branch_edge = EDGE_SUCC (b, 1);
5012     }
5013   else
5014     {
5015       *branch_edge = e;
5016       *fallthru_edge = EDGE_SUCC (b, 1);
5017     }
5018 }
5019 
5020 void
5021 init_rtl_bb_info (basic_block bb)
5022 {
5023   gcc_assert (!bb->il.x.rtl);
5024   bb->il.x.head_ = NULL;
5025   bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5026 }
5027 
5028 /* Returns true if it is possible to remove edge E by redirecting
5029    it to the destination of the other edge from E->src.  */
5030 
5031 static bool
5032 rtl_can_remove_branch_p (const_edge e)
5033 {
5034   const_basic_block src = e->src;
5035   const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5036   const rtx_insn *insn = BB_END (src);
5037   rtx set;
5038 
5039   /* The conditions are taken from try_redirect_by_replacing_jump.  */
5040   if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5041     return false;
5042 
5043   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5044     return false;
5045 
5046   if (BB_PARTITION (src) != BB_PARTITION (target))
5047     return false;
5048 
5049   if (!onlyjump_p (insn)
5050       || tablejump_p (insn, NULL, NULL))
5051     return false;
5052 
5053   set = single_set (insn);
5054   if (!set || side_effects_p (set))
5055     return false;
5056 
5057   return true;
5058 }
5059 
5060 static basic_block
5061 rtl_duplicate_bb (basic_block bb)
5062 {
5063   bb = cfg_layout_duplicate_bb (bb);
5064   bb->aux = NULL;
5065   return bb;
5066 }
5067 
5068 /* Do book-keeping of basic block BB for the profile consistency checker.
5069    If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
5070    then do post-pass accounting.  Store the counting in RECORD.  */
5071 static void
5072 rtl_account_profile_record (basic_block bb, int after_pass,
5073 			    struct profile_record *record)
5074 {
5075   rtx_insn *insn;
5076   FOR_BB_INSNS (bb, insn)
5077     if (INSN_P (insn))
5078       {
5079 	record->size[after_pass]
5080 	  += insn_rtx_cost (PATTERN (insn), false);
5081 	if (profile_status_for_fn (cfun) == PROFILE_READ)
5082 	  record->time[after_pass]
5083 	    += insn_rtx_cost (PATTERN (insn), true) * bb->count;
5084 	else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
5085 	  record->time[after_pass]
5086 	    += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
5087       }
5088 }
5089 
5090 /* Implementation of CFG manipulation for linearized RTL.  */
5091 struct cfg_hooks rtl_cfg_hooks = {
5092   "rtl",
5093   rtl_verify_flow_info,
5094   rtl_dump_bb,
5095   rtl_dump_bb_for_graph,
5096   rtl_create_basic_block,
5097   rtl_redirect_edge_and_branch,
5098   rtl_redirect_edge_and_branch_force,
5099   rtl_can_remove_branch_p,
5100   rtl_delete_block,
5101   rtl_split_block,
5102   rtl_move_block_after,
5103   rtl_can_merge_blocks,  /* can_merge_blocks_p */
5104   rtl_merge_blocks,
5105   rtl_predict_edge,
5106   rtl_predicted_by_p,
5107   cfg_layout_can_duplicate_bb_p,
5108   rtl_duplicate_bb,
5109   rtl_split_edge,
5110   rtl_make_forwarder_block,
5111   rtl_tidy_fallthru_edge,
5112   rtl_force_nonfallthru,
5113   rtl_block_ends_with_call_p,
5114   rtl_block_ends_with_condjump_p,
5115   rtl_flow_call_edges_add,
5116   NULL, /* execute_on_growing_pred */
5117   NULL, /* execute_on_shrinking_pred */
5118   NULL, /* duplicate loop for trees */
5119   NULL, /* lv_add_condition_to_bb */
5120   NULL, /* lv_adjust_loop_header_phi*/
5121   NULL, /* extract_cond_bb_edges */
5122   NULL, /* flush_pending_stmts */
5123   rtl_block_empty_p, /* block_empty_p */
5124   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5125   rtl_account_profile_record,
5126 };
5127 
5128 /* Implementation of CFG manipulation for cfg layout RTL, where
5129    basic block connected via fallthru edges does not have to be adjacent.
5130    This representation will hopefully become the default one in future
5131    version of the compiler.  */
5132 
5133 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5134   "cfglayout mode",
5135   rtl_verify_flow_info_1,
5136   rtl_dump_bb,
5137   rtl_dump_bb_for_graph,
5138   cfg_layout_create_basic_block,
5139   cfg_layout_redirect_edge_and_branch,
5140   cfg_layout_redirect_edge_and_branch_force,
5141   rtl_can_remove_branch_p,
5142   cfg_layout_delete_block,
5143   cfg_layout_split_block,
5144   rtl_move_block_after,
5145   cfg_layout_can_merge_blocks_p,
5146   cfg_layout_merge_blocks,
5147   rtl_predict_edge,
5148   rtl_predicted_by_p,
5149   cfg_layout_can_duplicate_bb_p,
5150   cfg_layout_duplicate_bb,
5151   cfg_layout_split_edge,
5152   rtl_make_forwarder_block,
5153   NULL, /* tidy_fallthru_edge */
5154   rtl_force_nonfallthru,
5155   rtl_block_ends_with_call_p,
5156   rtl_block_ends_with_condjump_p,
5157   rtl_flow_call_edges_add,
5158   NULL, /* execute_on_growing_pred */
5159   NULL, /* execute_on_shrinking_pred */
5160   duplicate_loop_to_header_edge, /* duplicate loop for trees */
5161   rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5162   NULL, /* lv_adjust_loop_header_phi*/
5163   rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5164   NULL, /* flush_pending_stmts */
5165   rtl_block_empty_p, /* block_empty_p */
5166   rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5167   rtl_account_profile_record,
5168 };
5169 
5170 #include "gt-cfgrtl.h"
5171