xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cfgrtl.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Control flow graph manipulation code for GNU compiler.
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /* This file contains low level functions to manipulate the CFG and analyze it
23    that are aware of the RTL intermediate language.
24 
25    Available functionality:
26      - Basic CFG/RTL manipulation API documented in cfghooks.h
27      - CFG-aware instruction chain manipulation
28 	 delete_insn, delete_insn_chain
29      - Edge splitting and committing to edges
30 	 insert_insn_on_edge, commit_edge_insertions
31      - CFG updating after insn simplification
32 	 purge_dead_edges, purge_all_dead_edges
33 
34    Functions not supposed for generic use:
35      - Infrastructure to determine quickly basic block for insn
36 	 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37      - Edge redirection with updating and optimizing of insn chain
38 	 block_label, tidy_fallthru_edge, force_nonfallthru  */
39 
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "rtl.h"
46 #include "hard-reg-set.h"
47 #include "basic-block.h"
48 #include "regs.h"
49 #include "flags.h"
50 #include "output.h"
51 #include "function.h"
52 #include "except.h"
53 #include "toplev.h"
54 #include "tm_p.h"
55 #include "obstack.h"
56 #include "insn-attr.h"
57 #include "insn-config.h"
58 #include "cfglayout.h"
59 #include "expr.h"
60 #include "target.h"
61 #include "cfgloop.h"
62 #include "ggc.h"
63 #include "tree-pass.h"
64 #include "df.h"
65 
66 static int can_delete_note_p (const_rtx);
67 static int can_delete_label_p (const_rtx);
68 static basic_block rtl_split_edge (edge);
69 static bool rtl_move_block_after (basic_block, basic_block);
70 static int rtl_verify_flow_info (void);
71 static basic_block cfg_layout_split_block (basic_block, void *);
72 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
73 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
74 static void cfg_layout_delete_block (basic_block);
75 static void rtl_delete_block (basic_block);
76 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
77 static edge rtl_redirect_edge_and_branch (edge, basic_block);
78 static basic_block rtl_split_block (basic_block, void *);
79 static void rtl_dump_bb (basic_block, FILE *, int, int);
80 static int rtl_verify_flow_info_1 (void);
81 static void rtl_make_forwarder_block (edge);
82 
83 /* Return true if NOTE is not one of the ones that must be kept paired,
84    so that we may simply delete it.  */
85 
86 static int
87 can_delete_note_p (const_rtx note)
88 {
89   switch (NOTE_KIND (note))
90     {
91     case NOTE_INSN_DELETED:
92     case NOTE_INSN_BASIC_BLOCK:
93     case NOTE_INSN_EPILOGUE_BEG:
94       return true;
95 
96     default:
97       return false;
98     }
99 }
100 
101 /* True if a given label can be deleted.  */
102 
103 static int
104 can_delete_label_p (const_rtx label)
105 {
106   return (!LABEL_PRESERVE_P (label)
107 	  /* User declared labels must be preserved.  */
108 	  && LABEL_NAME (label) == 0
109 	  && !in_expr_list_p (forced_labels, label));
110 }
111 
112 /* Delete INSN by patching it out.  Return the next insn.  */
113 
114 rtx
115 delete_insn (rtx insn)
116 {
117   rtx next = NEXT_INSN (insn);
118   rtx note;
119   bool really_delete = true;
120 
121   if (LABEL_P (insn))
122     {
123       /* Some labels can't be directly removed from the INSN chain, as they
124 	 might be references via variables, constant pool etc.
125 	 Convert them to the special NOTE_INSN_DELETED_LABEL note.  */
126       if (! can_delete_label_p (insn))
127 	{
128 	  const char *name = LABEL_NAME (insn);
129 
130 	  really_delete = false;
131 	  PUT_CODE (insn, NOTE);
132 	  NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
133 	  NOTE_DELETED_LABEL_NAME (insn) = name;
134 	}
135 
136       remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
137     }
138 
139   if (really_delete)
140     {
141       /* If this insn has already been deleted, something is very wrong.  */
142       gcc_assert (!INSN_DELETED_P (insn));
143       remove_insn (insn);
144       INSN_DELETED_P (insn) = 1;
145     }
146 
147   /* If deleting a jump, decrement the use count of the label.  Deleting
148      the label itself should happen in the normal course of block merging.  */
149   if (JUMP_P (insn))
150     {
151       if (JUMP_LABEL (insn)
152 	  && LABEL_P (JUMP_LABEL (insn)))
153 	LABEL_NUSES (JUMP_LABEL (insn))--;
154 
155       /* If there are more targets, remove them too.  */
156       while ((note
157 	      = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
158 	     && LABEL_P (XEXP (note, 0)))
159 	{
160 	  LABEL_NUSES (XEXP (note, 0))--;
161 	  remove_note (insn, note);
162 	}
163     }
164 
165   /* Also if deleting any insn that references a label as an operand.  */
166   while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
167 	 && LABEL_P (XEXP (note, 0)))
168     {
169       LABEL_NUSES (XEXP (note, 0))--;
170       remove_note (insn, note);
171     }
172 
173   if (JUMP_TABLE_DATA_P (insn))
174     {
175       rtx pat = PATTERN (insn);
176       int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
177       int len = XVECLEN (pat, diff_vec_p);
178       int i;
179 
180       for (i = 0; i < len; i++)
181 	{
182 	  rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
183 
184 	  /* When deleting code in bulk (e.g. removing many unreachable
185 	     blocks) we can delete a label that's a target of the vector
186 	     before deleting the vector itself.  */
187 	  if (!NOTE_P (label))
188 	    LABEL_NUSES (label)--;
189 	}
190     }
191 
192   return next;
193 }
194 
195 /* Like delete_insn but also purge dead edges from BB.  */
196 
197 rtx
198 delete_insn_and_edges (rtx insn)
199 {
200   rtx x;
201   bool purge = false;
202 
203   if (INSN_P (insn)
204       && BLOCK_FOR_INSN (insn)
205       && BB_END (BLOCK_FOR_INSN (insn)) == insn)
206     purge = true;
207   x = delete_insn (insn);
208   if (purge)
209     purge_dead_edges (BLOCK_FOR_INSN (insn));
210   return x;
211 }
212 
213 /* Unlink a chain of insns between START and FINISH, leaving notes
214    that must be paired.  If CLEAR_BB is true, we set bb field for
215    insns that cannot be removed to NULL.  */
216 
217 void
218 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
219 {
220   rtx next;
221 
222   /* Unchain the insns one by one.  It would be quicker to delete all of these
223      with a single unchaining, rather than one at a time, but we need to keep
224      the NOTE's.  */
225   while (1)
226     {
227       next = NEXT_INSN (start);
228       if (NOTE_P (start) && !can_delete_note_p (start))
229 	;
230       else
231 	next = delete_insn (start);
232 
233       if (clear_bb && !INSN_DELETED_P (start))
234 	set_block_for_insn (start, NULL);
235 
236       if (start == finish)
237 	break;
238       start = next;
239     }
240 }
241 
242 /* Create a new basic block consisting of the instructions between HEAD and END
243    inclusive.  This function is designed to allow fast BB construction - reuses
244    the note and basic block struct in BB_NOTE, if any and do not grow
245    BASIC_BLOCK chain and should be used directly only by CFG construction code.
246    END can be NULL in to create new empty basic block before HEAD.  Both END
247    and HEAD can be NULL to create basic block at the end of INSN chain.
248    AFTER is the basic block we should be put after.  */
249 
250 basic_block
251 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
252 {
253   basic_block bb;
254 
255   if (bb_note
256       && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
257       && bb->aux == NULL)
258     {
259       /* If we found an existing note, thread it back onto the chain.  */
260 
261       rtx after;
262 
263       if (LABEL_P (head))
264 	after = head;
265       else
266 	{
267 	  after = PREV_INSN (head);
268 	  head = bb_note;
269 	}
270 
271       if (after != bb_note && NEXT_INSN (after) != bb_note)
272 	reorder_insns_nobb (bb_note, bb_note, after);
273     }
274   else
275     {
276       /* Otherwise we must create a note and a basic block structure.  */
277 
278       bb = alloc_block ();
279 
280       init_rtl_bb_info (bb);
281       if (!head && !end)
282 	head = end = bb_note
283 	  = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
284       else if (LABEL_P (head) && end)
285 	{
286 	  bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
287 	  if (head == end)
288 	    end = bb_note;
289 	}
290       else
291 	{
292 	  bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
293 	  head = bb_note;
294 	  if (!end)
295 	    end = head;
296 	}
297 
298       NOTE_BASIC_BLOCK (bb_note) = bb;
299     }
300 
301   /* Always include the bb note in the block.  */
302   if (NEXT_INSN (end) == bb_note)
303     end = bb_note;
304 
305   BB_HEAD (bb) = head;
306   BB_END (bb) = end;
307   bb->index = last_basic_block++;
308   bb->flags = BB_NEW | BB_RTL;
309   link_block (bb, after);
310   SET_BASIC_BLOCK (bb->index, bb);
311   df_bb_refs_record (bb->index, false);
312   update_bb_for_insn (bb);
313   BB_SET_PARTITION (bb, BB_UNPARTITIONED);
314 
315   /* Tag the block so that we know it has been used when considering
316      other basic block notes.  */
317   bb->aux = bb;
318 
319   return bb;
320 }
321 
322 /* Create new basic block consisting of instructions in between HEAD and END
323    and place it to the BB chain after block AFTER.  END can be NULL in to
324    create new empty basic block before HEAD.  Both END and HEAD can be NULL to
325    create basic block at the end of INSN chain.  */
326 
327 static basic_block
328 rtl_create_basic_block (void *headp, void *endp, basic_block after)
329 {
330   rtx head = (rtx) headp, end = (rtx) endp;
331   basic_block bb;
332 
333   /* Grow the basic block array if needed.  */
334   if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
335     {
336       size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
337       VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
338     }
339 
340   n_basic_blocks++;
341 
342   bb = create_basic_block_structure (head, end, NULL, after);
343   bb->aux = NULL;
344   return bb;
345 }
346 
347 static basic_block
348 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
349 {
350   basic_block newbb = rtl_create_basic_block (head, end, after);
351 
352   return newbb;
353 }
354 
355 /* Delete the insns in a (non-live) block.  We physically delete every
356    non-deleted-note insn, and update the flow graph appropriately.
357 
358    Return nonzero if we deleted an exception handler.  */
359 
360 /* ??? Preserving all such notes strikes me as wrong.  It would be nice
361    to post-process the stream to remove empty blocks, loops, ranges, etc.  */
362 
363 static void
364 rtl_delete_block (basic_block b)
365 {
366   rtx insn, end;
367 
368   /* If the head of this block is a CODE_LABEL, then it might be the
369      label for an exception handler which can't be reached.  We need
370      to remove the label from the exception_handler_label list.  */
371   insn = BB_HEAD (b);
372 
373   end = get_last_bb_insn (b);
374 
375   /* Selectively delete the entire chain.  */
376   BB_HEAD (b) = NULL;
377   delete_insn_chain (insn, end, true);
378 
379 
380   if (dump_file)
381     fprintf (dump_file, "deleting block %d\n", b->index);
382   df_bb_delete (b->index);
383 }
384 
385 /* Records the basic block struct in BLOCK_FOR_INSN for every insn.  */
386 
387 void
388 compute_bb_for_insn (void)
389 {
390   basic_block bb;
391 
392   FOR_EACH_BB (bb)
393     {
394       rtx end = BB_END (bb);
395       rtx insn;
396 
397       for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
398 	{
399 	  BLOCK_FOR_INSN (insn) = bb;
400 	  if (insn == end)
401 	    break;
402 	}
403     }
404 }
405 
406 /* Release the basic_block_for_insn array.  */
407 
408 unsigned int
409 free_bb_for_insn (void)
410 {
411   rtx insn;
412   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
413     if (!BARRIER_P (insn))
414       BLOCK_FOR_INSN (insn) = NULL;
415   return 0;
416 }
417 
418 static unsigned int
419 rest_of_pass_free_cfg (void)
420 {
421 #ifdef DELAY_SLOTS
422   /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
423      valid at that point so it would be too late to call df_analyze.  */
424   if (optimize > 0 && flag_delayed_branch)
425     {
426       df_note_add_problem ();
427       df_analyze ();
428     }
429 #endif
430 
431   free_bb_for_insn ();
432   return 0;
433 }
434 
435 struct rtl_opt_pass pass_free_cfg =
436 {
437  {
438   RTL_PASS,
439   "*free_cfg",                          /* name */
440   NULL,                                 /* gate */
441   rest_of_pass_free_cfg,                /* execute */
442   NULL,                                 /* sub */
443   NULL,                                 /* next */
444   0,                                    /* static_pass_number */
445   TV_NONE,                              /* tv_id */
446   0,                                    /* properties_required */
447   0,                                    /* properties_provided */
448   PROP_cfg,                             /* properties_destroyed */
449   0,                                    /* todo_flags_start */
450   0,                                    /* todo_flags_finish */
451  }
452 };
453 
454 /* Return RTX to emit after when we want to emit code on the entry of function.  */
455 rtx
456 entry_of_function (void)
457 {
458   return (n_basic_blocks > NUM_FIXED_BLOCKS ?
459 	  BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
460 }
461 
462 /* Emit INSN at the entry point of the function, ensuring that it is only
463    executed once per function.  */
464 void
465 emit_insn_at_entry (rtx insn)
466 {
467   edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
468   edge e = ei_safe_edge (ei);
469   gcc_assert (e->flags & EDGE_FALLTHRU);
470 
471   insert_insn_on_edge (insn, e);
472   commit_edge_insertions ();
473 }
474 
475 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
476    (or BARRIER if found) and notify df of the bb change.
477    The insn chain range is inclusive
478    (i.e. both BEGIN and END will be updated. */
479 
480 static void
481 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
482 {
483   rtx insn;
484 
485   end = NEXT_INSN (end);
486   for (insn = begin; insn != end; insn = NEXT_INSN (insn))
487     if (!BARRIER_P (insn))
488       df_insn_change_bb (insn, bb);
489 }
490 
491 /* Update BLOCK_FOR_INSN of insns in BB to BB,
492    and notify df of the change.  */
493 
494 void
495 update_bb_for_insn (basic_block bb)
496 {
497   update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
498 }
499 
500 
501 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
502    note associated with the BLOCK.  */
503 
504 static rtx
505 first_insn_after_basic_block_note (basic_block block)
506 {
507   rtx insn;
508 
509   /* Get the first instruction in the block.  */
510   insn = BB_HEAD (block);
511 
512   if (insn == NULL_RTX)
513     return NULL_RTX;
514   if (LABEL_P (insn))
515     insn = NEXT_INSN (insn);
516   gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
517 
518   return NEXT_INSN (insn);
519 }
520 
521 /* Creates a new basic block just after basic block B by splitting
522    everything after specified instruction I.  */
523 
524 static basic_block
525 rtl_split_block (basic_block bb, void *insnp)
526 {
527   basic_block new_bb;
528   rtx insn = (rtx) insnp;
529   edge e;
530   edge_iterator ei;
531 
532   if (!insn)
533     {
534       insn = first_insn_after_basic_block_note (bb);
535 
536       if (insn)
537 	{
538 	  rtx next = insn;
539 
540 	  insn = PREV_INSN (insn);
541 
542 	  /* If the block contains only debug insns, insn would have
543 	     been NULL in a non-debug compilation, and then we'd end
544 	     up emitting a DELETED note.  For -fcompare-debug
545 	     stability, emit the note too.  */
546 	  if (insn != BB_END (bb)
547 	      && DEBUG_INSN_P (next)
548 	      && DEBUG_INSN_P (BB_END (bb)))
549 	    {
550 	      while (next != BB_END (bb) && DEBUG_INSN_P (next))
551 		next = NEXT_INSN (next);
552 
553 	      if (next == BB_END (bb))
554 		emit_note_after (NOTE_INSN_DELETED, next);
555 	    }
556 	}
557       else
558 	insn = get_last_insn ();
559     }
560 
561   /* We probably should check type of the insn so that we do not create
562      inconsistent cfg.  It is checked in verify_flow_info anyway, so do not
563      bother.  */
564   if (insn == BB_END (bb))
565     emit_note_after (NOTE_INSN_DELETED, insn);
566 
567   /* Create the new basic block.  */
568   new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
569   BB_COPY_PARTITION (new_bb, bb);
570   BB_END (bb) = insn;
571 
572   /* Redirect the outgoing edges.  */
573   new_bb->succs = bb->succs;
574   bb->succs = NULL;
575   FOR_EACH_EDGE (e, ei, new_bb->succs)
576     e->src = new_bb;
577 
578   /* The new block starts off being dirty.  */
579   df_set_bb_dirty (bb);
580   return new_bb;
581 }
582 
583 /* Blocks A and B are to be merged into a single block A.  The insns
584    are already contiguous.  */
585 
586 static void
587 rtl_merge_blocks (basic_block a, basic_block b)
588 {
589   rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
590   rtx del_first = NULL_RTX, del_last = NULL_RTX;
591   rtx b_debug_start = b_end, b_debug_end = b_end;
592   int b_empty = 0;
593 
594   if (dump_file)
595     fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
596 
597   while (DEBUG_INSN_P (b_end))
598     b_end = PREV_INSN (b_debug_start = b_end);
599 
600   /* If there was a CODE_LABEL beginning B, delete it.  */
601   if (LABEL_P (b_head))
602     {
603       /* Detect basic blocks with nothing but a label.  This can happen
604 	 in particular at the end of a function.  */
605       if (b_head == b_end)
606 	b_empty = 1;
607 
608       del_first = del_last = b_head;
609       b_head = NEXT_INSN (b_head);
610     }
611 
612   /* Delete the basic block note and handle blocks containing just that
613      note.  */
614   if (NOTE_INSN_BASIC_BLOCK_P (b_head))
615     {
616       if (b_head == b_end)
617 	b_empty = 1;
618       if (! del_last)
619 	del_first = b_head;
620 
621       del_last = b_head;
622       b_head = NEXT_INSN (b_head);
623     }
624 
625   /* If there was a jump out of A, delete it.  */
626   if (JUMP_P (a_end))
627     {
628       rtx prev;
629 
630       for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
631 	if (!NOTE_P (prev)
632 	    || NOTE_INSN_BASIC_BLOCK_P (prev)
633 	    || prev == BB_HEAD (a))
634 	  break;
635 
636       del_first = a_end;
637 
638 #ifdef HAVE_cc0
639       /* If this was a conditional jump, we need to also delete
640 	 the insn that set cc0.  */
641       if (only_sets_cc0_p (prev))
642 	{
643 	  rtx tmp = prev;
644 
645 	  prev = prev_nonnote_insn (prev);
646 	  if (!prev)
647 	    prev = BB_HEAD (a);
648 	  del_first = tmp;
649 	}
650 #endif
651 
652       a_end = PREV_INSN (del_first);
653     }
654   else if (BARRIER_P (NEXT_INSN (a_end)))
655     del_first = NEXT_INSN (a_end);
656 
657   /* Delete everything marked above as well as crap that might be
658      hanging out between the two blocks.  */
659   BB_HEAD (b) = NULL;
660   delete_insn_chain (del_first, del_last, true);
661 
662   /* Reassociate the insns of B with A.  */
663   if (!b_empty)
664     {
665       update_bb_for_insn_chain (a_end, b_debug_end, a);
666 
667       a_end = b_debug_end;
668     }
669   else if (b_end != b_debug_end)
670     {
671       /* Move any deleted labels and other notes between the end of A
672 	 and the debug insns that make up B after the debug insns,
673 	 bringing the debug insns into A while keeping the notes after
674 	 the end of A.  */
675       if (NEXT_INSN (a_end) != b_debug_start)
676 	reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
677 			    b_debug_end);
678       update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
679       a_end = b_debug_end;
680     }
681 
682   df_bb_delete (b->index);
683   BB_END (a) = a_end;
684 }
685 
686 
687 /* Return true when block A and B can be merged.  */
688 
689 static bool
690 rtl_can_merge_blocks (basic_block a, basic_block b)
691 {
692   /* If we are partitioning hot/cold basic blocks, we don't want to
693      mess up unconditional or indirect jumps that cross between hot
694      and cold sections.
695 
696      Basic block partitioning may result in some jumps that appear to
697      be optimizable (or blocks that appear to be mergeable), but which really
698      must be left untouched (they are required to make it safely across
699      partition boundaries).  See  the comments at the top of
700      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
701 
702   if (BB_PARTITION (a) != BB_PARTITION (b))
703     return false;
704 
705   /* There must be exactly one edge in between the blocks.  */
706   return (single_succ_p (a)
707 	  && single_succ (a) == b
708 	  && single_pred_p (b)
709 	  && a != b
710 	  /* Must be simple edge.  */
711 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
712 	  && a->next_bb == b
713 	  && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
714 	  /* If the jump insn has side effects,
715 	     we can't kill the edge.  */
716 	  && (!JUMP_P (BB_END (a))
717 	      || (reload_completed
718 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
719 }
720 
721 /* Return the label in the head of basic block BLOCK.  Create one if it doesn't
722    exist.  */
723 
724 rtx
725 block_label (basic_block block)
726 {
727   if (block == EXIT_BLOCK_PTR)
728     return NULL_RTX;
729 
730   if (!LABEL_P (BB_HEAD (block)))
731     {
732       BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
733     }
734 
735   return BB_HEAD (block);
736 }
737 
738 /* Attempt to perform edge redirection by replacing possibly complex jump
739    instruction by unconditional jump or removing jump completely.  This can
740    apply only if all edges now point to the same block.  The parameters and
741    return values are equivalent to redirect_edge_and_branch.  */
742 
743 edge
744 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
745 {
746   basic_block src = e->src;
747   rtx insn = BB_END (src), kill_from;
748   rtx set;
749   int fallthru = 0;
750 
751   /* If we are partitioning hot/cold basic blocks, we don't want to
752      mess up unconditional or indirect jumps that cross between hot
753      and cold sections.
754 
755      Basic block partitioning may result in some jumps that appear to
756      be optimizable (or blocks that appear to be mergeable), but which really
757      must be left untouched (they are required to make it safely across
758      partition boundaries).  See  the comments at the top of
759      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
760 
761   if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
762       || BB_PARTITION (src) != BB_PARTITION (target))
763     return NULL;
764 
765   /* We can replace or remove a complex jump only when we have exactly
766      two edges.  Also, if we have exactly one outgoing edge, we can
767      redirect that.  */
768   if (EDGE_COUNT (src->succs) >= 3
769       /* Verify that all targets will be TARGET.  Specifically, the
770 	 edge that is not E must also go to TARGET.  */
771       || (EDGE_COUNT (src->succs) == 2
772 	  && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
773     return NULL;
774 
775   if (!onlyjump_p (insn))
776     return NULL;
777   if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
778     return NULL;
779 
780   /* Avoid removing branch with side effects.  */
781   set = single_set (insn);
782   if (!set || side_effects_p (set))
783     return NULL;
784 
785   /* In case we zap a conditional jump, we'll need to kill
786      the cc0 setter too.  */
787   kill_from = insn;
788 #ifdef HAVE_cc0
789   if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
790       && only_sets_cc0_p (PREV_INSN (insn)))
791     kill_from = PREV_INSN (insn);
792 #endif
793 
794   /* See if we can create the fallthru edge.  */
795   if (in_cfglayout || can_fallthru (src, target))
796     {
797       if (dump_file)
798 	fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
799       fallthru = 1;
800 
801       /* Selectively unlink whole insn chain.  */
802       if (in_cfglayout)
803 	{
804 	  rtx insn = src->il.rtl->footer;
805 
806 	  delete_insn_chain (kill_from, BB_END (src), false);
807 
808 	  /* Remove barriers but keep jumptables.  */
809 	  while (insn)
810 	    {
811 	      if (BARRIER_P (insn))
812 		{
813 		  if (PREV_INSN (insn))
814 		    NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
815 		  else
816 		    src->il.rtl->footer = NEXT_INSN (insn);
817 		  if (NEXT_INSN (insn))
818 		    PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
819 		}
820 	      if (LABEL_P (insn))
821 		break;
822 	      insn = NEXT_INSN (insn);
823 	    }
824 	}
825       else
826 	delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
827 			   false);
828     }
829 
830   /* If this already is simplejump, redirect it.  */
831   else if (simplejump_p (insn))
832     {
833       if (e->dest == target)
834 	return NULL;
835       if (dump_file)
836 	fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
837 		 INSN_UID (insn), e->dest->index, target->index);
838       if (!redirect_jump (insn, block_label (target), 0))
839 	{
840 	  gcc_assert (target == EXIT_BLOCK_PTR);
841 	  return NULL;
842 	}
843     }
844 
845   /* Cannot do anything for target exit block.  */
846   else if (target == EXIT_BLOCK_PTR)
847     return NULL;
848 
849   /* Or replace possibly complicated jump insn by simple jump insn.  */
850   else
851     {
852       rtx target_label = block_label (target);
853       rtx barrier, label, table;
854 
855       emit_jump_insn_after_noloc (gen_jump (target_label), insn);
856       JUMP_LABEL (BB_END (src)) = target_label;
857       LABEL_NUSES (target_label)++;
858       if (dump_file)
859 	fprintf (dump_file, "Replacing insn %i by jump %i\n",
860 		 INSN_UID (insn), INSN_UID (BB_END (src)));
861 
862 
863       delete_insn_chain (kill_from, insn, false);
864 
865       /* Recognize a tablejump that we are converting to a
866 	 simple jump and remove its associated CODE_LABEL
867 	 and ADDR_VEC or ADDR_DIFF_VEC.  */
868       if (tablejump_p (insn, &label, &table))
869 	delete_insn_chain (label, table, false);
870 
871       barrier = next_nonnote_insn (BB_END (src));
872       if (!barrier || !BARRIER_P (barrier))
873 	emit_barrier_after (BB_END (src));
874       else
875 	{
876 	  if (barrier != NEXT_INSN (BB_END (src)))
877 	    {
878 	      /* Move the jump before barrier so that the notes
879 		 which originally were or were created before jump table are
880 		 inside the basic block.  */
881 	      rtx new_insn = BB_END (src);
882 
883 	      update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
884 				        PREV_INSN (barrier), src);
885 
886 	      NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
887 	      PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
888 
889 	      NEXT_INSN (new_insn) = barrier;
890 	      NEXT_INSN (PREV_INSN (barrier)) = new_insn;
891 
892 	      PREV_INSN (new_insn) = PREV_INSN (barrier);
893 	      PREV_INSN (barrier) = new_insn;
894 	    }
895 	}
896     }
897 
898   /* Keep only one edge out and set proper flags.  */
899   if (!single_succ_p (src))
900     remove_edge (e);
901   gcc_assert (single_succ_p (src));
902 
903   e = single_succ_edge (src);
904   if (fallthru)
905     e->flags = EDGE_FALLTHRU;
906   else
907     e->flags = 0;
908 
909   e->probability = REG_BR_PROB_BASE;
910   e->count = src->count;
911 
912   if (e->dest != target)
913     redirect_edge_succ (e, target);
914   return e;
915 }
916 
917 /* Subroutine of redirect_branch_edge that tries to patch the jump
918    instruction INSN so that it reaches block NEW.  Do this
919    only when it originally reached block OLD.  Return true if this
920    worked or the original target wasn't OLD, return false if redirection
921    doesn't work.  */
922 
923 static bool
924 patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
925 {
926   rtx tmp;
927   /* Recognize a tablejump and adjust all matching cases.  */
928   if (tablejump_p (insn, NULL, &tmp))
929     {
930       rtvec vec;
931       int j;
932       rtx new_label = block_label (new_bb);
933 
934       if (new_bb == EXIT_BLOCK_PTR)
935 	return false;
936       if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
937 	vec = XVEC (PATTERN (tmp), 0);
938       else
939 	vec = XVEC (PATTERN (tmp), 1);
940 
941       for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
942 	if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
943 	  {
944 	    RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
945 	    --LABEL_NUSES (old_label);
946 	    ++LABEL_NUSES (new_label);
947 	  }
948 
949       /* Handle casesi dispatch insns.  */
950       if ((tmp = single_set (insn)) != NULL
951 	  && SET_DEST (tmp) == pc_rtx
952 	  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
953 	  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
954 	  && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
955 	{
956 	  XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
957 						       new_label);
958 	  --LABEL_NUSES (old_label);
959 	  ++LABEL_NUSES (new_label);
960 	}
961     }
962   else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
963     {
964       int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
965       rtx new_label, note;
966 
967       if (new_bb == EXIT_BLOCK_PTR)
968 	return false;
969       new_label = block_label (new_bb);
970 
971       for (i = 0; i < n; ++i)
972 	{
973 	  rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
974 	  gcc_assert (GET_CODE (old_ref) == LABEL_REF);
975 	  if (XEXP (old_ref, 0) == old_label)
976 	    {
977 	      ASM_OPERANDS_LABEL (tmp, i)
978 		= gen_rtx_LABEL_REF (Pmode, new_label);
979 	      --LABEL_NUSES (old_label);
980 	      ++LABEL_NUSES (new_label);
981 	    }
982 	}
983 
984       if (JUMP_LABEL (insn) == old_label)
985 	{
986 	  JUMP_LABEL (insn) = new_label;
987 	  note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
988 	  if (note)
989 	    remove_note (insn, note);
990 	}
991       else
992 	{
993 	  note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
994 	  if (note)
995 	    remove_note (insn, note);
996 	  if (JUMP_LABEL (insn) != new_label
997 	      && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
998 	    add_reg_note (insn, REG_LABEL_TARGET, new_label);
999 	}
1000       while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1001 	     != NULL_RTX)
1002 	XEXP (note, 0) = new_label;
1003     }
1004   else
1005     {
1006       /* ?? We may play the games with moving the named labels from
1007 	 one basic block to the other in case only one computed_jump is
1008 	 available.  */
1009       if (computed_jump_p (insn)
1010 	  /* A return instruction can't be redirected.  */
1011 	  || returnjump_p (insn))
1012 	return false;
1013 
1014       if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1015 	{
1016 	  /* If the insn doesn't go where we think, we're confused.  */
1017 	  gcc_assert (JUMP_LABEL (insn) == old_label);
1018 
1019 	  /* If the substitution doesn't succeed, die.  This can happen
1020 	     if the back end emitted unrecognizable instructions or if
1021 	     target is exit block on some arches.  */
1022 	  if (!redirect_jump (insn, block_label (new_bb), 0))
1023 	    {
1024 	      gcc_assert (new_bb == EXIT_BLOCK_PTR);
1025 	      return false;
1026 	    }
1027 	}
1028     }
1029   return true;
1030 }
1031 
1032 
1033 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1034    NULL on failure  */
1035 static edge
1036 redirect_branch_edge (edge e, basic_block target)
1037 {
1038   rtx old_label = BB_HEAD (e->dest);
1039   basic_block src = e->src;
1040   rtx insn = BB_END (src);
1041 
1042   /* We can only redirect non-fallthru edges of jump insn.  */
1043   if (e->flags & EDGE_FALLTHRU)
1044     return NULL;
1045   else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1046     return NULL;
1047 
1048   if (!currently_expanding_to_rtl)
1049     {
1050       if (!patch_jump_insn (insn, old_label, target))
1051 	return NULL;
1052     }
1053   else
1054     /* When expanding this BB might actually contain multiple
1055        jumps (i.e. not yet split by find_many_sub_basic_blocks).
1056        Redirect all of those that match our label.  */
1057     for (insn = BB_HEAD (src); insn != NEXT_INSN (BB_END (src));
1058 	 insn = NEXT_INSN (insn))
1059       if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1060 	return NULL;
1061 
1062   if (dump_file)
1063     fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1064 	     e->src->index, e->dest->index, target->index);
1065 
1066   if (e->dest != target)
1067     e = redirect_edge_succ_nodup (e, target);
1068 
1069   return e;
1070 }
1071 
1072 /* Attempt to change code to redirect edge E to TARGET.  Don't do that on
1073    expense of adding new instructions or reordering basic blocks.
1074 
1075    Function can be also called with edge destination equivalent to the TARGET.
1076    Then it should try the simplifications and do nothing if none is possible.
1077 
1078    Return edge representing the branch if transformation succeeded.  Return NULL
1079    on failure.
1080    We still return NULL in case E already destinated TARGET and we didn't
1081    managed to simplify instruction stream.  */
1082 
1083 static edge
1084 rtl_redirect_edge_and_branch (edge e, basic_block target)
1085 {
1086   edge ret;
1087   basic_block src = e->src;
1088 
1089   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1090     return NULL;
1091 
1092   if (e->dest == target)
1093     return e;
1094 
1095   if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1096     {
1097       df_set_bb_dirty (src);
1098       return ret;
1099     }
1100 
1101   ret = redirect_branch_edge (e, target);
1102   if (!ret)
1103     return NULL;
1104 
1105   df_set_bb_dirty (src);
1106   return ret;
1107 }
1108 
1109 /* Like force_nonfallthru below, but additionally performs redirection
1110    Used by redirect_edge_and_branch_force.  */
1111 
1112 static basic_block
1113 force_nonfallthru_and_redirect (edge e, basic_block target)
1114 {
1115   basic_block jump_block, new_bb = NULL, src = e->src;
1116   rtx note;
1117   edge new_edge;
1118   int abnormal_edge_flags = 0;
1119   bool asm_goto_edge = false;
1120   int loc;
1121 
1122   /* In the case the last instruction is conditional jump to the next
1123      instruction, first redirect the jump itself and then continue
1124      by creating a basic block afterwards to redirect fallthru edge.  */
1125   if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1126       && any_condjump_p (BB_END (e->src))
1127       && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1128     {
1129       rtx note;
1130       edge b = unchecked_make_edge (e->src, target, 0);
1131       bool redirected;
1132 
1133       redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1134       gcc_assert (redirected);
1135 
1136       note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1137       if (note)
1138 	{
1139 	  int prob = INTVAL (XEXP (note, 0));
1140 
1141 	  b->probability = prob;
1142 	  b->count = e->count * prob / REG_BR_PROB_BASE;
1143 	  e->probability -= e->probability;
1144 	  e->count -= b->count;
1145 	  if (e->probability < 0)
1146 	    e->probability = 0;
1147 	  if (e->count < 0)
1148 	    e->count = 0;
1149 	}
1150     }
1151 
1152   if (e->flags & EDGE_ABNORMAL)
1153     {
1154       /* Irritating special case - fallthru edge to the same block as abnormal
1155 	 edge.
1156 	 We can't redirect abnormal edge, but we still can split the fallthru
1157 	 one and create separate abnormal edge to original destination.
1158 	 This allows bb-reorder to make such edge non-fallthru.  */
1159       gcc_assert (e->dest == target);
1160       abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1161       e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1162     }
1163   else
1164     {
1165       gcc_assert (e->flags & EDGE_FALLTHRU);
1166       if (e->src == ENTRY_BLOCK_PTR)
1167 	{
1168 	  /* We can't redirect the entry block.  Create an empty block
1169 	     at the start of the function which we use to add the new
1170 	     jump.  */
1171 	  edge tmp;
1172 	  edge_iterator ei;
1173 	  bool found = false;
1174 
1175 	  basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1176 
1177 	  /* Change the existing edge's source to be the new block, and add
1178 	     a new edge from the entry block to the new block.  */
1179 	  e->src = bb;
1180 	  for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1181 	    {
1182 	      if (tmp == e)
1183 		{
1184 		  VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1185 		  found = true;
1186 		  break;
1187 		}
1188 	      else
1189 		ei_next (&ei);
1190 	    }
1191 
1192 	  gcc_assert (found);
1193 
1194 	  VEC_safe_push (edge, gc, bb->succs, e);
1195 	  make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1196 	}
1197     }
1198 
1199   /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1200      don't point to target label.  */
1201   if (JUMP_P (BB_END (e->src))
1202       && target != EXIT_BLOCK_PTR
1203       && e->dest == target
1204       && (e->flags & EDGE_FALLTHRU)
1205       && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1206     {
1207       int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1208 
1209       for (i = 0; i < n; ++i)
1210 	if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1211 	  {
1212 	    asm_goto_edge = true;
1213 	    break;
1214 	  }
1215     }
1216 
1217   if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1218     {
1219       gcov_type count = e->count;
1220       int probability = e->probability;
1221       /* Create the new structures.  */
1222 
1223       /* If the old block ended with a tablejump, skip its table
1224 	 by searching forward from there.  Otherwise start searching
1225 	 forward from the last instruction of the old block.  */
1226       if (!tablejump_p (BB_END (e->src), NULL, &note))
1227 	note = BB_END (e->src);
1228       note = NEXT_INSN (note);
1229 
1230       jump_block = create_basic_block (note, NULL, e->src);
1231       jump_block->count = count;
1232       jump_block->frequency = EDGE_FREQUENCY (e);
1233       jump_block->loop_depth = target->loop_depth;
1234 
1235       /* Make sure new block ends up in correct hot/cold section.  */
1236 
1237       BB_COPY_PARTITION (jump_block, e->src);
1238       if (flag_reorder_blocks_and_partition
1239 	  && targetm.have_named_sections
1240 	  && JUMP_P (BB_END (jump_block))
1241 	  && !any_condjump_p (BB_END (jump_block))
1242 	  && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1243 	add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
1244 
1245       /* Wire edge in.  */
1246       new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1247       new_edge->probability = probability;
1248       new_edge->count = count;
1249 
1250       /* Redirect old edge.  */
1251       redirect_edge_pred (e, jump_block);
1252       e->probability = REG_BR_PROB_BASE;
1253 
1254       /* If asm goto has any label refs to target's label,
1255 	 add also edge from asm goto bb to target.  */
1256       if (asm_goto_edge)
1257 	{
1258 	  new_edge->probability /= 2;
1259 	  new_edge->count /= 2;
1260 	  jump_block->count /= 2;
1261 	  jump_block->frequency /= 2;
1262 	  new_edge = make_edge (new_edge->src, target,
1263 				e->flags & ~EDGE_FALLTHRU);
1264 	  new_edge->probability = probability - probability / 2;
1265 	  new_edge->count = count - count / 2;
1266 	}
1267 
1268       new_bb = jump_block;
1269     }
1270   else
1271     jump_block = e->src;
1272 
1273   if (e->goto_locus && e->goto_block == NULL)
1274     loc = e->goto_locus;
1275   else
1276     loc = 0;
1277   e->flags &= ~EDGE_FALLTHRU;
1278   if (target == EXIT_BLOCK_PTR)
1279     {
1280 #ifdef HAVE_return
1281 	emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1282 #else
1283 	gcc_unreachable ();
1284 #endif
1285     }
1286   else
1287     {
1288       rtx label = block_label (target);
1289       emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1290       JUMP_LABEL (BB_END (jump_block)) = label;
1291       LABEL_NUSES (label)++;
1292     }
1293 
1294   emit_barrier_after (BB_END (jump_block));
1295   redirect_edge_succ_nodup (e, target);
1296 
1297   if (abnormal_edge_flags)
1298     make_edge (src, target, abnormal_edge_flags);
1299 
1300   df_mark_solutions_dirty ();
1301   return new_bb;
1302 }
1303 
1304 /* Edge E is assumed to be fallthru edge.  Emit needed jump instruction
1305    (and possibly create new basic block) to make edge non-fallthru.
1306    Return newly created BB or NULL if none.  */
1307 
1308 basic_block
1309 force_nonfallthru (edge e)
1310 {
1311   return force_nonfallthru_and_redirect (e, e->dest);
1312 }
1313 
1314 /* Redirect edge even at the expense of creating new jump insn or
1315    basic block.  Return new basic block if created, NULL otherwise.
1316    Conversion must be possible.  */
1317 
1318 static basic_block
1319 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1320 {
1321   if (redirect_edge_and_branch (e, target)
1322       || e->dest == target)
1323     return NULL;
1324 
1325   /* In case the edge redirection failed, try to force it to be non-fallthru
1326      and redirect newly created simplejump.  */
1327   df_set_bb_dirty (e->src);
1328   return force_nonfallthru_and_redirect (e, target);
1329 }
1330 
1331 /* The given edge should potentially be a fallthru edge.  If that is in
1332    fact true, delete the jump and barriers that are in the way.  */
1333 
1334 static void
1335 rtl_tidy_fallthru_edge (edge e)
1336 {
1337   rtx q;
1338   basic_block b = e->src, c = b->next_bb;
1339 
1340   /* ??? In a late-running flow pass, other folks may have deleted basic
1341      blocks by nopping out blocks, leaving multiple BARRIERs between here
1342      and the target label. They ought to be chastised and fixed.
1343 
1344      We can also wind up with a sequence of undeletable labels between
1345      one block and the next.
1346 
1347      So search through a sequence of barriers, labels, and notes for
1348      the head of block C and assert that we really do fall through.  */
1349 
1350   for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1351     if (INSN_P (q))
1352       return;
1353 
1354   /* Remove what will soon cease being the jump insn from the source block.
1355      If block B consisted only of this single jump, turn it into a deleted
1356      note.  */
1357   q = BB_END (b);
1358   if (JUMP_P (q)
1359       && onlyjump_p (q)
1360       && (any_uncondjump_p (q)
1361 	  || single_succ_p (b)))
1362     {
1363 #ifdef HAVE_cc0
1364       /* If this was a conditional jump, we need to also delete
1365 	 the insn that set cc0.  */
1366       if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1367 	q = PREV_INSN (q);
1368 #endif
1369 
1370       q = PREV_INSN (q);
1371     }
1372 
1373   /* Selectively unlink the sequence.  */
1374   if (q != PREV_INSN (BB_HEAD (c)))
1375     delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1376 
1377   e->flags |= EDGE_FALLTHRU;
1378 }
1379 
1380 /* Should move basic block BB after basic block AFTER.  NIY.  */
1381 
1382 static bool
1383 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1384 		      basic_block after ATTRIBUTE_UNUSED)
1385 {
1386   return false;
1387 }
1388 
1389 /* Split a (typically critical) edge.  Return the new block.
1390    The edge must not be abnormal.
1391 
1392    ??? The code generally expects to be called on critical edges.
1393    The case of a block ending in an unconditional jump to a
1394    block with multiple predecessors is not handled optimally.  */
1395 
1396 static basic_block
1397 rtl_split_edge (edge edge_in)
1398 {
1399   basic_block bb;
1400   rtx before;
1401 
1402   /* Abnormal edges cannot be split.  */
1403   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1404 
1405   /* We are going to place the new block in front of edge destination.
1406      Avoid existence of fallthru predecessors.  */
1407   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1408     {
1409       edge e;
1410       edge_iterator ei;
1411 
1412       FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
1413 	if (e->flags & EDGE_FALLTHRU)
1414 	  break;
1415 
1416       if (e)
1417 	force_nonfallthru (e);
1418     }
1419 
1420   /* Create the basic block note.  */
1421   if (edge_in->dest != EXIT_BLOCK_PTR)
1422     before = BB_HEAD (edge_in->dest);
1423   else
1424     before = NULL_RTX;
1425 
1426   /* If this is a fall through edge to the exit block, the blocks might be
1427      not adjacent, and the right place is the after the source.  */
1428   if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1429     {
1430       before = NEXT_INSN (BB_END (edge_in->src));
1431       bb = create_basic_block (before, NULL, edge_in->src);
1432       BB_COPY_PARTITION (bb, edge_in->src);
1433     }
1434   else
1435     {
1436       bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1437       /* ??? Why not edge_in->dest->prev_bb here?  */
1438       BB_COPY_PARTITION (bb, edge_in->dest);
1439     }
1440 
1441   make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1442 
1443   /* For non-fallthru edges, we must adjust the predecessor's
1444      jump instruction to target our new block.  */
1445   if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1446     {
1447       edge redirected = redirect_edge_and_branch (edge_in, bb);
1448       gcc_assert (redirected);
1449     }
1450   else
1451     {
1452       if (edge_in->src != ENTRY_BLOCK_PTR)
1453 	{
1454 	  /* For asm goto even splitting of fallthru edge might
1455 	     need insn patching, as other labels might point to the
1456 	     old label.  */
1457 	  rtx last = BB_END (edge_in->src);
1458 	  if (last
1459 	      && JUMP_P (last)
1460 	      && edge_in->dest != EXIT_BLOCK_PTR
1461 	      && extract_asm_operands (PATTERN (last)) != NULL_RTX
1462 	      && patch_jump_insn (last, before, bb))
1463 	    df_set_bb_dirty (edge_in->src);
1464 	}
1465       redirect_edge_succ (edge_in, bb);
1466     }
1467 
1468   return bb;
1469 }
1470 
1471 /* Queue instructions for insertion on an edge between two basic blocks.
1472    The new instructions and basic blocks (if any) will not appear in the
1473    CFG until commit_edge_insertions is called.  */
1474 
1475 void
1476 insert_insn_on_edge (rtx pattern, edge e)
1477 {
1478   /* We cannot insert instructions on an abnormal critical edge.
1479      It will be easier to find the culprit if we die now.  */
1480   gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1481 
1482   if (e->insns.r == NULL_RTX)
1483     start_sequence ();
1484   else
1485     push_to_sequence (e->insns.r);
1486 
1487   emit_insn (pattern);
1488 
1489   e->insns.r = get_insns ();
1490   end_sequence ();
1491 }
1492 
1493 /* Update the CFG for the instructions queued on edge E.  */
1494 
1495 void
1496 commit_one_edge_insertion (edge e)
1497 {
1498   rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1499   basic_block bb = NULL;
1500 
1501   /* Pull the insns off the edge now since the edge might go away.  */
1502   insns = e->insns.r;
1503   e->insns.r = NULL_RTX;
1504 
1505   if (!before && !after)
1506     {
1507       /* Figure out where to put these things.  If the destination has
1508 	 one predecessor, insert there.  Except for the exit block.  */
1509       if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1510 	{
1511 	  bb = e->dest;
1512 
1513 	  /* Get the location correct wrt a code label, and "nice" wrt
1514 	     a basic block note, and before everything else.  */
1515 	  tmp = BB_HEAD (bb);
1516 	  if (LABEL_P (tmp))
1517 	    tmp = NEXT_INSN (tmp);
1518 	  if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1519 	    tmp = NEXT_INSN (tmp);
1520 	  if (tmp == BB_HEAD (bb))
1521 	    before = tmp;
1522 	  else if (tmp)
1523 	    after = PREV_INSN (tmp);
1524 	  else
1525 	    after = get_last_insn ();
1526 	}
1527 
1528       /* If the source has one successor and the edge is not abnormal,
1529 	 insert there.  Except for the entry block.  */
1530       else if ((e->flags & EDGE_ABNORMAL) == 0
1531 	       && single_succ_p (e->src)
1532 	       && e->src != ENTRY_BLOCK_PTR)
1533 	{
1534 	  bb = e->src;
1535 
1536 	  /* It is possible to have a non-simple jump here.  Consider a target
1537 	     where some forms of unconditional jumps clobber a register.  This
1538 	     happens on the fr30 for example.
1539 
1540 	     We know this block has a single successor, so we can just emit
1541 	     the queued insns before the jump.  */
1542 	  if (JUMP_P (BB_END (bb)))
1543 	    before = BB_END (bb);
1544 	  else
1545 	    {
1546 	      /* We'd better be fallthru, or we've lost track of
1547 		 what's what.  */
1548 	      gcc_assert (e->flags & EDGE_FALLTHRU);
1549 
1550 	      after = BB_END (bb);
1551 	    }
1552 	}
1553       /* Otherwise we must split the edge.  */
1554       else
1555 	{
1556 	  bb = split_edge (e);
1557 	  after = BB_END (bb);
1558 
1559 	  if (flag_reorder_blocks_and_partition
1560 	      && targetm.have_named_sections
1561 	      && e->src != ENTRY_BLOCK_PTR
1562 	      && BB_PARTITION (e->src) == BB_COLD_PARTITION
1563 	      && !(e->flags & EDGE_CROSSING)
1564 	      && JUMP_P (after)
1565 	      && !any_condjump_p (after)
1566 	      && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1567 	    add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1568 	}
1569     }
1570 
1571   /* Now that we've found the spot, do the insertion.  */
1572 
1573   if (before)
1574     {
1575       emit_insn_before_noloc (insns, before, bb);
1576       last = prev_nonnote_insn (before);
1577     }
1578   else
1579     last = emit_insn_after_noloc (insns, after, bb);
1580 
1581   if (returnjump_p (last))
1582     {
1583       /* ??? Remove all outgoing edges from BB and add one for EXIT.
1584 	 This is not currently a problem because this only happens
1585 	 for the (single) epilogue, which already has a fallthru edge
1586 	 to EXIT.  */
1587 
1588       e = single_succ_edge (bb);
1589       gcc_assert (e->dest == EXIT_BLOCK_PTR
1590 		  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1591 
1592       e->flags &= ~EDGE_FALLTHRU;
1593       emit_barrier_after (last);
1594 
1595       if (before)
1596 	delete_insn (before);
1597     }
1598   else
1599     gcc_assert (!JUMP_P (last));
1600 
1601   /* Mark the basic block for find_many_sub_basic_blocks.  */
1602   if (current_ir_type () != IR_RTL_CFGLAYOUT)
1603     bb->aux = &bb->aux;
1604 }
1605 
1606 /* Update the CFG for all queued instructions.  */
1607 
1608 void
1609 commit_edge_insertions (void)
1610 {
1611   basic_block bb;
1612   sbitmap blocks;
1613   bool changed = false;
1614 
1615 #ifdef ENABLE_CHECKING
1616   verify_flow_info ();
1617 #endif
1618 
1619   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1620     {
1621       edge e;
1622       edge_iterator ei;
1623 
1624       FOR_EACH_EDGE (e, ei, bb->succs)
1625 	if (e->insns.r)
1626 	  {
1627 	    changed = true;
1628 	    commit_one_edge_insertion (e);
1629 	  }
1630     }
1631 
1632   if (!changed)
1633     return;
1634 
1635   /* In the old rtl CFG API, it was OK to insert control flow on an
1636      edge, apparently?  In cfglayout mode, this will *not* work, and
1637      the caller is responsible for making sure that control flow is
1638      valid at all times.  */
1639   if (current_ir_type () == IR_RTL_CFGLAYOUT)
1640     return;
1641 
1642   blocks = sbitmap_alloc (last_basic_block);
1643   sbitmap_zero (blocks);
1644   FOR_EACH_BB (bb)
1645     if (bb->aux)
1646       {
1647 	SET_BIT (blocks, bb->index);
1648 	/* Check for forgotten bb->aux values before commit_edge_insertions
1649 	   call.  */
1650 	gcc_assert (bb->aux == &bb->aux);
1651 	bb->aux = NULL;
1652       }
1653   find_many_sub_basic_blocks (blocks);
1654   sbitmap_free (blocks);
1655 }
1656 
1657 
1658 /* Print out RTL-specific basic block information (live information
1659    at start and end).  */
1660 
1661 static void
1662 rtl_dump_bb (basic_block bb, FILE *outf, int indent, int flags ATTRIBUTE_UNUSED)
1663 {
1664   rtx insn;
1665   rtx last;
1666   char *s_indent;
1667 
1668   s_indent = (char *) alloca ((size_t) indent + 1);
1669   memset (s_indent, ' ', (size_t) indent);
1670   s_indent[indent] = '\0';
1671 
1672   if (df)
1673     {
1674       df_dump_top (bb, outf);
1675       putc ('\n', outf);
1676     }
1677 
1678   if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
1679     for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1680 	 insn = NEXT_INSN (insn))
1681       print_rtl_single (outf, insn);
1682 
1683   if (df)
1684     {
1685       df_dump_bottom (bb, outf);
1686       putc ('\n', outf);
1687     }
1688 
1689 }
1690 
1691 /* Like print_rtl, but also print out live information for the start of each
1692    basic block.  */
1693 
1694 void
1695 print_rtl_with_bb (FILE *outf, const_rtx rtx_first)
1696 {
1697   const_rtx tmp_rtx;
1698   if (rtx_first == 0)
1699     fprintf (outf, "(nil)\n");
1700   else
1701     {
1702       enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1703       int max_uid = get_max_uid ();
1704       basic_block *start = XCNEWVEC (basic_block, max_uid);
1705       basic_block *end = XCNEWVEC (basic_block, max_uid);
1706       enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1707 
1708       basic_block bb;
1709 
1710       if (df)
1711 	df_dump_start (outf);
1712 
1713       FOR_EACH_BB_REVERSE (bb)
1714 	{
1715 	  rtx x;
1716 
1717 	  start[INSN_UID (BB_HEAD (bb))] = bb;
1718 	  end[INSN_UID (BB_END (bb))] = bb;
1719 	  for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1720 	    {
1721 	      enum bb_state state = IN_MULTIPLE_BB;
1722 
1723 	      if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1724 		state = IN_ONE_BB;
1725 	      in_bb_p[INSN_UID (x)] = state;
1726 
1727 	      if (x == BB_END (bb))
1728 		break;
1729 	    }
1730 	}
1731 
1732       for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1733 	{
1734 	  int did_output;
1735 	  if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1736 	    {
1737 	      edge e;
1738 	      edge_iterator ei;
1739 
1740 	      fprintf (outf, ";; Start of basic block (");
1741 	      FOR_EACH_EDGE (e, ei, bb->preds)
1742 		fprintf (outf, " %d", e->src->index);
1743 	      fprintf (outf, ") -> %d\n", bb->index);
1744 
1745 	      if (df)
1746 		{
1747 		  df_dump_top (bb, outf);
1748 		  putc ('\n', outf);
1749 		}
1750 	      FOR_EACH_EDGE (e, ei, bb->preds)
1751 		{
1752 		  fputs (";; Pred edge ", outf);
1753 		  dump_edge_info (outf, e, 0);
1754 		  fputc ('\n', outf);
1755 		}
1756 	    }
1757 
1758 	  if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1759 	      && !NOTE_P (tmp_rtx)
1760 	      && !BARRIER_P (tmp_rtx))
1761 	    fprintf (outf, ";; Insn is not within a basic block\n");
1762 	  else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1763 	    fprintf (outf, ";; Insn is in multiple basic blocks\n");
1764 
1765 	  did_output = print_rtl_single (outf, tmp_rtx);
1766 
1767 	  if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1768 	    {
1769 	      edge e;
1770 	      edge_iterator ei;
1771 
1772 	      fprintf (outf, ";; End of basic block %d -> (", bb->index);
1773 	      FOR_EACH_EDGE (e, ei, bb->succs)
1774 		fprintf (outf, " %d", e->dest->index);
1775 	      fprintf (outf, ")\n");
1776 
1777 	      if (df)
1778 		{
1779 		  df_dump_bottom (bb, outf);
1780 		  putc ('\n', outf);
1781 		}
1782 	      putc ('\n', outf);
1783 	      FOR_EACH_EDGE (e, ei, bb->succs)
1784 		{
1785 		  fputs (";; Succ edge ", outf);
1786 		  dump_edge_info (outf, e, 1);
1787 		  fputc ('\n', outf);
1788 		}
1789 	    }
1790 	  if (did_output)
1791 	    putc ('\n', outf);
1792 	}
1793 
1794       free (start);
1795       free (end);
1796       free (in_bb_p);
1797     }
1798 
1799   if (crtl->epilogue_delay_list != 0)
1800     {
1801       fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1802       for (tmp_rtx = crtl->epilogue_delay_list; tmp_rtx != 0;
1803 	   tmp_rtx = XEXP (tmp_rtx, 1))
1804 	print_rtl_single (outf, XEXP (tmp_rtx, 0));
1805     }
1806 }
1807 
1808 void
1809 update_br_prob_note (basic_block bb)
1810 {
1811   rtx note;
1812   if (!JUMP_P (BB_END (bb)))
1813     return;
1814   note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1815   if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1816     return;
1817   XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1818 }
1819 
1820 /* Get the last insn associated with block BB (that includes barriers and
1821    tablejumps after BB).  */
1822 rtx
1823 get_last_bb_insn (basic_block bb)
1824 {
1825   rtx tmp;
1826   rtx end = BB_END (bb);
1827 
1828   /* Include any jump table following the basic block.  */
1829   if (tablejump_p (end, NULL, &tmp))
1830     end = tmp;
1831 
1832   /* Include any barriers that may follow the basic block.  */
1833   tmp = next_nonnote_insn_bb (end);
1834   while (tmp && BARRIER_P (tmp))
1835     {
1836       end = tmp;
1837       tmp = next_nonnote_insn_bb (end);
1838     }
1839 
1840   return end;
1841 }
1842 
1843 /* Verify the CFG and RTL consistency common for both underlying RTL and
1844    cfglayout RTL.
1845 
1846    Currently it does following checks:
1847 
1848    - overlapping of basic blocks
1849    - insns with wrong BLOCK_FOR_INSN pointers
1850    - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1851    - tails of basic blocks (ensure that boundary is necessary)
1852    - scans body of the basic block for JUMP_INSN, CODE_LABEL
1853      and NOTE_INSN_BASIC_BLOCK
1854    - verify that no fall_thru edge crosses hot/cold partition boundaries
1855    - verify that there are no pending RTL branch predictions
1856 
1857    In future it can be extended check a lot of other stuff as well
1858    (reachability of basic blocks, life information, etc. etc.).  */
1859 
1860 static int
1861 rtl_verify_flow_info_1 (void)
1862 {
1863   rtx x;
1864   int err = 0;
1865   basic_block bb;
1866 
1867   /* Check the general integrity of the basic blocks.  */
1868   FOR_EACH_BB_REVERSE (bb)
1869     {
1870       rtx insn;
1871 
1872       if (!(bb->flags & BB_RTL))
1873 	{
1874 	  error ("BB_RTL flag not set for block %d", bb->index);
1875 	  err = 1;
1876 	}
1877 
1878       FOR_BB_INSNS (bb, insn)
1879 	if (BLOCK_FOR_INSN (insn) != bb)
1880 	  {
1881 	    error ("insn %d basic block pointer is %d, should be %d",
1882 		   INSN_UID (insn),
1883 		   BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
1884 		   bb->index);
1885 	    err = 1;
1886 	  }
1887 
1888       for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
1889 	if (!BARRIER_P (insn)
1890 	    && BLOCK_FOR_INSN (insn) != NULL)
1891 	  {
1892 	    error ("insn %d in header of bb %d has non-NULL basic block",
1893 		   INSN_UID (insn), bb->index);
1894 	    err = 1;
1895 	  }
1896       for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
1897 	if (!BARRIER_P (insn)
1898 	    && BLOCK_FOR_INSN (insn) != NULL)
1899 	  {
1900 	    error ("insn %d in footer of bb %d has non-NULL basic block",
1901 		   INSN_UID (insn), bb->index);
1902 	    err = 1;
1903 	  }
1904     }
1905 
1906   /* Now check the basic blocks (boundaries etc.) */
1907   FOR_EACH_BB_REVERSE (bb)
1908     {
1909       int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1910       edge e, fallthru = NULL;
1911       rtx note;
1912       edge_iterator ei;
1913 
1914       if (JUMP_P (BB_END (bb))
1915 	  && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1916 	  && EDGE_COUNT (bb->succs) >= 2
1917 	  && any_condjump_p (BB_END (bb)))
1918 	{
1919 	  if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1920 	      && profile_status != PROFILE_ABSENT)
1921 	    {
1922 	      error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1923 		     INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1924 	      err = 1;
1925 	    }
1926 	}
1927       FOR_EACH_EDGE (e, ei, bb->succs)
1928 	{
1929 	  if (e->flags & EDGE_FALLTHRU)
1930 	    {
1931 	      n_fallthru++, fallthru = e;
1932 	      if ((e->flags & EDGE_CROSSING)
1933 		  || (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1934 		      && e->src != ENTRY_BLOCK_PTR
1935 		      && e->dest != EXIT_BLOCK_PTR))
1936 	    {
1937 		  error ("fallthru edge crosses section boundary (bb %i)",
1938 			 e->src->index);
1939 		  err = 1;
1940 		}
1941 	    }
1942 
1943 	  if ((e->flags & ~(EDGE_DFS_BACK
1944 			    | EDGE_CAN_FALLTHRU
1945 			    | EDGE_IRREDUCIBLE_LOOP
1946 			    | EDGE_LOOP_EXIT
1947 			    | EDGE_CROSSING)) == 0)
1948 	    n_branch++;
1949 
1950 	  if (e->flags & EDGE_ABNORMAL_CALL)
1951 	    n_call++;
1952 
1953 	  if (e->flags & EDGE_EH)
1954 	    n_eh++;
1955 	  else if (e->flags & EDGE_ABNORMAL)
1956 	    n_abnormal++;
1957 	}
1958 
1959       if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1960 	{
1961 	  error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1962 	  err = 1;
1963 	}
1964       if (n_eh > 1)
1965 	{
1966 	  error ("too many eh edges %i", bb->index);
1967 	  err = 1;
1968 	}
1969       if (n_branch
1970 	  && (!JUMP_P (BB_END (bb))
1971 	      || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1972 				   || any_condjump_p (BB_END (bb))))))
1973 	{
1974 	  error ("too many outgoing branch edges from bb %i", bb->index);
1975 	  err = 1;
1976 	}
1977       if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1978 	{
1979 	  error ("fallthru edge after unconditional jump %i", bb->index);
1980 	  err = 1;
1981 	}
1982       if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1983 	{
1984 	  error ("wrong number of branch edges after unconditional jump %i",
1985 		 bb->index);
1986 	  err = 1;
1987 	}
1988       if (n_branch != 1 && any_condjump_p (BB_END (bb))
1989 	  && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1990 	{
1991 	  error ("wrong amount of branch edges after conditional jump %i",
1992 		 bb->index);
1993 	  err = 1;
1994 	}
1995       if (n_call && !CALL_P (BB_END (bb)))
1996 	{
1997 	  error ("call edges for non-call insn in bb %i", bb->index);
1998 	  err = 1;
1999 	}
2000       if (n_abnormal
2001 	  && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
2002 	  && (!JUMP_P (BB_END (bb))
2003 	      || any_condjump_p (BB_END (bb))
2004 	      || any_uncondjump_p (BB_END (bb))))
2005 	{
2006 	  error ("abnormal edges for no purpose in bb %i", bb->index);
2007 	  err = 1;
2008 	}
2009 
2010       for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
2011 	/* We may have a barrier inside a basic block before dead code
2012 	   elimination.  There is no BLOCK_FOR_INSN field in a barrier.  */
2013 	if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
2014 	  {
2015 	    debug_rtx (x);
2016 	    if (! BLOCK_FOR_INSN (x))
2017 	      error
2018 		("insn %d inside basic block %d but block_for_insn is NULL",
2019 		 INSN_UID (x), bb->index);
2020 	    else
2021 	      error
2022 		("insn %d inside basic block %d but block_for_insn is %i",
2023 		 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2024 
2025 	    err = 1;
2026 	  }
2027 
2028       /* OK pointers are correct.  Now check the header of basic
2029 	 block.  It ought to contain optional CODE_LABEL followed
2030 	 by NOTE_BASIC_BLOCK.  */
2031       x = BB_HEAD (bb);
2032       if (LABEL_P (x))
2033 	{
2034 	  if (BB_END (bb) == x)
2035 	    {
2036 	      error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2037 		     bb->index);
2038 	      err = 1;
2039 	    }
2040 
2041 	  x = NEXT_INSN (x);
2042 	}
2043 
2044       if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2045 	{
2046 	  error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2047 		 bb->index);
2048 	  err = 1;
2049 	}
2050 
2051       if (BB_END (bb) == x)
2052 	/* Do checks for empty blocks here.  */
2053 	;
2054       else
2055 	for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2056 	  {
2057 	    if (NOTE_INSN_BASIC_BLOCK_P (x))
2058 	      {
2059 		error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2060 		       INSN_UID (x), bb->index);
2061 		err = 1;
2062 	      }
2063 
2064 	    if (x == BB_END (bb))
2065 	      break;
2066 
2067 	    if (control_flow_insn_p (x))
2068 	      {
2069 		error ("in basic block %d:", bb->index);
2070 		fatal_insn ("flow control insn inside a basic block", x);
2071 	      }
2072 	  }
2073     }
2074 
2075   /* Clean up.  */
2076   return err;
2077 }
2078 
2079 /* Verify the CFG and RTL consistency common for both underlying RTL and
2080    cfglayout RTL.
2081 
2082    Currently it does following checks:
2083    - all checks of rtl_verify_flow_info_1
2084    - test head/end pointers
2085    - check that all insns are in the basic blocks
2086      (except the switch handling code, barriers and notes)
2087    - check that all returns are followed by barriers
2088    - check that all fallthru edge points to the adjacent blocks.  */
2089 
2090 static int
2091 rtl_verify_flow_info (void)
2092 {
2093   basic_block bb;
2094   int err = rtl_verify_flow_info_1 ();
2095   rtx x;
2096   rtx last_head = get_last_insn ();
2097   basic_block *bb_info;
2098   int num_bb_notes;
2099   const rtx rtx_first = get_insns ();
2100   basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2101   const int max_uid = get_max_uid ();
2102 
2103   bb_info = XCNEWVEC (basic_block, max_uid);
2104 
2105   FOR_EACH_BB_REVERSE (bb)
2106     {
2107       edge e;
2108       edge_iterator ei;
2109       rtx head = BB_HEAD (bb);
2110       rtx end = BB_END (bb);
2111 
2112       for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2113 	{
2114 	  /* Verify the end of the basic block is in the INSN chain.  */
2115 	  if (x == end)
2116 	    break;
2117 
2118 	  /* And that the code outside of basic blocks has NULL bb field.  */
2119 	if (!BARRIER_P (x)
2120 	    && BLOCK_FOR_INSN (x) != NULL)
2121 	  {
2122 	    error ("insn %d outside of basic blocks has non-NULL bb field",
2123 		   INSN_UID (x));
2124 	    err = 1;
2125 	  }
2126 	}
2127 
2128       if (!x)
2129 	{
2130 	  error ("end insn %d for block %d not found in the insn stream",
2131 		 INSN_UID (end), bb->index);
2132 	  err = 1;
2133 	}
2134 
2135       /* Work backwards from the end to the head of the basic block
2136 	 to verify the head is in the RTL chain.  */
2137       for (; x != NULL_RTX; x = PREV_INSN (x))
2138 	{
2139 	  /* While walking over the insn chain, verify insns appear
2140 	     in only one basic block.  */
2141 	  if (bb_info[INSN_UID (x)] != NULL)
2142 	    {
2143 	      error ("insn %d is in multiple basic blocks (%d and %d)",
2144 		     INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2145 	      err = 1;
2146 	    }
2147 
2148 	  bb_info[INSN_UID (x)] = bb;
2149 
2150 	  if (x == head)
2151 	    break;
2152 	}
2153       if (!x)
2154 	{
2155 	  error ("head insn %d for block %d not found in the insn stream",
2156 		 INSN_UID (head), bb->index);
2157 	  err = 1;
2158 	}
2159 
2160       last_head = PREV_INSN (x);
2161 
2162       FOR_EACH_EDGE (e, ei, bb->succs)
2163 	if (e->flags & EDGE_FALLTHRU)
2164 	  break;
2165       if (!e)
2166 	{
2167 	  rtx insn;
2168 
2169 	  /* Ensure existence of barrier in BB with no fallthru edges.  */
2170 	  for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2171 	    {
2172 	      if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2173 		{
2174 		  error ("missing barrier after block %i", bb->index);
2175 		  err = 1;
2176 		  break;
2177 		}
2178 	      if (BARRIER_P (insn))
2179 		break;
2180 	    }
2181 	}
2182       else if (e->src != ENTRY_BLOCK_PTR
2183 	       && e->dest != EXIT_BLOCK_PTR)
2184 	{
2185 	  rtx insn;
2186 
2187 	  if (e->src->next_bb != e->dest)
2188 	    {
2189 	      error
2190 		("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2191 		 e->src->index, e->dest->index);
2192 	      err = 1;
2193 	    }
2194 	  else
2195 	    for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2196 		 insn = NEXT_INSN (insn))
2197 	      if (BARRIER_P (insn) || INSN_P (insn))
2198 		{
2199 		  error ("verify_flow_info: Incorrect fallthru %i->%i",
2200 			 e->src->index, e->dest->index);
2201 		  fatal_insn ("wrong insn in the fallthru edge", insn);
2202 		  err = 1;
2203 		}
2204 	}
2205     }
2206 
2207   for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2208     {
2209       /* Check that the code before the first basic block has NULL
2210 	 bb field.  */
2211       if (!BARRIER_P (x)
2212 	  && BLOCK_FOR_INSN (x) != NULL)
2213 	{
2214 	  error ("insn %d outside of basic blocks has non-NULL bb field",
2215 		 INSN_UID (x));
2216 	  err = 1;
2217 	}
2218     }
2219   free (bb_info);
2220 
2221   num_bb_notes = 0;
2222   last_bb_seen = ENTRY_BLOCK_PTR;
2223 
2224   for (x = rtx_first; x; x = NEXT_INSN (x))
2225     {
2226       if (NOTE_INSN_BASIC_BLOCK_P (x))
2227 	{
2228 	  bb = NOTE_BASIC_BLOCK (x);
2229 
2230 	  num_bb_notes++;
2231 	  if (bb != last_bb_seen->next_bb)
2232 	    internal_error ("basic blocks not laid down consecutively");
2233 
2234 	  curr_bb = last_bb_seen = bb;
2235 	}
2236 
2237       if (!curr_bb)
2238 	{
2239 	  switch (GET_CODE (x))
2240 	    {
2241 	    case BARRIER:
2242 	    case NOTE:
2243 	      break;
2244 
2245 	    case CODE_LABEL:
2246 	      /* An addr_vec is placed outside any basic block.  */
2247 	      if (NEXT_INSN (x)
2248 		  && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2249 		x = NEXT_INSN (x);
2250 
2251 	      /* But in any case, non-deletable labels can appear anywhere.  */
2252 	      break;
2253 
2254 	    default:
2255 	      fatal_insn ("insn outside basic block", x);
2256 	    }
2257 	}
2258 
2259       if (JUMP_P (x)
2260 	  && returnjump_p (x) && ! condjump_p (x)
2261 	  && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2262 	    fatal_insn ("return not followed by barrier", x);
2263       if (curr_bb && x == BB_END (curr_bb))
2264 	curr_bb = NULL;
2265     }
2266 
2267   if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2268     internal_error
2269       ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2270        num_bb_notes, n_basic_blocks);
2271 
2272    return err;
2273 }
2274 
2275 /* Assume that the preceding pass has possibly eliminated jump instructions
2276    or converted the unconditional jumps.  Eliminate the edges from CFG.
2277    Return true if any edges are eliminated.  */
2278 
2279 bool
2280 purge_dead_edges (basic_block bb)
2281 {
2282   edge e;
2283   rtx insn = BB_END (bb), note;
2284   bool purged = false;
2285   bool found;
2286   edge_iterator ei;
2287 
2288   if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2289     do
2290       insn = PREV_INSN (insn);
2291     while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2292 
2293   /* If this instruction cannot trap, remove REG_EH_REGION notes.  */
2294   if (NONJUMP_INSN_P (insn)
2295       && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2296     {
2297       rtx eqnote;
2298 
2299       if (! may_trap_p (PATTERN (insn))
2300 	  || ((eqnote = find_reg_equal_equiv_note (insn))
2301 	      && ! may_trap_p (XEXP (eqnote, 0))))
2302 	remove_note (insn, note);
2303     }
2304 
2305   /* Cleanup abnormal edges caused by exceptions or non-local gotos.  */
2306   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2307     {
2308       bool remove = false;
2309 
2310       /* There are three types of edges we need to handle correctly here: EH
2311 	 edges, abnormal call EH edges, and abnormal call non-EH edges.  The
2312 	 latter can appear when nonlocal gotos are used.  */
2313       if (e->flags & EDGE_ABNORMAL_CALL)
2314 	{
2315 	  if (!CALL_P (insn))
2316 	    remove = true;
2317 	  else if (can_nonlocal_goto (insn))
2318 	    ;
2319 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2320 	    ;
2321 	  else
2322 	    remove = true;
2323 	}
2324       else if (e->flags & EDGE_EH)
2325 	remove = !can_throw_internal (insn);
2326 
2327       if (remove)
2328 	{
2329 	  remove_edge (e);
2330 	  df_set_bb_dirty (bb);
2331 	  purged = true;
2332 	}
2333       else
2334 	ei_next (&ei);
2335     }
2336 
2337   if (JUMP_P (insn))
2338     {
2339       rtx note;
2340       edge b,f;
2341       edge_iterator ei;
2342 
2343       /* We do care only about conditional jumps and simplejumps.  */
2344       if (!any_condjump_p (insn)
2345 	  && !returnjump_p (insn)
2346 	  && !simplejump_p (insn))
2347 	return purged;
2348 
2349       /* Branch probability/prediction notes are defined only for
2350 	 condjumps.  We've possibly turned condjump into simplejump.  */
2351       if (simplejump_p (insn))
2352 	{
2353 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
2354 	  if (note)
2355 	    remove_note (insn, note);
2356 	  while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2357 	    remove_note (insn, note);
2358 	}
2359 
2360       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2361 	{
2362 	  /* Avoid abnormal flags to leak from computed jumps turned
2363 	     into simplejumps.  */
2364 
2365 	  e->flags &= ~EDGE_ABNORMAL;
2366 
2367 	  /* See if this edge is one we should keep.  */
2368 	  if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2369 	    /* A conditional jump can fall through into the next
2370 	       block, so we should keep the edge.  */
2371 	    {
2372 	      ei_next (&ei);
2373 	      continue;
2374 	    }
2375 	  else if (e->dest != EXIT_BLOCK_PTR
2376 		   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2377 	    /* If the destination block is the target of the jump,
2378 	       keep the edge.  */
2379 	    {
2380 	      ei_next (&ei);
2381 	      continue;
2382 	    }
2383 	  else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2384 	    /* If the destination block is the exit block, and this
2385 	       instruction is a return, then keep the edge.  */
2386 	    {
2387 	      ei_next (&ei);
2388 	      continue;
2389 	    }
2390 	  else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2391 	    /* Keep the edges that correspond to exceptions thrown by
2392 	       this instruction and rematerialize the EDGE_ABNORMAL
2393 	       flag we just cleared above.  */
2394 	    {
2395 	      e->flags |= EDGE_ABNORMAL;
2396 	      ei_next (&ei);
2397 	      continue;
2398 	    }
2399 
2400 	  /* We do not need this edge.  */
2401 	  df_set_bb_dirty (bb);
2402 	  purged = true;
2403 	  remove_edge (e);
2404 	}
2405 
2406       if (EDGE_COUNT (bb->succs) == 0 || !purged)
2407 	return purged;
2408 
2409       if (dump_file)
2410 	fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2411 
2412       if (!optimize)
2413 	return purged;
2414 
2415       /* Redistribute probabilities.  */
2416       if (single_succ_p (bb))
2417 	{
2418 	  single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2419 	  single_succ_edge (bb)->count = bb->count;
2420 	}
2421       else
2422 	{
2423 	  note = find_reg_note (insn, REG_BR_PROB, NULL);
2424 	  if (!note)
2425 	    return purged;
2426 
2427 	  b = BRANCH_EDGE (bb);
2428 	  f = FALLTHRU_EDGE (bb);
2429 	  b->probability = INTVAL (XEXP (note, 0));
2430 	  f->probability = REG_BR_PROB_BASE - b->probability;
2431 	  b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2432 	  f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2433 	}
2434 
2435       return purged;
2436     }
2437   else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2438     {
2439       /* First, there should not be any EH or ABCALL edges resulting
2440 	 from non-local gotos and the like.  If there were, we shouldn't
2441 	 have created the sibcall in the first place.  Second, there
2442 	 should of course never have been a fallthru edge.  */
2443       gcc_assert (single_succ_p (bb));
2444       gcc_assert (single_succ_edge (bb)->flags
2445 		  == (EDGE_SIBCALL | EDGE_ABNORMAL));
2446 
2447       return 0;
2448     }
2449 
2450   /* If we don't see a jump insn, we don't know exactly why the block would
2451      have been broken at this point.  Look for a simple, non-fallthru edge,
2452      as these are only created by conditional branches.  If we find such an
2453      edge we know that there used to be a jump here and can then safely
2454      remove all non-fallthru edges.  */
2455   found = false;
2456   FOR_EACH_EDGE (e, ei, bb->succs)
2457     if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2458       {
2459 	found = true;
2460 	break;
2461       }
2462 
2463   if (!found)
2464     return purged;
2465 
2466   /* Remove all but the fake and fallthru edges.  The fake edge may be
2467      the only successor for this block in the case of noreturn
2468      calls.  */
2469   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2470     {
2471       if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2472 	{
2473 	  df_set_bb_dirty (bb);
2474 	  remove_edge (e);
2475 	  purged = true;
2476 	}
2477       else
2478 	ei_next (&ei);
2479     }
2480 
2481   gcc_assert (single_succ_p (bb));
2482 
2483   single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2484   single_succ_edge (bb)->count = bb->count;
2485 
2486   if (dump_file)
2487     fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2488 	     bb->index);
2489   return purged;
2490 }
2491 
2492 /* Search all basic blocks for potentially dead edges and purge them.  Return
2493    true if some edge has been eliminated.  */
2494 
2495 bool
2496 purge_all_dead_edges (void)
2497 {
2498   int purged = false;
2499   basic_block bb;
2500 
2501   FOR_EACH_BB (bb)
2502     {
2503       bool purged_here = purge_dead_edges (bb);
2504 
2505       purged |= purged_here;
2506     }
2507 
2508   return purged;
2509 }
2510 
2511 /* Same as split_block but update cfg_layout structures.  */
2512 
2513 static basic_block
2514 cfg_layout_split_block (basic_block bb, void *insnp)
2515 {
2516   rtx insn = (rtx) insnp;
2517   basic_block new_bb = rtl_split_block (bb, insn);
2518 
2519   new_bb->il.rtl->footer = bb->il.rtl->footer;
2520   bb->il.rtl->footer = NULL;
2521 
2522   return new_bb;
2523 }
2524 
2525 /* Redirect Edge to DEST.  */
2526 static edge
2527 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2528 {
2529   basic_block src = e->src;
2530   edge ret;
2531 
2532   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2533     return NULL;
2534 
2535   if (e->dest == dest)
2536     return e;
2537 
2538   if (e->src != ENTRY_BLOCK_PTR
2539       && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2540     {
2541       df_set_bb_dirty (src);
2542       return ret;
2543     }
2544 
2545   if (e->src == ENTRY_BLOCK_PTR
2546       && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2547     {
2548       if (dump_file)
2549 	fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2550 		 e->src->index, dest->index);
2551 
2552       df_set_bb_dirty (e->src);
2553       redirect_edge_succ (e, dest);
2554       return e;
2555     }
2556 
2557   /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2558      in the case the basic block appears to be in sequence.  Avoid this
2559      transformation.  */
2560 
2561   if (e->flags & EDGE_FALLTHRU)
2562     {
2563       /* Redirect any branch edges unified with the fallthru one.  */
2564       if (JUMP_P (BB_END (src))
2565 	  && label_is_jump_target_p (BB_HEAD (e->dest),
2566 				     BB_END (src)))
2567 	{
2568 	  edge redirected;
2569 
2570 	  if (dump_file)
2571 	    fprintf (dump_file, "Fallthru edge unified with branch "
2572 		     "%i->%i redirected to %i\n",
2573 		     e->src->index, e->dest->index, dest->index);
2574 	  e->flags &= ~EDGE_FALLTHRU;
2575 	  redirected = redirect_branch_edge (e, dest);
2576 	  gcc_assert (redirected);
2577 	  e->flags |= EDGE_FALLTHRU;
2578 	  df_set_bb_dirty (e->src);
2579 	  return e;
2580 	}
2581       /* In case we are redirecting fallthru edge to the branch edge
2582 	 of conditional jump, remove it.  */
2583       if (EDGE_COUNT (src->succs) == 2)
2584 	{
2585 	  /* Find the edge that is different from E.  */
2586 	  edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2587 
2588 	  if (s->dest == dest
2589 	      && any_condjump_p (BB_END (src))
2590 	      && onlyjump_p (BB_END (src)))
2591 	    delete_insn (BB_END (src));
2592 	}
2593       ret = redirect_edge_succ_nodup (e, dest);
2594       if (dump_file)
2595 	fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
2596 		 e->src->index, e->dest->index, dest->index);
2597     }
2598   else
2599     ret = redirect_branch_edge (e, dest);
2600 
2601   /* We don't want simplejumps in the insn stream during cfglayout.  */
2602   gcc_assert (!simplejump_p (BB_END (src)));
2603 
2604   df_set_bb_dirty (src);
2605   return ret;
2606 }
2607 
2608 /* Simple wrapper as we always can redirect fallthru edges.  */
2609 static basic_block
2610 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2611 {
2612   edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2613 
2614   gcc_assert (redirected);
2615   return NULL;
2616 }
2617 
2618 /* Same as delete_basic_block but update cfg_layout structures.  */
2619 
2620 static void
2621 cfg_layout_delete_block (basic_block bb)
2622 {
2623   rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2624 
2625   if (bb->il.rtl->header)
2626     {
2627       next = BB_HEAD (bb);
2628       if (prev)
2629 	NEXT_INSN (prev) = bb->il.rtl->header;
2630       else
2631 	set_first_insn (bb->il.rtl->header);
2632       PREV_INSN (bb->il.rtl->header) = prev;
2633       insn = bb->il.rtl->header;
2634       while (NEXT_INSN (insn))
2635 	insn = NEXT_INSN (insn);
2636       NEXT_INSN (insn) = next;
2637       PREV_INSN (next) = insn;
2638     }
2639   next = NEXT_INSN (BB_END (bb));
2640   if (bb->il.rtl->footer)
2641     {
2642       insn = bb->il.rtl->footer;
2643       while (insn)
2644 	{
2645 	  if (BARRIER_P (insn))
2646 	    {
2647 	      if (PREV_INSN (insn))
2648 		NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2649 	      else
2650 		bb->il.rtl->footer = NEXT_INSN (insn);
2651 	      if (NEXT_INSN (insn))
2652 		PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2653 	    }
2654 	  if (LABEL_P (insn))
2655 	    break;
2656 	  insn = NEXT_INSN (insn);
2657 	}
2658       if (bb->il.rtl->footer)
2659 	{
2660 	  insn = BB_END (bb);
2661 	  NEXT_INSN (insn) = bb->il.rtl->footer;
2662 	  PREV_INSN (bb->il.rtl->footer) = insn;
2663 	  while (NEXT_INSN (insn))
2664 	    insn = NEXT_INSN (insn);
2665 	  NEXT_INSN (insn) = next;
2666 	  if (next)
2667 	    PREV_INSN (next) = insn;
2668 	  else
2669 	    set_last_insn (insn);
2670 	}
2671     }
2672   if (bb->next_bb != EXIT_BLOCK_PTR)
2673     to = &bb->next_bb->il.rtl->header;
2674   else
2675     to = &cfg_layout_function_footer;
2676 
2677   rtl_delete_block (bb);
2678 
2679   if (prev)
2680     prev = NEXT_INSN (prev);
2681   else
2682     prev = get_insns ();
2683   if (next)
2684     next = PREV_INSN (next);
2685   else
2686     next = get_last_insn ();
2687 
2688   if (next && NEXT_INSN (next) != prev)
2689     {
2690       remaints = unlink_insn_chain (prev, next);
2691       insn = remaints;
2692       while (NEXT_INSN (insn))
2693 	insn = NEXT_INSN (insn);
2694       NEXT_INSN (insn) = *to;
2695       if (*to)
2696 	PREV_INSN (*to) = insn;
2697       *to = remaints;
2698     }
2699 }
2700 
2701 /* Return true when blocks A and B can be safely merged.  */
2702 
2703 static bool
2704 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2705 {
2706   /* If we are partitioning hot/cold basic blocks, we don't want to
2707      mess up unconditional or indirect jumps that cross between hot
2708      and cold sections.
2709 
2710      Basic block partitioning may result in some jumps that appear to
2711      be optimizable (or blocks that appear to be mergeable), but which really
2712      must be left untouched (they are required to make it safely across
2713      partition boundaries).  See  the comments at the top of
2714      bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
2715 
2716   if (BB_PARTITION (a) != BB_PARTITION (b))
2717     return false;
2718 
2719   /* There must be exactly one edge in between the blocks.  */
2720   return (single_succ_p (a)
2721 	  && single_succ (a) == b
2722 	  && single_pred_p (b) == 1
2723 	  && a != b
2724 	  /* Must be simple edge.  */
2725 	  && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2726 	  && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2727 	  /* If the jump insn has side effects, we can't kill the edge.
2728 	     When not optimizing, try_redirect_by_replacing_jump will
2729 	     not allow us to redirect an edge by replacing a table jump.  */
2730 	  && (!JUMP_P (BB_END (a))
2731 	      || ((!optimize || reload_completed)
2732 		  ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2733 }
2734 
2735 /* Merge block A and B.  The blocks must be mergeable.  */
2736 
2737 static void
2738 cfg_layout_merge_blocks (basic_block a, basic_block b)
2739 {
2740 #ifdef ENABLE_CHECKING
2741   gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
2742 #endif
2743 
2744   if (dump_file)
2745     fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
2746 
2747   /* If there was a CODE_LABEL beginning B, delete it.  */
2748   if (LABEL_P (BB_HEAD (b)))
2749     {
2750       delete_insn (BB_HEAD (b));
2751     }
2752 
2753   /* We should have fallthru edge in a, or we can do dummy redirection to get
2754      it cleaned up.  */
2755   if (JUMP_P (BB_END (a)))
2756     try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2757   gcc_assert (!JUMP_P (BB_END (a)));
2758 
2759   /* When not optimizing and the edge is the only place in RTL which holds
2760      some unique locus, emit a nop with that locus in between.  */
2761   if (!optimize && EDGE_SUCC (a, 0)->goto_locus)
2762     {
2763       rtx insn = BB_END (a), end = PREV_INSN (BB_HEAD (a));
2764       int goto_locus = EDGE_SUCC (a, 0)->goto_locus;
2765 
2766       while (insn != end && (!INSN_P (insn) || INSN_LOCATOR (insn) == 0))
2767 	insn = PREV_INSN (insn);
2768       if (insn != end && locator_eq (INSN_LOCATOR (insn), goto_locus))
2769 	goto_locus = 0;
2770       else
2771 	{
2772 	  insn = BB_HEAD (b);
2773 	  end = NEXT_INSN (BB_END (b));
2774 	  while (insn != end && !INSN_P (insn))
2775 	    insn = NEXT_INSN (insn);
2776 	  if (insn != end && INSN_LOCATOR (insn) != 0
2777 	      && locator_eq (INSN_LOCATOR (insn), goto_locus))
2778 	    goto_locus = 0;
2779 	}
2780       if (goto_locus)
2781 	{
2782 	  BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
2783 	  INSN_LOCATOR (BB_END (a)) = goto_locus;
2784 	}
2785     }
2786 
2787   /* Possible line number notes should appear in between.  */
2788   if (b->il.rtl->header)
2789     {
2790       rtx first = BB_END (a), last;
2791 
2792       last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a), a);
2793       /* The above might add a BARRIER as BB_END, but as barriers
2794 	 aren't valid parts of a bb, remove_insn doesn't update
2795 	 BB_END if it is a barrier.  So adjust BB_END here.  */
2796       while (BB_END (a) != first && BARRIER_P (BB_END (a)))
2797 	BB_END (a) = PREV_INSN (BB_END (a));
2798       delete_insn_chain (NEXT_INSN (first), last, false);
2799       b->il.rtl->header = NULL;
2800     }
2801 
2802   /* In the case basic blocks are not adjacent, move them around.  */
2803   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2804     {
2805       rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2806 
2807       emit_insn_after_noloc (first, BB_END (a), a);
2808       /* Skip possible DELETED_LABEL insn.  */
2809       if (!NOTE_INSN_BASIC_BLOCK_P (first))
2810 	first = NEXT_INSN (first);
2811       gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2812       BB_HEAD (b) = NULL;
2813 
2814       /* emit_insn_after_noloc doesn't call df_insn_change_bb.
2815          We need to explicitly call. */
2816       update_bb_for_insn_chain (NEXT_INSN (first),
2817 				BB_END (b),
2818 				a);
2819 
2820       delete_insn (first);
2821     }
2822   /* Otherwise just re-associate the instructions.  */
2823   else
2824     {
2825       rtx insn;
2826 
2827       update_bb_for_insn_chain (BB_HEAD (b), BB_END (b), a);
2828 
2829       insn = BB_HEAD (b);
2830       /* Skip possible DELETED_LABEL insn.  */
2831       if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2832 	insn = NEXT_INSN (insn);
2833       gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2834       BB_HEAD (b) = NULL;
2835       BB_END (a) = BB_END (b);
2836       delete_insn (insn);
2837     }
2838 
2839   df_bb_delete (b->index);
2840 
2841   /* Possible tablejumps and barriers should appear after the block.  */
2842   if (b->il.rtl->footer)
2843     {
2844       if (!a->il.rtl->footer)
2845 	a->il.rtl->footer = b->il.rtl->footer;
2846       else
2847 	{
2848 	  rtx last = a->il.rtl->footer;
2849 
2850 	  while (NEXT_INSN (last))
2851 	    last = NEXT_INSN (last);
2852 	  NEXT_INSN (last) = b->il.rtl->footer;
2853 	  PREV_INSN (b->il.rtl->footer) = last;
2854 	}
2855       b->il.rtl->footer = NULL;
2856     }
2857 
2858   if (dump_file)
2859     fprintf (dump_file, "Merged blocks %d and %d.\n",
2860 	     a->index, b->index);
2861 }
2862 
2863 /* Split edge E.  */
2864 
2865 static basic_block
2866 cfg_layout_split_edge (edge e)
2867 {
2868   basic_block new_bb =
2869     create_basic_block (e->src != ENTRY_BLOCK_PTR
2870 			? NEXT_INSN (BB_END (e->src)) : get_insns (),
2871 			NULL_RTX, e->src);
2872 
2873   if (e->dest == EXIT_BLOCK_PTR)
2874     BB_COPY_PARTITION (new_bb, e->src);
2875   else
2876     BB_COPY_PARTITION (new_bb, e->dest);
2877   make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2878   redirect_edge_and_branch_force (e, new_bb);
2879 
2880   return new_bb;
2881 }
2882 
2883 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU.  */
2884 
2885 static void
2886 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2887 {
2888 }
2889 
2890 /* Return 1 if BB ends with a call, possibly followed by some
2891    instructions that must stay with the call, 0 otherwise.  */
2892 
2893 static bool
2894 rtl_block_ends_with_call_p (basic_block bb)
2895 {
2896   rtx insn = BB_END (bb);
2897 
2898   while (!CALL_P (insn)
2899 	 && insn != BB_HEAD (bb)
2900 	 && (keep_with_call_p (insn)
2901 	     || NOTE_P (insn)
2902 	     || DEBUG_INSN_P (insn)))
2903     insn = PREV_INSN (insn);
2904   return (CALL_P (insn));
2905 }
2906 
2907 /* Return 1 if BB ends with a conditional branch, 0 otherwise.  */
2908 
2909 static bool
2910 rtl_block_ends_with_condjump_p (const_basic_block bb)
2911 {
2912   return any_condjump_p (BB_END (bb));
2913 }
2914 
2915 /* Return true if we need to add fake edge to exit.
2916    Helper function for rtl_flow_call_edges_add.  */
2917 
2918 static bool
2919 need_fake_edge_p (const_rtx insn)
2920 {
2921   if (!INSN_P (insn))
2922     return false;
2923 
2924   if ((CALL_P (insn)
2925        && !SIBLING_CALL_P (insn)
2926        && !find_reg_note (insn, REG_NORETURN, NULL)
2927        && !(RTL_CONST_OR_PURE_CALL_P (insn))))
2928     return true;
2929 
2930   return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2931 	   && MEM_VOLATILE_P (PATTERN (insn)))
2932 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
2933 	      && asm_noperands (insn) != -1
2934 	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2935 	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2936 }
2937 
2938 /* Add fake edges to the function exit for any non constant and non noreturn
2939    calls, volatile inline assembly in the bitmap of blocks specified by
2940    BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
2941    that were split.
2942 
2943    The goal is to expose cases in which entering a basic block does not imply
2944    that all subsequent instructions must be executed.  */
2945 
2946 static int
2947 rtl_flow_call_edges_add (sbitmap blocks)
2948 {
2949   int i;
2950   int blocks_split = 0;
2951   int last_bb = last_basic_block;
2952   bool check_last_block = false;
2953 
2954   if (n_basic_blocks == NUM_FIXED_BLOCKS)
2955     return 0;
2956 
2957   if (! blocks)
2958     check_last_block = true;
2959   else
2960     check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2961 
2962   /* In the last basic block, before epilogue generation, there will be
2963      a fallthru edge to EXIT.  Special care is required if the last insn
2964      of the last basic block is a call because make_edge folds duplicate
2965      edges, which would result in the fallthru edge also being marked
2966      fake, which would result in the fallthru edge being removed by
2967      remove_fake_edges, which would result in an invalid CFG.
2968 
2969      Moreover, we can't elide the outgoing fake edge, since the block
2970      profiler needs to take this into account in order to solve the minimal
2971      spanning tree in the case that the call doesn't return.
2972 
2973      Handle this by adding a dummy instruction in a new last basic block.  */
2974   if (check_last_block)
2975     {
2976       basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2977       rtx insn = BB_END (bb);
2978 
2979       /* Back up past insns that must be kept in the same block as a call.  */
2980       while (insn != BB_HEAD (bb)
2981 	     && keep_with_call_p (insn))
2982 	insn = PREV_INSN (insn);
2983 
2984       if (need_fake_edge_p (insn))
2985 	{
2986 	  edge e;
2987 
2988 	  e = find_edge (bb, EXIT_BLOCK_PTR);
2989 	  if (e)
2990 	    {
2991 	      insert_insn_on_edge (gen_use (const0_rtx), e);
2992 	      commit_edge_insertions ();
2993 	    }
2994 	}
2995     }
2996 
2997   /* Now add fake edges to the function exit for any non constant
2998      calls since there is no way that we can determine if they will
2999      return or not...  */
3000 
3001   for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
3002     {
3003       basic_block bb = BASIC_BLOCK (i);
3004       rtx insn;
3005       rtx prev_insn;
3006 
3007       if (!bb)
3008 	continue;
3009 
3010       if (blocks && !TEST_BIT (blocks, i))
3011 	continue;
3012 
3013       for (insn = BB_END (bb); ; insn = prev_insn)
3014 	{
3015 	  prev_insn = PREV_INSN (insn);
3016 	  if (need_fake_edge_p (insn))
3017 	    {
3018 	      edge e;
3019 	      rtx split_at_insn = insn;
3020 
3021 	      /* Don't split the block between a call and an insn that should
3022 		 remain in the same block as the call.  */
3023 	      if (CALL_P (insn))
3024 		while (split_at_insn != BB_END (bb)
3025 		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
3026 		  split_at_insn = NEXT_INSN (split_at_insn);
3027 
3028 	      /* The handling above of the final block before the epilogue
3029 		 should be enough to verify that there is no edge to the exit
3030 		 block in CFG already.  Calling make_edge in such case would
3031 		 cause us to mark that edge as fake and remove it later.  */
3032 
3033 #ifdef ENABLE_CHECKING
3034 	      if (split_at_insn == BB_END (bb))
3035 		{
3036 		  e = find_edge (bb, EXIT_BLOCK_PTR);
3037 		  gcc_assert (e == NULL);
3038 		}
3039 #endif
3040 
3041 	      /* Note that the following may create a new basic block
3042 		 and renumber the existing basic blocks.  */
3043 	      if (split_at_insn != BB_END (bb))
3044 		{
3045 		  e = split_block (bb, split_at_insn);
3046 		  if (e)
3047 		    blocks_split++;
3048 		}
3049 
3050 	      make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
3051 	    }
3052 
3053 	  if (insn == BB_HEAD (bb))
3054 	    break;
3055 	}
3056     }
3057 
3058   if (blocks_split)
3059     verify_flow_info ();
3060 
3061   return blocks_split;
3062 }
3063 
3064 /* Add COMP_RTX as a condition at end of COND_BB.  FIRST_HEAD is
3065    the conditional branch target, SECOND_HEAD should be the fall-thru
3066    there is no need to handle this here the loop versioning code handles
3067    this.  the reason for SECON_HEAD is that it is needed for condition
3068    in trees, and this should be of the same type since it is a hook.  */
3069 static void
3070 rtl_lv_add_condition_to_bb (basic_block first_head ,
3071 			    basic_block second_head ATTRIBUTE_UNUSED,
3072 			    basic_block cond_bb, void *comp_rtx)
3073 {
3074   rtx label, seq, jump;
3075   rtx op0 = XEXP ((rtx)comp_rtx, 0);
3076   rtx op1 = XEXP ((rtx)comp_rtx, 1);
3077   enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
3078   enum machine_mode mode;
3079 
3080 
3081   label = block_label (first_head);
3082   mode = GET_MODE (op0);
3083   if (mode == VOIDmode)
3084     mode = GET_MODE (op1);
3085 
3086   start_sequence ();
3087   op0 = force_operand (op0, NULL_RTX);
3088   op1 = force_operand (op1, NULL_RTX);
3089   do_compare_rtx_and_jump (op0, op1, comp, 0,
3090 			   mode, NULL_RTX, NULL_RTX, label, -1);
3091   jump = get_last_insn ();
3092   JUMP_LABEL (jump) = label;
3093   LABEL_NUSES (label)++;
3094   seq = get_insns ();
3095   end_sequence ();
3096 
3097   /* Add the new cond , in the new head.  */
3098   emit_insn_after(seq, BB_END(cond_bb));
3099 }
3100 
3101 
3102 /* Given a block B with unconditional branch at its end, get the
3103    store the return the branch edge and the fall-thru edge in
3104    BRANCH_EDGE and FALLTHRU_EDGE respectively.  */
3105 static void
3106 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
3107 			   edge *fallthru_edge)
3108 {
3109   edge e = EDGE_SUCC (b, 0);
3110 
3111   if (e->flags & EDGE_FALLTHRU)
3112     {
3113       *fallthru_edge = e;
3114       *branch_edge = EDGE_SUCC (b, 1);
3115     }
3116   else
3117     {
3118       *branch_edge = e;
3119       *fallthru_edge = EDGE_SUCC (b, 1);
3120     }
3121 }
3122 
3123 void
3124 init_rtl_bb_info (basic_block bb)
3125 {
3126   gcc_assert (!bb->il.rtl);
3127   bb->il.rtl = GGC_CNEW (struct rtl_bb_info);
3128 }
3129 
3130 
3131 /* Add EXPR to the end of basic block BB.  */
3132 
3133 rtx
3134 insert_insn_end_bb_new (rtx pat, basic_block bb)
3135 {
3136   rtx insn = BB_END (bb);
3137   rtx new_insn;
3138   rtx pat_end = pat;
3139 
3140   while (NEXT_INSN (pat_end) != NULL_RTX)
3141     pat_end = NEXT_INSN (pat_end);
3142 
3143   /* If the last insn is a jump, insert EXPR in front [taking care to
3144      handle cc0, etc. properly].  Similarly we need to care trapping
3145      instructions in presence of non-call exceptions.  */
3146 
3147   if (JUMP_P (insn)
3148       || (NONJUMP_INSN_P (insn)
3149           && (!single_succ_p (bb)
3150               || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
3151     {
3152 #ifdef HAVE_cc0
3153       rtx note;
3154 #endif
3155       /* If this is a jump table, then we can't insert stuff here.  Since
3156          we know the previous real insn must be the tablejump, we insert
3157          the new instruction just before the tablejump.  */
3158       if (GET_CODE (PATTERN (insn)) == ADDR_VEC
3159           || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
3160         insn = prev_real_insn (insn);
3161 
3162 #ifdef HAVE_cc0
3163       /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
3164          if cc0 isn't set.  */
3165       note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3166       if (note)
3167         insn = XEXP (note, 0);
3168       else
3169         {
3170           rtx maybe_cc0_setter = prev_nonnote_insn (insn);
3171           if (maybe_cc0_setter
3172               && INSN_P (maybe_cc0_setter)
3173               && sets_cc0_p (PATTERN (maybe_cc0_setter)))
3174             insn = maybe_cc0_setter;
3175         }
3176 #endif
3177       /* FIXME: What if something in cc0/jump uses value set in new
3178          insn?  */
3179       new_insn = emit_insn_before_noloc (pat, insn, bb);
3180     }
3181 
3182   /* Likewise if the last insn is a call, as will happen in the presence
3183      of exception handling.  */
3184   else if (CALL_P (insn)
3185            && (!single_succ_p (bb)
3186                || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
3187     {
3188       /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
3189          we search backward and place the instructions before the first
3190          parameter is loaded.  Do this for everyone for consistency and a
3191          presumption that we'll get better code elsewhere as well.  */
3192 
3193       /* Since different machines initialize their parameter registers
3194          in different orders, assume nothing.  Collect the set of all
3195          parameter registers.  */
3196       insn = find_first_parameter_load (insn, BB_HEAD (bb));
3197 
3198       /* If we found all the parameter loads, then we want to insert
3199          before the first parameter load.
3200 
3201          If we did not find all the parameter loads, then we might have
3202          stopped on the head of the block, which could be a CODE_LABEL.
3203          If we inserted before the CODE_LABEL, then we would be putting
3204          the insn in the wrong basic block.  In that case, put the insn
3205          after the CODE_LABEL.  Also, respect NOTE_INSN_BASIC_BLOCK.  */
3206       while (LABEL_P (insn)
3207              || NOTE_INSN_BASIC_BLOCK_P (insn))
3208         insn = NEXT_INSN (insn);
3209 
3210       new_insn = emit_insn_before_noloc (pat, insn, bb);
3211     }
3212   else
3213     new_insn = emit_insn_after_noloc (pat, insn, bb);
3214 
3215   return new_insn;
3216 }
3217 
3218 /* Returns true if it is possible to remove edge E by redirecting
3219    it to the destination of the other edge from E->src.  */
3220 
3221 static bool
3222 rtl_can_remove_branch_p (const_edge e)
3223 {
3224   const_basic_block src = e->src;
3225   const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
3226   const_rtx insn = BB_END (src), set;
3227 
3228   /* The conditions are taken from try_redirect_by_replacing_jump.  */
3229   if (target == EXIT_BLOCK_PTR)
3230     return false;
3231 
3232   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3233     return false;
3234 
3235   if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
3236       || BB_PARTITION (src) != BB_PARTITION (target))
3237     return false;
3238 
3239   if (!onlyjump_p (insn)
3240       || tablejump_p (insn, NULL, NULL))
3241     return false;
3242 
3243   set = single_set (insn);
3244   if (!set || side_effects_p (set))
3245     return false;
3246 
3247   return true;
3248 }
3249 
3250 /* Implementation of CFG manipulation for linearized RTL.  */
3251 struct cfg_hooks rtl_cfg_hooks = {
3252   "rtl",
3253   rtl_verify_flow_info,
3254   rtl_dump_bb,
3255   rtl_create_basic_block,
3256   rtl_redirect_edge_and_branch,
3257   rtl_redirect_edge_and_branch_force,
3258   rtl_can_remove_branch_p,
3259   rtl_delete_block,
3260   rtl_split_block,
3261   rtl_move_block_after,
3262   rtl_can_merge_blocks,  /* can_merge_blocks_p */
3263   rtl_merge_blocks,
3264   rtl_predict_edge,
3265   rtl_predicted_by_p,
3266   NULL, /* can_duplicate_block_p */
3267   NULL, /* duplicate_block */
3268   rtl_split_edge,
3269   rtl_make_forwarder_block,
3270   rtl_tidy_fallthru_edge,
3271   rtl_block_ends_with_call_p,
3272   rtl_block_ends_with_condjump_p,
3273   rtl_flow_call_edges_add,
3274   NULL, /* execute_on_growing_pred */
3275   NULL, /* execute_on_shrinking_pred */
3276   NULL, /* duplicate loop for trees */
3277   NULL, /* lv_add_condition_to_bb */
3278   NULL, /* lv_adjust_loop_header_phi*/
3279   NULL, /* extract_cond_bb_edges */
3280   NULL		/* flush_pending_stmts */
3281 };
3282 
3283 /* Implementation of CFG manipulation for cfg layout RTL, where
3284    basic block connected via fallthru edges does not have to be adjacent.
3285    This representation will hopefully become the default one in future
3286    version of the compiler.  */
3287 
3288 /* We do not want to declare these functions in a header file, since they
3289    should only be used through the cfghooks interface, and we do not want to
3290    move them here since it would require also moving quite a lot of related
3291    code.  They are in cfglayout.c.  */
3292 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
3293 extern basic_block cfg_layout_duplicate_bb (basic_block);
3294 
3295 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3296   "cfglayout mode",
3297   rtl_verify_flow_info_1,
3298   rtl_dump_bb,
3299   cfg_layout_create_basic_block,
3300   cfg_layout_redirect_edge_and_branch,
3301   cfg_layout_redirect_edge_and_branch_force,
3302   rtl_can_remove_branch_p,
3303   cfg_layout_delete_block,
3304   cfg_layout_split_block,
3305   rtl_move_block_after,
3306   cfg_layout_can_merge_blocks_p,
3307   cfg_layout_merge_blocks,
3308   rtl_predict_edge,
3309   rtl_predicted_by_p,
3310   cfg_layout_can_duplicate_bb_p,
3311   cfg_layout_duplicate_bb,
3312   cfg_layout_split_edge,
3313   rtl_make_forwarder_block,
3314   NULL,
3315   rtl_block_ends_with_call_p,
3316   rtl_block_ends_with_condjump_p,
3317   rtl_flow_call_edges_add,
3318   NULL, /* execute_on_growing_pred */
3319   NULL, /* execute_on_shrinking_pred */
3320   duplicate_loop_to_header_edge, /* duplicate loop for trees */
3321   rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3322   NULL, /* lv_adjust_loop_header_phi*/
3323   rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3324   NULL		/* flush_pending_stmts */
3325 };
3326