1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
24
25 Available functionality:
26 - CFG-aware instruction chain manipulation
27 delete_insn, delete_insn_chain
28 - Basic block manipulation
29 create_basic_block, flow_delete_block, split_block,
30 merge_blocks_nomove
31 - Infrastructure to determine quickly basic block for insn
32 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
33 - Edge redirection with updating and optimizing of insn chain
34 block_label, redirect_edge_and_branch,
35 redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
36 - Edge splitting and commiting to edges
37 split_edge, insert_insn_on_edge, commit_edge_insertions
38 - Dumping and debugging
39 print_rtl_with_bb, dump_bb, debug_bb, debug_bb_n
40 - Consistency checking
41 verify_flow_info
42 - CFG updating after constant propagation
43 purge_dead_edges, purge_all_dead_edges */
44
45 #include "config.h"
46 #include "system.h"
47 #include "tree.h"
48 #include "rtl.h"
49 #include "hard-reg-set.h"
50 #include "basic-block.h"
51 #include "regs.h"
52 #include "flags.h"
53 #include "output.h"
54 #include "function.h"
55 #include "except.h"
56 #include "toplev.h"
57 #include "tm_p.h"
58 #include "obstack.h"
59 #include "insn-config.h"
60
61 /* Stubs in case we don't have a return insn. */
62 #ifndef HAVE_return
63 #define HAVE_return 0
64 #define gen_return() NULL_RTX
65 #endif
66
67 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
68 /* ??? Should probably be using LABEL_NUSES instead. It would take a
69 bit of surgery to be able to use or co-opt the routines in jump. */
70 rtx label_value_list;
71 rtx tail_recursion_label_list;
72
73 static int can_delete_note_p PARAMS ((rtx));
74 static int can_delete_label_p PARAMS ((rtx));
75 static void commit_one_edge_insertion PARAMS ((edge, int));
76 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
77 static rtx last_loop_beg_note PARAMS ((rtx));
78 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
79 static basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));
80
81 /* Return true if NOTE is not one of the ones that must be kept paired,
82 so that we may simply delete it. */
83
84 static int
can_delete_note_p(note)85 can_delete_note_p (note)
86 rtx note;
87 {
88 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
89 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
90 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
91 }
92
93 /* True if a given label can be deleted. */
94
95 static int
can_delete_label_p(label)96 can_delete_label_p (label)
97 rtx label;
98 {
99 return (!LABEL_PRESERVE_P (label)
100 /* User declared labels must be preserved. */
101 && LABEL_NAME (label) == 0
102 && !in_expr_list_p (forced_labels, label)
103 && !in_expr_list_p (label_value_list, label));
104 }
105
106 /* Delete INSN by patching it out. Return the next insn. */
107
108 rtx
delete_insn(insn)109 delete_insn (insn)
110 rtx insn;
111 {
112 rtx next = NEXT_INSN (insn);
113 rtx note;
114 bool really_delete = true;
115
116 if (GET_CODE (insn) == CODE_LABEL)
117 {
118 /* Some labels can't be directly removed from the INSN chain, as they
119 might be references via variables, constant pool etc.
120 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
121 if (! can_delete_label_p (insn))
122 {
123 const char *name = LABEL_NAME (insn);
124
125 really_delete = false;
126 PUT_CODE (insn, NOTE);
127 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
128 NOTE_SOURCE_FILE (insn) = name;
129 }
130
131 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
132 }
133
134 if (really_delete)
135 {
136 /* If this insn has already been deleted, something is very wrong. */
137 if (INSN_DELETED_P (insn))
138 abort ();
139 remove_insn (insn);
140 INSN_DELETED_P (insn) = 1;
141 }
142
143 /* If deleting a jump, decrement the use count of the label. Deleting
144 the label itself should happen in the normal course of block merging. */
145 if (GET_CODE (insn) == JUMP_INSN
146 && JUMP_LABEL (insn)
147 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
148 LABEL_NUSES (JUMP_LABEL (insn))--;
149
150 /* Also if deleting an insn that references a label. */
151 else
152 {
153 while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
154 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
155 {
156 LABEL_NUSES (XEXP (note, 0))--;
157 remove_note (insn, note);
158 }
159 }
160
161 if (GET_CODE (insn) == JUMP_INSN
162 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
163 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
164 {
165 rtx pat = PATTERN (insn);
166 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
167 int len = XVECLEN (pat, diff_vec_p);
168 int i;
169
170 for (i = 0; i < len; i++)
171 {
172 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
173
174 /* When deleting code in bulk (e.g. removing many unreachable
175 blocks) we can delete a label that's a target of the vector
176 before deleting the vector itself. */
177 if (GET_CODE (label) != NOTE)
178 LABEL_NUSES (label)--;
179 }
180 }
181
182 return next;
183 }
184
185 /* Like delete_insn but also purge dead edges from BB. */
186 rtx
delete_insn_and_edges(insn)187 delete_insn_and_edges (insn)
188 rtx insn;
189 {
190 rtx x;
191 bool purge = false;
192
193 if (INSN_P (insn)
194 && BLOCK_FOR_INSN (insn)
195 && BLOCK_FOR_INSN (insn)->end == insn)
196 purge = true;
197 x = delete_insn (insn);
198 if (purge)
199 purge_dead_edges (BLOCK_FOR_INSN (insn));
200 return x;
201 }
202
203 /* Unlink a chain of insns between START and FINISH, leaving notes
204 that must be paired. */
205
206 void
delete_insn_chain(start,finish)207 delete_insn_chain (start, finish)
208 rtx start, finish;
209 {
210 rtx next;
211
212 /* Unchain the insns one by one. It would be quicker to delete all of these
213 with a single unchaining, rather than one at a time, but we need to keep
214 the NOTE's. */
215 while (1)
216 {
217 next = NEXT_INSN (start);
218 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
219 ;
220 else
221 next = delete_insn (start);
222
223 if (start == finish)
224 break;
225 start = next;
226 }
227 }
228
229 /* Like delete_insn but also purge dead edges from BB. */
230 void
delete_insn_chain_and_edges(first,last)231 delete_insn_chain_and_edges (first, last)
232 rtx first, last;
233 {
234 bool purge = false;
235
236 if (INSN_P (last)
237 && BLOCK_FOR_INSN (last)
238 && BLOCK_FOR_INSN (last)->end == last)
239 purge = true;
240 delete_insn_chain (first, last);
241 if (purge)
242 purge_dead_edges (BLOCK_FOR_INSN (last));
243 }
244
245 /* Create a new basic block consisting of the instructions between HEAD and END
246 inclusive. This function is designed to allow fast BB construction - reuses
247 the note and basic block struct in BB_NOTE, if any and do not grow
248 BASIC_BLOCK chain and should be used directly only by CFG construction code.
249 END can be NULL in to create new empty basic block before HEAD. Both END
250 and HEAD can be NULL to create basic block at the end of INSN chain.
251 AFTER is the basic block we should be put after. */
252
253 basic_block
create_basic_block_structure(head,end,bb_note,after)254 create_basic_block_structure (head, end, bb_note, after)
255 rtx head, end, bb_note;
256 basic_block after;
257 {
258 basic_block bb;
259
260 if (bb_note
261 && ! RTX_INTEGRATED_P (bb_note)
262 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
263 && bb->aux == NULL)
264 {
265 /* If we found an existing note, thread it back onto the chain. */
266
267 rtx after;
268
269 if (GET_CODE (head) == CODE_LABEL)
270 after = head;
271 else
272 {
273 after = PREV_INSN (head);
274 head = bb_note;
275 }
276
277 if (after != bb_note && NEXT_INSN (after) != bb_note)
278 reorder_insns_nobb (bb_note, bb_note, after);
279 }
280 else
281 {
282 /* Otherwise we must create a note and a basic block structure. */
283
284 bb = alloc_block ();
285
286 if (!head && !end)
287 head = end = bb_note
288 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
289 else if (GET_CODE (head) == CODE_LABEL && end)
290 {
291 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
292 if (head == end)
293 end = bb_note;
294 }
295 else
296 {
297 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
298 head = bb_note;
299 if (!end)
300 end = head;
301 }
302
303 NOTE_BASIC_BLOCK (bb_note) = bb;
304 }
305
306 /* Always include the bb note in the block. */
307 if (NEXT_INSN (end) == bb_note)
308 end = bb_note;
309
310 bb->head = head;
311 bb->end = end;
312 bb->index = last_basic_block++;
313 bb->flags = BB_NEW;
314 link_block (bb, after);
315 BASIC_BLOCK (bb->index) = bb;
316 update_bb_for_insn (bb);
317
318 /* Tag the block so that we know it has been used when considering
319 other basic block notes. */
320 bb->aux = bb;
321
322 return bb;
323 }
324
325 /* Create new basic block consisting of instructions in between HEAD and END
326 and place it to the BB chain after block AFTER. END can be NULL in to
327 create new empty basic block before HEAD. Both END and HEAD can be NULL to
328 create basic block at the end of INSN chain. */
329
330 basic_block
create_basic_block(head,end,after)331 create_basic_block (head, end, after)
332 rtx head, end;
333 basic_block after;
334 {
335 basic_block bb;
336
337 /* Place the new block just after the end. */
338 VARRAY_GROW (basic_block_info, last_basic_block+1);
339
340 n_basic_blocks++;
341
342 bb = create_basic_block_structure (head, end, NULL, after);
343 bb->aux = NULL;
344 return bb;
345 }
346
347 /* Delete the insns in a (non-live) block. We physically delete every
348 non-deleted-note insn, and update the flow graph appropriately.
349
350 Return nonzero if we deleted an exception handler. */
351
352 /* ??? Preserving all such notes strikes me as wrong. It would be nice
353 to post-process the stream to remove empty blocks, loops, ranges, etc. */
354
355 int
flow_delete_block_noexpunge(b)356 flow_delete_block_noexpunge (b)
357 basic_block b;
358 {
359 int deleted_handler = 0;
360 rtx insn, end, tmp;
361
362 /* If the head of this block is a CODE_LABEL, then it might be the
363 label for an exception handler which can't be reached.
364
365 We need to remove the label from the exception_handler_label list
366 and remove the associated NOTE_INSN_EH_REGION_BEG and
367 NOTE_INSN_EH_REGION_END notes. */
368
369 /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
370 hanging before the block. */
371
372 for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
373 {
374 if (GET_CODE (insn) != NOTE)
375 break;
376 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
377 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
378 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
379 }
380
381 insn = b->head;
382
383 never_reached_warning (insn, b->end);
384
385 if (GET_CODE (insn) == CODE_LABEL)
386 maybe_remove_eh_handler (insn);
387
388 /* Include any jump table following the basic block. */
389 end = b->end;
390 if (GET_CODE (end) == JUMP_INSN
391 && (tmp = JUMP_LABEL (end)) != NULL_RTX
392 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
393 && GET_CODE (tmp) == JUMP_INSN
394 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
395 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
396 end = tmp;
397
398 /* Include any barrier that may follow the basic block. */
399 tmp = next_nonnote_insn (end);
400 if (tmp && GET_CODE (tmp) == BARRIER)
401 end = tmp;
402
403 /* Selectively delete the entire chain. */
404 b->head = NULL;
405 delete_insn_chain (insn, end);
406
407 /* Remove the edges into and out of this block. Note that there may
408 indeed be edges in, if we are removing an unreachable loop. */
409 while (b->pred != NULL)
410 remove_edge (b->pred);
411 while (b->succ != NULL)
412 remove_edge (b->succ);
413
414 b->pred = NULL;
415 b->succ = NULL;
416
417 return deleted_handler;
418 }
419
420 int
flow_delete_block(b)421 flow_delete_block (b)
422 basic_block b;
423 {
424 int deleted_handler = flow_delete_block_noexpunge (b);
425
426 /* Remove the basic block from the array. */
427 expunge_block (b);
428
429 return deleted_handler;
430 }
431
432 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
433
434 void
compute_bb_for_insn()435 compute_bb_for_insn ()
436 {
437 basic_block bb;
438
439 FOR_EACH_BB (bb)
440 {
441 rtx end = bb->end;
442 rtx insn;
443
444 for (insn = bb->head; ; insn = NEXT_INSN (insn))
445 {
446 BLOCK_FOR_INSN (insn) = bb;
447 if (insn == end)
448 break;
449 }
450 }
451 }
452
453 /* Release the basic_block_for_insn array. */
454
455 void
free_bb_for_insn()456 free_bb_for_insn ()
457 {
458 rtx insn;
459 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
460 if (GET_CODE (insn) != BARRIER)
461 BLOCK_FOR_INSN (insn) = NULL;
462 }
463
464 /* Update insns block within BB. */
465
466 void
update_bb_for_insn(bb)467 update_bb_for_insn (bb)
468 basic_block bb;
469 {
470 rtx insn;
471
472 for (insn = bb->head; ; insn = NEXT_INSN (insn))
473 {
474 set_block_for_insn (insn, bb);
475 if (insn == bb->end)
476 break;
477 }
478 }
479
480 /* Split a block BB after insn INSN creating a new fallthru edge.
481 Return the new edge. Note that to keep other parts of the compiler happy,
482 this function renumbers all the basic blocks so that the new
483 one has a number one greater than the block split. */
484
485 edge
split_block(bb,insn)486 split_block (bb, insn)
487 basic_block bb;
488 rtx insn;
489 {
490 basic_block new_bb;
491 edge new_edge;
492 edge e;
493
494 /* There is no point splitting the block after its end. */
495 if (bb->end == insn)
496 return 0;
497
498 /* Create the new basic block. */
499 new_bb = create_basic_block (NEXT_INSN (insn), bb->end, bb);
500 new_bb->count = bb->count;
501 new_bb->frequency = bb->frequency;
502 new_bb->loop_depth = bb->loop_depth;
503 bb->end = insn;
504
505 /* Redirect the outgoing edges. */
506 new_bb->succ = bb->succ;
507 bb->succ = NULL;
508 for (e = new_bb->succ; e; e = e->succ_next)
509 e->src = new_bb;
510
511 new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
512
513 if (bb->global_live_at_start)
514 {
515 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
516 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
517 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
518
519 /* We now have to calculate which registers are live at the end
520 of the split basic block and at the start of the new basic
521 block. Start with those registers that are known to be live
522 at the end of the original basic block and get
523 propagate_block to determine which registers are live. */
524 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
525 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
526 COPY_REG_SET (bb->global_live_at_end,
527 new_bb->global_live_at_start);
528 #ifdef HAVE_conditional_execution
529 /* In the presence of conditional execution we are not able to update
530 liveness precisely. */
531 if (reload_completed)
532 {
533 bb->flags |= BB_DIRTY;
534 new_bb->flags |= BB_DIRTY;
535 }
536 #endif
537 }
538
539 return new_edge;
540 }
541
542 /* Blocks A and B are to be merged into a single block A. The insns
543 are already contiguous, hence `nomove'. */
544
545 void
merge_blocks_nomove(a,b)546 merge_blocks_nomove (a, b)
547 basic_block a, b;
548 {
549 rtx b_head = b->head, b_end = b->end, a_end = a->end;
550 rtx del_first = NULL_RTX, del_last = NULL_RTX;
551 int b_empty = 0;
552 edge e;
553
554 /* If there was a CODE_LABEL beginning B, delete it. */
555 if (GET_CODE (b_head) == CODE_LABEL)
556 {
557 /* Detect basic blocks with nothing but a label. This can happen
558 in particular at the end of a function. */
559 if (b_head == b_end)
560 b_empty = 1;
561
562 del_first = del_last = b_head;
563 b_head = NEXT_INSN (b_head);
564 }
565
566 /* Delete the basic block note and handle blocks containing just that
567 note. */
568 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
569 {
570 if (b_head == b_end)
571 b_empty = 1;
572 if (! del_last)
573 del_first = b_head;
574
575 del_last = b_head;
576 b_head = NEXT_INSN (b_head);
577 }
578
579 /* If there was a jump out of A, delete it. */
580 if (GET_CODE (a_end) == JUMP_INSN)
581 {
582 rtx prev;
583
584 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
585 if (GET_CODE (prev) != NOTE
586 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
587 || prev == a->head)
588 break;
589
590 del_first = a_end;
591
592 #ifdef HAVE_cc0
593 /* If this was a conditional jump, we need to also delete
594 the insn that set cc0. */
595 if (only_sets_cc0_p (prev))
596 {
597 rtx tmp = prev;
598
599 prev = prev_nonnote_insn (prev);
600 if (!prev)
601 prev = a->head;
602 del_first = tmp;
603 }
604 #endif
605
606 a_end = PREV_INSN (del_first);
607 }
608 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
609 del_first = NEXT_INSN (a_end);
610
611 /* Normally there should only be one successor of A and that is B, but
612 partway though the merge of blocks for conditional_execution we'll
613 be merging a TEST block with THEN and ELSE successors. Free the
614 whole lot of them and hope the caller knows what they're doing. */
615 while (a->succ)
616 remove_edge (a->succ);
617
618 /* Adjust the edges out of B for the new owner. */
619 for (e = b->succ; e; e = e->succ_next)
620 e->src = a;
621 a->succ = b->succ;
622 a->flags |= b->flags;
623
624 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
625 b->pred = b->succ = NULL;
626 a->global_live_at_end = b->global_live_at_end;
627
628 expunge_block (b);
629
630 /* Delete everything marked above as well as crap that might be
631 hanging out between the two blocks. */
632 delete_insn_chain (del_first, del_last);
633
634 /* Reassociate the insns of B with A. */
635 if (!b_empty)
636 {
637 rtx x;
638
639 for (x = a_end; x != b_end; x = NEXT_INSN (x))
640 set_block_for_insn (x, a);
641
642 set_block_for_insn (b_end, a);
643
644 a_end = b_end;
645 }
646
647 a->end = a_end;
648 }
649
650 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
651 exist. */
652
653 rtx
block_label(block)654 block_label (block)
655 basic_block block;
656 {
657 if (block == EXIT_BLOCK_PTR)
658 return NULL_RTX;
659
660 if (GET_CODE (block->head) != CODE_LABEL)
661 {
662 block->head = emit_label_before (gen_label_rtx (), block->head);
663 }
664
665 return block->head;
666 }
667
668 /* Attempt to perform edge redirection by replacing possibly complex jump
669 instruction by unconditional jump or removing jump completely. This can
670 apply only if all edges now point to the same block. The parameters and
671 return values are equivalent to redirect_edge_and_branch. */
672
673 static bool
try_redirect_by_replacing_jump(e,target)674 try_redirect_by_replacing_jump (e, target)
675 edge e;
676 basic_block target;
677 {
678 basic_block src = e->src;
679 rtx insn = src->end, kill_from;
680 edge tmp;
681 rtx set, table;
682 int fallthru = 0;
683
684 /* Verify that all targets will be TARGET. */
685 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
686 if (tmp->dest != target && tmp != e)
687 break;
688
689 if (tmp || !onlyjump_p (insn))
690 return false;
691 if (reload_completed && JUMP_LABEL (insn)
692 && (table = NEXT_INSN (JUMP_LABEL (insn))) != NULL_RTX
693 && GET_CODE (table) == JUMP_INSN
694 && (GET_CODE (PATTERN (table)) == ADDR_VEC
695 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
696 return false;
697
698 /* Avoid removing branch with side effects. */
699 set = single_set (insn);
700 if (!set || side_effects_p (set))
701 return false;
702
703 /* In case we zap a conditional jump, we'll need to kill
704 the cc0 setter too. */
705 kill_from = insn;
706 #ifdef HAVE_cc0
707 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
708 kill_from = PREV_INSN (insn);
709 #endif
710
711 /* See if we can create the fallthru edge. */
712 if (can_fallthru (src, target))
713 {
714 if (rtl_dump_file)
715 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
716 fallthru = 1;
717
718 /* Selectively unlink whole insn chain. */
719 delete_insn_chain (kill_from, PREV_INSN (target->head));
720 }
721
722 /* If this already is simplejump, redirect it. */
723 else if (simplejump_p (insn))
724 {
725 if (e->dest == target)
726 return false;
727 if (rtl_dump_file)
728 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
729 INSN_UID (insn), e->dest->index, target->index);
730 if (!redirect_jump (insn, block_label (target), 0))
731 {
732 if (target == EXIT_BLOCK_PTR)
733 return false;
734 abort ();
735 }
736 }
737
738 /* Cannot do anything for target exit block. */
739 else if (target == EXIT_BLOCK_PTR)
740 return false;
741
742 /* Or replace possibly complicated jump insn by simple jump insn. */
743 else
744 {
745 rtx target_label = block_label (target);
746 rtx barrier, tmp;
747
748 emit_jump_insn_after (gen_jump (target_label), insn);
749 JUMP_LABEL (src->end) = target_label;
750 LABEL_NUSES (target_label)++;
751 if (rtl_dump_file)
752 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
753 INSN_UID (insn), INSN_UID (src->end));
754
755
756 delete_insn_chain (kill_from, insn);
757
758 /* Recognize a tablejump that we are converting to a
759 simple jump and remove its associated CODE_LABEL
760 and ADDR_VEC or ADDR_DIFF_VEC. */
761 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
762 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
763 && GET_CODE (tmp) == JUMP_INSN
764 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
765 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
766 {
767 delete_insn_chain (JUMP_LABEL (insn), tmp);
768 }
769
770 barrier = next_nonnote_insn (src->end);
771 if (!barrier || GET_CODE (barrier) != BARRIER)
772 emit_barrier_after (src->end);
773 else
774 {
775 if (barrier != NEXT_INSN (src->end))
776 {
777 /* Move the jump before barrier so that the notes
778 which originally were or were created before jump table are
779 inside the basic block. */
780 rtx new_insn = src->end;
781 rtx tmp;
782
783 for (tmp = NEXT_INSN (src->end); tmp != barrier;
784 tmp = NEXT_INSN (tmp))
785 set_block_for_insn (tmp, src);
786
787 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
788 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
789
790 NEXT_INSN (new_insn) = barrier;
791 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
792
793 PREV_INSN (new_insn) = PREV_INSN (barrier);
794 PREV_INSN (barrier) = new_insn;
795 }
796 }
797 }
798
799 /* Keep only one edge out and set proper flags. */
800 while (src->succ->succ_next)
801 remove_edge (src->succ);
802 e = src->succ;
803 if (fallthru)
804 e->flags = EDGE_FALLTHRU;
805 else
806 e->flags = 0;
807
808 e->probability = REG_BR_PROB_BASE;
809 e->count = src->count;
810
811 /* We don't want a block to end on a line-number note since that has
812 the potential of changing the code between -g and not -g. */
813 while (GET_CODE (e->src->end) == NOTE
814 && NOTE_LINE_NUMBER (e->src->end) >= 0)
815 delete_insn (e->src->end);
816
817 if (e->dest != target)
818 redirect_edge_succ (e, target);
819
820 return true;
821 }
822
823 /* Return last loop_beg note appearing after INSN, before start of next
824 basic block. Return INSN if there are no such notes.
825
826 When emitting jump to redirect a fallthru edge, it should always appear
827 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
828 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
829 test. */
830
831 static rtx
last_loop_beg_note(insn)832 last_loop_beg_note (insn)
833 rtx insn;
834 {
835 rtx last = insn;
836
837 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
838 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
839 insn = NEXT_INSN (insn))
840 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
841 last = insn;
842
843 return last;
844 }
845
846 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
847 expense of adding new instructions or reordering basic blocks.
848
849 Function can be also called with edge destination equivalent to the TARGET.
850 Then it should try the simplifications and do nothing if none is possible.
851
852 Return true if transformation succeeded. We still return false in case E
853 already destinated TARGET and we didn't managed to simplify instruction
854 stream. */
855
856 bool
redirect_edge_and_branch(e,target)857 redirect_edge_and_branch (e, target)
858 edge e;
859 basic_block target;
860 {
861 rtx tmp;
862 rtx old_label = e->dest->head;
863 basic_block src = e->src;
864 rtx insn = src->end;
865
866 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
867 return false;
868
869 if (try_redirect_by_replacing_jump (e, target))
870 return true;
871
872 /* Do this fast path late, as we want above code to simplify for cases
873 where called on single edge leaving basic block containing nontrivial
874 jump insn. */
875 else if (e->dest == target)
876 return false;
877
878 /* We can only redirect non-fallthru edges of jump insn. */
879 if (e->flags & EDGE_FALLTHRU)
880 return false;
881 else if (GET_CODE (insn) != JUMP_INSN)
882 return false;
883
884 /* Recognize a tablejump and adjust all matching cases. */
885 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
886 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
887 && GET_CODE (tmp) == JUMP_INSN
888 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
889 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
890 {
891 rtvec vec;
892 int j;
893 rtx new_label = block_label (target);
894
895 if (target == EXIT_BLOCK_PTR)
896 return false;
897 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
898 vec = XVEC (PATTERN (tmp), 0);
899 else
900 vec = XVEC (PATTERN (tmp), 1);
901
902 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
903 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
904 {
905 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
906 --LABEL_NUSES (old_label);
907 ++LABEL_NUSES (new_label);
908 }
909
910 /* Handle casesi dispatch insns */
911 if ((tmp = single_set (insn)) != NULL
912 && SET_DEST (tmp) == pc_rtx
913 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
914 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
915 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
916 {
917 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
918 new_label);
919 --LABEL_NUSES (old_label);
920 ++LABEL_NUSES (new_label);
921 }
922 }
923 else
924 {
925 /* ?? We may play the games with moving the named labels from
926 one basic block to the other in case only one computed_jump is
927 available. */
928 if (computed_jump_p (insn)
929 /* A return instruction can't be redirected. */
930 || returnjump_p (insn))
931 return false;
932
933 /* If the insn doesn't go where we think, we're confused. */
934 if (JUMP_LABEL (insn) != old_label)
935 abort ();
936
937 /* If the substitution doesn't succeed, die. This can happen
938 if the back end emitted unrecognizable instructions or if
939 target is exit block on some arches. */
940 if (!redirect_jump (insn, block_label (target), 0))
941 {
942 if (target == EXIT_BLOCK_PTR)
943 return false;
944 abort ();
945 }
946 }
947
948 if (rtl_dump_file)
949 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
950 e->src->index, e->dest->index, target->index);
951
952 if (e->dest != target)
953 redirect_edge_succ_nodup (e, target);
954
955 return true;
956 }
957
958 /* Like force_nonfallthru below, but additionally performs redirection
959 Used by redirect_edge_and_branch_force. */
960
961 static basic_block
force_nonfallthru_and_redirect(e,target)962 force_nonfallthru_and_redirect (e, target)
963 edge e;
964 basic_block target;
965 {
966 basic_block jump_block, new_bb = NULL, src = e->src;
967 rtx note;
968 edge new_edge;
969 int abnormal_edge_flags = 0;
970
971 /* In the case the last instruction is conditional jump to the next
972 instruction, first redirect the jump itself and then continue
973 by creating an basic block afterwards to redirect fallthru edge. */
974 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
975 && any_condjump_p (e->src->end)
976 /* When called from cfglayout, fallthru edges do not
977 neccessarily go to the next block. */
978 && e->src->next_bb == e->dest
979 && JUMP_LABEL (e->src->end) == e->dest->head)
980 {
981 rtx note;
982 edge b = unchecked_make_edge (e->src, target, 0);
983
984 if (!redirect_jump (e->src->end, block_label (target), 0))
985 abort ();
986 note = find_reg_note (e->src->end, REG_BR_PROB, NULL_RTX);
987 if (note)
988 {
989 int prob = INTVAL (XEXP (note, 0));
990
991 b->probability = prob;
992 b->count = e->count * prob / REG_BR_PROB_BASE;
993 e->probability -= e->probability;
994 e->count -= b->count;
995 if (e->probability < 0)
996 e->probability = 0;
997 if (e->count < 0)
998 e->count = 0;
999 }
1000 }
1001
1002 if (e->flags & EDGE_ABNORMAL)
1003 {
1004 /* Irritating special case - fallthru edge to the same block as abnormal
1005 edge.
1006 We can't redirect abnormal edge, but we still can split the fallthru
1007 one and create separate abnormal edge to original destination.
1008 This allows bb-reorder to make such edge non-fallthru. */
1009 if (e->dest != target)
1010 abort ();
1011 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1012 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1013 }
1014 else if (!(e->flags & EDGE_FALLTHRU))
1015 abort ();
1016 else if (e->src == ENTRY_BLOCK_PTR)
1017 {
1018 /* We can't redirect the entry block. Create an empty block at the
1019 start of the function which we use to add the new jump. */
1020 edge *pe1;
1021 basic_block bb = create_basic_block (e->dest->head, NULL, ENTRY_BLOCK_PTR);
1022
1023 /* Change the existing edge's source to be the new block, and add
1024 a new edge from the entry block to the new block. */
1025 e->src = bb;
1026 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
1027 if (*pe1 == e)
1028 {
1029 *pe1 = e->succ_next;
1030 break;
1031 }
1032 e->succ_next = 0;
1033 bb->succ = e;
1034 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1035 }
1036
1037 if (e->src->succ->succ_next || abnormal_edge_flags)
1038 {
1039 /* Create the new structures. */
1040
1041 /* Position the new block correctly relative to loop notes. */
1042 note = last_loop_beg_note (e->src->end);
1043 note = NEXT_INSN (note);
1044
1045 /* ... and ADDR_VECs. */
1046 if (note != NULL
1047 && GET_CODE (note) == CODE_LABEL
1048 && NEXT_INSN (note)
1049 && GET_CODE (NEXT_INSN (note)) == JUMP_INSN
1050 && (GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_DIFF_VEC
1051 || GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_VEC))
1052 note = NEXT_INSN (NEXT_INSN (note));
1053
1054 jump_block = create_basic_block (note, NULL, e->src);
1055 jump_block->count = e->count;
1056 jump_block->frequency = EDGE_FREQUENCY (e);
1057 jump_block->loop_depth = target->loop_depth;
1058
1059 if (target->global_live_at_start)
1060 {
1061 jump_block->global_live_at_start
1062 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1063 jump_block->global_live_at_end
1064 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1065 COPY_REG_SET (jump_block->global_live_at_start,
1066 target->global_live_at_start);
1067 COPY_REG_SET (jump_block->global_live_at_end,
1068 target->global_live_at_start);
1069 }
1070
1071 /* Wire edge in. */
1072 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1073 new_edge->probability = e->probability;
1074 new_edge->count = e->count;
1075
1076 /* Redirect old edge. */
1077 redirect_edge_pred (e, jump_block);
1078 e->probability = REG_BR_PROB_BASE;
1079
1080 new_bb = jump_block;
1081 }
1082 else
1083 jump_block = e->src;
1084
1085 e->flags &= ~EDGE_FALLTHRU;
1086 if (target == EXIT_BLOCK_PTR)
1087 {
1088 if (HAVE_return)
1089 emit_jump_insn_after (gen_return (), jump_block->end);
1090 else
1091 abort ();
1092 }
1093 else
1094 {
1095 rtx label = block_label (target);
1096 emit_jump_insn_after (gen_jump (label), jump_block->end);
1097 JUMP_LABEL (jump_block->end) = label;
1098 LABEL_NUSES (label)++;
1099 }
1100
1101 emit_barrier_after (jump_block->end);
1102 redirect_edge_succ_nodup (e, target);
1103
1104 if (abnormal_edge_flags)
1105 make_edge (src, target, abnormal_edge_flags);
1106
1107 return new_bb;
1108 }
1109
1110 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1111 (and possibly create new basic block) to make edge non-fallthru.
1112 Return newly created BB or NULL if none. */
1113
1114 basic_block
force_nonfallthru(e)1115 force_nonfallthru (e)
1116 edge e;
1117 {
1118 return force_nonfallthru_and_redirect (e, e->dest);
1119 }
1120
1121 /* Redirect edge even at the expense of creating new jump insn or
1122 basic block. Return new basic block if created, NULL otherwise.
1123 Abort if conversion is impossible. */
1124
1125 basic_block
redirect_edge_and_branch_force(e,target)1126 redirect_edge_and_branch_force (e, target)
1127 edge e;
1128 basic_block target;
1129 {
1130 if (redirect_edge_and_branch (e, target)
1131 || e->dest == target)
1132 return NULL;
1133
1134 /* In case the edge redirection failed, try to force it to be non-fallthru
1135 and redirect newly created simplejump. */
1136 return force_nonfallthru_and_redirect (e, target);
1137 }
1138
1139 /* The given edge should potentially be a fallthru edge. If that is in
1140 fact true, delete the jump and barriers that are in the way. */
1141
1142 void
tidy_fallthru_edge(e,b,c)1143 tidy_fallthru_edge (e, b, c)
1144 edge e;
1145 basic_block b, c;
1146 {
1147 rtx q;
1148
1149 /* ??? In a late-running flow pass, other folks may have deleted basic
1150 blocks by nopping out blocks, leaving multiple BARRIERs between here
1151 and the target label. They ought to be chastized and fixed.
1152
1153 We can also wind up with a sequence of undeletable labels between
1154 one block and the next.
1155
1156 So search through a sequence of barriers, labels, and notes for
1157 the head of block C and assert that we really do fall through. */
1158
1159 for (q = NEXT_INSN (b->end); q != c->head; q = NEXT_INSN (q))
1160 if (INSN_P (q))
1161 return;
1162
1163 /* Remove what will soon cease being the jump insn from the source block.
1164 If block B consisted only of this single jump, turn it into a deleted
1165 note. */
1166 q = b->end;
1167 if (GET_CODE (q) == JUMP_INSN
1168 && onlyjump_p (q)
1169 && (any_uncondjump_p (q)
1170 || (b->succ == e && e->succ_next == NULL)))
1171 {
1172 #ifdef HAVE_cc0
1173 /* If this was a conditional jump, we need to also delete
1174 the insn that set cc0. */
1175 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1176 q = PREV_INSN (q);
1177 #endif
1178
1179 q = PREV_INSN (q);
1180
1181 /* We don't want a block to end on a line-number note since that has
1182 the potential of changing the code between -g and not -g. */
1183 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1184 q = PREV_INSN (q);
1185 }
1186
1187 /* Selectively unlink the sequence. */
1188 if (q != PREV_INSN (c->head))
1189 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
1190
1191 e->flags |= EDGE_FALLTHRU;
1192 }
1193
1194 /* Fix up edges that now fall through, or rather should now fall through
1195 but previously required a jump around now deleted blocks. Simplify
1196 the search by only examining blocks numerically adjacent, since this
1197 is how find_basic_blocks created them. */
1198
1199 void
tidy_fallthru_edges()1200 tidy_fallthru_edges ()
1201 {
1202 basic_block b, c;
1203
1204 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1205 return;
1206
1207 FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
1208 {
1209 edge s;
1210
1211 c = b->next_bb;
1212
1213 /* We care about simple conditional or unconditional jumps with
1214 a single successor.
1215
1216 If we had a conditional branch to the next instruction when
1217 find_basic_blocks was called, then there will only be one
1218 out edge for the block which ended with the conditional
1219 branch (since we do not create duplicate edges).
1220
1221 Furthermore, the edge will be marked as a fallthru because we
1222 merge the flags for the duplicate edges. So we do not want to
1223 check that the edge is not a FALLTHRU edge. */
1224
1225 if ((s = b->succ) != NULL
1226 && ! (s->flags & EDGE_COMPLEX)
1227 && s->succ_next == NULL
1228 && s->dest == c
1229 /* If the jump insn has side effects, we can't tidy the edge. */
1230 && (GET_CODE (b->end) != JUMP_INSN
1231 || onlyjump_p (b->end)))
1232 tidy_fallthru_edge (s, b, c);
1233 }
1234 }
1235
1236 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1237 is back edge of syntactic loop. */
1238
1239 static bool
back_edge_of_syntactic_loop_p(bb1,bb2)1240 back_edge_of_syntactic_loop_p (bb1, bb2)
1241 basic_block bb1, bb2;
1242 {
1243 rtx insn;
1244 int count = 0;
1245 basic_block bb;
1246
1247 if (bb1 == bb2)
1248 return true;
1249
1250 /* ??? Could we guarantee that bb indices are monotone, so that we could
1251 just compare them? */
1252 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1253 continue;
1254
1255 if (!bb)
1256 return false;
1257
1258 for (insn = bb1->end; insn != bb2->head && count >= 0;
1259 insn = NEXT_INSN (insn))
1260 if (GET_CODE (insn) == NOTE)
1261 {
1262 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1263 count++;
1264 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1265 count--;
1266 }
1267
1268 return count >= 0;
1269 }
1270
1271 /* Split a (typically critical) edge. Return the new block.
1272 Abort on abnormal edges.
1273
1274 ??? The code generally expects to be called on critical edges.
1275 The case of a block ending in an unconditional jump to a
1276 block with multiple predecessors is not handled optimally. */
1277
1278 basic_block
split_edge(edge_in)1279 split_edge (edge_in)
1280 edge edge_in;
1281 {
1282 basic_block bb;
1283 edge edge_out;
1284 rtx before;
1285
1286 /* Abnormal edges cannot be split. */
1287 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1288 abort ();
1289
1290 /* We are going to place the new block in front of edge destination.
1291 Avoid existence of fallthru predecessors. */
1292 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1293 {
1294 edge e;
1295
1296 for (e = edge_in->dest->pred; e; e = e->pred_next)
1297 if (e->flags & EDGE_FALLTHRU)
1298 break;
1299
1300 if (e)
1301 force_nonfallthru (e);
1302 }
1303
1304 /* Create the basic block note.
1305
1306 Where we place the note can have a noticeable impact on the generated
1307 code. Consider this cfg:
1308
1309 E
1310 |
1311 0
1312 / \
1313 +->1-->2--->E
1314 | |
1315 +--+
1316
1317 If we need to insert an insn on the edge from block 0 to block 1,
1318 we want to ensure the instructions we insert are outside of any
1319 loop notes that physically sit between block 0 and block 1. Otherwise
1320 we confuse the loop optimizer into thinking the loop is a phony. */
1321
1322 if (edge_in->dest != EXIT_BLOCK_PTR
1323 && PREV_INSN (edge_in->dest->head)
1324 && GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
1325 && (NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head))
1326 == NOTE_INSN_LOOP_BEG)
1327 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1328 before = PREV_INSN (edge_in->dest->head);
1329 else if (edge_in->dest != EXIT_BLOCK_PTR)
1330 before = edge_in->dest->head;
1331 else
1332 before = NULL_RTX;
1333
1334 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1335 bb->count = edge_in->count;
1336 bb->frequency = EDGE_FREQUENCY (edge_in);
1337
1338 /* ??? This info is likely going to be out of date very soon. */
1339 if (edge_in->dest->global_live_at_start)
1340 {
1341 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1342 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1343 COPY_REG_SET (bb->global_live_at_start,
1344 edge_in->dest->global_live_at_start);
1345 COPY_REG_SET (bb->global_live_at_end,
1346 edge_in->dest->global_live_at_start);
1347 }
1348
1349 edge_out = make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1350
1351 /* For non-fallthry edges, we must adjust the predecessor's
1352 jump instruction to target our new block. */
1353 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1354 {
1355 if (!redirect_edge_and_branch (edge_in, bb))
1356 abort ();
1357 }
1358 else
1359 redirect_edge_succ (edge_in, bb);
1360
1361 return bb;
1362 }
1363
1364 /* Queue instructions for insertion on an edge between two basic blocks.
1365 The new instructions and basic blocks (if any) will not appear in the
1366 CFG until commit_edge_insertions is called. */
1367
1368 void
insert_insn_on_edge(pattern,e)1369 insert_insn_on_edge (pattern, e)
1370 rtx pattern;
1371 edge e;
1372 {
1373 /* We cannot insert instructions on an abnormal critical edge.
1374 It will be easier to find the culprit if we die now. */
1375 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1376 abort ();
1377
1378 if (e->insns == NULL_RTX)
1379 start_sequence ();
1380 else
1381 push_to_sequence (e->insns);
1382
1383 emit_insn (pattern);
1384
1385 e->insns = get_insns ();
1386 end_sequence ();
1387 }
1388
1389 /* Update the CFG for the instructions queued on edge E. */
1390
1391 static void
commit_one_edge_insertion(e,watch_calls)1392 commit_one_edge_insertion (e, watch_calls)
1393 edge e;
1394 int watch_calls;
1395 {
1396 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1397 basic_block bb = NULL;
1398
1399 /* Pull the insns off the edge now since the edge might go away. */
1400 insns = e->insns;
1401 e->insns = NULL_RTX;
1402
1403 /* Special case -- avoid inserting code between call and storing
1404 its return value. */
1405 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1406 && e->src != ENTRY_BLOCK_PTR
1407 && GET_CODE (e->src->end) == CALL_INSN)
1408 {
1409 rtx next = next_nonnote_insn (e->src->end);
1410
1411 after = e->dest->head;
1412 /* The first insn after the call may be a stack pop, skip it. */
1413 while (next
1414 && keep_with_call_p (next))
1415 {
1416 after = next;
1417 next = next_nonnote_insn (next);
1418 }
1419 bb = e->dest;
1420 }
1421 if (!before && !after)
1422 {
1423 /* Figure out where to put these things. If the destination has
1424 one predecessor, insert there. Except for the exit block. */
1425 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1426 {
1427 bb = e->dest;
1428
1429 /* Get the location correct wrt a code label, and "nice" wrt
1430 a basic block note, and before everything else. */
1431 tmp = bb->head;
1432 if (GET_CODE (tmp) == CODE_LABEL)
1433 tmp = NEXT_INSN (tmp);
1434 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1435 tmp = NEXT_INSN (tmp);
1436 if (tmp == bb->head)
1437 before = tmp;
1438 else if (tmp)
1439 after = PREV_INSN (tmp);
1440 else
1441 after = get_last_insn ();
1442 }
1443
1444 /* If the source has one successor and the edge is not abnormal,
1445 insert there. Except for the entry block. */
1446 else if ((e->flags & EDGE_ABNORMAL) == 0
1447 && e->src->succ->succ_next == NULL
1448 && e->src != ENTRY_BLOCK_PTR)
1449 {
1450 bb = e->src;
1451
1452 /* It is possible to have a non-simple jump here. Consider a target
1453 where some forms of unconditional jumps clobber a register. This
1454 happens on the fr30 for example.
1455
1456 We know this block has a single successor, so we can just emit
1457 the queued insns before the jump. */
1458 if (GET_CODE (bb->end) == JUMP_INSN)
1459 for (before = bb->end;
1460 GET_CODE (PREV_INSN (before)) == NOTE
1461 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1462 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1463 ;
1464 else
1465 {
1466 /* We'd better be fallthru, or we've lost track of what's what. */
1467 if ((e->flags & EDGE_FALLTHRU) == 0)
1468 abort ();
1469
1470 after = bb->end;
1471 }
1472 }
1473 /* Otherwise we must split the edge. */
1474 else
1475 {
1476 bb = split_edge (e);
1477 after = bb->end;
1478 }
1479 }
1480
1481 /* Now that we've found the spot, do the insertion. */
1482
1483 if (before)
1484 {
1485 emit_insn_before (insns, before);
1486 last = prev_nonnote_insn (before);
1487 }
1488 else
1489 last = emit_insn_after (insns, after);
1490
1491 if (returnjump_p (last))
1492 {
1493 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1494 This is not currently a problem because this only happens
1495 for the (single) epilogue, which already has a fallthru edge
1496 to EXIT. */
1497
1498 e = bb->succ;
1499 if (e->dest != EXIT_BLOCK_PTR
1500 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1501 abort ();
1502
1503 e->flags &= ~EDGE_FALLTHRU;
1504 emit_barrier_after (last);
1505
1506 if (before)
1507 delete_insn (before);
1508 }
1509 else if (GET_CODE (last) == JUMP_INSN)
1510 abort ();
1511
1512 /* Mark the basic block for find_sub_basic_blocks. */
1513 bb->aux = &bb->aux;
1514 }
1515
1516 /* Update the CFG for all queued instructions. */
1517
1518 void
commit_edge_insertions()1519 commit_edge_insertions ()
1520 {
1521 basic_block bb;
1522 sbitmap blocks;
1523 bool changed = false;
1524
1525 #ifdef ENABLE_CHECKING
1526 verify_flow_info ();
1527 #endif
1528
1529 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1530 {
1531 edge e, next;
1532
1533 for (e = bb->succ; e; e = next)
1534 {
1535 next = e->succ_next;
1536 if (e->insns)
1537 {
1538 changed = true;
1539 commit_one_edge_insertion (e, false);
1540 }
1541 }
1542 }
1543
1544 if (!changed)
1545 return;
1546
1547 blocks = sbitmap_alloc (last_basic_block);
1548 sbitmap_zero (blocks);
1549 FOR_EACH_BB (bb)
1550 if (bb->aux)
1551 {
1552 SET_BIT (blocks, bb->index);
1553 bb->aux = NULL;
1554 }
1555 find_many_sub_basic_blocks (blocks);
1556 sbitmap_free (blocks);
1557 }
1558
1559 /* Update the CFG for all queued instructions, taking special care of inserting
1560 code on edges between call and storing its return value. */
1561
1562 void
commit_edge_insertions_watch_calls()1563 commit_edge_insertions_watch_calls ()
1564 {
1565 basic_block bb;
1566 sbitmap blocks;
1567 bool changed = false;
1568
1569 #ifdef ENABLE_CHECKING
1570 verify_flow_info ();
1571 #endif
1572
1573 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1574 {
1575 edge e, next;
1576
1577 for (e = bb->succ; e; e = next)
1578 {
1579 next = e->succ_next;
1580 if (e->insns)
1581 {
1582 changed = true;
1583 commit_one_edge_insertion (e, true);
1584 }
1585 }
1586 }
1587
1588 if (!changed)
1589 return;
1590
1591 blocks = sbitmap_alloc (last_basic_block);
1592 sbitmap_zero (blocks);
1593 FOR_EACH_BB (bb)
1594 if (bb->aux)
1595 {
1596 SET_BIT (blocks, bb->index);
1597 bb->aux = NULL;
1598 }
1599 find_many_sub_basic_blocks (blocks);
1600 sbitmap_free (blocks);
1601 }
1602
1603 /* Print out one basic block with live information at start and end. */
1604
1605 void
dump_bb(bb,outf)1606 dump_bb (bb, outf)
1607 basic_block bb;
1608 FILE *outf;
1609 {
1610 rtx insn;
1611 rtx last;
1612 edge e;
1613
1614 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
1615 bb->index, bb->loop_depth);
1616 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
1617 putc ('\n', outf);
1618
1619 fputs (";; Predecessors: ", outf);
1620 for (e = bb->pred; e; e = e->pred_next)
1621 dump_edge_info (outf, e, 0);
1622 putc ('\n', outf);
1623
1624 fputs (";; Registers live at start:", outf);
1625 dump_regset (bb->global_live_at_start, outf);
1626 putc ('\n', outf);
1627
1628 for (insn = bb->head, last = NEXT_INSN (bb->end); insn != last;
1629 insn = NEXT_INSN (insn))
1630 print_rtl_single (outf, insn);
1631
1632 fputs (";; Registers live at end:", outf);
1633 dump_regset (bb->global_live_at_end, outf);
1634 putc ('\n', outf);
1635
1636 fputs (";; Successors: ", outf);
1637 for (e = bb->succ; e; e = e->succ_next)
1638 dump_edge_info (outf, e, 1);
1639 putc ('\n', outf);
1640 }
1641
1642 void
debug_bb(bb)1643 debug_bb (bb)
1644 basic_block bb;
1645 {
1646 dump_bb (bb, stderr);
1647 }
1648
1649 void
debug_bb_n(n)1650 debug_bb_n (n)
1651 int n;
1652 {
1653 dump_bb (BASIC_BLOCK (n), stderr);
1654 }
1655
1656 /* Like print_rtl, but also print out live information for the start of each
1657 basic block. */
1658
1659 void
print_rtl_with_bb(outf,rtx_first)1660 print_rtl_with_bb (outf, rtx_first)
1661 FILE *outf;
1662 rtx rtx_first;
1663 {
1664 rtx tmp_rtx;
1665
1666 if (rtx_first == 0)
1667 fprintf (outf, "(nil)\n");
1668 else
1669 {
1670 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1671 int max_uid = get_max_uid ();
1672 basic_block *start
1673 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1674 basic_block *end
1675 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1676 enum bb_state *in_bb_p
1677 = (enum bb_state *) xcalloc (max_uid, sizeof (enum bb_state));
1678
1679 basic_block bb;
1680
1681 FOR_EACH_BB_REVERSE (bb)
1682 {
1683 rtx x;
1684
1685 start[INSN_UID (bb->head)] = bb;
1686 end[INSN_UID (bb->end)] = bb;
1687 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
1688 {
1689 enum bb_state state = IN_MULTIPLE_BB;
1690
1691 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1692 state = IN_ONE_BB;
1693 in_bb_p[INSN_UID (x)] = state;
1694
1695 if (x == bb->end)
1696 break;
1697 }
1698 }
1699
1700 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1701 {
1702 int did_output;
1703
1704 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1705 {
1706 fprintf (outf, ";; Start of basic block %d, registers live:",
1707 bb->index);
1708 dump_regset (bb->global_live_at_start, outf);
1709 putc ('\n', outf);
1710 }
1711
1712 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1713 && GET_CODE (tmp_rtx) != NOTE
1714 && GET_CODE (tmp_rtx) != BARRIER)
1715 fprintf (outf, ";; Insn is not within a basic block\n");
1716 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1717 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1718
1719 did_output = print_rtl_single (outf, tmp_rtx);
1720
1721 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1722 {
1723 fprintf (outf, ";; End of basic block %d, registers live:\n",
1724 bb->index);
1725 dump_regset (bb->global_live_at_end, outf);
1726 putc ('\n', outf);
1727 }
1728
1729 if (did_output)
1730 putc ('\n', outf);
1731 }
1732
1733 free (start);
1734 free (end);
1735 free (in_bb_p);
1736 }
1737
1738 if (current_function_epilogue_delay_list != 0)
1739 {
1740 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1741 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1742 tmp_rtx = XEXP (tmp_rtx, 1))
1743 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1744 }
1745 }
1746
1747 void
update_br_prob_note(bb)1748 update_br_prob_note (bb)
1749 basic_block bb;
1750 {
1751 rtx note;
1752 if (GET_CODE (bb->end) != JUMP_INSN)
1753 return;
1754 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
1755 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1756 return;
1757 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1758 }
1759
1760 /* Verify the CFG consistency. This function check some CFG invariants and
1761 aborts when something is wrong. Hope that this function will help to
1762 convert many optimization passes to preserve CFG consistent.
1763
1764 Currently it does following checks:
1765
1766 - test head/end pointers
1767 - overlapping of basic blocks
1768 - edge list correctness
1769 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1770 - tails of basic blocks (ensure that boundary is necessary)
1771 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1772 and NOTE_INSN_BASIC_BLOCK
1773 - check that all insns are in the basic blocks
1774 (except the switch handling code, barriers and notes)
1775 - check that all returns are followed by barriers
1776
1777 In future it can be extended check a lot of other stuff as well
1778 (reachability of basic blocks, life information, etc. etc.). */
1779
1780 void
verify_flow_info()1781 verify_flow_info ()
1782 {
1783 const int max_uid = get_max_uid ();
1784 const rtx rtx_first = get_insns ();
1785 rtx last_head = get_last_insn ();
1786 basic_block *bb_info, *last_visited;
1787 size_t *edge_checksum;
1788 rtx x;
1789 int num_bb_notes, err = 0;
1790 basic_block bb, last_bb_seen;
1791
1792 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1793 last_visited = (basic_block *) xcalloc (last_basic_block + 2,
1794 sizeof (basic_block));
1795 edge_checksum = (size_t *) xcalloc (last_basic_block + 2, sizeof (size_t));
1796
1797 /* Check bb chain & numbers. */
1798 last_bb_seen = ENTRY_BLOCK_PTR;
1799 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
1800 {
1801 if (bb != EXIT_BLOCK_PTR
1802 && bb != BASIC_BLOCK (bb->index))
1803 {
1804 error ("bb %d on wrong place", bb->index);
1805 err = 1;
1806 }
1807
1808 if (bb->prev_bb != last_bb_seen)
1809 {
1810 error ("prev_bb of %d should be %d, not %d",
1811 bb->index, last_bb_seen->index, bb->prev_bb->index);
1812 err = 1;
1813 }
1814
1815 last_bb_seen = bb;
1816 }
1817
1818 FOR_EACH_BB_REVERSE (bb)
1819 {
1820 rtx head = bb->head;
1821 rtx end = bb->end;
1822
1823 /* Verify the end of the basic block is in the INSN chain. */
1824 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1825 if (x == end)
1826 break;
1827
1828 if (!x)
1829 {
1830 error ("end insn %d for block %d not found in the insn stream",
1831 INSN_UID (end), bb->index);
1832 err = 1;
1833 }
1834
1835 /* Work backwards from the end to the head of the basic block
1836 to verify the head is in the RTL chain. */
1837 for (; x != NULL_RTX; x = PREV_INSN (x))
1838 {
1839 /* While walking over the insn chain, verify insns appear
1840 in only one basic block and initialize the BB_INFO array
1841 used by other passes. */
1842 if (bb_info[INSN_UID (x)] != NULL)
1843 {
1844 error ("insn %d is in multiple basic blocks (%d and %d)",
1845 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1846 err = 1;
1847 }
1848
1849 bb_info[INSN_UID (x)] = bb;
1850
1851 if (x == head)
1852 break;
1853 }
1854 if (!x)
1855 {
1856 error ("head insn %d for block %d not found in the insn stream",
1857 INSN_UID (head), bb->index);
1858 err = 1;
1859 }
1860
1861 last_head = x;
1862 }
1863
1864 /* Now check the basic blocks (boundaries etc.) */
1865 FOR_EACH_BB_REVERSE (bb)
1866 {
1867 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1868 edge e;
1869 rtx note;
1870
1871 if (INSN_P (bb->end)
1872 && (note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX))
1873 && bb->succ && bb->succ->succ_next
1874 && any_condjump_p (bb->end))
1875 {
1876 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1877 {
1878 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
1879 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1880 err = 1;
1881 }
1882 }
1883 if (bb->count < 0)
1884 {
1885 error ("verify_flow_info: Wrong count of block %i %i",
1886 bb->index, (int)bb->count);
1887 err = 1;
1888 }
1889 if (bb->frequency < 0)
1890 {
1891 error ("verify_flow_info: Wrong frequency of block %i %i",
1892 bb->index, bb->frequency);
1893 err = 1;
1894 }
1895 for (e = bb->succ; e; e = e->succ_next)
1896 {
1897 if (last_visited [e->dest->index + 2] == bb)
1898 {
1899 error ("verify_flow_info: Duplicate edge %i->%i",
1900 e->src->index, e->dest->index);
1901 err = 1;
1902 }
1903 if (e->probability < 0 || e->probability > REG_BR_PROB_BASE)
1904 {
1905 error ("verify_flow_info: Wrong probability of edge %i->%i %i",
1906 e->src->index, e->dest->index, e->probability);
1907 err = 1;
1908 }
1909 if (e->count < 0)
1910 {
1911 error ("verify_flow_info: Wrong count of edge %i->%i %i",
1912 e->src->index, e->dest->index, (int)e->count);
1913 err = 1;
1914 }
1915
1916 last_visited [e->dest->index + 2] = bb;
1917
1918 if (e->flags & EDGE_FALLTHRU)
1919 n_fallthru++;
1920
1921 if ((e->flags & ~EDGE_DFS_BACK) == 0)
1922 n_branch++;
1923
1924 if (e->flags & EDGE_ABNORMAL_CALL)
1925 n_call++;
1926
1927 if (e->flags & EDGE_EH)
1928 n_eh++;
1929 else if (e->flags & EDGE_ABNORMAL)
1930 n_abnormal++;
1931
1932 if ((e->flags & EDGE_FALLTHRU)
1933 && e->src != ENTRY_BLOCK_PTR
1934 && e->dest != EXIT_BLOCK_PTR)
1935 {
1936 rtx insn;
1937
1938 if (e->src->next_bb != e->dest)
1939 {
1940 error
1941 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1942 e->src->index, e->dest->index);
1943 err = 1;
1944 }
1945 else
1946 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
1947 insn = NEXT_INSN (insn))
1948 if (GET_CODE (insn) == BARRIER
1949 #ifndef CASE_DROPS_THROUGH
1950 || INSN_P (insn)
1951 #else
1952 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
1953 #endif
1954 )
1955 {
1956 error ("verify_flow_info: Incorrect fallthru %i->%i",
1957 e->src->index, e->dest->index);
1958 fatal_insn ("wrong insn in the fallthru edge", insn);
1959 err = 1;
1960 }
1961 }
1962
1963 if (e->src != bb)
1964 {
1965 error ("verify_flow_info: Basic block %d succ edge is corrupted",
1966 bb->index);
1967 fprintf (stderr, "Predecessor: ");
1968 dump_edge_info (stderr, e, 0);
1969 fprintf (stderr, "\nSuccessor: ");
1970 dump_edge_info (stderr, e, 1);
1971 fprintf (stderr, "\n");
1972 err = 1;
1973 }
1974
1975 edge_checksum[e->dest->index + 2] += (size_t) e;
1976 }
1977
1978 if (n_eh && GET_CODE (PATTERN (bb->end)) != RESX
1979 && !find_reg_note (bb->end, REG_EH_REGION, NULL_RTX))
1980 {
1981 error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1982 err = 1;
1983 }
1984 if (n_branch
1985 && (GET_CODE (bb->end) != JUMP_INSN
1986 || (n_branch > 1 && (any_uncondjump_p (bb->end)
1987 || any_condjump_p (bb->end)))))
1988 {
1989 error ("Too many outgoing branch edges from bb %i", bb->index);
1990 err = 1;
1991 }
1992 if (n_fallthru && any_uncondjump_p (bb->end))
1993 {
1994 error ("Fallthru edge after unconditional jump %i", bb->index);
1995 err = 1;
1996 }
1997 if (n_branch != 1 && any_uncondjump_p (bb->end))
1998 {
1999 error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
2000 err = 1;
2001 }
2002 if (n_branch != 1 && any_condjump_p (bb->end)
2003 && JUMP_LABEL (bb->end) != bb->next_bb->head)
2004 {
2005 error ("Wrong amount of branch edges after conditional jump %i", bb->index);
2006 err = 1;
2007 }
2008 if (n_call && GET_CODE (bb->end) != CALL_INSN)
2009 {
2010 error ("Call edges for non-call insn in bb %i", bb->index);
2011 err = 1;
2012 }
2013 if (n_abnormal
2014 && (GET_CODE (bb->end) != CALL_INSN && n_call != n_abnormal)
2015 && (GET_CODE (bb->end) != JUMP_INSN
2016 || any_condjump_p (bb->end)
2017 || any_uncondjump_p (bb->end)))
2018 {
2019 error ("Abnormal edges for no purpose in bb %i", bb->index);
2020 err = 1;
2021 }
2022
2023 if (!n_fallthru)
2024 {
2025 rtx insn;
2026
2027 /* Ensure existence of barrier in BB with no fallthru edges. */
2028 for (insn = bb->end; !insn || GET_CODE (insn) != BARRIER;
2029 insn = NEXT_INSN (insn))
2030 if (!insn
2031 || (GET_CODE (insn) == NOTE
2032 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
2033 {
2034 error ("missing barrier after block %i", bb->index);
2035 err = 1;
2036 break;
2037 }
2038 }
2039
2040 for (e = bb->pred; e; e = e->pred_next)
2041 {
2042 if (e->dest != bb)
2043 {
2044 error ("basic block %d pred edge is corrupted", bb->index);
2045 fputs ("Predecessor: ", stderr);
2046 dump_edge_info (stderr, e, 0);
2047 fputs ("\nSuccessor: ", stderr);
2048 dump_edge_info (stderr, e, 1);
2049 fputc ('\n', stderr);
2050 err = 1;
2051 }
2052 edge_checksum[e->dest->index + 2] -= (size_t) e;
2053 }
2054
2055 for (x = bb->head; x != NEXT_INSN (bb->end); x = NEXT_INSN (x))
2056 if (BLOCK_FOR_INSN (x) != bb)
2057 {
2058 debug_rtx (x);
2059 if (! BLOCK_FOR_INSN (x))
2060 error
2061 ("insn %d inside basic block %d but block_for_insn is NULL",
2062 INSN_UID (x), bb->index);
2063 else
2064 error
2065 ("insn %d inside basic block %d but block_for_insn is %i",
2066 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2067
2068 err = 1;
2069 }
2070
2071 /* OK pointers are correct. Now check the header of basic
2072 block. It ought to contain optional CODE_LABEL followed
2073 by NOTE_BASIC_BLOCK. */
2074 x = bb->head;
2075 if (GET_CODE (x) == CODE_LABEL)
2076 {
2077 if (bb->end == x)
2078 {
2079 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2080 bb->index);
2081 err = 1;
2082 }
2083
2084 x = NEXT_INSN (x);
2085 }
2086
2087 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2088 {
2089 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2090 bb->index);
2091 err = 1;
2092 }
2093
2094 if (bb->end == x)
2095 /* Do checks for empty blocks her. e */
2096 ;
2097 else
2098 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2099 {
2100 if (NOTE_INSN_BASIC_BLOCK_P (x))
2101 {
2102 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2103 INSN_UID (x), bb->index);
2104 err = 1;
2105 }
2106
2107 if (x == bb->end)
2108 break;
2109
2110 if (GET_CODE (x) == JUMP_INSN
2111 || GET_CODE (x) == CODE_LABEL
2112 || GET_CODE (x) == BARRIER)
2113 {
2114 error ("in basic block %d:", bb->index);
2115 fatal_insn ("flow control insn inside a basic block", x);
2116 }
2117 }
2118 }
2119
2120 /* Complete edge checksumming for ENTRY and EXIT. */
2121 {
2122 edge e;
2123
2124 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
2125 edge_checksum[e->dest->index + 2] += (size_t) e;
2126
2127 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2128 edge_checksum[e->dest->index + 2] -= (size_t) e;
2129 }
2130
2131 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2132 if (edge_checksum[bb->index + 2])
2133 {
2134 error ("basic block %i edge lists are corrupted", bb->index);
2135 err = 1;
2136 }
2137
2138 num_bb_notes = 0;
2139 last_bb_seen = ENTRY_BLOCK_PTR;
2140
2141 for (x = rtx_first; x; x = NEXT_INSN (x))
2142 {
2143 if (NOTE_INSN_BASIC_BLOCK_P (x))
2144 {
2145 bb = NOTE_BASIC_BLOCK (x);
2146
2147 num_bb_notes++;
2148 if (bb != last_bb_seen->next_bb)
2149 internal_error ("basic blocks not numbered consecutively");
2150
2151 last_bb_seen = bb;
2152 }
2153
2154 if (!bb_info[INSN_UID (x)])
2155 {
2156 switch (GET_CODE (x))
2157 {
2158 case BARRIER:
2159 case NOTE:
2160 break;
2161
2162 case CODE_LABEL:
2163 /* An addr_vec is placed outside any block block. */
2164 if (NEXT_INSN (x)
2165 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2166 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2167 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2168 x = NEXT_INSN (x);
2169
2170 /* But in any case, non-deletable labels can appear anywhere. */
2171 break;
2172
2173 default:
2174 fatal_insn ("insn outside basic block", x);
2175 }
2176 }
2177
2178 if (INSN_P (x)
2179 && GET_CODE (x) == JUMP_INSN
2180 && returnjump_p (x) && ! condjump_p (x)
2181 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2182 fatal_insn ("return not followed by barrier", x);
2183 }
2184
2185 if (num_bb_notes != n_basic_blocks)
2186 internal_error
2187 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2188 num_bb_notes, n_basic_blocks);
2189
2190 if (err)
2191 internal_error ("verify_flow_info failed");
2192
2193 /* Clean up. */
2194 free (bb_info);
2195 free (last_visited);
2196 free (edge_checksum);
2197 }
2198
2199 /* Assume that the preceding pass has possibly eliminated jump instructions
2200 or converted the unconditional jumps. Eliminate the edges from CFG.
2201 Return true if any edges are eliminated. */
2202
2203 bool
purge_dead_edges(bb)2204 purge_dead_edges (bb)
2205 basic_block bb;
2206 {
2207 edge e, next;
2208 rtx insn = bb->end, note;
2209 bool purged = false;
2210
2211 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2212 if (GET_CODE (insn) == INSN
2213 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2214 {
2215 rtx eqnote;
2216
2217 if (! may_trap_p (PATTERN (insn))
2218 || ((eqnote = find_reg_equal_equiv_note (insn))
2219 && ! may_trap_p (XEXP (eqnote, 0))))
2220 remove_note (insn, note);
2221 }
2222
2223 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2224 for (e = bb->succ; e; e = next)
2225 {
2226 next = e->succ_next;
2227 if (e->flags & EDGE_EH)
2228 {
2229 if (can_throw_internal (bb->end))
2230 continue;
2231 }
2232 else if (e->flags & EDGE_ABNORMAL_CALL)
2233 {
2234 if (GET_CODE (bb->end) == CALL_INSN
2235 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2236 || INTVAL (XEXP (note, 0)) >= 0))
2237 continue;
2238 }
2239 else
2240 continue;
2241
2242 remove_edge (e);
2243 bb->flags |= BB_DIRTY;
2244 purged = true;
2245 }
2246
2247 if (GET_CODE (insn) == JUMP_INSN)
2248 {
2249 rtx note;
2250 edge b,f;
2251
2252 /* We do care only about conditional jumps and simplejumps. */
2253 if (!any_condjump_p (insn)
2254 && !returnjump_p (insn)
2255 && !simplejump_p (insn))
2256 return purged;
2257
2258 /* Branch probability/prediction notes are defined only for
2259 condjumps. We've possibly turned condjump into simplejump. */
2260 if (simplejump_p (insn))
2261 {
2262 note = find_reg_note (insn, REG_BR_PROB, NULL);
2263 if (note)
2264 remove_note (insn, note);
2265 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2266 remove_note (insn, note);
2267 }
2268
2269 for (e = bb->succ; e; e = next)
2270 {
2271 next = e->succ_next;
2272
2273 /* Avoid abnormal flags to leak from computed jumps turned
2274 into simplejumps. */
2275
2276 e->flags &= ~EDGE_ABNORMAL;
2277
2278 /* See if this edge is one we should keep. */
2279 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2280 /* A conditional jump can fall through into the next
2281 block, so we should keep the edge. */
2282 continue;
2283 else if (e->dest != EXIT_BLOCK_PTR
2284 && e->dest->head == JUMP_LABEL (insn))
2285 /* If the destination block is the target of the jump,
2286 keep the edge. */
2287 continue;
2288 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2289 /* If the destination block is the exit block, and this
2290 instruction is a return, then keep the edge. */
2291 continue;
2292 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2293 /* Keep the edges that correspond to exceptions thrown by
2294 this instruction and rematerialize the EDGE_ABNORMAL flag
2295 we just cleared above. */
2296 {
2297 e->flags |= EDGE_ABNORMAL;
2298 continue;
2299 }
2300
2301 /* We do not need this edge. */
2302 bb->flags |= BB_DIRTY;
2303 purged = true;
2304 remove_edge (e);
2305 }
2306
2307 if (!bb->succ || !purged)
2308 return purged;
2309
2310 if (rtl_dump_file)
2311 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
2312
2313 if (!optimize)
2314 return purged;
2315
2316 /* Redistribute probabilities. */
2317 if (!bb->succ->succ_next)
2318 {
2319 bb->succ->probability = REG_BR_PROB_BASE;
2320 bb->succ->count = bb->count;
2321 }
2322 else
2323 {
2324 note = find_reg_note (insn, REG_BR_PROB, NULL);
2325 if (!note)
2326 return purged;
2327
2328 b = BRANCH_EDGE (bb);
2329 f = FALLTHRU_EDGE (bb);
2330 b->probability = INTVAL (XEXP (note, 0));
2331 f->probability = REG_BR_PROB_BASE - b->probability;
2332 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2333 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2334 }
2335
2336 return purged;
2337 }
2338
2339 /* If we don't see a jump insn, we don't know exactly why the block would
2340 have been broken at this point. Look for a simple, non-fallthru edge,
2341 as these are only created by conditional branches. If we find such an
2342 edge we know that there used to be a jump here and can then safely
2343 remove all non-fallthru edges. */
2344 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2345 e = e->succ_next)
2346 ;
2347
2348 if (!e)
2349 return purged;
2350
2351 for (e = bb->succ; e; e = next)
2352 {
2353 next = e->succ_next;
2354 if (!(e->flags & EDGE_FALLTHRU))
2355 {
2356 bb->flags |= BB_DIRTY;
2357 remove_edge (e);
2358 purged = true;
2359 }
2360 }
2361
2362 if (!bb->succ || bb->succ->succ_next)
2363 abort ();
2364
2365 bb->succ->probability = REG_BR_PROB_BASE;
2366 bb->succ->count = bb->count;
2367
2368 if (rtl_dump_file)
2369 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2370 bb->index);
2371 return purged;
2372 }
2373
2374 /* Search all basic blocks for potentially dead edges and purge them. Return
2375 true if some edge has been eliminated. */
2376
2377 bool
purge_all_dead_edges(update_life_p)2378 purge_all_dead_edges (update_life_p)
2379 int update_life_p;
2380 {
2381 int purged = false;
2382 sbitmap blocks = 0;
2383 basic_block bb;
2384
2385 if (update_life_p)
2386 {
2387 blocks = sbitmap_alloc (last_basic_block);
2388 sbitmap_zero (blocks);
2389 }
2390
2391 FOR_EACH_BB (bb)
2392 {
2393 bool purged_here = purge_dead_edges (bb);
2394
2395 purged |= purged_here;
2396 if (purged_here && update_life_p)
2397 SET_BIT (blocks, bb->index);
2398 }
2399
2400 if (update_life_p && purged)
2401 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2402 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2403 | PROP_KILL_DEAD_CODE);
2404
2405 if (update_life_p)
2406 sbitmap_free (blocks);
2407 return purged;
2408 }
2409