1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - CFG-aware instruction chain manipulation
27 delete_insn, delete_insn_chain
28 - Basic block manipulation
29 create_basic_block, flow_delete_block, split_block,
31 - Infrastructure to determine quickly basic block for insn
32 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
33 - Edge redirection with updating and optimizing of insn chain
34 block_label, redirect_edge_and_branch,
35 redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
36 - Edge splitting and commiting to edges
37 split_edge, insert_insn_on_edge, commit_edge_insertions
38 - Dumping and debugging
39 print_rtl_with_bb, dump_bb, debug_bb, debug_bb_n
40 - Consistency checking
42 - CFG updating after constant propagation
43 purge_dead_edges, purge_all_dead_edges */
49 #include "hard-reg-set.h"
50 #include "basic-block.h"
59 #include "insn-config.h"
61 /* Stubs in case we don't have a return insn. */
64 #define gen_return() NULL_RTX
67 /* The basic block structure for every insn, indexed by uid. */
68 varray_type basic_block_for_insn;
70 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
71 /* ??? Should probably be using LABEL_NUSES instead. It would take a
72 bit of surgery to be able to use or co-opt the routines in jump. */
74 rtx tail_recursion_label_list;
76 static int can_delete_note_p PARAMS ((rtx));
77 static int can_delete_label_p PARAMS ((rtx));
78 static void commit_one_edge_insertion PARAMS ((edge, int));
79 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
80 static rtx last_loop_beg_note PARAMS ((rtx));
81 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
82 static basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));
84 /* Return true if NOTE is not one of the ones that must be kept paired,
85 so that we may simply delete it. */
88 can_delete_note_p (note)
91 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
92 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
93 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
96 /* True if a given label can be deleted. */
99 can_delete_label_p (label)
102 return (!LABEL_PRESERVE_P (label)
103 /* User declared labels must be preserved. */
104 && LABEL_NAME (label) == 0
105 && !in_expr_list_p (forced_labels, label)
106 && !in_expr_list_p (label_value_list, label));
109 /* Delete INSN by patching it out. Return the next insn. */
115 rtx next = NEXT_INSN (insn);
117 bool really_delete = true;
119 if (GET_CODE (insn) == CODE_LABEL)
121 /* Some labels can't be directly removed from the INSN chain, as they
122 might be references via variables, constant pool etc.
123 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
124 if (! can_delete_label_p (insn))
126 const char *name = LABEL_NAME (insn);
128 really_delete = false;
129 PUT_CODE (insn, NOTE);
130 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
131 NOTE_SOURCE_FILE (insn) = name;
134 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
139 /* If this insn has already been deleted, something is very wrong. */
140 if (INSN_DELETED_P (insn))
143 INSN_DELETED_P (insn) = 1;
146 /* If deleting a jump, decrement the use count of the label. Deleting
147 the label itself should happen in the normal course of block merging. */
148 if (GET_CODE (insn) == JUMP_INSN
150 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
151 LABEL_NUSES (JUMP_LABEL (insn))--;
153 /* Also if deleting an insn that references a label. */
154 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
155 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
156 LABEL_NUSES (XEXP (note, 0))--;
158 if (GET_CODE (insn) == JUMP_INSN
159 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
160 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
162 rtx pat = PATTERN (insn);
163 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
164 int len = XVECLEN (pat, diff_vec_p);
167 for (i = 0; i < len; i++)
169 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
171 /* When deleting code in bulk (e.g. removing many unreachable
172 blocks) we can delete a label that's a target of the vector
173 before deleting the vector itself. */
174 if (GET_CODE (label) != NOTE)
175 LABEL_NUSES (label)--;
182 /* Like delete_insn but also purge dead edges from BB. */
184 delete_insn_and_edges (insn)
190 if (basic_block_for_insn
192 && (unsigned int)INSN_UID (insn) < basic_block_for_insn->num_elements
193 && BLOCK_FOR_INSN (insn)
194 && BLOCK_FOR_INSN (insn)->end == insn)
196 x = delete_insn (insn);
198 purge_dead_edges (BLOCK_FOR_INSN (insn));
202 /* Unlink a chain of insns between START and FINISH, leaving notes
203 that must be paired. */
206 delete_insn_chain (start, finish)
211 /* Unchain the insns one by one. It would be quicker to delete all of these
212 with a single unchaining, rather than one at a time, but we need to keep
216 next = NEXT_INSN (start);
217 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
220 next = delete_insn (start);
228 /* Like delete_insn but also purge dead edges from BB. */
230 delete_insn_chain_and_edges (first, last)
235 if (basic_block_for_insn
237 && (unsigned int)INSN_UID (last) < basic_block_for_insn->num_elements
238 && BLOCK_FOR_INSN (last)
239 && BLOCK_FOR_INSN (last)->end == last)
241 delete_insn_chain (first, last);
243 purge_dead_edges (BLOCK_FOR_INSN (last));
246 /* Create a new basic block consisting of the instructions between HEAD and END
247 inclusive. This function is designed to allow fast BB construction - reuses
248 the note and basic block struct in BB_NOTE, if any and do not grow
249 BASIC_BLOCK chain and should be used directly only by CFG construction code.
250 END can be NULL in to create new empty basic block before HEAD. Both END
251 and HEAD can be NULL to create basic block at the end of INSN chain.
252 AFTER is the basic block we should be put after. */
255 create_basic_block_structure (index, head, end, bb_note, after)
257 rtx head, end, bb_note;
263 && ! RTX_INTEGRATED_P (bb_note)
264 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
267 /* If we found an existing note, thread it back onto the chain. */
271 if (GET_CODE (head) == CODE_LABEL)
275 after = PREV_INSN (head);
279 if (after != bb_note && NEXT_INSN (after) != bb_note)
280 reorder_insns (bb_note, bb_note, after);
284 /* Otherwise we must create a note and a basic block structure. */
290 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
291 else if (GET_CODE (head) == CODE_LABEL && end)
293 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
299 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
305 NOTE_BASIC_BLOCK (bb_note) = bb;
308 /* Always include the bb note in the block. */
309 if (NEXT_INSN (end) == bb_note)
316 link_block (bb, after);
317 BASIC_BLOCK (index) = bb;
318 if (basic_block_for_insn)
319 update_bb_for_insn (bb);
321 /* Tag the block so that we know it has been used when considering
322 other basic block notes. */
328 /* Create new basic block consisting of instructions in between HEAD and END
329 and place it to the BB chain after block AFTER. END can be NULL in to
330 create new empty basic block before HEAD. Both END and HEAD can be NULL to
331 create basic block at the end of INSN chain. */
334 create_basic_block (head, end, after)
339 int index = last_basic_block++;
341 /* Place the new block to the end. */
342 VARRAY_GROW (basic_block_info, last_basic_block);
345 bb = create_basic_block_structure (index, head, end, NULL, after);
350 /* Delete the insns in a (non-live) block. We physically delete every
351 non-deleted-note insn, and update the flow graph appropriately.
353 Return nonzero if we deleted an exception handler. */
355 /* ??? Preserving all such notes strikes me as wrong. It would be nice
356 to post-process the stream to remove empty blocks, loops, ranges, etc. */
359 flow_delete_block_noexpunge (b)
362 int deleted_handler = 0;
365 /* If the head of this block is a CODE_LABEL, then it might be the
366 label for an exception handler which can't be reached.
368 We need to remove the label from the exception_handler_label list
369 and remove the associated NOTE_INSN_EH_REGION_BEG and
370 NOTE_INSN_EH_REGION_END notes. */
372 /* Get rid of all NOTE_INSN_PREDICTIONs hanging before the block. */
374 for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
376 if (GET_CODE (insn) != NOTE)
378 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
379 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
384 never_reached_warning (insn, b->end);
386 if (GET_CODE (insn) == CODE_LABEL)
387 maybe_remove_eh_handler (insn);
389 /* Include any jump table following the basic block. */
391 if (GET_CODE (end) == JUMP_INSN
392 && (tmp = JUMP_LABEL (end)) != NULL_RTX
393 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
394 && GET_CODE (tmp) == JUMP_INSN
395 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
396 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
399 /* Include any barrier that may follow the basic block. */
400 tmp = next_nonnote_insn (end);
401 if (tmp && GET_CODE (tmp) == BARRIER)
404 /* Selectively delete the entire chain. */
406 delete_insn_chain (insn, end);
408 /* Remove the edges into and out of this block. Note that there may
409 indeed be edges in, if we are removing an unreachable loop. */
410 while (b->pred != NULL)
411 remove_edge (b->pred);
412 while (b->succ != NULL)
413 remove_edge (b->succ);
418 return deleted_handler;
422 flow_delete_block (b)
425 int deleted_handler = flow_delete_block_noexpunge (b);
427 /* Remove the basic block from the array. */
430 return deleted_handler;
433 /* Records the basic block struct in BB_FOR_INSN, for every instruction
434 indexed by INSN_UID. MAX is the size of the array. */
437 compute_bb_for_insn (max)
442 if (basic_block_for_insn)
443 VARRAY_FREE (basic_block_for_insn);
445 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
452 for (insn = bb->head; ; insn = NEXT_INSN (insn))
454 if (INSN_UID (insn) < max)
455 VARRAY_BB (basic_block_for_insn, INSN_UID (insn)) = bb;
463 /* Release the basic_block_for_insn array. */
468 if (basic_block_for_insn)
469 VARRAY_FREE (basic_block_for_insn);
471 basic_block_for_insn = 0;
474 /* Update insns block within BB. */
477 update_bb_for_insn (bb)
482 if (! basic_block_for_insn)
485 for (insn = bb->head; ; insn = NEXT_INSN (insn))
487 set_block_for_insn (insn, bb);
493 /* Record INSN's block as BB. */
496 set_block_for_insn (insn, bb)
500 size_t uid = INSN_UID (insn);
502 if (uid >= basic_block_for_insn->num_elements)
504 /* Add one-eighth the size so we don't keep calling xrealloc. */
505 size_t new_size = uid + (uid + 7) / 8;
507 VARRAY_GROW (basic_block_for_insn, new_size);
510 VARRAY_BB (basic_block_for_insn, uid) = bb;
513 /* Split a block BB after insn INSN creating a new fallthru edge.
514 Return the new edge. Note that to keep other parts of the compiler happy,
515 this function renumbers all the basic blocks so that the new
516 one has a number one greater than the block split. */
519 split_block (bb, insn)
527 /* There is no point splitting the block after its end. */
531 /* Create the new basic block. */
532 new_bb = create_basic_block (NEXT_INSN (insn), bb->end, bb);
533 new_bb->count = bb->count;
534 new_bb->frequency = bb->frequency;
535 new_bb->loop_depth = bb->loop_depth;
538 /* Redirect the outgoing edges. */
539 new_bb->succ = bb->succ;
541 for (e = new_bb->succ; e; e = e->succ_next)
544 new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
546 if (bb->global_live_at_start)
548 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
549 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
550 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
552 /* We now have to calculate which registers are live at the end
553 of the split basic block and at the start of the new basic
554 block. Start with those registers that are known to be live
555 at the end of the original basic block and get
556 propagate_block to determine which registers are live. */
557 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
558 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
559 COPY_REG_SET (bb->global_live_at_end,
560 new_bb->global_live_at_start);
561 #ifdef HAVE_conditional_execution
562 /* In the presence of conditional execution we are not able to update
563 liveness precisely. */
564 if (reload_completed)
566 bb->flags |= BB_DIRTY;
567 new_bb->flags |= BB_DIRTY;
575 /* Blocks A and B are to be merged into a single block A. The insns
576 are already contiguous, hence `nomove'. */
579 merge_blocks_nomove (a, b)
582 rtx b_head = b->head, b_end = b->end, a_end = a->end;
583 rtx del_first = NULL_RTX, del_last = NULL_RTX;
587 /* If there was a CODE_LABEL beginning B, delete it. */
588 if (GET_CODE (b_head) == CODE_LABEL)
590 /* Detect basic blocks with nothing but a label. This can happen
591 in particular at the end of a function. */
595 del_first = del_last = b_head;
596 b_head = NEXT_INSN (b_head);
599 /* Delete the basic block note and handle blocks containing just that
601 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
609 b_head = NEXT_INSN (b_head);
612 /* If there was a jump out of A, delete it. */
613 if (GET_CODE (a_end) == JUMP_INSN)
617 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
618 if (GET_CODE (prev) != NOTE
619 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
626 /* If this was a conditional jump, we need to also delete
627 the insn that set cc0. */
628 if (only_sets_cc0_p (prev))
632 prev = prev_nonnote_insn (prev);
639 a_end = PREV_INSN (del_first);
641 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
642 del_first = NEXT_INSN (a_end);
644 /* Normally there should only be one successor of A and that is B, but
645 partway though the merge of blocks for conditional_execution we'll
646 be merging a TEST block with THEN and ELSE successors. Free the
647 whole lot of them and hope the caller knows what they're doing. */
649 remove_edge (a->succ);
651 /* Adjust the edges out of B for the new owner. */
652 for (e = b->succ; e; e = e->succ_next)
655 a->flags |= b->flags;
657 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
658 b->pred = b->succ = NULL;
659 a->global_live_at_end = b->global_live_at_end;
663 /* Delete everything marked above as well as crap that might be
664 hanging out between the two blocks. */
665 delete_insn_chain (del_first, del_last);
667 /* Reassociate the insns of B with A. */
670 if (basic_block_for_insn)
674 for (x = a_end; x != b_end; x = NEXT_INSN (x))
675 set_block_for_insn (x, a);
677 set_block_for_insn (b_end, a);
686 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
693 if (block == EXIT_BLOCK_PTR)
696 if (GET_CODE (block->head) != CODE_LABEL)
698 block->head = emit_label_before (gen_label_rtx (), block->head);
699 if (basic_block_for_insn)
700 set_block_for_insn (block->head, block);
706 /* Attempt to perform edge redirection by replacing possibly complex jump
707 instruction by unconditional jump or removing jump completely. This can
708 apply only if all edges now point to the same block. The parameters and
709 return values are equivalent to redirect_edge_and_branch. */
712 try_redirect_by_replacing_jump (e, target)
716 basic_block src = e->src;
717 rtx insn = src->end, kill_from;
722 /* Verify that all targets will be TARGET. */
723 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
724 if (tmp->dest != target && tmp != e)
727 if (tmp || !onlyjump_p (insn))
729 if (reload_completed && JUMP_LABEL (insn)
730 && (table = NEXT_INSN (JUMP_LABEL (insn))) != NULL_RTX
731 && GET_CODE (table) == JUMP_INSN
732 && (GET_CODE (PATTERN (table)) == ADDR_VEC
733 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
736 /* Avoid removing branch with side effects. */
737 set = single_set (insn);
738 if (!set || side_effects_p (set))
741 /* In case we zap a conditional jump, we'll need to kill
742 the cc0 setter too. */
745 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
746 kill_from = PREV_INSN (insn);
749 /* See if we can create the fallthru edge. */
750 if (can_fallthru (src, target))
753 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
756 /* Selectively unlink whole insn chain. */
757 delete_insn_chain (kill_from, PREV_INSN (target->head));
760 /* If this already is simplejump, redirect it. */
761 else if (simplejump_p (insn))
763 if (e->dest == target)
766 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
767 INSN_UID (insn), e->dest->sindex, target->sindex);
768 if (!redirect_jump (insn, block_label (target), 0))
770 if (target == EXIT_BLOCK_PTR)
776 /* Cannot do anything for target exit block. */
777 else if (target == EXIT_BLOCK_PTR)
780 /* Or replace possibly complicated jump insn by simple jump insn. */
783 rtx target_label = block_label (target);
786 emit_jump_insn_after (gen_jump (target_label), insn);
787 JUMP_LABEL (src->end) = target_label;
788 LABEL_NUSES (target_label)++;
790 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
791 INSN_UID (insn), INSN_UID (src->end));
794 delete_insn_chain (kill_from, insn);
796 /* Recognize a tablejump that we are converting to a
797 simple jump and remove its associated CODE_LABEL
798 and ADDR_VEC or ADDR_DIFF_VEC. */
799 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
800 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
801 && GET_CODE (tmp) == JUMP_INSN
802 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
803 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
805 delete_insn_chain (JUMP_LABEL (insn), tmp);
808 barrier = next_nonnote_insn (src->end);
809 if (!barrier || GET_CODE (barrier) != BARRIER)
810 emit_barrier_after (src->end);
813 /* Keep only one edge out and set proper flags. */
814 while (src->succ->succ_next)
815 remove_edge (src->succ);
818 e->flags = EDGE_FALLTHRU;
822 e->probability = REG_BR_PROB_BASE;
823 e->count = src->count;
825 /* We don't want a block to end on a line-number note since that has
826 the potential of changing the code between -g and not -g. */
827 while (GET_CODE (e->src->end) == NOTE
828 && NOTE_LINE_NUMBER (e->src->end) >= 0)
829 delete_insn (e->src->end);
831 if (e->dest != target)
832 redirect_edge_succ (e, target);
837 /* Return last loop_beg note appearing after INSN, before start of next
838 basic block. Return INSN if there are no such notes.
840 When emitting jump to redirect an fallthru edge, it should always appear
841 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
842 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
846 last_loop_beg_note (insn)
851 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
852 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
853 insn = NEXT_INSN (insn))
854 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
860 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
861 expense of adding new instructions or reordering basic blocks.
863 Function can be also called with edge destination equivalent to the TARGET.
864 Then it should try the simplifications and do nothing if none is possible.
866 Return true if transformation succeeded. We still return false in case E
867 already destinated TARGET and we didn't managed to simplify instruction
871 redirect_edge_and_branch (e, target)
876 rtx old_label = e->dest->head;
877 basic_block src = e->src;
880 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
883 if (try_redirect_by_replacing_jump (e, target))
886 /* Do this fast path late, as we want above code to simplify for cases
887 where called on single edge leaving basic block containing nontrivial
889 else if (e->dest == target)
892 /* We can only redirect non-fallthru edges of jump insn. */
893 if (e->flags & EDGE_FALLTHRU)
895 else if (GET_CODE (insn) != JUMP_INSN)
898 /* Recognize a tablejump and adjust all matching cases. */
899 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
900 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
901 && GET_CODE (tmp) == JUMP_INSN
902 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
903 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
907 rtx new_label = block_label (target);
909 if (target == EXIT_BLOCK_PTR)
911 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
912 vec = XVEC (PATTERN (tmp), 0);
914 vec = XVEC (PATTERN (tmp), 1);
916 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
917 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
919 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
920 --LABEL_NUSES (old_label);
921 ++LABEL_NUSES (new_label);
924 /* Handle casesi dispatch insns */
925 if ((tmp = single_set (insn)) != NULL
926 && SET_DEST (tmp) == pc_rtx
927 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
928 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
929 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
931 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
933 --LABEL_NUSES (old_label);
934 ++LABEL_NUSES (new_label);
939 /* ?? We may play the games with moving the named labels from
940 one basic block to the other in case only one computed_jump is
942 if (computed_jump_p (insn)
943 /* A return instruction can't be redirected. */
944 || returnjump_p (insn))
947 /* If the insn doesn't go where we think, we're confused. */
948 if (JUMP_LABEL (insn) != old_label)
951 /* If the substitution doesn't succeed, die. This can happen
952 if the back end emitted unrecognizable instructions or if
953 target is exit block on some arches. */
954 if (!redirect_jump (insn, block_label (target), 0))
956 if (target == EXIT_BLOCK_PTR)
963 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
964 e->src->sindex, e->dest->sindex, target->sindex);
966 if (e->dest != target)
967 redirect_edge_succ_nodup (e, target);
972 /* Like force_nonfallthru below, but additionally performs redirection
973 Used by redirect_edge_and_branch_force. */
976 force_nonfallthru_and_redirect (e, target)
980 basic_block jump_block, new_bb = NULL;
984 if (e->flags & EDGE_ABNORMAL)
986 else if (!(e->flags & EDGE_FALLTHRU))
988 else if (e->src == ENTRY_BLOCK_PTR)
990 /* We can't redirect the entry block. Create an empty block at the
991 start of the function which we use to add the new jump. */
993 basic_block bb = create_basic_block (e->dest->head, NULL, ENTRY_BLOCK_PTR);
995 /* Change the existing edge's source to be the new block, and add
996 a new edge from the entry block to the new block. */
998 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
1001 *pe1 = e->succ_next;
1006 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1009 if (e->src->succ->succ_next)
1011 /* Create the new structures. */
1012 note = last_loop_beg_note (e->src->end);
1013 jump_block = create_basic_block (NEXT_INSN (note), NULL, e->src);
1014 jump_block->count = e->count;
1015 jump_block->frequency = EDGE_FREQUENCY (e);
1016 jump_block->loop_depth = target->loop_depth;
1018 if (target->global_live_at_start)
1020 jump_block->global_live_at_start
1021 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1022 jump_block->global_live_at_end
1023 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1024 COPY_REG_SET (jump_block->global_live_at_start,
1025 target->global_live_at_start);
1026 COPY_REG_SET (jump_block->global_live_at_end,
1027 target->global_live_at_start);
1031 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1032 new_edge->probability = e->probability;
1033 new_edge->count = e->count;
1035 /* Redirect old edge. */
1036 redirect_edge_pred (e, jump_block);
1037 e->probability = REG_BR_PROB_BASE;
1039 new_bb = jump_block;
1042 jump_block = e->src;
1044 e->flags &= ~EDGE_FALLTHRU;
1045 if (target == EXIT_BLOCK_PTR)
1048 emit_jump_insn_after (gen_return (), jump_block->end);
1054 rtx label = block_label (target);
1055 emit_jump_insn_after (gen_jump (label), jump_block->end);
1056 JUMP_LABEL (jump_block->end) = label;
1057 LABEL_NUSES (label)++;
1060 emit_barrier_after (jump_block->end);
1061 redirect_edge_succ_nodup (e, target);
1066 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1067 (and possibly create new basic block) to make edge non-fallthru.
1068 Return newly created BB or NULL if none. */
1071 force_nonfallthru (e)
1074 return force_nonfallthru_and_redirect (e, e->dest);
1077 /* Redirect edge even at the expense of creating new jump insn or
1078 basic block. Return new basic block if created, NULL otherwise.
1079 Abort if conversion is impossible. */
1082 redirect_edge_and_branch_force (e, target)
1086 if (redirect_edge_and_branch (e, target)
1087 || e->dest == target)
1090 /* In case the edge redirection failed, try to force it to be non-fallthru
1091 and redirect newly created simplejump. */
1092 return force_nonfallthru_and_redirect (e, target);
1095 /* The given edge should potentially be a fallthru edge. If that is in
1096 fact true, delete the jump and barriers that are in the way. */
1099 tidy_fallthru_edge (e, b, c)
1105 /* ??? In a late-running flow pass, other folks may have deleted basic
1106 blocks by nopping out blocks, leaving multiple BARRIERs between here
1107 and the target label. They ought to be chastized and fixed.
1109 We can also wind up with a sequence of undeletable labels between
1110 one block and the next.
1112 So search through a sequence of barriers, labels, and notes for
1113 the head of block C and assert that we really do fall through. */
1115 for (q = NEXT_INSN (b->end); q != c->head; q = NEXT_INSN (q))
1119 /* Remove what will soon cease being the jump insn from the source block.
1120 If block B consisted only of this single jump, turn it into a deleted
1123 if (GET_CODE (q) == JUMP_INSN
1125 && (any_uncondjump_p (q)
1126 || (b->succ == e && e->succ_next == NULL)))
1129 /* If this was a conditional jump, we need to also delete
1130 the insn that set cc0. */
1131 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1137 /* We don't want a block to end on a line-number note since that has
1138 the potential of changing the code between -g and not -g. */
1139 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1143 /* Selectively unlink the sequence. */
1144 if (q != PREV_INSN (c->head))
1145 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
1147 e->flags |= EDGE_FALLTHRU;
1150 /* Fix up edges that now fall through, or rather should now fall through
1151 but previously required a jump around now deleted blocks. Simplify
1152 the search by only examining blocks numerically adjacent, since this
1153 is how find_basic_blocks created them. */
1156 tidy_fallthru_edges ()
1160 for (b = ENTRY_BLOCK_PTR->next_bb, c = b->next_bb;
1161 c && c != EXIT_BLOCK_PTR; b = c, c = c->next_bb)
1165 /* We care about simple conditional or unconditional jumps with
1168 If we had a conditional branch to the next instruction when
1169 find_basic_blocks was called, then there will only be one
1170 out edge for the block which ended with the conditional
1171 branch (since we do not create duplicate edges).
1173 Furthermore, the edge will be marked as a fallthru because we
1174 merge the flags for the duplicate edges. So we do not want to
1175 check that the edge is not a FALLTHRU edge. */
1177 if ((s = b->succ) != NULL
1178 && ! (s->flags & EDGE_COMPLEX)
1179 && s->succ_next == NULL
1181 /* If the jump insn has side effects, we can't tidy the edge. */
1182 && (GET_CODE (b->end) != JUMP_INSN
1183 || onlyjump_p (b->end)))
1184 tidy_fallthru_edge (s, b, c);
1188 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1189 is back edge of syntactic loop. */
1192 back_edge_of_syntactic_loop_p (bb1, bb2)
1193 basic_block bb1, bb2;
1202 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1209 for (insn = bb1->end; insn != bb2->head && count >= 0;
1210 insn = NEXT_INSN (insn))
1211 if (GET_CODE (insn) == NOTE)
1213 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1215 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1222 /* Split a (typically critical) edge. Return the new block.
1223 Abort on abnormal edges.
1225 ??? The code generally expects to be called on critical edges.
1226 The case of a block ending in an unconditional jump to a
1227 block with multiple predecessors is not handled optimally. */
1230 split_edge (edge_in)
1237 /* Abnormal edges cannot be split. */
1238 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1241 /* We are going to place the new block in front of edge destination.
1242 Avoid existence of fallthru predecessors. */
1243 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1247 for (e = edge_in->dest->pred; e; e = e->pred_next)
1248 if (e->flags & EDGE_FALLTHRU)
1252 force_nonfallthru (e);
1255 /* Create the basic block note.
1257 Where we place the note can have a noticeable impact on the generated
1258 code. Consider this cfg:
1268 If we need to insert an insn on the edge from block 0 to block 1,
1269 we want to ensure the instructions we insert are outside of any
1270 loop notes that physically sit between block 0 and block 1. Otherwise
1271 we confuse the loop optimizer into thinking the loop is a phony. */
1273 if (edge_in->dest != EXIT_BLOCK_PTR
1274 && PREV_INSN (edge_in->dest->head)
1275 && GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
1276 && (NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head))
1277 == NOTE_INSN_LOOP_BEG)
1278 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1279 before = PREV_INSN (edge_in->dest->head);
1280 else if (edge_in->dest != EXIT_BLOCK_PTR)
1281 before = edge_in->dest->head;
1285 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1286 bb->count = edge_in->count;
1287 bb->frequency = EDGE_FREQUENCY (edge_in);
1289 /* ??? This info is likely going to be out of date very soon. */
1290 if (edge_in->dest->global_live_at_start)
1292 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1293 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1294 COPY_REG_SET (bb->global_live_at_start,
1295 edge_in->dest->global_live_at_start);
1296 COPY_REG_SET (bb->global_live_at_end,
1297 edge_in->dest->global_live_at_start);
1300 edge_out = make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1302 /* For non-fallthry edges, we must adjust the predecessor's
1303 jump instruction to target our new block. */
1304 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1306 if (!redirect_edge_and_branch (edge_in, bb))
1310 redirect_edge_succ (edge_in, bb);
1315 /* Queue instructions for insertion on an edge between two basic blocks.
1316 The new instructions and basic blocks (if any) will not appear in the
1317 CFG until commit_edge_insertions is called. */
1320 insert_insn_on_edge (pattern, e)
1324 /* We cannot insert instructions on an abnormal critical edge.
1325 It will be easier to find the culprit if we die now. */
1326 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1329 if (e->insns == NULL_RTX)
1332 push_to_sequence (e->insns);
1334 emit_insn (pattern);
1336 e->insns = get_insns ();
1340 /* Update the CFG for the instructions queued on edge E. */
1343 commit_one_edge_insertion (e, watch_calls)
1347 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1350 /* Pull the insns off the edge now since the edge might go away. */
1352 e->insns = NULL_RTX;
1354 /* Special case -- avoid inserting code between call and storing
1355 its return value. */
1356 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1357 && e->src != ENTRY_BLOCK_PTR
1358 && GET_CODE (e->src->end) == CALL_INSN)
1360 rtx next = next_nonnote_insn (e->src->end);
1362 after = e->dest->head;
1363 /* The first insn after the call may be a stack pop, skip it. */
1365 && keep_with_call_p (next))
1368 next = next_nonnote_insn (next);
1372 if (!before && !after)
1374 /* Figure out where to put these things. If the destination has
1375 one predecessor, insert there. Except for the exit block. */
1376 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1380 /* Get the location correct wrt a code label, and "nice" wrt
1381 a basic block note, and before everything else. */
1383 if (GET_CODE (tmp) == CODE_LABEL)
1384 tmp = NEXT_INSN (tmp);
1385 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1386 tmp = NEXT_INSN (tmp);
1387 if (tmp == bb->head)
1390 after = PREV_INSN (tmp);
1392 after = get_last_insn ();
1395 /* If the source has one successor and the edge is not abnormal,
1396 insert there. Except for the entry block. */
1397 else if ((e->flags & EDGE_ABNORMAL) == 0
1398 && e->src->succ->succ_next == NULL
1399 && e->src != ENTRY_BLOCK_PTR)
1403 /* It is possible to have a non-simple jump here. Consider a target
1404 where some forms of unconditional jumps clobber a register. This
1405 happens on the fr30 for example.
1407 We know this block has a single successor, so we can just emit
1408 the queued insns before the jump. */
1409 if (GET_CODE (bb->end) == JUMP_INSN)
1410 for (before = bb->end;
1411 GET_CODE (PREV_INSN (before)) == NOTE
1412 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1413 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1417 /* We'd better be fallthru, or we've lost track of what's what. */
1418 if ((e->flags & EDGE_FALLTHRU) == 0)
1424 /* Otherwise we must split the edge. */
1427 bb = split_edge (e);
1432 /* Now that we've found the spot, do the insertion. */
1436 emit_insns_before (insns, before);
1437 last = prev_nonnote_insn (before);
1440 last = emit_insns_after (insns, after);
1442 if (returnjump_p (last))
1444 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1445 This is not currently a problem because this only happens
1446 for the (single) epilogue, which already has a fallthru edge
1450 if (e->dest != EXIT_BLOCK_PTR
1451 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1454 e->flags &= ~EDGE_FALLTHRU;
1455 emit_barrier_after (last);
1458 delete_insn (before);
1460 else if (GET_CODE (last) == JUMP_INSN)
1463 find_sub_basic_blocks (bb);
1466 /* Update the CFG for all queued instructions. */
1469 commit_edge_insertions ()
1474 #ifdef ENABLE_CHECKING
1475 verify_flow_info ();
1480 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1484 for (e = bb->succ; e; e = next)
1486 next = e->succ_next;
1488 commit_one_edge_insertion (e, false);
1493 /* Update the CFG for all queued instructions, taking special care of inserting
1494 code on edges between call and storing its return value. */
1497 commit_edge_insertions_watch_calls ()
1502 #ifdef ENABLE_CHECKING
1503 verify_flow_info ();
1507 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1511 for (e = bb->succ; e; e = next)
1513 next = e->succ_next;
1515 commit_one_edge_insertion (e, true);
1520 /* Print out one basic block with live information at start and end. */
1531 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
1532 bb->sindex, bb->loop_depth);
1533 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
1536 fputs (";; Predecessors: ", outf);
1537 for (e = bb->pred; e; e = e->pred_next)
1538 dump_edge_info (outf, e, 0);
1541 fputs (";; Registers live at start:", outf);
1542 dump_regset (bb->global_live_at_start, outf);
1545 for (insn = bb->head, last = NEXT_INSN (bb->end); insn != last;
1546 insn = NEXT_INSN (insn))
1547 print_rtl_single (outf, insn);
1549 fputs (";; Registers live at end:", outf);
1550 dump_regset (bb->global_live_at_end, outf);
1553 fputs (";; Successors: ", outf);
1554 for (e = bb->succ; e; e = e->succ_next)
1555 dump_edge_info (outf, e, 1);
1563 dump_bb (bb, stderr);
1570 dump_bb (BASIC_BLOCK (n), stderr);
1573 /* Like print_rtl, but also print out live information for the start of each
1577 print_rtl_with_bb (outf, rtx_first)
1584 fprintf (outf, "(nil)\n");
1587 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1588 int max_uid = get_max_uid ();
1590 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1592 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1593 enum bb_state *in_bb_p
1594 = (enum bb_state *) xcalloc (max_uid, sizeof (enum bb_state));
1597 FOR_ALL_BB_REVERSE (bb)
1601 start[INSN_UID (bb->head)] = bb;
1602 end[INSN_UID (bb->end)] = bb;
1603 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
1605 enum bb_state state = IN_MULTIPLE_BB;
1607 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1609 in_bb_p[INSN_UID (x)] = state;
1616 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1620 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1622 fprintf (outf, ";; Start of basic block %d, registers live:",
1624 dump_regset (bb->global_live_at_start, outf);
1628 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1629 && GET_CODE (tmp_rtx) != NOTE
1630 && GET_CODE (tmp_rtx) != BARRIER)
1631 fprintf (outf, ";; Insn is not within a basic block\n");
1632 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1633 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1635 did_output = print_rtl_single (outf, tmp_rtx);
1637 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1639 fprintf (outf, ";; End of basic block %d, registers live:\n",
1641 dump_regset (bb->global_live_at_end, outf);
1654 if (current_function_epilogue_delay_list != 0)
1656 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1657 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1658 tmp_rtx = XEXP (tmp_rtx, 1))
1659 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1664 update_br_prob_note (bb)
1668 if (GET_CODE (bb->end) != JUMP_INSN)
1670 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
1671 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1673 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1676 /* Verify the CFG consistency. This function check some CFG invariants and
1677 aborts when something is wrong. Hope that this function will help to
1678 convert many optimization passes to preserve CFG consistent.
1680 Currently it does following checks:
1682 - test head/end pointers
1683 - overlapping of basic blocks
1684 - edge list correctness
1685 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1686 - tails of basic blocks (ensure that boundary is necessary)
1687 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1688 and NOTE_INSN_BASIC_BLOCK
1689 - check that all insns are in the basic blocks
1690 (except the switch handling code, barriers and notes)
1691 - check that all returns are followed by barriers
1693 In future it can be extended check a lot of other stuff as well
1694 (reachability of basic blocks, life information, etc. etc.). */
1699 const int max_uid = get_max_uid ();
1700 const rtx rtx_first = get_insns ();
1701 rtx last_head = get_last_insn ();
1702 basic_block *bb_info, *last_visited;
1703 size_t *edge_checksum;
1705 int num_bb_notes, err = 0;
1706 basic_block bb, last_bb_seen;
1708 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1709 last_visited = (basic_block *) xcalloc (last_basic_block + 2,
1710 sizeof (basic_block));
1711 edge_checksum = (size_t *) xcalloc (last_basic_block + 2, sizeof (size_t));
1713 /* Check bb chain & numbers. */
1714 last_bb_seen = ENTRY_BLOCK_PTR;
1715 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
1717 if (bb != EXIT_BLOCK_PTR
1718 && bb != BASIC_BLOCK (bb->sindex))
1720 error ("bb %d on wrong place", bb->sindex);
1724 if (bb->prev_bb != last_bb_seen)
1726 error ("prev_bb of %d should be %d, not %d",
1727 bb->sindex, last_bb_seen->sindex, bb->prev_bb->sindex);
1734 FOR_ALL_BB_REVERSE (bb)
1736 rtx head = bb->head;
1739 /* Verify the end of the basic block is in the INSN chain. */
1740 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1746 error ("end insn %d for block %d not found in the insn stream",
1747 INSN_UID (end), bb->sindex);
1751 /* Work backwards from the end to the head of the basic block
1752 to verify the head is in the RTL chain. */
1753 for (; x != NULL_RTX; x = PREV_INSN (x))
1755 /* While walking over the insn chain, verify insns appear
1756 in only one basic block and initialize the BB_INFO array
1757 used by other passes. */
1758 if (bb_info[INSN_UID (x)] != NULL)
1760 error ("insn %d is in multiple basic blocks (%d and %d)",
1761 INSN_UID (x), bb->sindex, bb_info[INSN_UID (x)]->sindex);
1765 bb_info[INSN_UID (x)] = bb;
1772 error ("head insn %d for block %d not found in the insn stream",
1773 INSN_UID (head), bb->sindex);
1780 /* Now check the basic blocks (boundaries etc.) */
1781 FOR_ALL_BB_REVERSE (bb)
1783 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1787 if (INSN_P (bb->end)
1788 && (note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX))
1789 && bb->succ && bb->succ->succ_next
1790 && any_condjump_p (bb->end))
1792 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1794 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
1795 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1801 error ("verify_flow_info: Wrong count of block %i %i",
1802 bb->sindex, (int)bb->count);
1805 if (bb->frequency < 0)
1807 error ("verify_flow_info: Wrong frequency of block %i %i",
1808 bb->sindex, bb->frequency);
1811 for (e = bb->succ; e; e = e->succ_next)
1813 if (last_visited [e->dest->sindex + 2] == bb)
1815 error ("verify_flow_info: Duplicate edge %i->%i",
1816 e->src->sindex, e->dest->sindex);
1819 if (e->probability < 0 || e->probability > REG_BR_PROB_BASE)
1821 error ("verify_flow_info: Wrong probability of edge %i->%i %i",
1822 e->src->sindex, e->dest->sindex, e->probability);
1827 error ("verify_flow_info: Wrong count of edge %i->%i %i",
1828 e->src->sindex, e->dest->sindex, (int)e->count);
1832 last_visited [e->dest->sindex + 2] = bb;
1834 if (e->flags & EDGE_FALLTHRU)
1837 if ((e->flags & ~EDGE_DFS_BACK) == 0)
1840 if (e->flags & EDGE_ABNORMAL_CALL)
1843 if (e->flags & EDGE_EH)
1845 else if (e->flags & EDGE_ABNORMAL)
1848 if ((e->flags & EDGE_FALLTHRU)
1849 && e->src != ENTRY_BLOCK_PTR
1850 && e->dest != EXIT_BLOCK_PTR)
1854 if (e->src->next_bb != e->dest)
1857 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1858 e->src->sindex, e->dest->sindex);
1862 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
1863 insn = NEXT_INSN (insn))
1864 if (GET_CODE (insn) == BARRIER
1865 #ifndef CASE_DROPS_THROUGH
1868 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
1872 error ("verify_flow_info: Incorrect fallthru %i->%i",
1873 e->src->sindex, e->dest->sindex);
1874 fatal_insn ("wrong insn in the fallthru edge", insn);
1881 error ("verify_flow_info: Basic block %d succ edge is corrupted",
1883 fprintf (stderr, "Predecessor: ");
1884 dump_edge_info (stderr, e, 0);
1885 fprintf (stderr, "\nSuccessor: ");
1886 dump_edge_info (stderr, e, 1);
1887 fprintf (stderr, "\n");
1891 edge_checksum[e->dest->sindex + 2] += (size_t) e;
1894 if (n_eh && GET_CODE (PATTERN (bb->end)) != RESX
1895 && !find_reg_note (bb->end, REG_EH_REGION, NULL_RTX))
1897 error ("Missing REG_EH_REGION note in the end of bb %i", bb->sindex);
1901 && (GET_CODE (bb->end) != JUMP_INSN
1902 || (n_branch > 1 && (any_uncondjump_p (bb->end)
1903 || any_condjump_p (bb->end)))))
1905 error ("Too many outgoing branch edges from bb %i", bb->sindex);
1908 if (n_fallthru && any_uncondjump_p (bb->end))
1910 error ("Fallthru edge after unconditional jump %i", bb->sindex);
1913 if (n_branch != 1 && any_uncondjump_p (bb->end))
1915 error ("Wrong amount of branch edges after unconditional jump %i", bb->sindex);
1918 if (n_branch != 1 && any_condjump_p (bb->end)
1919 && JUMP_LABEL (bb->end) != bb->next_bb->head)
1921 error ("Wrong amount of branch edges after conditional jump %i", bb->sindex);
1924 if (n_call && GET_CODE (bb->end) != CALL_INSN)
1926 error ("Call edges for non-call insn in bb %i", bb->sindex);
1930 && (GET_CODE (bb->end) != CALL_INSN && n_call != n_abnormal)
1931 && (GET_CODE (bb->end) != JUMP_INSN
1932 || any_condjump_p (bb->end)
1933 || any_uncondjump_p (bb->end)))
1935 error ("Abnormal edges for no purpose in bb %i", bb->sindex);
1943 /* Ensure existence of barrier in BB with no fallthru edges. */
1944 for (insn = bb->end; !insn || GET_CODE (insn) != BARRIER;
1945 insn = NEXT_INSN (insn))
1947 || (GET_CODE (insn) == NOTE
1948 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
1950 error ("missing barrier after block %i", bb->sindex);
1956 for (e = bb->pred; e; e = e->pred_next)
1960 error ("basic block %d pred edge is corrupted", bb->sindex);
1961 fputs ("Predecessor: ", stderr);
1962 dump_edge_info (stderr, e, 0);
1963 fputs ("\nSuccessor: ", stderr);
1964 dump_edge_info (stderr, e, 1);
1965 fputc ('\n', stderr);
1968 edge_checksum[e->dest->sindex + 2] -= (size_t) e;
1971 for (x = bb->head; x != NEXT_INSN (bb->end); x = NEXT_INSN (x))
1972 if (basic_block_for_insn && BLOCK_FOR_INSN (x) != bb)
1975 if (! BLOCK_FOR_INSN (x))
1977 ("insn %d inside basic block %d but block_for_insn is NULL",
1978 INSN_UID (x), bb->sindex);
1981 ("insn %d inside basic block %d but block_for_insn is %i",
1982 INSN_UID (x), bb->sindex, BLOCK_FOR_INSN (x)->sindex);
1987 /* OK pointers are correct. Now check the header of basic
1988 block. It ought to contain optional CODE_LABEL followed
1989 by NOTE_BASIC_BLOCK. */
1991 if (GET_CODE (x) == CODE_LABEL)
1995 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2003 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2005 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2011 /* Do checks for empty blocks her. e */
2014 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2016 if (NOTE_INSN_BASIC_BLOCK_P (x))
2018 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2019 INSN_UID (x), bb->sindex);
2026 if (GET_CODE (x) == JUMP_INSN
2027 || GET_CODE (x) == CODE_LABEL
2028 || GET_CODE (x) == BARRIER)
2030 error ("in basic block %d:", bb->sindex);
2031 fatal_insn ("flow control insn inside a basic block", x);
2036 /* Complete edge checksumming for ENTRY and EXIT. */
2040 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
2041 edge_checksum[e->dest->sindex + 2] += (size_t) e;
2043 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2044 edge_checksum[e->dest->sindex + 2] -= (size_t) e;
2047 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2048 if (edge_checksum[bb->sindex + 2])
2050 error ("basic block %i edge lists are corrupted", bb->sindex);
2055 last_bb_seen = ENTRY_BLOCK_PTR;
2057 for (x = rtx_first; x; x = NEXT_INSN (x))
2059 if (NOTE_INSN_BASIC_BLOCK_P (x))
2061 bb = NOTE_BASIC_BLOCK (x);
2064 if (bb != last_bb_seen->next_bb)
2065 internal_error ("basic blocks not numbered consecutively");
2070 if (!bb_info[INSN_UID (x)])
2072 switch (GET_CODE (x))
2079 /* An addr_vec is placed outside any block block. */
2081 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2082 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2083 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2086 /* But in any case, non-deletable labels can appear anywhere. */
2090 fatal_insn ("insn outside basic block", x);
2095 && GET_CODE (x) == JUMP_INSN
2096 && returnjump_p (x) && ! condjump_p (x)
2097 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2098 fatal_insn ("return not followed by barrier", x);
2101 if (num_bb_notes != num_basic_blocks)
2103 ("number of bb notes in insn chain (%d) != num_basic_blocks (%d)",
2104 num_bb_notes, num_basic_blocks);
2107 internal_error ("verify_flow_info failed");
2111 free (last_visited);
2112 free (edge_checksum);
2115 /* Assume that the preceding pass has possibly eliminated jump instructions
2116 or converted the unconditional jumps. Eliminate the edges from CFG.
2117 Return true if any edges are eliminated. */
2120 purge_dead_edges (bb)
2124 rtx insn = bb->end, note;
2125 bool purged = false;
2127 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2128 if (GET_CODE (insn) == INSN
2129 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2133 if (! may_trap_p (PATTERN (insn))
2134 || ((eqnote = find_reg_equal_equiv_note (insn))
2135 && ! may_trap_p (XEXP (eqnote, 0))))
2136 remove_note (insn, note);
2139 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2140 for (e = bb->succ; e; e = next)
2142 next = e->succ_next;
2143 if (e->flags & EDGE_EH)
2145 if (can_throw_internal (bb->end))
2148 else if (e->flags & EDGE_ABNORMAL_CALL)
2150 if (GET_CODE (bb->end) == CALL_INSN
2151 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2152 || INTVAL (XEXP (note, 0)) >= 0))
2159 bb->flags |= BB_DIRTY;
2163 if (GET_CODE (insn) == JUMP_INSN)
2168 /* We do care only about conditional jumps and simplejumps. */
2169 if (!any_condjump_p (insn)
2170 && !returnjump_p (insn)
2171 && !simplejump_p (insn))
2174 /* Branch probability/prediction notes are defined only for
2175 condjumps. We've possibly turned condjump into simplejump. */
2176 if (simplejump_p (insn))
2178 note = find_reg_note (insn, REG_BR_PROB, NULL);
2180 remove_note (insn, note);
2181 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2182 remove_note (insn, note);
2185 for (e = bb->succ; e; e = next)
2187 next = e->succ_next;
2189 /* Avoid abnormal flags to leak from computed jumps turned
2190 into simplejumps. */
2192 e->flags &= ~EDGE_ABNORMAL;
2194 /* See if this edge is one we should keep. */
2195 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2196 /* A conditional jump can fall through into the next
2197 block, so we should keep the edge. */
2199 else if (e->dest != EXIT_BLOCK_PTR
2200 && e->dest->head == JUMP_LABEL (insn))
2201 /* If the destination block is the target of the jump,
2204 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2205 /* If the destination block is the exit block, and this
2206 instruction is a return, then keep the edge. */
2208 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2209 /* Keep the edges that correspond to exceptions thrown by
2210 this instruction. */
2213 /* We do not need this edge. */
2214 bb->flags |= BB_DIRTY;
2219 if (!bb->succ || !purged)
2223 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->sindex);
2228 /* Redistribute probabilities. */
2229 if (!bb->succ->succ_next)
2231 bb->succ->probability = REG_BR_PROB_BASE;
2232 bb->succ->count = bb->count;
2236 note = find_reg_note (insn, REG_BR_PROB, NULL);
2240 b = BRANCH_EDGE (bb);
2241 f = FALLTHRU_EDGE (bb);
2242 b->probability = INTVAL (XEXP (note, 0));
2243 f->probability = REG_BR_PROB_BASE - b->probability;
2244 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2245 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2251 /* If we don't see a jump insn, we don't know exactly why the block would
2252 have been broken at this point. Look for a simple, non-fallthru edge,
2253 as these are only created by conditional branches. If we find such an
2254 edge we know that there used to be a jump here and can then safely
2255 remove all non-fallthru edges. */
2256 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2263 for (e = bb->succ; e; e = next)
2265 next = e->succ_next;
2266 if (!(e->flags & EDGE_FALLTHRU))
2268 bb->flags |= BB_DIRTY;
2274 if (!bb->succ || bb->succ->succ_next)
2277 bb->succ->probability = REG_BR_PROB_BASE;
2278 bb->succ->count = bb->count;
2281 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2286 /* Search all basic blocks for potentially dead edges and purge them. Return
2287 true if some edge has been eliminated. */
2290 purge_all_dead_edges (update_life_p)
2299 blocks = sbitmap_alloc (last_basic_block);
2300 sbitmap_zero (blocks);
2305 bool purged_here = purge_dead_edges (bb);
2307 purged |= purged_here;
2308 if (purged_here && update_life_p)
2309 SET_BIT (blocks, bb->sindex);
2312 if (update_life_p && purged)
2313 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2314 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2315 | PROP_KILL_DEAD_CODE);
2318 sbitmap_free (blocks);