1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
30 if (p) goto A; // predict taken
33 if (q) goto B; // predict taken
39 We'll currently reorder this as
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
72 (2) Coordinate with shorten_branches to minimize the number of
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
102 #ifndef HAVE_epilogue
103 #define HAVE_epilogue 0
107 /* The contents of the current function definition are allocated
108 in this obstack, and all are freed at the end of the function.
109 For top-level functions, this is temporary_obstack.
110 Separate obstacks are made for nested functions. */
112 extern struct obstack flow_obstack;
115 /* Structure to hold information about lexical scopes. */
116 typedef struct scope_def
120 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
123 /* The NOTE_INSN_BLOCK_END that ended this scope. */
126 /* The bb containing note_beg (if any). */
129 /* The bb containing note_end (if any). */
132 /* List of basic blocks contained within this scope. */
135 /* Number of blocks contained within this scope. */
138 /* The outer scope or NULL if outermost scope. */
139 struct scope_def *outer;
141 /* The first inner scope or NULL if innermost scope. */
142 struct scope_def *inner;
144 /* The last inner scope or NULL if innermost scope. */
145 struct scope_def *inner_last;
147 /* Link to the next (sibling) scope. */
148 struct scope_def *next;
152 /* Structure to hold information about the scope forest. */
155 /* Number of trees in forest. */
158 /* List of tree roots. */
162 /* Structure to hold information about the blocks during reordering. */
163 typedef struct reorder_block_def
171 } *reorder_block_def;
173 #define RBI(BB) ((reorder_block_def) (BB)->aux)
176 /* Local function prototypes. */
177 static rtx skip_insns_after_block PARAMS ((basic_block));
178 static void record_effective_endpoints PARAMS ((void));
179 static void make_reorder_chain PARAMS ((void));
180 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
181 static rtx label_for_bb PARAMS ((basic_block));
182 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
183 static void fixup_reorder_chain PARAMS ((void));
184 static void relate_bbs_with_scopes PARAMS ((scope));
185 static scope make_new_scope PARAMS ((int, rtx));
186 static void build_scope_forest PARAMS ((scope_forest_info *));
187 static void remove_scope_notes PARAMS ((void));
188 static void insert_intra_1 PARAMS ((scope, rtx *));
189 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
190 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
191 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
192 static void free_scope_forest_1 PARAMS ((scope));
193 static void free_scope_forest PARAMS ((scope_forest_info *));
194 void dump_scope_forest PARAMS ((scope_forest_info *));
195 static void dump_scope_forest_1 PARAMS ((scope, int));
196 static rtx get_next_bb_note PARAMS ((rtx));
197 static rtx get_prev_bb_note PARAMS ((rtx));
199 void verify_insn_chain PARAMS ((void));
201 /* Skip over inter-block insns occurring after BB which are typically
202 associated with BB (e.g., barriers). If there are any such insns,
203 we return the last one. Otherwise, we return the end of BB. */
206 skip_insns_after_block (bb)
209 rtx insn, last_insn, next_head;
211 next_head = NULL_RTX;
212 if (bb->index + 1 != n_basic_blocks)
213 next_head = BASIC_BLOCK (bb->index + 1)->head;
215 for (last_insn = bb->end; (insn = NEXT_INSN (last_insn)); last_insn = insn)
217 if (insn == next_head)
220 switch (GET_CODE (insn))
226 switch (NOTE_LINE_NUMBER (insn))
228 case NOTE_INSN_LOOP_END:
229 case NOTE_INSN_BLOCK_END:
230 case NOTE_INSN_DELETED:
231 case NOTE_INSN_DELETED_LABEL:
241 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
242 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
243 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
245 insn = NEXT_INSN (insn);
261 /* Locate the effective beginning and end of the insn chain for each
262 block, as defined by skip_insns_after_block above. */
265 record_effective_endpoints ()
267 rtx next_insn = get_insns ();
270 for (i = 0; i < n_basic_blocks; ++i)
272 basic_block bb = BASIC_BLOCK (i);
275 RBI (bb)->eff_head = next_insn;
276 end = skip_insns_after_block (bb);
277 RBI (bb)->eff_end = end;
278 next_insn = NEXT_INSN (end);
283 /* Compute an ordering for a subgraph beginning with block BB. Record the
284 ordering in RBI()->index and chained through RBI()->next. */
287 make_reorder_chain ()
289 basic_block last_block = NULL;
290 basic_block prev = NULL;
291 int nbb_m1 = n_basic_blocks - 1;
293 /* If we've not got epilogue in RTL, we must fallthru to the exit.
294 Force the last block to be at the end. */
295 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
296 end of the function for stack unwinding purposes. */
299 last_block = BASIC_BLOCK (nbb_m1);
300 RBI (last_block)->visited = 1;
304 /* Loop until we've placed every block. */
308 basic_block next = NULL;
310 /* Find the next unplaced block. */
311 /* ??? Get rid of this loop, and track which blocks are not yet
312 placed more directly, so as to avoid the O(N^2) worst case.
313 Perhaps keep a doubly-linked list of all to-be-placed blocks;
314 remove from the list as we place. The head of that list is
315 what we're looking for here. */
317 for (i = 0; i <= nbb_m1; ++i)
319 basic_block bb = BASIC_BLOCK (i);
320 if (! RBI (bb)->visited)
329 prev = make_reorder_chain_1 (next, prev);
331 while (RBI (prev)->index < nbb_m1);
333 /* Terminate the chain. */
336 RBI (prev)->next = last_block;
337 RBI (last_block)->index = RBI (prev)->index + 1;
340 RBI (prev)->next = NULL;
343 /* A helper function for make_reorder_chain.
345 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
346 These are assumed to be the error condition and we wish to cluster
347 all of them at the very end of the function for the benefit of cache
348 locality for the rest of the function.
350 ??? We could do slightly better by noticing earlier that some subgraph
351 has all paths leading to noreturn functions, but for there to be more
352 than one block in such a subgraph is rare. */
355 make_reorder_chain_1 (bb, prev)
363 /* Mark this block visited. */
369 RBI (prev)->next = bb;
370 new_index = RBI (prev)->index + 1;
371 RBI (bb)->index = new_index;
373 if (rtl_dump_file && prev->index + 1 != bb->index)
374 fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
375 bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
379 RBI (bb)->visited = 1;
382 if (bb->succ == NULL)
385 /* Find the most probable block. */
388 if (any_condjump_p (bb->end)
389 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
391 int taken, probability;
392 edge e_taken, e_fall;
394 probability = INTVAL (XEXP (note, 0));
395 taken = probability > REG_BR_PROB_BASE / 2;
397 /* Find the normal taken edge and the normal fallthru edge.
398 Note that there may in fact be other edges due to
399 flag_non_call_exceptions.
401 Note, conditional jumps with other side effects may not
402 be fully optimized. In this case it is possible for
403 the conditional jump to branch to the same location as
406 We should probably work to improve optimization of that
407 case; however, it seems silly not to also deal with such
408 problems here if they happen to occur. */
410 e_taken = e_fall = NULL;
411 for (e = bb->succ; e ; e = e->succ_next)
413 if (e->flags & EDGE_FALLTHRU)
415 if (! (e->flags & EDGE_EH))
419 next = (taken ? e_taken : e_fall)->dest;
422 /* In the absence of a prediction, disturb things as little as possible
423 by selecting the old "next" block from the list of successors. If
424 there had been a fallthru edge, that will be the one. */
427 for (e = bb->succ; e ; e = e->succ_next)
428 if (e->dest->index == bb->index + 1)
430 if ((e->flags & EDGE_FALLTHRU)
432 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
438 /* Make sure we didn't select a silly next block. */
439 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
442 /* Recurse on the successors. Unroll the last call, as the normal
443 case is exactly one or two edges, and we can tail recurse. */
444 for (e = bb->succ; e; e = e->succ_next)
445 if (e->dest != EXIT_BLOCK_PTR
446 && ! RBI (e->dest)->visited
448 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
452 prev = make_reorder_chain_1 (next, prev);
453 next = RBI (e->dest)->visited ? NULL : e->dest;
468 /* Locate or create a label for a given basic block. */
474 rtx label = bb->head;
476 if (GET_CODE (label) != CODE_LABEL)
479 fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
480 bb->index, RBI (bb)->index);
482 label = emit_label_before (gen_label_rtx (), label);
483 if (bb->head == RBI (bb)->eff_head)
484 RBI (bb)->eff_head = label;
492 /* Emit a jump to BB after insn AFTER. */
495 emit_jump_to_block_after (bb, after)
501 if (bb != EXIT_BLOCK_PTR)
503 rtx label = label_for_bb (bb);
504 jump = emit_jump_insn_after (gen_jump (label), after);
505 JUMP_LABEL (jump) = label;
506 LABEL_NUSES (label) += 1;
509 fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
510 bb->index, RBI (bb)->index);
517 jump = emit_jump_insn_after (gen_return (), after);
520 fprintf (rtl_dump_file, "Emitting return\n");
530 /* Given a reorder chain, rearrange the code to match. */
533 fixup_reorder_chain ()
535 basic_block bb, last_bb;
537 /* First do the bulk reordering -- rechain the blocks without regard to
538 the needed changes to jumps and labels. */
540 last_bb = BASIC_BLOCK (0);
541 bb = RBI (last_bb)->next;
544 rtx last_e = RBI (last_bb)->eff_end;
545 rtx curr_h = RBI (bb)->eff_head;
547 NEXT_INSN (last_e) = curr_h;
548 PREV_INSN (curr_h) = last_e;
553 NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
554 set_last_insn (RBI (last_bb)->eff_end);
556 /* Now add jumps and labels as needed to match the blocks new
559 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
561 edge e_fall, e_taken, e;
562 rtx jump_insn, barrier_insn, bb_end_insn;
565 if (bb->succ == NULL)
568 /* Find the old fallthru edge, and another non-EH edge for
570 e_taken = e_fall = NULL;
571 for (e = bb->succ; e ; e = e->succ_next)
572 if (e->flags & EDGE_FALLTHRU)
574 else if (! (e->flags & EDGE_EH))
577 bb_end_insn = bb->end;
578 if (GET_CODE (bb_end_insn) == JUMP_INSN)
580 if (any_uncondjump_p (bb_end_insn))
582 /* If the destination is still not next, nothing to do. */
583 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
586 /* Otherwise, we can remove the jump and cleanup the edge. */
587 tidy_fallthru_edge (e_taken, bb, e_taken->dest);
588 RBI (bb)->eff_end = skip_insns_after_block (bb);
589 RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
592 fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
593 bb->index, RBI (bb)->index);
596 else if (any_condjump_p (bb_end_insn))
598 /* If the old fallthru is still next, nothing to do. */
599 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
600 || (RBI (bb)->index == n_basic_blocks - 1
601 && e_fall->dest == EXIT_BLOCK_PTR))
604 /* There is one special case: if *neither* block is next,
605 such as happens at the very end of a function, then we'll
606 need to add a new unconditional jump. Choose the taken
607 edge based on known or assumed probability. */
608 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
610 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
612 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
613 && invert_jump (bb_end_insn,
614 label_for_bb (e_fall->dest), 0))
616 e_fall->flags &= ~EDGE_FALLTHRU;
617 e_taken->flags |= EDGE_FALLTHRU;
618 e = e_fall, e_fall = e_taken, e_taken = e;
622 /* Otherwise we can try to invert the jump. This will
623 basically never fail, however, keep up the pretense. */
624 else if (invert_jump (bb_end_insn,
625 label_for_bb (e_fall->dest), 0))
627 e_fall->flags &= ~EDGE_FALLTHRU;
628 e_taken->flags |= EDGE_FALLTHRU;
632 else if (returnjump_p (bb_end_insn))
636 /* Otherwise we have some switch or computed jump. In the
637 99% case, there should not have been a fallthru edge. */
640 #ifdef CASE_DROPS_THROUGH
641 /* Except for VAX. Since we didn't have predication for the
642 tablejump, the fallthru block should not have moved. */
643 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
645 bb_end_insn = skip_insns_after_block (bb);
653 /* No fallthru implies a noreturn function with EH edges, or
654 something similarly bizarre. In any case, we don't need to
659 /* If the fallthru block is still next, nothing to do. */
660 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
661 || (RBI (bb)->index == n_basic_blocks - 1
662 && e_fall->dest == EXIT_BLOCK_PTR))
665 /* We need a new jump insn. If the block has only one outgoing
666 edge, then we can stuff the new jump insn in directly. */
667 if (bb->succ->succ_next == NULL)
669 e_fall->flags &= ~EDGE_FALLTHRU;
671 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
673 barrier_insn = emit_barrier_after (jump_insn);
674 RBI (bb)->eff_end = barrier_insn;
679 /* We got here if we need to add a new jump insn in a new block
680 across the edge e_fall. */
682 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
683 barrier_insn = emit_barrier_after (jump_insn);
685 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
686 create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
688 nb = BASIC_BLOCK (n_basic_blocks - 1);
689 nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
690 nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
693 COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
694 COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
696 nb->aux = xmalloc (sizeof (struct reorder_block_def));
697 RBI (nb)->eff_head = nb->head;
698 RBI (nb)->eff_end = barrier_insn;
699 RBI (nb)->scope = RBI (bb)->scope;
700 RBI (nb)->index = RBI (bb)->index + 1;
701 RBI (nb)->visited = 1;
702 RBI (nb)->next = RBI (bb)->next;
705 /* Link to new block. */
706 make_edge (NULL, nb, e_fall->dest, 0);
707 redirect_edge_succ (e_fall, nb);
709 /* Don't process this new block. */
712 /* Fix subsequent reorder block indices to reflect new block. */
713 while ((nb = RBI (nb)->next) != NULL)
714 RBI (nb)->index += 1;
717 /* Put basic_block_info in the new order. */
718 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
720 bb->index = RBI (bb)->index;
721 BASIC_BLOCK (bb->index) = bb;
726 /* Perform sanity checks on the insn chain.
727 1. Check that next/prev pointers are consistent in both the forward and
729 2. Count insns in chain, going both directions, and check if equal.
730 3. Check that get_last_insn () returns the actual end of chain. */
743 for (x = get_insns (); x; x = NEXT_INSN (x))
745 if (PREV_INSN (x) != prevx)
747 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
748 fprintf (stderr, "previous insn:\n");
750 fprintf (stderr, "current insn:\n");
758 if (prevx != get_last_insn ())
760 fprintf (stderr, "last_insn corrupt.\n");
766 for (x = get_last_insn (); x; x = PREV_INSN (x))
768 if (NEXT_INSN (x) != nextx)
770 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
771 fprintf (stderr, "current insn:\n");
773 fprintf (stderr, "next insn:\n");
781 if (insn_cnt1 != insn_cnt2)
783 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
784 insn_cnt1, insn_cnt2);
795 if (NOTE_INSN_BASIC_BLOCK_P (x))
809 if (NOTE_INSN_BASIC_BLOCK_P (x))
817 /* Determine and record the relationships between basic blocks and
818 scopes in scope tree S. */
821 relate_bbs_with_scopes (s)
825 int i, bbi1, bbi2, bbs_spanned;
828 for (p = s->inner; p; p = p->next)
829 relate_bbs_with_scopes (p);
834 /* If the begin and end notes are both inside the same basic block,
835 or if they are both outside of basic blocks, then we know immediately
836 how they are related. Otherwise, we need to poke around to make the
838 if (s->bb_beg != s->bb_end)
840 if (s->bb_beg && s->bb_end)
842 /* Both notes are in different bbs. This implies that all the
843 basic blocks spanned by the pair of notes are contained in
845 bbi1 = s->bb_beg->index;
846 bbi2 = s->bb_end->index;
849 else if (! s->bb_beg)
851 /* First note is outside of a bb. If the scope spans more than
852 one basic block, then they all are contained within this
853 scope. Otherwise, this scope is contained within the basic
855 bbnote = get_next_bb_note (s->note_beg);
858 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
861 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
865 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
866 bbi2 = s->bb_end->index;
871 else /* ! s->bb_end */
873 /* Second note is outside of a bb. If the scope spans more than
874 one basic block, then they all are contained within this
875 scope. Otherwise, this scope is contained within the basic
877 bbnote = get_prev_bb_note (s->note_end);
880 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
883 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
887 bbi1 = s->bb_beg->index;
888 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
897 /* Both notes are in the same bb, which implies the block
898 contains this scope. */
903 /* Both notes are outside of any bbs. This implies that all the
904 basic blocks spanned by the pair of notes are contained in
906 There is a degenerate case to consider. If the notes do not
907 span any basic blocks, then it is an empty scope that can
908 safely be deleted or ignored. Mark these with level = -1. */
910 x1 = get_next_bb_note (s->note_beg);
911 x2 = get_prev_bb_note (s->note_end);
919 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
920 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
926 /* If the scope spans one or more basic blocks, we record them. We
927 only record the bbs that are immediately contained within this
928 scope. Note that if a scope is contained within a bb, we can tell
929 by checking that bb_beg = bb_end and that they are non-null. */
935 for (i = bbi1; i <= bbi2; i++)
936 if (! RBI (BASIC_BLOCK (i))->scope)
939 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
940 for (i = bbi1; i <= bbi2; i++)
942 basic_block curr_bb = BASIC_BLOCK (i);
943 if (! RBI (curr_bb)->scope)
945 s->bbs[j++] = curr_bb;
946 RBI (curr_bb)->scope = s;
955 /* Allocate and initialize a new scope structure with scope level LEVEL,
956 and record the NOTE beginning the scope. */
959 make_new_scope (level, note)
963 scope new_scope = xcalloc (1, sizeof (struct scope_def));
964 new_scope->level = level;
965 new_scope->note_beg = note;
970 /* Build a forest representing the scope structure of the function.
971 Return a pointer to a structure describing the forest. */
974 build_scope_forest (forest)
975 scope_forest_info *forest;
980 scope root, curr_scope = 0;
982 forest->num_trees = 0;
983 forest->trees = NULL;
988 for (x = get_insns (); x; x = NEXT_INSN (x))
990 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
991 curr_bb = BASIC_BLOCK (bbi);
993 if (GET_CODE (x) == NOTE)
995 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
1003 new_scope = make_new_scope (level, x);
1004 new_scope->outer = curr_scope;
1005 new_scope->next = NULL;
1006 if (! curr_scope->inner)
1008 curr_scope->inner = new_scope;
1009 curr_scope->inner_last = new_scope;
1013 curr_scope->inner_last->next = new_scope;
1014 curr_scope->inner_last = new_scope;
1016 curr_scope = curr_scope->inner_last;
1020 int ntrees = forest->num_trees;
1022 curr_scope = make_new_scope (level, x);
1024 forest->trees = xrealloc (forest->trees,
1025 sizeof (scope) * (ntrees + 1));
1026 forest->trees[forest->num_trees++] = root;
1028 curr_scope->bb_beg = curr_bb;
1030 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1032 curr_scope->bb_end = curr_bb;
1033 curr_scope->note_end = x;
1035 curr_scope = curr_scope->outer;
1041 if (curr_bb && curr_bb->end == x)
1049 for (i = 0; i < forest->num_trees; i++)
1050 relate_bbs_with_scopes (forest->trees[i]);
1054 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1058 remove_scope_notes ()
1061 basic_block currbb = NULL;
1063 for (x = get_insns (); x; x = next)
1065 next = NEXT_INSN (x);
1066 if (NOTE_INSN_BASIC_BLOCK_P (x))
1067 currbb = NOTE_BASIC_BLOCK (x);
1069 if (GET_CODE (x) == NOTE
1070 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1071 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1073 /* Check if the scope note happens to be the end of a bb. */
1074 if (currbb && x == currbb->end)
1075 currbb->end = PREV_INSN (x);
1076 if (currbb && x == currbb->head)
1081 NEXT_INSN (PREV_INSN (x)) = next;
1082 PREV_INSN (next) = PREV_INSN (x);
1084 NEXT_INSN (x) = NULL;
1085 PREV_INSN (x) = NULL;
1094 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1097 insert_intra_1 (s, ip)
1103 if (NOTE_BLOCK (s->note_beg))
1105 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1106 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1109 for (p = s->inner; p; p = p->next)
1110 insert_intra_1 (p, ip);
1112 if (NOTE_BLOCK (s->note_beg))
1114 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1115 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1120 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1121 scopes that are contained within BB. */
1124 insert_intra_bb_scope_notes (bb)
1127 scope s = RBI (bb)->scope;
1135 if (GET_CODE (ip) == CODE_LABEL)
1136 ip = NEXT_INSN (ip);
1138 for (p = s->inner; p; p = p->next)
1140 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1141 insert_intra_1 (p, &ip);
1146 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1147 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1148 notes before BB2 such that the notes are correctly balanced. If BB1 or
1149 BB2 is NULL, we are inserting scope notes for the first and last basic
1150 blocks, respectively. */
1153 insert_inter_bb_scope_notes (bb1, bb2)
1160 /* It is possible that a basic block is not contained in any scope.
1161 In that case, we either open or close a scope but not both. */
1164 scope s1 = RBI (bb1)->scope;
1165 scope s2 = RBI (bb2)->scope;
1174 /* Find common ancestor scope. */
1177 scope s1 = RBI (bb1)->scope;
1178 scope s2 = RBI (bb2)->scope;
1183 if (s1->level > s2->level)
1185 else if (s2->level > s1->level)
1201 scope s = RBI (bb1)->scope;
1202 ip = RBI (bb1)->eff_end;
1205 if (NOTE_BLOCK (s->note_beg))
1207 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1208 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1217 scope s = RBI (bb2)->scope;
1221 if (NOTE_BLOCK (s->note_beg))
1223 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1224 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1232 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1233 on the scope forest and the newly reordered basic blocks. */
1236 rebuild_scope_notes (forest)
1237 scope_forest_info *forest;
1241 if (forest->num_trees == 0)
1244 /* Start by opening the scopes before the first basic block. */
1245 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1247 /* Then, open and close scopes as needed between blocks. */
1248 for (i = 0; i < n_basic_blocks - 1; i++)
1250 basic_block bb1 = BASIC_BLOCK (i);
1251 basic_block bb2 = BASIC_BLOCK (i + 1);
1252 if (RBI (bb1)->scope != RBI (bb2)->scope)
1253 insert_inter_bb_scope_notes (bb1, bb2);
1254 insert_intra_bb_scope_notes (bb1);
1257 /* Finally, close the scopes after the last basic block. */
1258 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1259 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1263 /* Free the storage associated with the scope tree at S. */
1266 free_scope_forest_1 (s)
1271 for (p = s->inner; p; p = next)
1274 free_scope_forest_1 (p);
1283 /* Free the storage associated with the scope forest. */
1286 free_scope_forest (forest)
1287 scope_forest_info *forest;
1290 for (i = 0; i < forest->num_trees; i++)
1291 free_scope_forest_1 (forest->trees[i]);
1295 /* Visualize the scope forest. */
1298 dump_scope_forest (forest)
1299 scope_forest_info *forest;
1301 if (forest->num_trees == 0)
1302 fprintf (stderr, "\n< Empty scope forest >\n");
1306 fprintf (stderr, "\n< Scope forest >\n");
1307 for (i = 0; i < forest->num_trees; i++)
1308 dump_scope_forest_1 (forest->trees[i], 0);
1313 /* Recursive portion of dump_scope_forest. */
1316 dump_scope_forest_1 (s, indent)
1323 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1324 && RBI (s->bb_beg)->scope
1325 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1327 fprintf (stderr, "%*s", indent, "");
1328 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1331 fprintf (stderr, "%*s", indent, "");
1332 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1333 (PTR) NOTE_BLOCK (s->note_beg));
1335 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1336 for (i = 0; i < s->num_bbs; i++)
1337 fprintf (stderr, " %d", s->bbs[i]->index);
1338 fprintf (stderr, "\n");
1340 for (p = s->inner; p; p = p->next)
1341 dump_scope_forest_1 (p, indent + 2);
1343 fprintf (stderr, "%*s", indent, "");
1344 fprintf (stderr, "}\n");
1348 /* Reorder basic blocks. The main entry point to this file. */
1351 reorder_basic_blocks ()
1353 scope_forest_info forest;
1356 if (n_basic_blocks <= 1)
1359 /* We do not currently handle correct re-placement of EH notes.
1360 But that does not matter unless we intend to use them. */
1361 if (flag_exceptions)
1362 for (i = 0; i < n_basic_blocks; i++)
1365 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1366 if (e->flags & EDGE_EH)
1370 for (i = 0; i < n_basic_blocks; i++)
1371 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1373 EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
1375 build_scope_forest (&forest);
1376 remove_scope_notes ();
1378 record_effective_endpoints ();
1379 make_reorder_chain ();
1380 fixup_reorder_chain ();
1382 #ifdef ENABLE_CHECKING
1383 verify_insn_chain ();
1386 rebuild_scope_notes (&forest);
1387 free_scope_forest (&forest);
1390 for (i = 0; i < n_basic_blocks; i++)
1391 free (BASIC_BLOCK (i)->aux);
1393 free (EXIT_BLOCK_PTR->aux);
1395 #ifdef ENABLE_CHECKING
1396 verify_flow_info ();