1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
33 #include "cfglayout.h"
37 #include "alloc-pool.h"
39 /* The contents of the current function definition are allocated
40 in this obstack, and all are freed at the end of the function. */
41 extern struct obstack flow_obstack;
43 alloc_pool cfg_layout_pool;
45 /* Holds the interesting trailing notes for the function. */
46 rtx cfg_layout_function_footer, cfg_layout_function_header;
48 static rtx skip_insns_after_block (basic_block);
49 static void record_effective_endpoints (void);
50 static rtx label_for_bb (basic_block);
51 static void fixup_reorder_chain (void);
53 static void set_block_levels (tree, int);
54 static void change_scope (rtx, tree, tree);
56 void verify_insn_chain (void);
57 static void fixup_fallthru_exit_predecessor (void);
58 static rtx duplicate_insn_chain (rtx, rtx);
59 static void break_superblocks (void);
60 static tree insn_scope (rtx);
63 unlink_insn_chain (rtx first, rtx last)
65 rtx prevfirst = PREV_INSN (first);
66 rtx nextlast = NEXT_INSN (last);
68 PREV_INSN (first) = NULL;
69 NEXT_INSN (last) = NULL;
71 NEXT_INSN (prevfirst) = nextlast;
73 PREV_INSN (nextlast) = prevfirst;
75 set_last_insn (prevfirst);
77 set_first_insn (nextlast);
81 /* Skip over inter-block insns occurring after BB which are typically
82 associated with BB (e.g., barriers). If there are any such insns,
83 we return the last one. Otherwise, we return the end of BB. */
86 skip_insns_after_block (basic_block bb)
88 rtx insn, last_insn, next_head, prev;
91 if (bb->next_bb != EXIT_BLOCK_PTR)
92 next_head = bb->next_bb->head;
94 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
96 if (insn == next_head)
99 switch (GET_CODE (insn))
106 switch (NOTE_LINE_NUMBER (insn))
108 case NOTE_INSN_LOOP_END:
109 case NOTE_INSN_BLOCK_END:
112 case NOTE_INSN_DELETED:
113 case NOTE_INSN_DELETED_LABEL:
124 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
125 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
126 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
128 insn = NEXT_INSN (insn);
141 /* It is possible to hit contradictory sequence. For instance:
147 Where barrier belongs to jump_insn, but the note does not. This can be
148 created by removing the basic block originally following
149 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
151 for (insn = last_insn; insn != bb->end; insn = prev)
153 prev = PREV_INSN (insn);
154 if (GET_CODE (insn) == NOTE)
155 switch (NOTE_LINE_NUMBER (insn))
157 case NOTE_INSN_LOOP_END:
158 case NOTE_INSN_BLOCK_END:
159 case NOTE_INSN_DELETED:
160 case NOTE_INSN_DELETED_LABEL:
163 reorder_insns (insn, insn, last_insn);
170 /* Locate or create a label for a given basic block. */
173 label_for_bb (basic_block bb)
175 rtx label = bb->head;
177 if (GET_CODE (label) != CODE_LABEL)
180 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
182 label = block_label (bb);
188 /* Locate the effective beginning and end of the insn chain for each
189 block, as defined by skip_insns_after_block above. */
192 record_effective_endpoints (void)
198 for (insn = get_insns ();
200 && GET_CODE (insn) == NOTE
201 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
202 insn = NEXT_INSN (insn))
205 abort (); /* No basic blocks at all? */
206 if (PREV_INSN (insn))
207 cfg_layout_function_header =
208 unlink_insn_chain (get_insns (), PREV_INSN (insn));
210 cfg_layout_function_header = NULL_RTX;
212 next_insn = get_insns ();
217 if (PREV_INSN (bb->head) && next_insn != bb->head)
218 bb->rbi->header = unlink_insn_chain (next_insn,
219 PREV_INSN (bb->head));
220 end = skip_insns_after_block (bb);
221 if (NEXT_INSN (bb->end) && bb->end != end)
222 bb->rbi->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
223 next_insn = NEXT_INSN (bb->end);
226 cfg_layout_function_footer = next_insn;
227 if (cfg_layout_function_footer)
228 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
231 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
232 numbers and files. In order to be GGC friendly we need to use separate
233 varrays. This also slightly improve the memory locality in binary search.
234 The _locs array contains locators where the given property change. The
235 block_locators_blocks contains the scope block that is used for all insn
236 locator greater than corresponding block_locators_locs value and smaller
237 than the following one. Similarly for the other properties. */
238 static GTY(()) varray_type block_locators_locs;
239 static GTY(()) varray_type block_locators_blocks;
240 static GTY(()) varray_type line_locators_locs;
241 static GTY(()) varray_type line_locators_lines;
242 static GTY(()) varray_type file_locators_locs;
243 static GTY(()) varray_type file_locators_files;
244 int prologue_locator;
245 int epilogue_locator;
247 /* During the RTL expansion the lexical blocks and line numbers are
248 represented via INSN_NOTEs. Replace them by representation using
252 insn_locators_initialize (void)
255 tree last_block = NULL;
258 int line_number = 0, last_line_number = 0;
259 char *file_name = NULL, *last_file_name = NULL;
261 prologue_locator = epilogue_locator = 0;
263 VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
264 VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
265 VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
266 VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
267 VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
268 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
270 for (insn = get_insns (); insn; insn = next)
272 next = NEXT_INSN (insn);
274 if ((active_insn_p (insn)
275 && GET_CODE (PATTERN (insn)) != ADDR_VEC
276 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
278 || (!prologue_locator && file_name))
280 if (last_block != block)
283 VARRAY_PUSH_INT (block_locators_locs, loc);
284 VARRAY_PUSH_TREE (block_locators_blocks, block);
287 if (last_line_number != line_number)
290 VARRAY_PUSH_INT (line_locators_locs, loc);
291 VARRAY_PUSH_INT (line_locators_lines, line_number);
292 last_line_number = line_number;
294 if (last_file_name != file_name)
297 VARRAY_PUSH_INT (file_locators_locs, loc);
298 VARRAY_PUSH_CHAR_PTR (file_locators_files, file_name);
299 last_file_name = file_name;
302 if (!prologue_locator && file_name)
303 prologue_locator = loc;
304 if (!NEXT_INSN (insn))
305 epilogue_locator = loc;
306 if (active_insn_p (insn))
307 INSN_LOCATOR (insn) = loc;
308 else if (GET_CODE (insn) == NOTE)
310 switch (NOTE_LINE_NUMBER (insn))
312 case NOTE_INSN_BLOCK_BEG:
313 block = NOTE_BLOCK (insn);
316 case NOTE_INSN_BLOCK_END:
317 block = BLOCK_SUPERCONTEXT (block);
318 if (block && TREE_CODE (block) == FUNCTION_DECL)
323 if (NOTE_LINE_NUMBER (insn) > 0)
325 line_number = NOTE_LINE_NUMBER (insn);
326 file_name = (char *)NOTE_SOURCE_FILE (insn);
333 /* Tag the blocks with a depth number so that change_scope can find
334 the common parent easily. */
335 set_block_levels (DECL_INITIAL (cfun->decl), 0);
338 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
339 found in the block tree. */
342 set_block_levels (tree block, int level)
346 BLOCK_NUMBER (block) = level;
347 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
348 block = BLOCK_CHAIN (block);
352 /* Return sope resulting from combination of S1 and S2. */
354 choose_inner_scope (tree s1, tree s2)
360 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
365 /* Emit lexical block notes needed to change scope from S1 to S2. */
368 change_scope (rtx orig_insn, tree s1, tree s2)
370 rtx insn = orig_insn;
371 tree com = NULL_TREE;
372 tree ts1 = s1, ts2 = s2;
377 if (ts1 == NULL || ts2 == NULL)
379 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
380 ts1 = BLOCK_SUPERCONTEXT (ts1);
381 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
382 ts2 = BLOCK_SUPERCONTEXT (ts2);
385 ts1 = BLOCK_SUPERCONTEXT (ts1);
386 ts2 = BLOCK_SUPERCONTEXT (ts2);
395 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
396 NOTE_BLOCK (note) = s;
397 s = BLOCK_SUPERCONTEXT (s);
404 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
405 NOTE_BLOCK (insn) = s;
406 s = BLOCK_SUPERCONTEXT (s);
410 /* Return lexical scope block insn belong to. */
412 insn_scope (rtx insn)
414 int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
416 int loc = INSN_LOCATOR (insn);
422 int pos = (min + max) / 2;
423 int tmp = VARRAY_INT (block_locators_locs, pos);
425 if (tmp <= loc && min != pos)
427 else if (tmp > loc && max != pos)
435 return VARRAY_TREE (block_locators_blocks, min);
438 /* Return line number of the statement that produced this insn. */
442 int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
444 int loc = INSN_LOCATOR (insn);
450 int pos = (min + max) / 2;
451 int tmp = VARRAY_INT (line_locators_locs, pos);
453 if (tmp <= loc && min != pos)
455 else if (tmp > loc && max != pos)
463 return VARRAY_INT (line_locators_lines, min);
466 /* Return source file of the statement that produced this insn. */
470 int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
472 int loc = INSN_LOCATOR (insn);
478 int pos = (min + max) / 2;
479 int tmp = VARRAY_INT (file_locators_locs, pos);
481 if (tmp <= loc && min != pos)
483 else if (tmp > loc && max != pos)
491 return VARRAY_CHAR_PTR (file_locators_files, min);
494 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
495 on the scope tree and the newly reordered instructions. */
498 reemit_insn_block_notes (void)
500 tree cur_block = DECL_INITIAL (cfun->decl);
504 if (!active_insn_p (insn))
505 insn = next_active_insn (insn);
506 for (; insn; insn = next_active_insn (insn))
510 this_block = insn_scope (insn);
511 /* For sequences compute scope resulting from merging all scopes
512 of instructions nested inside. */
513 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
516 rtx body = PATTERN (insn);
519 for (i = 0; i < XVECLEN (body, 0); i++)
520 this_block = choose_inner_scope (this_block,
521 insn_scope (XVECEXP (body, 0, i)));
526 if (this_block != cur_block)
528 change_scope (insn, cur_block, this_block);
529 cur_block = this_block;
533 /* change_scope emits before the insn, not after. */
534 note = emit_note (NOTE_INSN_DELETED);
535 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
541 /* Given a reorder chain, rearrange the code to match. */
544 fixup_reorder_chain (void)
546 basic_block bb, prev_bb;
550 if (cfg_layout_function_header)
552 set_first_insn (cfg_layout_function_header);
553 insn = cfg_layout_function_header;
554 while (NEXT_INSN (insn))
555 insn = NEXT_INSN (insn);
558 /* First do the bulk reordering -- rechain the blocks without regard to
559 the needed changes to jumps and labels. */
561 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
563 bb = bb->rbi->next, index++)
568 NEXT_INSN (insn) = bb->rbi->header;
570 set_first_insn (bb->rbi->header);
571 PREV_INSN (bb->rbi->header) = insn;
572 insn = bb->rbi->header;
573 while (NEXT_INSN (insn))
574 insn = NEXT_INSN (insn);
577 NEXT_INSN (insn) = bb->head;
579 set_first_insn (bb->head);
580 PREV_INSN (bb->head) = insn;
584 NEXT_INSN (insn) = bb->rbi->footer;
585 PREV_INSN (bb->rbi->footer) = insn;
586 while (NEXT_INSN (insn))
587 insn = NEXT_INSN (insn);
591 if (index != n_basic_blocks)
594 NEXT_INSN (insn) = cfg_layout_function_footer;
595 if (cfg_layout_function_footer)
596 PREV_INSN (cfg_layout_function_footer) = insn;
598 while (NEXT_INSN (insn))
599 insn = NEXT_INSN (insn);
601 set_last_insn (insn);
602 #ifdef ENABLE_CHECKING
603 verify_insn_chain ();
606 /* Now add jumps and labels as needed to match the blocks new
609 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->rbi->next)
611 edge e_fall, e_taken, e;
615 if (bb->succ == NULL)
618 /* Find the old fallthru edge, and another non-EH edge for
620 e_taken = e_fall = NULL;
621 for (e = bb->succ; e ; e = e->succ_next)
622 if (e->flags & EDGE_FALLTHRU)
624 else if (! (e->flags & EDGE_EH))
627 bb_end_insn = bb->end;
628 if (GET_CODE (bb_end_insn) == JUMP_INSN)
630 if (any_condjump_p (bb_end_insn))
632 /* If the old fallthru is still next, nothing to do. */
633 if (bb->rbi->next == e_fall->dest
635 && e_fall->dest == EXIT_BLOCK_PTR))
638 /* The degenerated case of conditional jump jumping to the next
639 instruction can happen on target having jumps with side
642 Create temporarily the duplicated edge representing branch.
643 It will get unidentified by force_nonfallthru_and_redirect
644 that would otherwise get confused by fallthru edge not pointing
645 to the next basic block. */
651 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
653 if (!redirect_jump (bb->end, block_label (bb), 0))
655 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
658 int prob = INTVAL (XEXP (note, 0));
660 e_fake->probability = prob;
661 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
662 e_fall->probability -= e_fall->probability;
663 e_fall->count -= e_fake->count;
664 if (e_fall->probability < 0)
665 e_fall->probability = 0;
666 if (e_fall->count < 0)
670 /* There is one special case: if *neither* block is next,
671 such as happens at the very end of a function, then we'll
672 need to add a new unconditional jump. Choose the taken
673 edge based on known or assumed probability. */
674 else if (bb->rbi->next != e_taken->dest)
676 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
679 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
680 && invert_jump (bb_end_insn,
681 label_for_bb (e_fall->dest), 0))
683 e_fall->flags &= ~EDGE_FALLTHRU;
684 e_taken->flags |= EDGE_FALLTHRU;
685 update_br_prob_note (bb);
686 e = e_fall, e_fall = e_taken, e_taken = e;
690 /* Otherwise we can try to invert the jump. This will
691 basically never fail, however, keep up the pretense. */
692 else if (invert_jump (bb_end_insn,
693 label_for_bb (e_fall->dest), 0))
695 e_fall->flags &= ~EDGE_FALLTHRU;
696 e_taken->flags |= EDGE_FALLTHRU;
697 update_br_prob_note (bb);
701 else if (returnjump_p (bb_end_insn))
705 /* Otherwise we have some switch or computed jump. In the
706 99% case, there should not have been a fallthru edge. */
710 #ifdef CASE_DROPS_THROUGH
711 /* Except for VAX. Since we didn't have predication for the
712 tablejump, the fallthru block should not have moved. */
713 if (bb->rbi->next == e_fall->dest)
715 bb_end_insn = skip_insns_after_block (bb);
723 /* No fallthru implies a noreturn function with EH edges, or
724 something similarly bizarre. In any case, we don't need to
729 /* If the fallthru block is still next, nothing to do. */
730 if (bb->rbi->next == e_fall->dest)
733 /* A fallthru to exit block. */
734 if (!bb->rbi->next && e_fall->dest == EXIT_BLOCK_PTR)
738 /* We got here if we need to add a new jump insn. */
739 nb = force_nonfallthru (e_fall);
742 cfg_layout_initialize_rbi (nb);
743 nb->rbi->visited = 1;
744 nb->rbi->next = bb->rbi->next;
746 /* Don't process this new block. */
751 /* Put basic_block_info in the new order. */
755 fprintf (rtl_dump_file, "Reordered sequence:\n");
756 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = bb->rbi->next, index ++)
758 fprintf (rtl_dump_file, " %i ", index);
759 if (bb->rbi->original)
760 fprintf (rtl_dump_file, "duplicate of %i ",
761 bb->rbi->original->index);
762 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
763 fprintf (rtl_dump_file, "compensation ");
765 fprintf (rtl_dump_file, "bb %i ", bb->index);
766 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
770 prev_bb = ENTRY_BLOCK_PTR;
771 bb = ENTRY_BLOCK_PTR->next_bb;
774 for (; bb; prev_bb = bb, bb = bb->rbi->next, index ++)
777 BASIC_BLOCK (index) = bb;
779 bb->prev_bb = prev_bb;
780 prev_bb->next_bb = bb;
782 prev_bb->next_bb = EXIT_BLOCK_PTR;
783 EXIT_BLOCK_PTR->prev_bb = prev_bb;
785 /* Anoying special case - jump around dead jumptables left in the code. */
789 for (e = bb->succ; e && !(e->flags & EDGE_FALLTHRU); e = e->succ_next)
791 if (e && !can_fallthru (e->src, e->dest))
792 force_nonfallthru (e);
796 /* Perform sanity checks on the insn chain.
797 1. Check that next/prev pointers are consistent in both the forward and
799 2. Count insns in chain, going both directions, and check if equal.
800 3. Check that get_last_insn () returns the actual end of chain. */
803 verify_insn_chain (void)
806 int insn_cnt1, insn_cnt2;
808 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
810 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
811 if (PREV_INSN (x) != prevx)
814 if (prevx != get_last_insn ())
817 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
819 nextx = x, insn_cnt2++, x = PREV_INSN (x))
820 if (NEXT_INSN (x) != nextx)
823 if (insn_cnt1 != insn_cnt2)
827 /* The block falling through to exit must be the last one in the
828 reordered chain. Ensure that this condition is met. */
830 fixup_fallthru_exit_predecessor (void)
833 basic_block bb = NULL;
835 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
836 if (e->flags & EDGE_FALLTHRU)
839 if (bb && bb->rbi->next)
841 basic_block c = ENTRY_BLOCK_PTR->next_bb;
843 while (c->rbi->next != bb)
846 c->rbi->next = bb->rbi->next;
851 bb->rbi->next = NULL;
855 /* Return true in case it is possible to duplicate the basic block BB. */
858 cfg_layout_can_duplicate_bb_p (basic_block bb)
862 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
865 /* Duplicating fallthru block to exit would require adding a jump
866 and splitting the real last BB. */
867 for (s = bb->succ; s; s = s->succ_next)
868 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
871 /* Do not attempt to duplicate tablejumps, as we need to unshare
872 the dispatch table. This is difficult to do, as the instructions
873 computing jump destination may be hoisted outside the basic block. */
874 if (tablejump_p (bb->end, NULL, NULL))
877 /* Do not duplicate blocks containing insns that can't be copied. */
878 if (targetm.cannot_copy_insn_p)
883 if (INSN_P (insn) && (*targetm.cannot_copy_insn_p) (insn))
887 insn = NEXT_INSN (insn);
895 duplicate_insn_chain (rtx from, rtx to)
899 /* Avoid updating of boundaries of previous basic block. The
900 note will get removed from insn stream in fixup. */
901 last = emit_note (NOTE_INSN_DELETED);
903 /* Create copy at the end of INSN chain. The chain will
904 be reordered later. */
905 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
907 switch (GET_CODE (insn))
912 /* Avoid copying of dispatch tables. We never duplicate
913 tablejumps, so this can hit only in case the table got
914 moved far from original jump. */
915 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
916 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
918 emit_copy_of_insn_after (insn, get_last_insn ());
929 switch (NOTE_LINE_NUMBER (insn))
931 /* In case prologue is empty and function contain label
932 in first BB, we may want to copy the block. */
933 case NOTE_INSN_PROLOGUE_END:
935 case NOTE_INSN_LOOP_VTOP:
936 case NOTE_INSN_LOOP_CONT:
937 case NOTE_INSN_LOOP_BEG:
938 case NOTE_INSN_LOOP_END:
939 /* Strip down the loop notes - we don't really want to keep
940 them consistent in loop copies. */
941 case NOTE_INSN_DELETED:
942 case NOTE_INSN_DELETED_LABEL:
943 /* No problem to strip these. */
944 case NOTE_INSN_EPILOGUE_BEG:
945 case NOTE_INSN_FUNCTION_END:
946 /* Debug code expect these notes to exist just once.
947 Keep them in the master copy.
948 ??? It probably makes more sense to duplicate them for each
950 case NOTE_INSN_FUNCTION_BEG:
951 /* There is always just single entry to function. */
952 case NOTE_INSN_BASIC_BLOCK:
955 /* There is no purpose to duplicate prologue. */
956 case NOTE_INSN_BLOCK_BEG:
957 case NOTE_INSN_BLOCK_END:
958 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
959 reordering is in the progress. */
960 case NOTE_INSN_EH_REGION_BEG:
961 case NOTE_INSN_EH_REGION_END:
962 /* Should never exist at BB duplication time. */
965 case NOTE_INSN_REPEATED_LINE_NUMBER:
966 emit_note_copy (insn);
970 if (NOTE_LINE_NUMBER (insn) < 0)
972 /* It is possible that no_line_number is set and the note
974 emit_note_copy (insn);
981 insn = NEXT_INSN (last);
985 /* Create a duplicate of the basic block BB and redirect edge E into it.
986 If E is not specified, BB is just copied, but updating the frequencies
987 etc. is left to the caller. */
990 cfg_layout_duplicate_bb (basic_block bb, edge e)
995 gcov_type new_count = e ? e->count : 0;
997 if (bb->count < new_count)
998 new_count = bb->count;
1001 #ifdef ENABLE_CHECKING
1002 if (!cfg_layout_can_duplicate_bb_p (bb))
1006 insn = duplicate_insn_chain (bb->head, bb->end);
1007 new_bb = create_basic_block (insn,
1008 insn ? get_last_insn () : NULL,
1009 EXIT_BLOCK_PTR->prev_bb);
1011 if (bb->rbi->header)
1013 insn = bb->rbi->header;
1014 while (NEXT_INSN (insn))
1015 insn = NEXT_INSN (insn);
1016 insn = duplicate_insn_chain (bb->rbi->header, insn);
1018 new_bb->rbi->header = unlink_insn_chain (insn, get_last_insn ());
1021 if (bb->rbi->footer)
1023 insn = bb->rbi->footer;
1024 while (NEXT_INSN (insn))
1025 insn = NEXT_INSN (insn);
1026 insn = duplicate_insn_chain (bb->rbi->footer, insn);
1028 new_bb->rbi->footer = unlink_insn_chain (insn, get_last_insn ());
1031 if (bb->global_live_at_start)
1033 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1034 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1035 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1036 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1039 new_bb->loop_depth = bb->loop_depth;
1040 new_bb->flags = bb->flags;
1041 for (s = bb->succ; s; s = s->succ_next)
1043 /* Since we are creating edges from a new block to successors
1044 of another block (which therefore are known to be disjoint), there
1045 is no need to actually check for duplicated edges. */
1046 n = unchecked_make_edge (new_bb, s->dest, s->flags);
1047 n->probability = s->probability;
1050 /* Take care for overflows! */
1051 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
1052 s->count -= n->count;
1055 n->count = s->count;
1061 new_bb->count = new_count;
1062 bb->count -= new_count;
1064 new_bb->frequency = EDGE_FREQUENCY (e);
1065 bb->frequency -= EDGE_FREQUENCY (e);
1067 redirect_edge_and_branch_force (e, new_bb);
1071 if (bb->frequency < 0)
1076 new_bb->count = bb->count;
1077 new_bb->frequency = bb->frequency;
1080 new_bb->rbi->original = bb;
1081 bb->rbi->copy = new_bb;
1087 cfg_layout_initialize_rbi (bb)
1092 bb->rbi = pool_alloc (cfg_layout_pool);
1093 memset (bb->rbi, 0, sizeof (struct reorder_block_def));
1096 /* Main entry point to this module - initialize the datastructures for
1097 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1100 cfg_layout_initialize ()
1104 /* Our algorithm depends on fact that there are now dead jumptables
1107 create_alloc_pool ("cfg layout pool", sizeof (struct reorder_block_def),
1108 n_basic_blocks + 2);
1109 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1110 cfg_layout_initialize_rbi (bb);
1112 cfg_layout_rtl_register_cfg_hooks ();
1114 record_effective_endpoints ();
1116 cleanup_cfg (CLEANUP_CFGLAYOUT);
1119 /* Splits superblocks. */
1121 break_superblocks (void)
1123 sbitmap superblocks;
1126 superblocks = sbitmap_alloc (n_basic_blocks);
1127 sbitmap_zero (superblocks);
1131 for (i = 0; i < n_basic_blocks; i++)
1132 if (BASIC_BLOCK(i)->flags & BB_SUPERBLOCK)
1134 BASIC_BLOCK(i)->flags &= ~BB_SUPERBLOCK;
1135 SET_BIT (superblocks, i);
1141 rebuild_jump_labels (get_insns ());
1142 find_many_sub_basic_blocks (superblocks);
1148 /* Finalize the changes: reorder insn list according to the sequence, enter
1149 compensation code, rebuild scope forest. */
1152 cfg_layout_finalize (void)
1156 #ifdef ENABLE_CHECKING
1157 verify_flow_info ();
1159 rtl_register_cfg_hooks ();
1160 fixup_fallthru_exit_predecessor ();
1161 fixup_reorder_chain ();
1163 #ifdef ENABLE_CHECKING
1164 verify_insn_chain ();
1167 free_alloc_pool (cfg_layout_pool);
1168 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1171 break_superblocks ();
1173 #ifdef ENABLE_CHECKING
1174 verify_flow_info ();
1178 /* Checks whether all N blocks in BBS array can be copied. */
1180 can_copy_bbs_p (basic_block *bbs, unsigned n)
1186 for (i = 0; i < n; i++)
1187 bbs[i]->rbi->duplicated = 1;
1189 for (i = 0; i < n; i++)
1191 /* In case we should redirect abnormal edge during duplication, fail. */
1192 for (e = bbs[i]->succ; e; e = e->succ_next)
1193 if ((e->flags & EDGE_ABNORMAL)
1194 && e->dest->rbi->duplicated)
1200 if (!cfg_layout_can_duplicate_bb_p (bbs[i]))
1208 for (i = 0; i < n; i++)
1209 bbs[i]->rbi->duplicated = 0;
1214 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1215 are placed into array NEW_BBS in the same order. Edges from basic blocks
1216 in BBS are also duplicated and copies of those of them
1217 that lead into BBS are redirected to appropriate newly created block. The
1218 function assigns bbs into loops (copy of basic block bb is assigned to
1219 bb->loop_father->copy loop, so this must be set up correctly in advance)
1220 and updates dominators locally (LOOPS structure that contains the information
1221 about dominators is passed to enable this).
1223 BASE is the superloop to that basic block belongs; if its header or latch
1224 is copied, we do not set the new blocks as header or latch.
1226 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1227 also in the same order. */
1230 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1231 edge *edges, unsigned n_edges, edge *new_edges,
1232 struct loop *base, struct loops *loops)
1235 basic_block bb, new_bb, dom_bb;
1238 /* Duplicate bbs, update dominators, assign bbs to loops. */
1239 for (i = 0; i < n; i++)
1243 new_bb = new_bbs[i] = cfg_layout_duplicate_bb (bb, NULL);
1244 bb->rbi->duplicated = 1;
1246 add_bb_to_loop (new_bb, bb->loop_father->copy);
1247 add_to_dominance_info (loops->cfg.dom, new_bb);
1248 /* Possibly set header. */
1249 if (bb->loop_father->header == bb && bb->loop_father != base)
1250 new_bb->loop_father->header = new_bb;
1252 if (bb->loop_father->latch == bb && bb->loop_father != base)
1253 new_bb->loop_father->latch = new_bb;
1256 /* Set dominators. */
1257 for (i = 0; i < n; i++)
1260 new_bb = new_bbs[i];
1262 dom_bb = get_immediate_dominator (loops->cfg.dom, bb);
1263 if (dom_bb->rbi->duplicated)
1265 dom_bb = dom_bb->rbi->copy;
1266 set_immediate_dominator (loops->cfg.dom, new_bb, dom_bb);
1270 /* Redirect edges. */
1271 for (j = 0; j < n_edges; j++)
1272 new_edges[j] = NULL;
1273 for (i = 0; i < n; i++)
1275 new_bb = new_bbs[i];
1278 for (e = new_bb->succ; e; e = e->succ_next)
1280 for (j = 0; j < n_edges; j++)
1281 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1284 if (!e->dest->rbi->duplicated)
1286 redirect_edge_and_branch_force (e, e->dest->rbi->copy);
1290 /* Clear information about duplicates. */
1291 for (i = 0; i < n; i++)
1292 bbs[i]->rbi->duplicated = 0;
1295 #include "gt-cfglayout.h"