1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
165 #define LOCAL_REGNO(REGNO) 0
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
177 /* The obstack on which the flow graph components are allocated. */
179 struct obstack flow_obstack;
180 static char *flow_firstobj;
182 /* Number of basic blocks in the current function. */
186 /* Number of edges in the current function. */
190 /* The basic block array. */
192 varray_type basic_block_info;
194 /* The special entry and exit blocks. */
196 struct basic_block_def entry_exit_blocks[2]
199 NULL, /* head_tree */
203 NULL, /* local_set */
204 NULL, /* cond_local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
208 ENTRY_BLOCK, /* index */
216 NULL, /* head_tree */
220 NULL, /* local_set */
221 NULL, /* cond_local_set */
222 NULL, /* global_live_at_start */
223 NULL, /* global_live_at_end */
225 EXIT_BLOCK, /* index */
232 /* Nonzero if the second flow pass has completed. */
235 /* Maximum register number used in this function, plus one. */
239 /* Indexed by n, giving various register information */
241 varray_type reg_n_info;
243 /* Size of a regset for the current function,
244 in (1) bytes and (2) elements. */
249 /* Regset of regs live when calls to `setjmp'-like functions happen. */
250 /* ??? Does this exist only for the setjmp-clobbered warning message? */
252 regset regs_live_at_setjmp;
254 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
255 that have to go in the same hard reg.
256 The first two regs in the list are a pair, and the next two
257 are another pair, etc. */
260 /* Callback that determines if it's ok for a function to have no
261 noreturn attribute. */
262 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
264 /* Set of registers that may be eliminable. These are handled specially
265 in updating regs_ever_live. */
267 static HARD_REG_SET elim_reg_set;
269 /* The basic block structure for every insn, indexed by uid. */
271 varray_type basic_block_for_insn;
273 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
274 /* ??? Should probably be using LABEL_NUSES instead. It would take a
275 bit of surgery to be able to use or co-opt the routines in jump. */
277 static rtx label_value_list;
278 static rtx tail_recursion_label_list;
280 /* Holds information for tracking conditional register life information. */
281 struct reg_cond_life_info
283 /* A boolean expression of conditions under which a register is dead. */
285 /* Conditions under which a register is dead at the basic block end. */
288 /* A boolean expression of conditions under which a register has been
292 /* ??? Could store mask of bytes that are dead, so that we could finally
293 track lifetimes of multi-word registers accessed via subregs. */
296 /* For use in communicating between propagate_block and its subroutines.
297 Holds all information needed to compute life and def-use information. */
299 struct propagate_block_info
301 /* The basic block we're considering. */
304 /* Bit N is set if register N is conditionally or unconditionally live. */
307 /* Bit N is set if register N is set this insn. */
310 /* Element N is the next insn that uses (hard or pseudo) register N
311 within the current basic block; or zero, if there is no such insn. */
314 /* Contains a list of all the MEMs we are tracking for dead store
318 /* If non-null, record the set of registers set unconditionally in the
322 /* If non-null, record the set of registers set conditionally in the
324 regset cond_local_set;
326 #ifdef HAVE_conditional_execution
327 /* Indexed by register number, holds a reg_cond_life_info for each
328 register that is not unconditionally live or dead. */
329 splay_tree reg_cond_dead;
331 /* Bit N is set if register N is in an expression in reg_cond_dead. */
335 /* The length of mem_set_list. */
336 int mem_set_list_len;
338 /* Non-zero if the value of CC0 is live. */
341 /* Flags controling the set of information propagate_block collects. */
345 /* Maximum length of pbi->mem_set_list before we start dropping
346 new elements on the floor. */
347 #define MAX_MEM_SET_LIST_LEN 100
349 /* Store the data structures necessary for depth-first search. */
350 struct depth_first_search_dsS {
351 /* stack for backtracking during the algorithm */
354 /* number of edges in the stack. That is, positions 0, ..., sp-1
358 /* record of basic blocks already seen by depth-first search */
359 sbitmap visited_blocks;
361 typedef struct depth_first_search_dsS *depth_first_search_ds;
363 /* Have print_rtl_and_abort give the same information that fancy_abort
365 #define print_rtl_and_abort() \
366 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
368 /* Forward declarations */
369 static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
370 static bool try_crossjump_bb PARAMS ((int, basic_block));
371 static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
372 static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
374 static int count_basic_blocks PARAMS ((rtx));
375 static void find_basic_blocks_1 PARAMS ((rtx));
376 static rtx find_label_refs PARAMS ((rtx, rtx));
377 static void make_edges PARAMS ((rtx, int, int, int));
378 static void make_label_edge PARAMS ((sbitmap *, basic_block,
380 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
382 static void commit_one_edge_insertion PARAMS ((edge));
384 static void delete_unreachable_blocks PARAMS ((void));
385 static int can_delete_note_p PARAMS ((rtx));
386 static void expunge_block PARAMS ((basic_block));
387 static int can_delete_label_p PARAMS ((rtx));
388 static int tail_recursion_label_p PARAMS ((rtx));
389 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
391 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
393 static int merge_blocks PARAMS ((edge,basic_block,basic_block,
395 static bool try_optimize_cfg PARAMS ((int));
396 static bool can_fallthru PARAMS ((basic_block, basic_block));
397 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
398 static bool try_simplify_condjump PARAMS ((basic_block));
399 static bool try_forward_edges PARAMS ((int, basic_block));
400 static void tidy_fallthru_edges PARAMS ((void));
401 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
402 static void verify_wide_reg PARAMS ((int, rtx, rtx));
403 static void verify_local_live_at_start PARAMS ((regset, basic_block));
404 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
405 static void notice_stack_pointer_modification PARAMS ((rtx));
406 static void mark_reg PARAMS ((rtx, void *));
407 static void mark_regs_live_at_end PARAMS ((regset));
408 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
409 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
410 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
411 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
412 static int insn_dead_p PARAMS ((struct propagate_block_info *,
414 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
416 static void mark_set_regs PARAMS ((struct propagate_block_info *,
418 static void mark_set_1 PARAMS ((struct propagate_block_info *,
419 enum rtx_code, rtx, rtx,
421 #ifdef HAVE_conditional_execution
422 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
424 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
425 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
426 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
428 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
429 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
430 static rtx not_reg_cond PARAMS ((rtx));
431 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
434 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
435 rtx, rtx, rtx, rtx, rtx));
436 static void find_auto_inc PARAMS ((struct propagate_block_info *,
438 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
440 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
442 static void mark_used_reg PARAMS ((struct propagate_block_info *,
444 static void mark_used_regs PARAMS ((struct propagate_block_info *,
446 void dump_flow_info PARAMS ((FILE *));
447 void debug_flow_info PARAMS ((void));
448 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
452 static void add_to_mem_set_list PARAMS ((struct propagate_block_info *,
454 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
456 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
458 static void remove_fake_successors PARAMS ((basic_block));
459 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
461 static void flow_edge_list_print PARAMS ((const char *, const edge *,
463 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
465 static int flow_loop_nested_p PARAMS ((struct loop *,
467 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
469 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
470 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
471 static void flow_dfs_compute_reverse_init
472 PARAMS ((depth_first_search_ds));
473 static void flow_dfs_compute_reverse_add_bb
474 PARAMS ((depth_first_search_ds, basic_block));
475 static basic_block flow_dfs_compute_reverse_execute
476 PARAMS ((depth_first_search_ds));
477 static void flow_dfs_compute_reverse_finish
478 PARAMS ((depth_first_search_ds));
479 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
480 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
482 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
483 static void flow_loops_tree_build PARAMS ((struct loops *));
484 static int flow_loop_level_compute PARAMS ((struct loop *, int));
485 static int flow_loops_level_compute PARAMS ((struct loops *));
486 static void delete_dead_jumptables PARAMS ((void));
487 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
488 static bool need_fake_edge_p PARAMS ((rtx));
490 /* Find basic blocks of the current function.
491 F is the first insn of the function and NREGS the number of register
495 find_basic_blocks (f, nregs, file)
497 int nregs ATTRIBUTE_UNUSED;
498 FILE *file ATTRIBUTE_UNUSED;
501 timevar_push (TV_CFG);
503 /* Flush out existing data. */
504 if (basic_block_info != NULL)
510 /* Clear bb->aux on all extant basic blocks. We'll use this as a
511 tag for reuse during create_basic_block, just in case some pass
512 copies around basic block notes improperly. */
513 for (i = 0; i < n_basic_blocks; ++i)
514 BASIC_BLOCK (i)->aux = NULL;
516 VARRAY_FREE (basic_block_info);
519 n_basic_blocks = count_basic_blocks (f);
521 /* Size the basic block table. The actual structures will be allocated
522 by find_basic_blocks_1, since we want to keep the structure pointers
523 stable across calls to find_basic_blocks. */
524 /* ??? This whole issue would be much simpler if we called find_basic_blocks
525 exactly once, and thereafter we don't have a single long chain of
526 instructions at all until close to the end of compilation when we
527 actually lay them out. */
529 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
531 find_basic_blocks_1 (f);
533 /* Record the block to which an insn belongs. */
534 /* ??? This should be done another way, by which (perhaps) a label is
535 tagged directly with the basic block that it starts. It is used for
536 more than that currently, but IMO that is the only valid use. */
538 max_uid = get_max_uid ();
540 /* Leave space for insns life_analysis makes in some cases for auto-inc.
541 These cases are rare, so we don't need too much space. */
542 max_uid += max_uid / 10;
545 compute_bb_for_insn (max_uid);
547 /* Discover the edges of our cfg. */
548 make_edges (label_value_list, 0, n_basic_blocks - 1, 0);
550 /* Do very simple cleanup now, for the benefit of code that runs between
551 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
552 tidy_fallthru_edges ();
554 mark_critical_edges ();
556 #ifdef ENABLE_CHECKING
559 timevar_pop (TV_CFG);
563 check_function_return_warnings ()
565 if (warn_missing_noreturn
566 && !TREE_THIS_VOLATILE (cfun->decl)
567 && EXIT_BLOCK_PTR->pred == NULL
568 && (lang_missing_noreturn_ok_p
569 && !lang_missing_noreturn_ok_p (cfun->decl)))
570 warning ("function might be possible candidate for attribute `noreturn'");
572 /* If we have a path to EXIT, then we do return. */
573 if (TREE_THIS_VOLATILE (cfun->decl)
574 && EXIT_BLOCK_PTR->pred != NULL)
575 warning ("`noreturn' function does return");
577 /* If the clobber_return_insn appears in some basic block, then we
578 do reach the end without returning a value. */
579 else if (warn_return_type
580 && cfun->x_clobber_return_insn != NULL
581 && EXIT_BLOCK_PTR->pred != NULL)
583 int max_uid = get_max_uid ();
585 /* If clobber_return_insn was excised by jump1, then renumber_insns
586 can make max_uid smaller than the number still recorded in our rtx.
587 That's fine, since this is a quick way of verifying that the insn
588 is no longer in the chain. */
589 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
591 /* Recompute insn->block mapping, since the initial mapping is
592 set before we delete unreachable blocks. */
593 compute_bb_for_insn (max_uid);
595 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
596 warning ("control reaches end of non-void function");
601 /* Count the basic blocks of the function. */
604 count_basic_blocks (f)
608 register RTX_CODE prev_code;
609 register int count = 0;
610 int saw_abnormal_edge = 0;
612 prev_code = JUMP_INSN;
613 for (insn = f; insn; insn = NEXT_INSN (insn))
615 enum rtx_code code = GET_CODE (insn);
617 if (code == CODE_LABEL
618 || (GET_RTX_CLASS (code) == 'i'
619 && (prev_code == JUMP_INSN
620 || prev_code == BARRIER
621 || saw_abnormal_edge)))
623 saw_abnormal_edge = 0;
627 /* Record whether this insn created an edge. */
628 if (code == CALL_INSN)
632 /* If there is a nonlocal goto label and the specified
633 region number isn't -1, we have an edge. */
634 if (nonlocal_goto_handler_labels
635 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
636 || INTVAL (XEXP (note, 0)) >= 0))
637 saw_abnormal_edge = 1;
639 else if (can_throw_internal (insn))
640 saw_abnormal_edge = 1;
642 else if (flag_non_call_exceptions
644 && can_throw_internal (insn))
645 saw_abnormal_edge = 1;
651 /* The rest of the compiler works a bit smoother when we don't have to
652 check for the edge case of do-nothing functions with no basic blocks. */
655 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
662 /* Scan a list of insns for labels referred to other than by jumps.
663 This is used to scan the alternatives of a call placeholder. */
665 find_label_refs (f, lvl)
671 for (insn = f; insn; insn = NEXT_INSN (insn))
672 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
676 /* Make a list of all labels referred to other than by jumps
677 (which just don't have the REG_LABEL notes).
679 Make a special exception for labels followed by an ADDR*VEC,
680 as this would be a part of the tablejump setup code.
682 Make a special exception to registers loaded with label
683 values just before jump insns that use them. */
685 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
686 if (REG_NOTE_KIND (note) == REG_LABEL)
688 rtx lab = XEXP (note, 0), next;
690 if ((next = next_nonnote_insn (lab)) != NULL
691 && GET_CODE (next) == JUMP_INSN
692 && (GET_CODE (PATTERN (next)) == ADDR_VEC
693 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
695 else if (GET_CODE (lab) == NOTE)
697 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
698 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
701 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
708 /* Assume that someone emitted code with control flow instructions to the
709 basic block. Update the data structure. */
711 find_sub_basic_blocks (bb)
716 rtx jump_insn = NULL_RTX;
718 basic_block first_bb = bb;
724 if (GET_CODE (insn) == CODE_LABEL)
725 insn = NEXT_INSN (insn);
727 /* Scan insn chain and try to find new basic block boundaries. */
730 enum rtx_code code = GET_CODE (insn);
737 /* On code label, split current basic block. */
739 falltru = split_block (bb, PREV_INSN (insn));
743 remove_edge (falltru);
745 if (LABEL_ALTERNATE_NAME (insn))
746 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
750 /* In case we've previously split insn on the JUMP_INSN, move the
751 block header to proper place. */
754 falltru = split_block (bb, PREV_INSN (insn));
757 remove_edge (falltru);
760 /* We need some special care for those expressions. */
761 if (GET_CODE (insn) == JUMP_INSN)
763 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
764 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
774 insn = NEXT_INSN (insn);
777 /* In case expander replaced normal insn by sequence terminating by
778 return and barrier, or possibly other sequence not behaving like
779 ordinary jump, we need to take care and move basic block boundary. */
780 if (jump_insn && GET_CODE (bb->end) != JUMP_INSN)
783 /* We've possibly replaced the conditional jump by conditional jump
784 followed by cleanup at fallthru edge, so the outgoing edges may
786 purge_dead_edges (bb);
788 /* Now re-scan and wire in all edges. This expect simple (conditional)
789 jumps at the end of each new basic blocks. */
790 make_edges (NULL, first_bb->index, bb->index, 1);
792 /* Update branch probabilities. Expect only (un)conditional jumps
793 to be created with only the forward edges. */
794 for (i = first_bb->index; i <= bb->index; i++)
797 basic_block b = BASIC_BLOCK (i);
802 for (e = b->pred; e; e=e->pred_next)
804 b->count += e->count;
805 b->frequency += EDGE_FREQUENCY (e);
808 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
810 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
815 probability = INTVAL (XEXP (find_reg_note (b->end,
819 e->probability = probability;
820 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
822 f = FALLTHRU_EDGE (b);
823 f->probability = REG_BR_PROB_BASE - probability;
824 f->count = b->count - e->count;
826 if (b->succ && !b->succ->succ_next)
829 e->probability = REG_BR_PROB_BASE;
835 /* Find all basic blocks of the function whose first insn is F.
837 Collect and return a list of labels whose addresses are taken. This
838 will be used in make_edges for use with computed gotos. */
841 find_basic_blocks_1 (f)
844 register rtx insn, next;
846 rtx bb_note = NULL_RTX;
852 /* We process the instructions in a slightly different way than we did
853 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
854 closed out the previous block, so that it gets attached at the proper
855 place. Since this form should be equivalent to the previous,
856 count_basic_blocks continues to use the old form as a check. */
858 for (insn = f; insn; insn = next)
860 enum rtx_code code = GET_CODE (insn);
862 next = NEXT_INSN (insn);
868 int kind = NOTE_LINE_NUMBER (insn);
870 /* Look for basic block notes with which to keep the
871 basic_block_info pointers stable. Unthread the note now;
872 we'll put it back at the right place in create_basic_block.
873 Or not at all if we've already found a note in this block. */
874 if (kind == NOTE_INSN_BASIC_BLOCK)
876 if (bb_note == NULL_RTX)
879 next = flow_delete_insn (insn);
885 /* A basic block starts at a label. If we've closed one off due
886 to a barrier or some such, no need to do it again. */
887 if (head != NULL_RTX)
889 create_basic_block (i++, head, end, bb_note);
897 /* A basic block ends at a jump. */
898 if (head == NULL_RTX)
902 /* ??? Make a special check for table jumps. The way this
903 happens is truly and amazingly gross. We are about to
904 create a basic block that contains just a code label and
905 an addr*vec jump insn. Worse, an addr_diff_vec creates
906 its own natural loop.
908 Prevent this bit of brain damage, pasting things together
909 correctly in make_edges.
911 The correct solution involves emitting the table directly
912 on the tablejump instruction as a note, or JUMP_LABEL. */
914 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
915 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
923 goto new_bb_inclusive;
926 /* A basic block ends at a barrier. It may be that an unconditional
927 jump already closed the basic block -- no need to do it again. */
928 if (head == NULL_RTX)
930 goto new_bb_exclusive;
934 /* Record whether this call created an edge. */
935 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
936 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
938 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
940 /* Scan each of the alternatives for label refs. */
941 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
942 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
943 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
944 /* Record its tail recursion label, if any. */
945 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
946 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
949 /* A basic block ends at a call that can either throw or
950 do a non-local goto. */
951 if ((nonlocal_goto_handler_labels && region >= 0)
952 || can_throw_internal (insn))
955 if (head == NULL_RTX)
960 create_basic_block (i++, head, end, bb_note);
961 head = end = NULL_RTX;
969 /* Non-call exceptions generate new blocks just like calls. */
970 if (flag_non_call_exceptions && can_throw_internal (insn))
971 goto new_bb_inclusive;
973 if (head == NULL_RTX)
982 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
986 /* Make a list of all labels referred to other than by jumps.
988 Make a special exception for labels followed by an ADDR*VEC,
989 as this would be a part of the tablejump setup code.
991 Make a special exception to registers loaded with label
992 values just before jump insns that use them. */
994 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
995 if (REG_NOTE_KIND (note) == REG_LABEL)
997 rtx lab = XEXP (note, 0), next;
999 if ((next = next_nonnote_insn (lab)) != NULL
1000 && GET_CODE (next) == JUMP_INSN
1001 && (GET_CODE (PATTERN (next)) == ADDR_VEC
1002 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1004 else if (GET_CODE (lab) == NOTE)
1006 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1007 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
1010 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
1015 if (head != NULL_RTX)
1016 create_basic_block (i++, head, end, bb_note);
1018 flow_delete_insn (bb_note);
1020 if (i != n_basic_blocks)
1023 label_value_list = lvl;
1024 tail_recursion_label_list = trll;
1027 /* Tidy the CFG by deleting unreachable code and whatnot. */
1033 timevar_push (TV_CLEANUP_CFG);
1034 delete_unreachable_blocks ();
1035 if (try_optimize_cfg (mode))
1036 delete_unreachable_blocks ();
1037 mark_critical_edges ();
1039 /* Kill the data we won't maintain. */
1040 free_EXPR_LIST_list (&label_value_list);
1041 free_EXPR_LIST_list (&tail_recursion_label_list);
1042 timevar_pop (TV_CLEANUP_CFG);
1045 /* Create a new basic block consisting of the instructions between
1046 HEAD and END inclusive. Reuses the note and basic block struct
1047 in BB_NOTE, if any. */
1050 create_basic_block (index, head, end, bb_note)
1052 rtx head, end, bb_note;
1057 && ! RTX_INTEGRATED_P (bb_note)
1058 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1061 /* If we found an existing note, thread it back onto the chain. */
1065 if (GET_CODE (head) == CODE_LABEL)
1069 after = PREV_INSN (head);
1073 if (after != bb_note && NEXT_INSN (after) != bb_note)
1074 reorder_insns (bb_note, bb_note, after);
1078 /* Otherwise we must create a note and a basic block structure.
1079 Since we allow basic block structs in rtl, give the struct
1080 the same lifetime by allocating it off the function obstack
1081 rather than using malloc. */
1083 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1084 memset (bb, 0, sizeof (*bb));
1086 if (GET_CODE (head) == CODE_LABEL)
1087 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1090 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1093 NOTE_BASIC_BLOCK (bb_note) = bb;
1096 /* Always include the bb note in the block. */
1097 if (NEXT_INSN (end) == bb_note)
1103 BASIC_BLOCK (index) = bb;
1105 /* Tag the block so that we know it has been used when considering
1106 other basic block notes. */
1110 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
1111 note associated with the BLOCK. */
1114 first_insn_after_basic_block_note (block)
1119 /* Get the first instruction in the block. */
1122 if (insn == NULL_RTX)
1124 if (GET_CODE (insn) == CODE_LABEL)
1125 insn = NEXT_INSN (insn);
1126 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
1129 return NEXT_INSN (insn);
1132 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1133 indexed by INSN_UID. MAX is the size of the array. */
1136 compute_bb_for_insn (max)
1141 if (basic_block_for_insn)
1142 VARRAY_FREE (basic_block_for_insn);
1143 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1145 for (i = 0; i < n_basic_blocks; ++i)
1147 basic_block bb = BASIC_BLOCK (i);
1154 int uid = INSN_UID (insn);
1156 VARRAY_BB (basic_block_for_insn, uid) = bb;
1159 insn = NEXT_INSN (insn);
1164 /* Free the memory associated with the edge structures. */
1172 for (i = 0; i < n_basic_blocks; ++i)
1174 basic_block bb = BASIC_BLOCK (i);
1176 for (e = bb->succ; e; e = n)
1186 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1192 ENTRY_BLOCK_PTR->succ = 0;
1193 EXIT_BLOCK_PTR->pred = 0;
1198 /* Identify the edges between basic blocks MIN to MAX.
1200 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1201 that are otherwise unreachable may be reachable with a non-local goto.
1203 BB_EH_END is an array indexed by basic block number in which we record
1204 the list of exception regions active at the end of the basic block. */
1207 make_edges (label_value_list, min, max, update_p)
1208 rtx label_value_list;
1209 int min, max, update_p;
1212 sbitmap *edge_cache = NULL;
1214 /* Assume no computed jump; revise as we create edges. */
1215 current_function_has_computed_jump = 0;
1217 /* Heavy use of computed goto in machine-generated code can lead to
1218 nearly fully-connected CFGs. In that case we spend a significant
1219 amount of time searching the edge lists for duplicates. */
1220 if (forced_labels || label_value_list)
1222 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1223 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1226 for (i = min; i <= max; ++i)
1229 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1230 if (e->dest != EXIT_BLOCK_PTR)
1231 SET_BIT (edge_cache[i], e->dest->index);
1235 /* By nature of the way these get numbered, block 0 is always the entry. */
1236 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1238 for (i = min; i <= max; ++i)
1240 basic_block bb = BASIC_BLOCK (i);
1243 int force_fallthru = 0;
1245 if (GET_CODE (bb->head) == CODE_LABEL
1246 && LABEL_ALTERNATE_NAME (bb->head))
1247 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1249 /* Examine the last instruction of the block, and discover the
1250 ways we can leave the block. */
1253 code = GET_CODE (insn);
1256 if (code == JUMP_INSN)
1260 /* Recognize exception handling placeholders. */
1261 if (GET_CODE (PATTERN (insn)) == RESX)
1262 make_eh_edge (edge_cache, bb, insn);
1264 /* Recognize a non-local goto as a branch outside the
1265 current function. */
1266 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1269 /* ??? Recognize a tablejump and do the right thing. */
1270 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1271 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1272 && GET_CODE (tmp) == JUMP_INSN
1273 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1274 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1279 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1280 vec = XVEC (PATTERN (tmp), 0);
1282 vec = XVEC (PATTERN (tmp), 1);
1284 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1285 make_label_edge (edge_cache, bb,
1286 XEXP (RTVEC_ELT (vec, j), 0), 0);
1288 /* Some targets (eg, ARM) emit a conditional jump that also
1289 contains the out-of-range target. Scan for these and
1290 add an edge if necessary. */
1291 if ((tmp = single_set (insn)) != NULL
1292 && SET_DEST (tmp) == pc_rtx
1293 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1294 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1295 make_label_edge (edge_cache, bb,
1296 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1298 #ifdef CASE_DROPS_THROUGH
1299 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1300 us naturally detecting fallthru into the next block. */
1305 /* If this is a computed jump, then mark it as reaching
1306 everything on the label_value_list and forced_labels list. */
1307 else if (computed_jump_p (insn))
1309 current_function_has_computed_jump = 1;
1311 for (x = label_value_list; x; x = XEXP (x, 1))
1312 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1314 for (x = forced_labels; x; x = XEXP (x, 1))
1315 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1318 /* Returns create an exit out. */
1319 else if (returnjump_p (insn))
1320 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1322 /* Otherwise, we have a plain conditional or unconditional jump. */
1325 if (! JUMP_LABEL (insn))
1327 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1331 /* If this is a sibling call insn, then this is in effect a
1332 combined call and return, and so we need an edge to the
1333 exit block. No need to worry about EH edges, since we
1334 wouldn't have created the sibling call in the first place. */
1336 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1337 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1338 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1340 /* If this is a CALL_INSN, then mark it as reaching the active EH
1341 handler for this CALL_INSN. If we're handling non-call
1342 exceptions then any insn can reach any of the active handlers.
1344 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1346 else if (code == CALL_INSN || flag_non_call_exceptions)
1348 /* Add any appropriate EH edges. */
1349 make_eh_edge (edge_cache, bb, insn);
1351 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1353 /* ??? This could be made smarter: in some cases it's possible
1354 to tell that certain calls will not do a nonlocal goto.
1356 For example, if the nested functions that do the nonlocal
1357 gotos do not have their addresses taken, then only calls to
1358 those functions or to other nested functions that use them
1359 could possibly do nonlocal gotos. */
1360 /* We do know that a REG_EH_REGION note with a value less
1361 than 0 is guaranteed not to perform a non-local goto. */
1362 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1363 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1364 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1365 make_label_edge (edge_cache, bb, XEXP (x, 0),
1366 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1370 /* Find out if we can drop through to the next block. */
1371 insn = next_nonnote_insn (insn);
1372 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1373 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1374 else if (i + 1 < n_basic_blocks)
1376 rtx tmp = BLOCK_HEAD (i + 1);
1377 if (GET_CODE (tmp) == NOTE)
1378 tmp = next_nonnote_insn (tmp);
1379 if (force_fallthru || insn == tmp)
1380 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1385 sbitmap_vector_free (edge_cache);
1388 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1389 about the edge that is accumulated between calls. */
1392 make_edge (edge_cache, src, dst, flags)
1393 sbitmap *edge_cache;
1394 basic_block src, dst;
1400 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1401 many edges to them, and we didn't allocate memory for it. */
1402 use_edge_cache = (edge_cache
1403 && src != ENTRY_BLOCK_PTR
1404 && dst != EXIT_BLOCK_PTR);
1406 /* Make sure we don't add duplicate edges. */
1407 switch (use_edge_cache)
1410 /* Quick test for non-existance of the edge. */
1411 if (! TEST_BIT (edge_cache[src->index], dst->index))
1414 /* The edge exists; early exit if no work to do. */
1420 for (e = src->succ; e; e = e->succ_next)
1429 e = (edge) xcalloc (1, sizeof (*e));
1432 e->succ_next = src->succ;
1433 e->pred_next = dst->pred;
1442 SET_BIT (edge_cache[src->index], dst->index);
1445 /* Create an edge from a basic block to a label. */
1448 make_label_edge (edge_cache, src, label, flags)
1449 sbitmap *edge_cache;
1454 if (GET_CODE (label) != CODE_LABEL)
1457 /* If the label was never emitted, this insn is junk, but avoid a
1458 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1459 as a result of a syntax error and a diagnostic has already been
1462 if (INSN_UID (label) == 0)
1465 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1468 /* Create the edges generated by INSN in REGION. */
1471 make_eh_edge (edge_cache, src, insn)
1472 sbitmap *edge_cache;
1476 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1479 handlers = reachable_handlers (insn);
1481 for (i = handlers; i; i = XEXP (i, 1))
1482 make_label_edge (edge_cache, src, XEXP (i, 0),
1483 EDGE_ABNORMAL | EDGE_EH | is_call);
1485 free_INSN_LIST_list (&handlers);
1488 /* Identify critical edges and set the bits appropriately. */
1491 mark_critical_edges ()
1493 int i, n = n_basic_blocks;
1496 /* We begin with the entry block. This is not terribly important now,
1497 but could be if a front end (Fortran) implemented alternate entry
1499 bb = ENTRY_BLOCK_PTR;
1506 /* (1) Critical edges must have a source with multiple successors. */
1507 if (bb->succ && bb->succ->succ_next)
1509 for (e = bb->succ; e; e = e->succ_next)
1511 /* (2) Critical edges must have a destination with multiple
1512 predecessors. Note that we know there is at least one
1513 predecessor -- the edge we followed to get here. */
1514 if (e->dest->pred->pred_next)
1515 e->flags |= EDGE_CRITICAL;
1517 e->flags &= ~EDGE_CRITICAL;
1522 for (e = bb->succ; e; e = e->succ_next)
1523 e->flags &= ~EDGE_CRITICAL;
1528 bb = BASIC_BLOCK (i);
1532 /* Mark the back edges in DFS traversal.
1533 Return non-zero if a loop (natural or otherwise) is present.
1534 Inspired by Depth_First_Search_PP described in:
1536 Advanced Compiler Design and Implementation
1538 Morgan Kaufmann, 1997
1540 and heavily borrowed from flow_depth_first_order_compute. */
1543 mark_dfs_back_edges ()
1554 /* Allocate the preorder and postorder number arrays. */
1555 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
1556 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
1558 /* Allocate stack for back-tracking up CFG. */
1559 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
1562 /* Allocate bitmap to track nodes that have been visited. */
1563 visited = sbitmap_alloc (n_basic_blocks);
1565 /* None of the nodes in the CFG have been visited yet. */
1566 sbitmap_zero (visited);
1568 /* Push the first edge on to the stack. */
1569 stack[sp++] = ENTRY_BLOCK_PTR->succ;
1577 /* Look at the edge on the top of the stack. */
1581 e->flags &= ~EDGE_DFS_BACK;
1583 /* Check if the edge destination has been visited yet. */
1584 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1586 /* Mark that we have visited the destination. */
1587 SET_BIT (visited, dest->index);
1589 pre[dest->index] = prenum++;
1593 /* Since the DEST node has been visited for the first
1594 time, check its successors. */
1595 stack[sp++] = dest->succ;
1598 post[dest->index] = postnum++;
1602 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
1603 && pre[src->index] >= pre[dest->index]
1604 && post[dest->index] == 0)
1605 e->flags |= EDGE_DFS_BACK, found = true;
1607 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
1608 post[src->index] = postnum++;
1611 stack[sp - 1] = e->succ_next;
1620 sbitmap_free (visited);
1625 /* Split a block BB after insn INSN creating a new fallthru edge.
1626 Return the new edge. Note that to keep other parts of the compiler happy,
1627 this function renumbers all the basic blocks so that the new
1628 one has a number one greater than the block split. */
1631 split_block (bb, insn)
1641 /* There is no point splitting the block after its end. */
1642 if (bb->end == insn)
1645 /* Create the new structures. */
1646 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1647 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1650 memset (new_bb, 0, sizeof (*new_bb));
1652 new_bb->head = NEXT_INSN (insn);
1653 new_bb->end = bb->end;
1656 new_bb->succ = bb->succ;
1657 bb->succ = new_edge;
1658 new_bb->pred = new_edge;
1659 new_bb->count = bb->count;
1660 new_bb->frequency = bb->frequency;
1661 new_bb->loop_depth = bb->loop_depth;
1664 new_edge->dest = new_bb;
1665 new_edge->flags = EDGE_FALLTHRU;
1666 new_edge->probability = REG_BR_PROB_BASE;
1667 new_edge->count = bb->count;
1669 /* Redirect the src of the successor edges of bb to point to new_bb. */
1670 for (e = new_bb->succ; e; e = e->succ_next)
1673 /* Place the new block just after the block being split. */
1674 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1676 /* Some parts of the compiler expect blocks to be number in
1677 sequential order so insert the new block immediately after the
1678 block being split.. */
1680 for (i = n_basic_blocks - 1; i > j + 1; --i)
1682 basic_block tmp = BASIC_BLOCK (i - 1);
1683 BASIC_BLOCK (i) = tmp;
1687 BASIC_BLOCK (i) = new_bb;
1690 if (GET_CODE (new_bb->head) == CODE_LABEL)
1692 /* Create the basic block note. */
1693 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1695 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1697 /* If the only thing in this new block was the label, make sure
1698 the block note gets included. */
1699 if (new_bb->head == new_bb->end)
1700 new_bb->end = bb_note;
1704 /* Create the basic block note. */
1705 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1707 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1708 new_bb->head = bb_note;
1711 update_bb_for_insn (new_bb);
1713 if (bb->global_live_at_start)
1715 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1716 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1717 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1719 /* We now have to calculate which registers are live at the end
1720 of the split basic block and at the start of the new basic
1721 block. Start with those registers that are known to be live
1722 at the end of the original basic block and get
1723 propagate_block to determine which registers are live. */
1724 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1725 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1726 COPY_REG_SET (bb->global_live_at_end,
1727 new_bb->global_live_at_start);
1733 /* Return label in the head of basic block. Create one if it doesn't exist. */
1738 if (block == EXIT_BLOCK_PTR)
1740 if (GET_CODE (block->head) != CODE_LABEL)
1742 block->head = emit_label_before (gen_label_rtx (), block->head);
1743 if (basic_block_for_insn)
1744 set_block_for_insn (block->head, block);
1749 /* Return true if the block has no effect and only forwards control flow to
1750 its single destination. */
1752 forwarder_block_p (bb)
1755 rtx insn = bb->head;
1756 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1757 || !bb->succ || bb->succ->succ_next)
1760 while (insn != bb->end)
1762 if (active_insn_p (insn))
1764 insn = NEXT_INSN (insn);
1766 return (!active_insn_p (insn)
1767 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
1770 /* Return nonzero if we can reach target from src by falling trought. */
1772 can_fallthru (src, target)
1773 basic_block src, target;
1775 rtx insn = src->end;
1776 rtx insn2 = target->head;
1778 if (src->index + 1 == target->index && !active_insn_p (insn2))
1779 insn2 = next_active_insn (insn2);
1780 /* ??? Later we may add code to move jump tables offline. */
1781 return next_active_insn (insn) == insn2;
1784 /* Attempt to perform edge redirection by replacing possibly complex jump
1785 instruction by unconditional jump or removing jump completely.
1786 This can apply only if all edges now point to the same block.
1788 The parameters and return values are equivalent to redirect_edge_and_branch.
1791 try_redirect_by_replacing_jump (e, target)
1795 basic_block src = e->src;
1796 rtx insn = src->end, kill_from;
1801 /* Verify that all targets will be TARGET. */
1802 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1803 if (tmp->dest != target && tmp != e)
1805 if (tmp || !onlyjump_p (insn))
1808 /* Avoid removing branch with side effects. */
1809 set = single_set (insn);
1810 if (!set || side_effects_p (set))
1813 /* In case we zap a conditional jump, we'll need to kill
1814 the cc0 setter too. */
1817 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
1818 kill_from = PREV_INSN (insn);
1821 /* See if we can create the fallthru edge. */
1822 if (can_fallthru (src, target))
1824 src->end = PREV_INSN (kill_from);
1826 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1829 /* Selectivly unlink whole insn chain. */
1830 flow_delete_insn_chain (kill_from, PREV_INSN (target->head));
1832 /* If this already is simplejump, redirect it. */
1833 else if (simplejump_p (insn))
1835 if (e->dest == target)
1838 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1839 INSN_UID (insn), e->dest->index, target->index);
1840 redirect_jump (insn, block_label (target), 0);
1842 /* Or replace possibly complicated jump insn by simple jump insn. */
1845 rtx target_label = block_label (target);
1848 src->end = emit_jump_insn_before (gen_jump (target_label), kill_from);
1849 JUMP_LABEL (src->end) = target_label;
1850 LABEL_NUSES (target_label)++;
1851 if (basic_block_for_insn)
1852 set_block_for_new_insns (src->end, src);
1854 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1855 INSN_UID (insn), INSN_UID (src->end));
1857 flow_delete_insn_chain (kill_from, insn);
1859 barrier = next_nonnote_insn (src->end);
1860 if (!barrier || GET_CODE (barrier) != BARRIER)
1861 emit_barrier_after (src->end);
1864 /* Keep only one edge out and set proper flags. */
1865 while (src->succ->succ_next)
1866 remove_edge (src->succ);
1869 e->flags = EDGE_FALLTHRU;
1872 e->probability = REG_BR_PROB_BASE;
1873 e->count = src->count;
1875 /* We don't want a block to end on a line-number note since that has
1876 the potential of changing the code between -g and not -g. */
1877 while (GET_CODE (e->src->end) == NOTE
1878 && NOTE_LINE_NUMBER (e->src->end) >= 0)
1880 rtx prev = PREV_INSN (e->src->end);
1881 flow_delete_insn (e->src->end);
1885 if (e->dest != target)
1886 redirect_edge_succ (e, target);
1890 /* Return last loop_beg note appearing after INSN, before start of next
1891 basic block. Return INSN if there are no such notes.
1893 When emmiting jump to redirect an fallthru edge, it should always
1894 appear after the LOOP_BEG notes, as loop optimizer expect loop to
1895 eighter start by fallthru edge or jump following the LOOP_BEG note
1896 jumping to the loop exit test. */
1898 last_loop_beg_note (insn)
1902 insn = NEXT_INSN (insn);
1903 while (GET_CODE (insn) == NOTE
1904 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1906 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1908 insn = NEXT_INSN (insn);
1913 /* Attempt to change code to redirect edge E to TARGET.
1914 Don't do that on expense of adding new instructions or reordering
1917 Function can be also called with edge destionation equivalent to the
1918 TARGET. Then it should try the simplifications and do nothing if
1921 Return true if transformation suceeded. We still return flase in case
1922 E already destinated TARGET and we didn't managed to simplify instruction
1925 redirect_edge_and_branch (e, target)
1930 rtx old_label = e->dest->head;
1931 basic_block src = e->src;
1932 rtx insn = src->end;
1934 if (e->flags & EDGE_COMPLEX)
1937 if (try_redirect_by_replacing_jump (e, target))
1939 /* Do this fast path late, as we want above code to simplify for cases
1940 where called on single edge leaving basic block containing nontrivial
1942 else if (e->dest == target)
1945 /* We can only redirect non-fallthru edges of jump insn. */
1946 if (e->flags & EDGE_FALLTHRU)
1948 if (GET_CODE (insn) != JUMP_INSN)
1951 /* Recognize a tablejump and adjust all matching cases. */
1952 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1953 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1954 && GET_CODE (tmp) == JUMP_INSN
1955 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1956 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1960 rtx new_label = block_label (target);
1962 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1963 vec = XVEC (PATTERN (tmp), 0);
1965 vec = XVEC (PATTERN (tmp), 1);
1967 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1968 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1970 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1971 --LABEL_NUSES (old_label);
1972 ++LABEL_NUSES (new_label);
1975 /* Handle casesi dispatch insns */
1976 if ((tmp = single_set (insn)) != NULL
1977 && SET_DEST (tmp) == pc_rtx
1978 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1979 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1980 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1982 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1984 --LABEL_NUSES (old_label);
1985 ++LABEL_NUSES (new_label);
1990 /* ?? We may play the games with moving the named labels from
1991 one basic block to the other in case only one computed_jump is
1993 if (computed_jump_p (insn))
1996 /* A return instruction can't be redirected. */
1997 if (returnjump_p (insn))
2000 /* If the insn doesn't go where we think, we're confused. */
2001 if (JUMP_LABEL (insn) != old_label)
2003 redirect_jump (insn, block_label (target), 0);
2007 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
2008 e->src->index, e->dest->index, target->index);
2009 if (e->dest != target)
2010 redirect_edge_succ_nodup (e, target);
2014 /* Redirect edge even at the expense of creating new jump insn or
2015 basic block. Return new basic block if created, NULL otherwise.
2016 Abort if converison is impossible. */
2018 redirect_edge_and_branch_force (e, target)
2028 if (redirect_edge_and_branch (e, target))
2030 if (e->dest == target)
2032 if (e->flags & EDGE_ABNORMAL)
2034 if (!(e->flags & EDGE_FALLTHRU))
2037 e->flags &= ~EDGE_FALLTHRU;
2038 label = block_label (target);
2039 /* Case of the fallthru block. */
2040 if (!e->src->succ->succ_next)
2042 e->src->end = emit_jump_insn_after (gen_jump (label),
2043 last_loop_beg_note (e->src->end));
2044 JUMP_LABEL (e->src->end) = label;
2045 LABEL_NUSES (label)++;
2046 if (basic_block_for_insn)
2047 set_block_for_new_insns (e->src->end, e->src);
2048 emit_barrier_after (e->src->end);
2050 fprintf (rtl_dump_file,
2051 "Emitting jump insn %i to redirect edge %i->%i to %i\n",
2052 INSN_UID (e->src->end), e->src->index, e->dest->index,
2054 redirect_edge_succ (e, target);
2057 /* Redirecting fallthru edge of the conditional needs extra work. */
2060 fprintf (rtl_dump_file,
2061 "Emitting jump insn %i in new BB to redirect edge %i->%i to %i\n",
2062 INSN_UID (e->src->end), e->src->index, e->dest->index,
2065 /* Create the new structures. */
2066 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
2067 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
2070 memset (new_bb, 0, sizeof (*new_bb));
2072 new_bb->end = new_bb->head = last_loop_beg_note (e->src->end);
2073 new_bb->succ = NULL;
2074 new_bb->pred = new_edge;
2075 new_bb->count = e->count;
2076 new_bb->frequency = EDGE_FREQUENCY (e);
2077 new_bb->loop_depth = e->dest->loop_depth;
2079 new_edge->flags = EDGE_FALLTHRU;
2080 new_edge->probability = e->probability;
2081 new_edge->count = e->count;
2083 if (target->global_live_at_start)
2085 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2086 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2087 COPY_REG_SET (new_bb->global_live_at_start,
2088 target->global_live_at_start);
2089 COPY_REG_SET (new_bb->global_live_at_end, new_bb->global_live_at_start);
2093 new_edge->src = e->src;
2094 new_edge->dest = new_bb;
2095 new_edge->succ_next = e->src->succ;
2096 e->src->succ = new_edge;
2097 new_edge->pred_next = NULL;
2099 /* Redirect old edge. */
2100 redirect_edge_succ (e, target);
2101 redirect_edge_pred (e, new_bb);
2102 e->probability = REG_BR_PROB_BASE;
2104 /* Place the new block just after the block being split. */
2105 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2107 /* Some parts of the compiler expect blocks to be number in
2108 sequential order so insert the new block immediately after the
2109 block being split.. */
2110 j = new_edge->src->index;
2111 for (i = n_basic_blocks - 1; i > j + 1; --i)
2113 basic_block tmp = BASIC_BLOCK (i - 1);
2114 BASIC_BLOCK (i) = tmp;
2118 BASIC_BLOCK (i) = new_bb;
2121 /* Create the basic block note. */
2122 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, new_bb->head);
2123 NOTE_BASIC_BLOCK (bb_note) = new_bb;
2124 new_bb->head = bb_note;
2126 new_bb->end = emit_jump_insn_after (gen_jump (label), new_bb->head);
2127 JUMP_LABEL (new_bb->end) = label;
2128 LABEL_NUSES (label)++;
2129 if (basic_block_for_insn)
2130 set_block_for_new_insns (new_bb->end, new_bb);
2131 emit_barrier_after (new_bb->end);
2135 /* Helper function for split_edge. Return true in case edge BB2 to BB1
2136 is back edge of syntactic loop. */
2138 back_edge_of_syntactic_loop_p (bb1, bb2)
2139 basic_block bb1, bb2;
2143 if (bb1->index > bb2->index)
2145 if (bb1->index == bb2->index)
2147 for (insn = bb1->end; insn != bb2->head && count >= 0;
2148 insn = NEXT_INSN (insn))
2149 if (GET_CODE (insn) == NOTE)
2151 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2153 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2159 /* Split a (typically critical) edge. Return the new block.
2160 Abort on abnormal edges.
2162 ??? The code generally expects to be called on critical edges.
2163 The case of a block ending in an unconditional jump to a
2164 block with multiple predecessors is not handled optimally. */
2167 split_edge (edge_in)
2170 basic_block old_pred, bb, old_succ;
2175 /* Abnormal edges cannot be split. */
2176 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
2179 old_pred = edge_in->src;
2180 old_succ = edge_in->dest;
2182 /* Create the new structures. */
2183 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
2184 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
2187 memset (bb, 0, sizeof (*bb));
2189 /* ??? This info is likely going to be out of date very soon. */
2190 if (old_succ->global_live_at_start)
2192 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2193 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2194 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
2195 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
2199 bb->succ = edge_out;
2200 bb->count = edge_in->count;
2201 bb->frequency = EDGE_FREQUENCY (edge_in);
2203 edge_in->flags &= ~EDGE_CRITICAL;
2205 edge_out->pred_next = old_succ->pred;
2206 edge_out->succ_next = NULL;
2208 edge_out->dest = old_succ;
2209 edge_out->flags = EDGE_FALLTHRU;
2210 edge_out->probability = REG_BR_PROB_BASE;
2211 edge_out->count = edge_in->count;
2213 old_succ->pred = edge_out;
2215 /* Tricky case -- if there existed a fallthru into the successor
2216 (and we're not it) we must add a new unconditional jump around
2217 the new block we're actually interested in.
2219 Further, if that edge is critical, this means a second new basic
2220 block must be created to hold it. In order to simplify correct
2221 insn placement, do this before we touch the existing basic block
2222 ordering for the block we were really wanting. */
2223 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2226 for (e = edge_out->pred_next; e; e = e->pred_next)
2227 if (e->flags & EDGE_FALLTHRU)
2232 basic_block jump_block;
2235 if ((e->flags & EDGE_CRITICAL) == 0
2236 && e->src != ENTRY_BLOCK_PTR)
2238 /* Non critical -- we can simply add a jump to the end
2239 of the existing predecessor. */
2240 jump_block = e->src;
2244 /* We need a new block to hold the jump. The simplest
2245 way to do the bulk of the work here is to recursively
2247 jump_block = split_edge (e);
2248 e = jump_block->succ;
2251 /* Now add the jump insn ... */
2252 pos = emit_jump_insn_after (gen_jump (old_succ->head),
2253 last_loop_beg_note (jump_block->end));
2254 jump_block->end = pos;
2255 if (basic_block_for_insn)
2256 set_block_for_new_insns (pos, jump_block);
2257 emit_barrier_after (pos);
2259 /* ... let jump know that label is in use, ... */
2260 JUMP_LABEL (pos) = old_succ->head;
2261 ++LABEL_NUSES (old_succ->head);
2263 /* ... and clear fallthru on the outgoing edge. */
2264 e->flags &= ~EDGE_FALLTHRU;
2266 /* Continue splitting the interesting edge. */
2270 /* Place the new block just in front of the successor. */
2271 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2272 if (old_succ == EXIT_BLOCK_PTR)
2273 j = n_basic_blocks - 1;
2275 j = old_succ->index;
2276 for (i = n_basic_blocks - 1; i > j; --i)
2278 basic_block tmp = BASIC_BLOCK (i - 1);
2279 BASIC_BLOCK (i) = tmp;
2282 BASIC_BLOCK (i) = bb;
2285 /* Create the basic block note.
2287 Where we place the note can have a noticable impact on the generated
2288 code. Consider this cfg:
2298 If we need to insert an insn on the edge from block 0 to block 1,
2299 we want to ensure the instructions we insert are outside of any
2300 loop notes that physically sit between block 0 and block 1. Otherwise
2301 we confuse the loop optimizer into thinking the loop is a phony. */
2302 if (old_succ != EXIT_BLOCK_PTR
2303 && PREV_INSN (old_succ->head)
2304 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
2305 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG
2306 && !back_edge_of_syntactic_loop_p (old_succ, old_pred))
2307 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
2308 PREV_INSN (old_succ->head));
2309 else if (old_succ != EXIT_BLOCK_PTR)
2310 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
2312 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
2313 NOTE_BASIC_BLOCK (bb_note) = bb;
2314 bb->head = bb->end = bb_note;
2316 /* For non-fallthry edges, we must adjust the predecessor's
2317 jump instruction to target our new block. */
2318 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2320 if (!redirect_edge_and_branch (edge_in, bb))
2324 redirect_edge_succ (edge_in, bb);
2329 /* Queue instructions for insertion on an edge between two basic blocks.
2330 The new instructions and basic blocks (if any) will not appear in the
2331 CFG until commit_edge_insertions is called. */
2334 insert_insn_on_edge (pattern, e)
2338 /* We cannot insert instructions on an abnormal critical edge.
2339 It will be easier to find the culprit if we die now. */
2340 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2341 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2344 if (e->insns == NULL_RTX)
2347 push_to_sequence (e->insns);
2349 emit_insn (pattern);
2351 e->insns = get_insns ();
2355 /* Update the CFG for the instructions queued on edge E. */
2358 commit_one_edge_insertion (e)
2361 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2364 /* Pull the insns off the edge now since the edge might go away. */
2366 e->insns = NULL_RTX;
2368 /* Figure out where to put these things. If the destination has
2369 one predecessor, insert there. Except for the exit block. */
2370 if (e->dest->pred->pred_next == NULL
2371 && e->dest != EXIT_BLOCK_PTR)
2375 /* Get the location correct wrt a code label, and "nice" wrt
2376 a basic block note, and before everything else. */
2378 if (GET_CODE (tmp) == CODE_LABEL)
2379 tmp = NEXT_INSN (tmp);
2380 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2381 tmp = NEXT_INSN (tmp);
2382 if (tmp == bb->head)
2385 after = PREV_INSN (tmp);
2388 /* If the source has one successor and the edge is not abnormal,
2389 insert there. Except for the entry block. */
2390 else if ((e->flags & EDGE_ABNORMAL) == 0
2391 && e->src->succ->succ_next == NULL
2392 && e->src != ENTRY_BLOCK_PTR)
2395 /* It is possible to have a non-simple jump here. Consider a target
2396 where some forms of unconditional jumps clobber a register. This
2397 happens on the fr30 for example.
2399 We know this block has a single successor, so we can just emit
2400 the queued insns before the jump. */
2401 if (GET_CODE (bb->end) == JUMP_INSN)
2407 /* We'd better be fallthru, or we've lost track of what's what. */
2408 if ((e->flags & EDGE_FALLTHRU) == 0)
2415 /* Otherwise we must split the edge. */
2418 bb = split_edge (e);
2422 /* Now that we've found the spot, do the insertion. */
2424 /* Set the new block number for these insns, if structure is allocated. */
2425 if (basic_block_for_insn)
2428 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2429 set_block_for_insn (i, bb);
2434 emit_insns_before (insns, before);
2435 if (before == bb->head)
2438 last = prev_nonnote_insn (before);
2442 last = emit_insns_after (insns, after);
2443 if (after == bb->end)
2447 if (returnjump_p (last))
2449 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2450 This is not currently a problem because this only happens
2451 for the (single) epilogue, which already has a fallthru edge
2455 if (e->dest != EXIT_BLOCK_PTR
2456 || e->succ_next != NULL
2457 || (e->flags & EDGE_FALLTHRU) == 0)
2459 e->flags &= ~EDGE_FALLTHRU;
2461 emit_barrier_after (last);
2465 flow_delete_insn (before);
2467 else if (GET_CODE (last) == JUMP_INSN)
2469 find_sub_basic_blocks (bb);
2472 /* Update the CFG for all queued instructions. */
2475 commit_edge_insertions ()
2479 compute_bb_for_insn (get_max_uid ());
2481 #ifdef ENABLE_CHECKING
2482 verify_flow_info ();
2486 bb = ENTRY_BLOCK_PTR;
2491 for (e = bb->succ; e; e = next)
2493 next = e->succ_next;
2495 commit_one_edge_insertion (e);
2498 if (++i >= n_basic_blocks)
2500 bb = BASIC_BLOCK (i);
2504 /* Return true if we need to add fake edge to exit.
2505 Helper function for the flow_call_edges_add. */
2507 need_fake_edge_p (insn)
2513 if ((GET_CODE (insn) == CALL_INSN
2514 && !SIBLING_CALL_P (insn)
2515 && !find_reg_note (insn, REG_NORETURN, NULL) && !CONST_CALL_P (insn)))
2518 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2519 && MEM_VOLATILE_P (PATTERN (insn)))
2520 || (GET_CODE (PATTERN (insn)) == PARALLEL
2521 && asm_noperands (insn) != -1
2522 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2523 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2526 /* Add fake edges to the function exit for any non constant and non noreturn
2527 calls, volatile inline assembly in the bitmap of blocks specified by
2528 BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks
2531 The goal is to expose cases in which entering a basic block does not imply
2532 that all subsequent instructions must be executed. */
2535 flow_call_edges_add (blocks)
2539 int blocks_split = 0;
2542 bool check_last_block = false;
2544 /* Map bb indicies into basic block pointers since split_block
2545 will renumber the basic blocks. */
2547 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2551 for (i = 0; i < n_basic_blocks; i++)
2552 bbs[bb_num++] = BASIC_BLOCK (i);
2553 check_last_block = true;
2557 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2559 bbs[bb_num++] = BASIC_BLOCK (i);
2560 if (i == n_basic_blocks - 1)
2561 check_last_block = true;
2565 /* In the last basic block, before epilogue generation, there will be
2566 a fallthru edge to EXIT. Special care is required if the last insn
2567 of the last basic block is a call because make_edge folds duplicate
2568 edges, which would result in the fallthru edge also being marked
2569 fake, which would result in the fallthru edge being removed by
2570 remove_fake_edges, which would result in an invalid CFG.
2572 Moreover, we can't elide the outgoing fake edge, since the block
2573 profiler needs to take this into account in order to solve the minimal
2574 spanning tree in the case that the call doesn't return.
2576 Handle this by adding a dummy instruction in a new last basic block. */
2577 if (check_last_block
2578 && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
2581 for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
2582 if (e->dest == EXIT_BLOCK_PTR)
2584 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2585 commit_edge_insertions ();
2589 /* Now add fake edges to the function exit for any non constant
2590 calls since there is no way that we can determine if they will
2593 for (i = 0; i < bb_num; i++)
2595 basic_block bb = bbs[i];
2599 for (insn = bb->end; ; insn = prev_insn)
2601 prev_insn = PREV_INSN (insn);
2602 if (need_fake_edge_p (insn))
2606 /* The above condition should be enought to verify that there is
2607 no edge to the exit block in CFG already. Calling make_edge in
2608 such case would make us to mark that edge as fake and remove it
2610 #ifdef ENABLE_CHECKING
2611 if (insn == bb->end)
2612 for (e = bb->succ; e; e = e->succ_next)
2613 if (e->dest == EXIT_BLOCK_PTR)
2617 /* Note that the following may create a new basic block
2618 and renumber the existing basic blocks. */
2619 e = split_block (bb, insn);
2623 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2625 if (insn == bb->head)
2631 verify_flow_info ();
2634 return blocks_split;
2637 /* Find unreachable blocks. An unreachable block will have NULL in
2638 block->aux, a non-NULL value indicates the block is reachable. */
2641 find_unreachable_blocks ()
2645 basic_block *tos, *worklist;
2648 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2650 /* Use basic_block->aux as a marker. Clear them all. */
2652 for (i = 0; i < n; ++i)
2653 BASIC_BLOCK (i)->aux = NULL;
2655 /* Add our starting points to the worklist. Almost always there will
2656 be only one. It isn't inconcievable that we might one day directly
2657 support Fortran alternate entry points. */
2659 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2663 /* Mark the block with a handy non-null value. */
2667 /* Iterate: find everything reachable from what we've already seen. */
2669 while (tos != worklist)
2671 basic_block b = *--tos;
2673 for (e = b->succ; e; e = e->succ_next)
2684 /* Delete all unreachable basic blocks. */
2686 delete_unreachable_blocks ()
2690 find_unreachable_blocks ();
2692 /* Delete all unreachable basic blocks. Count down so that we
2693 don't interfere with the block renumbering that happens in
2694 flow_delete_block. */
2696 for (i = n_basic_blocks - 1; i >= 0; --i)
2698 basic_block b = BASIC_BLOCK (i);
2701 /* This block was found. Tidy up the mark. */
2704 flow_delete_block (b);
2707 tidy_fallthru_edges ();
2710 /* Return true if NOTE is not one of the ones that must be kept paired,
2711 so that we may simply delete them. */
2714 can_delete_note_p (note)
2717 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2718 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2721 /* Unlink a chain of insns between START and FINISH, leaving notes
2722 that must be paired. */
2725 flow_delete_insn_chain (start, finish)
2728 /* Unchain the insns one by one. It would be quicker to delete all
2729 of these with a single unchaining, rather than one at a time, but
2730 we need to keep the NOTE's. */
2736 next = NEXT_INSN (start);
2737 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2739 else if (GET_CODE (start) == CODE_LABEL
2740 && ! can_delete_label_p (start))
2742 const char *name = LABEL_NAME (start);
2743 PUT_CODE (start, NOTE);
2744 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2745 NOTE_SOURCE_FILE (start) = name;
2748 next = flow_delete_insn (start);
2750 if (start == finish)
2756 /* Delete the insns in a (non-live) block. We physically delete every
2757 non-deleted-note insn, and update the flow graph appropriately.
2759 Return nonzero if we deleted an exception handler. */
2761 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2762 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2765 flow_delete_block (b)
2768 int deleted_handler = 0;
2771 /* If the head of this block is a CODE_LABEL, then it might be the
2772 label for an exception handler which can't be reached.
2774 We need to remove the label from the exception_handler_label list
2775 and remove the associated NOTE_INSN_EH_REGION_BEG and
2776 NOTE_INSN_EH_REGION_END notes. */
2780 never_reached_warning (insn);
2782 if (GET_CODE (insn) == CODE_LABEL)
2783 maybe_remove_eh_handler (insn);
2785 /* Include any jump table following the basic block. */
2787 if (GET_CODE (end) == JUMP_INSN
2788 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2789 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2790 && GET_CODE (tmp) == JUMP_INSN
2791 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2792 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2795 /* Include any barrier that may follow the basic block. */
2796 tmp = next_nonnote_insn (end);
2797 if (tmp && GET_CODE (tmp) == BARRIER)
2800 /* Selectively delete the entire chain. */
2801 flow_delete_insn_chain (insn, end);
2803 /* Remove the edges into and out of this block. Note that there may
2804 indeed be edges in, if we are removing an unreachable loop. */
2808 for (e = b->pred; e; e = next)
2810 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2813 next = e->pred_next;
2817 for (e = b->succ; e; e = next)
2819 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2822 next = e->succ_next;
2831 /* Remove the basic block from the array, and compact behind it. */
2834 return deleted_handler;
2837 /* Remove block B from the basic block array and compact behind it. */
2843 int i, n = n_basic_blocks;
2845 for (i = b->index; i + 1 < n; ++i)
2847 basic_block x = BASIC_BLOCK (i + 1);
2848 BASIC_BLOCK (i) = x;
2852 basic_block_info->num_elements--;
2856 /* Delete INSN by patching it out. Return the next insn. */
2859 flow_delete_insn (insn)
2862 rtx prev = PREV_INSN (insn);
2863 rtx next = NEXT_INSN (insn);
2866 PREV_INSN (insn) = NULL_RTX;
2867 NEXT_INSN (insn) = NULL_RTX;
2868 INSN_DELETED_P (insn) = 1;
2871 NEXT_INSN (prev) = next;
2873 PREV_INSN (next) = prev;
2875 set_last_insn (prev);
2877 if (GET_CODE (insn) == CODE_LABEL)
2878 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2880 /* If deleting a jump, decrement the use count of the label. Deleting
2881 the label itself should happen in the normal course of block merging. */
2882 if (GET_CODE (insn) == JUMP_INSN
2883 && JUMP_LABEL (insn)
2884 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2885 LABEL_NUSES (JUMP_LABEL (insn))--;
2887 /* Also if deleting an insn that references a label. */
2888 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2889 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2890 LABEL_NUSES (XEXP (note, 0))--;
2892 if (GET_CODE (insn) == JUMP_INSN
2893 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2894 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2896 rtx pat = PATTERN (insn);
2897 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2898 int len = XVECLEN (pat, diff_vec_p);
2901 for (i = 0; i < len; i++)
2902 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2908 /* True if a given label can be deleted. */
2911 can_delete_label_p (label)
2916 if (LABEL_PRESERVE_P (label))
2919 for (x = forced_labels; x; x = XEXP (x, 1))
2920 if (label == XEXP (x, 0))
2922 for (x = label_value_list; x; x = XEXP (x, 1))
2923 if (label == XEXP (x, 0))
2925 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2926 if (label == XEXP (x, 0))
2929 /* User declared labels must be preserved. */
2930 if (LABEL_NAME (label) != 0)
2937 tail_recursion_label_p (label)
2942 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2943 if (label == XEXP (x, 0))
2949 /* Blocks A and B are to be merged into a single block A. The insns
2950 are already contiguous, hence `nomove'. */
2953 merge_blocks_nomove (a, b)
2957 rtx b_head, b_end, a_end;
2958 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2961 /* If there was a CODE_LABEL beginning B, delete it. */
2964 if (GET_CODE (b_head) == CODE_LABEL)
2966 /* Detect basic blocks with nothing but a label. This can happen
2967 in particular at the end of a function. */
2968 if (b_head == b_end)
2970 del_first = del_last = b_head;
2971 b_head = NEXT_INSN (b_head);
2974 /* Delete the basic block note. */
2975 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2977 if (b_head == b_end)
2982 b_head = NEXT_INSN (b_head);
2985 /* If there was a jump out of A, delete it. */
2987 if (GET_CODE (a_end) == JUMP_INSN)
2991 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2992 if (GET_CODE (prev) != NOTE
2993 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
3000 /* If this was a conditional jump, we need to also delete
3001 the insn that set cc0. */
3002 if (prev && sets_cc0_p (prev))
3005 prev = prev_nonnote_insn (prev);
3014 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
3015 del_first = NEXT_INSN (a_end);
3017 /* Delete everything marked above as well as crap that might be
3018 hanging out between the two blocks. */
3019 flow_delete_insn_chain (del_first, del_last);
3021 /* Normally there should only be one successor of A and that is B, but
3022 partway though the merge of blocks for conditional_execution we'll
3023 be merging a TEST block with THEN and ELSE successors. Free the
3024 whole lot of them and hope the caller knows what they're doing. */
3026 remove_edge (a->succ);
3028 /* Adjust the edges out of B for the new owner. */
3029 for (e = b->succ; e; e = e->succ_next)
3033 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
3034 b->pred = b->succ = NULL;
3036 /* Reassociate the insns of B with A. */
3039 if (basic_block_for_insn)
3041 BLOCK_FOR_INSN (b_head) = a;
3042 while (b_head != b_end)
3044 b_head = NEXT_INSN (b_head);
3045 BLOCK_FOR_INSN (b_head) = a;
3055 /* Blocks A and B are to be merged into a single block. A has no incoming
3056 fallthru edge, so it can be moved before B without adding or modifying
3057 any jumps (aside from the jump from A to B). */
3060 merge_blocks_move_predecessor_nojumps (a, b)
3063 rtx start, end, barrier;
3069 barrier = next_nonnote_insn (end);
3070 if (GET_CODE (barrier) != BARRIER)
3072 flow_delete_insn (barrier);
3074 /* Move block and loop notes out of the chain so that we do not
3075 disturb their order.
3077 ??? A better solution would be to squeeze out all the non-nested notes
3078 and adjust the block trees appropriately. Even better would be to have
3079 a tighter connection between block trees and rtl so that this is not
3081 start = squeeze_notes (start, end);
3083 /* Scramble the insn chain. */
3084 if (end != PREV_INSN (b->head))
3085 reorder_insns (start, end, PREV_INSN (b->head));
3089 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
3090 a->index, b->index);
3093 /* Swap the records for the two blocks around. Although we are deleting B,
3094 A is now where B was and we want to compact the BB array from where
3096 BASIC_BLOCK (a->index) = b;
3097 BASIC_BLOCK (b->index) = a;
3099 a->index = b->index;
3102 /* Now blocks A and B are contiguous. Merge them. */
3103 merge_blocks_nomove (a, b);
3108 /* Blocks A and B are to be merged into a single block. B has no outgoing
3109 fallthru edge, so it can be moved after A without adding or modifying
3110 any jumps (aside from the jump from A to B). */
3113 merge_blocks_move_successor_nojumps (a, b)
3116 rtx start, end, barrier;
3120 barrier = NEXT_INSN (end);
3122 /* Recognize a jump table following block B. */
3124 && GET_CODE (barrier) == CODE_LABEL
3125 && NEXT_INSN (barrier)
3126 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
3127 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
3128 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
3130 end = NEXT_INSN (barrier);
3131 barrier = NEXT_INSN (end);
3134 /* There had better have been a barrier there. Delete it. */
3135 if (barrier && GET_CODE (barrier) == BARRIER)
3136 flow_delete_insn (barrier);
3138 /* Move block and loop notes out of the chain so that we do not
3139 disturb their order.
3141 ??? A better solution would be to squeeze out all the non-nested notes
3142 and adjust the block trees appropriately. Even better would be to have
3143 a tighter connection between block trees and rtl so that this is not
3145 start = squeeze_notes (start, end);
3147 /* Scramble the insn chain. */
3148 reorder_insns (start, end, a->end);
3150 /* Now blocks A and B are contiguous. Merge them. */
3151 merge_blocks_nomove (a, b);
3155 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
3156 b->index, a->index);
3162 /* Attempt to merge basic blocks that are potentially non-adjacent.
3163 Return true iff the attempt succeeded. */
3166 merge_blocks (e, b, c, mode)
3171 /* If C has a tail recursion label, do not merge. There is no
3172 edge recorded from the call_placeholder back to this label, as
3173 that would make optimize_sibling_and_tail_recursive_calls more
3174 complex for no gain. */
3175 if (GET_CODE (c->head) == CODE_LABEL
3176 && tail_recursion_label_p (c->head))
3179 /* If B has a fallthru edge to C, no need to move anything. */
3180 if (e->flags & EDGE_FALLTHRU)
3182 merge_blocks_nomove (b, c);
3186 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
3187 b->index, c->index);
3192 /* Otherwise we will need to move code around. Do that only if expensive
3193 transformations are allowed. */
3194 else if (mode & CLEANUP_EXPENSIVE)
3196 edge tmp_edge, c_fallthru_edge;
3197 int c_has_outgoing_fallthru;
3198 int b_has_incoming_fallthru;
3200 /* Avoid overactive code motion, as the forwarder blocks should be
3201 eliminated by edge redirection instead. One exception might have
3202 been if B is a forwarder block and C has no fallthru edge, but
3203 that should be cleaned up by bb-reorder instead. */
3204 if (forwarder_block_p (b) || forwarder_block_p (c))
3207 /* We must make sure to not munge nesting of lexical blocks,
3208 and loop notes. This is done by squeezing out all the notes
3209 and leaving them there to lie. Not ideal, but functional. */
3211 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
3212 if (tmp_edge->flags & EDGE_FALLTHRU)
3214 c_has_outgoing_fallthru = (tmp_edge != NULL);
3215 c_fallthru_edge = tmp_edge;
3217 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
3218 if (tmp_edge->flags & EDGE_FALLTHRU)
3220 b_has_incoming_fallthru = (tmp_edge != NULL);
3222 /* If B does not have an incoming fallthru, then it can be moved
3223 immediately before C without introducing or modifying jumps.
3224 C cannot be the first block, so we do not have to worry about
3225 accessing a non-existent block. */
3226 if (! b_has_incoming_fallthru)
3227 return merge_blocks_move_predecessor_nojumps (b, c);
3229 /* Otherwise, we're going to try to move C after B. If C does
3230 not have an outgoing fallthru, then it can be moved
3231 immediately after B without introducing or modifying jumps. */
3232 if (! c_has_outgoing_fallthru)
3233 return merge_blocks_move_successor_nojumps (b, c);
3235 /* Otherwise, we'll need to insert an extra jump, and possibly
3236 a new block to contain it. We can't redirect to EXIT_BLOCK_PTR,
3237 as we don't have explicit return instructions before epilogues
3238 are generated, so give up on that case. */
3240 if (c_fallthru_edge->dest != EXIT_BLOCK_PTR
3241 && merge_blocks_move_successor_nojumps (b, c))
3243 basic_block target = c_fallthru_edge->dest;
3247 /* This is a dirty hack to avoid code duplication.
3249 Set edge to point to wrong basic block, so
3250 redirect_edge_and_branch_force will do the trick
3251 and rewire edge back to the original location. */
3252 redirect_edge_succ (c_fallthru_edge, ENTRY_BLOCK_PTR);
3253 new = redirect_edge_and_branch_force (c_fallthru_edge, target);
3255 /* We've just created barrier, but another barrier is
3256 already present in the stream. Avoid the duplicate. */
3257 barrier = next_nonnote_insn (new ? new->end : b->end);
3258 if (GET_CODE (barrier) != BARRIER)
3260 flow_delete_insn (barrier);
3270 /* Simplify a conditional jump around an unconditional jump.
3271 Return true if something changed. */
3274 try_simplify_condjump (cbranch_block)
3275 basic_block cbranch_block;
3277 basic_block jump_block, jump_dest_block, cbranch_dest_block;
3278 edge cbranch_jump_edge, cbranch_fallthru_edge;
3281 /* Verify that there are exactly two successors. */
3282 if (!cbranch_block->succ
3283 || !cbranch_block->succ->succ_next
3284 || cbranch_block->succ->succ_next->succ_next)
3287 /* Verify that we've got a normal conditional branch at the end
3289 cbranch_insn = cbranch_block->end;
3290 if (!any_condjump_p (cbranch_insn))
3293 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
3294 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
3296 /* The next block must not have multiple predecessors, must not
3297 be the last block in the function, and must contain just the
3298 unconditional jump. */
3299 jump_block = cbranch_fallthru_edge->dest;
3300 if (jump_block->pred->pred_next
3301 || jump_block->index == n_basic_blocks - 1
3302 || !forwarder_block_p (jump_block))
3304 jump_dest_block = jump_block->succ->dest;
3306 /* The conditional branch must target the block after the
3307 unconditional branch. */
3308 cbranch_dest_block = cbranch_jump_edge->dest;
3310 if (!can_fallthru (jump_block, cbranch_dest_block))
3313 /* Invert the conditional branch. Prevent jump.c from deleting
3314 "unreachable" instructions. */
3315 LABEL_NUSES (JUMP_LABEL (cbranch_insn))++;
3316 if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 1))
3318 LABEL_NUSES (JUMP_LABEL (cbranch_insn))--;
3323 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
3324 INSN_UID (cbranch_insn), INSN_UID (jump_block->end));
3326 /* Success. Update the CFG to match. Note that after this point
3327 the edge variable names appear backwards; the redirection is done
3328 this way to preserve edge profile data. */
3329 redirect_edge_succ_nodup (cbranch_jump_edge, cbranch_dest_block);
3330 redirect_edge_succ_nodup (cbranch_fallthru_edge, jump_dest_block);
3331 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
3332 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
3334 /* Delete the block with the unconditional jump, and clean up the mess. */
3335 flow_delete_block (jump_block);
3336 tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block);
3341 /* Attempt to forward edges leaving basic block B.
3342 Return true if sucessful. */
3345 try_forward_edges (mode, b)
3349 bool changed = false;
3352 for (e = b->succ; e ; e = next)
3354 basic_block target, first;
3357 next = e->succ_next;
3359 /* Skip complex edges because we don't know how to update them.
3361 Still handle fallthru edges, as we can suceed to forward fallthru
3362 edge to the same place as the branch edge of conditional branch
3363 and turn conditional branch to an unconditonal branch. */
3364 if (e->flags & EDGE_COMPLEX)
3367 target = first = e->dest;
3370 /* Look for the real destination of the jump.
3371 Avoid inifinite loop in the infinite empty loop by counting
3372 up to n_basic_blocks. */
3373 while (forwarder_block_p (target)
3374 && target->succ->dest != EXIT_BLOCK_PTR
3375 && counter < n_basic_blocks)
3377 /* Bypass trivial infinite loops. */
3378 if (target == target->succ->dest)
3379 counter = n_basic_blocks;
3381 /* Avoid killing of loop pre-headers, as it is the place loop
3382 optimizer wants to hoist code to.
3384 For fallthru forwarders, the LOOP_BEG note must appear between
3385 the header of block and CODE_LABEL of the loop, for non forwarders
3386 it must appear before the JUMP_INSN. */
3387 if (mode & CLEANUP_PRE_LOOP)
3389 rtx insn = (target->succ->flags & EDGE_FALLTHRU
3390 ? target->head : prev_nonnote_insn (target->end));
3392 if (GET_CODE (insn) != NOTE)
3393 insn = NEXT_INSN (insn);
3395 for (;insn && GET_CODE (insn) != CODE_LABEL && !INSN_P (insn);
3396 insn = NEXT_INSN (insn))
3397 if (GET_CODE (insn) == NOTE
3398 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
3401 if (GET_CODE (insn) == NOTE)
3404 target = target->succ->dest, counter++;
3407 if (counter >= n_basic_blocks)
3410 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n",
3413 else if (target == first)
3414 ; /* We didn't do anything. */
3417 /* Save the values now, as the edge may get removed. */
3418 gcov_type edge_count = e->count;
3419 int edge_probability = e->probability;
3421 if (redirect_edge_and_branch (e, target))
3423 /* We successfully forwarded the edge. Now update profile
3424 data: for each edge we traversed in the chain, remove
3425 the original edge's execution count. */
3426 int edge_frequency = ((edge_probability * b->frequency
3427 + REG_BR_PROB_BASE / 2)
3428 / REG_BR_PROB_BASE);
3432 first->count -= edge_count;
3433 first->succ->count -= edge_count;
3434 first->frequency -= edge_frequency;
3435 first = first->succ->dest;
3437 while (first != target);
3444 fprintf (rtl_dump_file, "Forwarding edge %i->%i to %i failed.\n",
3445 b->index, e->dest->index, target->index);
3453 /* Look through the insns at the end of BB1 and BB2 and find the longest
3454 sequence that are equivalent. Store the first insns for that sequence
3455 in *F1 and *F2 and return the sequence length.
3457 To simplify callers of this function, if the blocks match exactly,
3458 store the head of the blocks in *F1 and *F2. */
3461 flow_find_cross_jump (mode, bb1, bb2, f1, f2)
3462 int mode ATTRIBUTE_UNUSED;
3463 basic_block bb1, bb2;
3466 rtx i1, i2, p1, p2, last1, last2, afterlast1, afterlast2;
3469 /* Skip simple jumps at the end of the blocks. Complex jumps still
3470 need to be compared for equivalence, which we'll do below. */
3473 if (onlyjump_p (i1))
3474 i1 = PREV_INSN (i1);
3476 if (onlyjump_p (i2))
3477 i2 = PREV_INSN (i2);
3479 last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
3483 while ((GET_CODE (i1) == NOTE && i1 != bb1->head))
3484 i1 = PREV_INSN (i1);
3485 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
3486 i2 = PREV_INSN (i2);
3488 if (i1 == bb1->head || i2 == bb2->head)
3491 /* Verify that I1 and I2 are equivalent. */
3493 if (GET_CODE (i1) != GET_CODE (i2))
3499 /* If this is a CALL_INSN, compare register usage information.
3500 If we don't check this on stack register machines, the two
3501 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3502 numbers of stack registers in the same basic block.
3503 If we don't check this on machines with delay slots, a delay slot may
3504 be filled that clobbers a parameter expected by the subroutine.
3506 ??? We take the simple route for now and assume that if they're
3507 equal, they were constructed identically. */
3509 if (GET_CODE (i1) == CALL_INSN
3510 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3511 CALL_INSN_FUNCTION_USAGE (i2)))
3515 /* If cross_jump_death_matters is not 0, the insn's mode
3516 indicates whether or not the insn contains any stack-like
3519 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
3521 /* If register stack conversion has already been done, then
3522 death notes must also be compared before it is certain that
3523 the two instruction streams match. */
3526 HARD_REG_SET i1_regset, i2_regset;
3528 CLEAR_HARD_REG_SET (i1_regset);
3529 CLEAR_HARD_REG_SET (i2_regset);
3531 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3532 if (REG_NOTE_KIND (note) == REG_DEAD
3533 && STACK_REG_P (XEXP (note, 0)))
3534 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3536 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3537 if (REG_NOTE_KIND (note) == REG_DEAD
3538 && STACK_REG_P (XEXP (note, 0)))
3539 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3541 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3550 if (GET_CODE (p1) != GET_CODE (p2))
3553 if (! rtx_renumbered_equal_p (p1, p2))
3555 /* The following code helps take care of G++ cleanups. */
3556 rtx equiv1 = find_reg_equal_equiv_note (i1);
3557 rtx equiv2 = find_reg_equal_equiv_note (i2);
3559 if (equiv1 && equiv2
3560 /* If the equivalences are not to a constant, they may
3561 reference pseudos that no longer exist, so we can't
3563 && CONSTANT_P (XEXP (equiv1, 0))
3564 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3566 rtx s1 = single_set (i1);
3567 rtx s2 = single_set (i2);
3568 if (s1 != 0 && s2 != 0
3569 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3571 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3572 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3573 if (! rtx_renumbered_equal_p (p1, p2))
3575 else if (apply_change_group ())
3583 /* Don't begin a cross-jump with a USE or CLOBBER insn. */
3584 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3586 afterlast1 = last1, afterlast2 = last2;
3587 last1 = i1, last2 = i2;
3590 i1 = PREV_INSN (i1);
3591 i2 = PREV_INSN (i2);
3597 /* Don't allow the insn after a compare to be shared by
3598 cross-jumping unless the compare is also shared. */
3599 if (reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
3600 last1 = afterlast1, last2 = afterlast2, ninsns--;
3604 /* Include preceeding notes and labels in the cross-jump. One,
3605 this may bring us to the head of the blocks as requested above.
3606 Two, it keeps line number notes as matched as may be. */
3609 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
3610 last1 = PREV_INSN (last1);
3611 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
3612 last1 = PREV_INSN (last1);
3613 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
3614 last2 = PREV_INSN (last2);
3615 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
3616 last2 = PREV_INSN (last2);
3625 /* Return true iff outgoing edges of BB1 and BB2 match, together with
3626 the branch instruction. This means that if we commonize the control
3627 flow before end of the basic block, the semantic remains unchanged.
3629 We may assume that there exists one edge with a common destination. */
3632 outgoing_edges_match (bb1, bb2)
3636 /* If BB1 has only one successor, we must be looking at an unconditional
3637 jump. Which, by the assumption above, means that we only need to check
3638 that BB2 has one successor. */
3639 if (bb1->succ && !bb1->succ->succ_next)
3640 return (bb2->succ && !bb2->succ->succ_next);
3642 /* Match conditional jumps - this may get tricky when fallthru and branch
3643 edges are crossed. */
3645 && bb1->succ->succ_next
3646 && !bb1->succ->succ_next->succ_next
3647 && any_condjump_p (bb1->end))
3649 edge b1, f1, b2, f2;
3650 bool reverse, match;
3651 rtx set1, set2, cond1, cond2;
3652 enum rtx_code code1, code2;
3655 || !bb2->succ->succ_next
3656 || bb1->succ->succ_next->succ_next
3657 || !any_condjump_p (bb2->end))
3660 b1 = BRANCH_EDGE (bb1);
3661 b2 = BRANCH_EDGE (bb2);
3662 f1 = FALLTHRU_EDGE (bb1);
3663 f2 = FALLTHRU_EDGE (bb2);
3665 /* Get around possible forwarders on fallthru edges. Other cases
3666 should be optimized out already. */
3667 if (forwarder_block_p (f1->dest))
3668 f1 = f1->dest->succ;
3669 if (forwarder_block_p (f2->dest))
3670 f2 = f2->dest->succ;
3672 /* To simplify use of this function, return false if there are
3673 unneeded forwarder blocks. These will get eliminated later
3674 during cleanup_cfg. */
3675 if (forwarder_block_p (f1->dest)
3676 || forwarder_block_p (f2->dest)
3677 || forwarder_block_p (b1->dest)
3678 || forwarder_block_p (b2->dest))
3681 if (f1->dest == f2->dest && b1->dest == b2->dest)
3683 else if (f1->dest == b2->dest && b1->dest == f2->dest)
3688 set1 = pc_set (bb1->end);
3689 set2 = pc_set (bb2->end);
3690 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
3691 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
3694 cond1 = XEXP (SET_SRC (set1), 0);
3695 cond2 = XEXP (SET_SRC (set2), 0);
3696 code1 = GET_CODE (cond1);
3698 code2 = reversed_comparison_code (cond2, bb2->end);
3700 code2 = GET_CODE (cond2);
3701 if (code2 == UNKNOWN)
3704 /* Verify codes and operands match. */
3705 match = ((code1 == code2
3706 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
3707 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
3708 || (code1 == swap_condition (code2)
3709 && rtx_renumbered_equal_p (XEXP (cond1, 1),
3711 && rtx_renumbered_equal_p (XEXP (cond1, 0),
3714 /* If we return true, we will join the blocks. Which means that
3715 we will only have one branch prediction bit to work with. Thus
3716 we require the existing branches to have probabilities that are
3718 /* ??? We should use bb->frequency to allow merging in infrequently
3719 executed blocks, but at the moment it is not available when
3720 cleanup_cfg is run. */
3721 if (match && !optimize_size)
3725 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
3726 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
3730 prob1 = INTVAL (XEXP (note1, 0));
3731 prob2 = INTVAL (XEXP (note2, 0));
3733 prob2 = REG_BR_PROB_BASE - prob2;
3735 /* Fail if the difference in probabilities is
3737 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 20)
3740 else if (note1 || note2)
3744 if (rtl_dump_file && match)
3745 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
3746 bb1->index, bb2->index);
3751 /* ??? We can handle computed jumps too. This may be important for
3752 inlined functions containing switch statements. Also jumps w/o
3753 fallthru edges can be handled by simply matching whole insn. */
3757 /* E1 and E2 are edges with the same destination block. Search their
3758 predecessors for common code. If found, redirect control flow from
3759 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
3762 try_crossjump_to_edge (mode, e1, e2)
3767 basic_block src1 = e1->src, src2 = e2->src;
3768 basic_block redirect_to;
3769 rtx newpos1, newpos2;
3775 /* Search backward through forwarder blocks. We don't need to worry
3776 about multiple entry or chained forwarders, as they will be optimized
3777 away. We do this to look past the unconditional jump following a
3778 conditional jump that is required due to the current CFG shape. */
3780 && !src1->pred->pred_next
3781 && forwarder_block_p (src1))
3787 && !src2->pred->pred_next
3788 && forwarder_block_p (src2))
3794 /* Nothing to do if we reach ENTRY, or a common source block. */
3795 if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
3800 /* Seeing more than 1 forwarder blocks would confuse us later... */
3801 if (forwarder_block_p (e1->dest)
3802 && forwarder_block_p (e1->dest->succ->dest))
3804 if (forwarder_block_p (e2->dest)
3805 && forwarder_block_p (e2->dest->succ->dest))
3808 /* Likewise with dead code (possibly newly created by the other optimizations
3810 if (!src1->pred || !src2->pred)
3813 /* Likewise with complex edges.
3814 ??? We should be able to handle most complex edges later with some
3816 if (e1->flags & EDGE_COMPLEX)
3819 /* Look for the common insn sequence, part the first ... */
3820 if (!outgoing_edges_match (src1, src2))
3823 /* ... and part the second. */
3824 nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
3828 /* Avoid splitting if possible. */
3829 if (newpos2 == src2->head)
3834 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
3835 src2->index, nmatch);
3836 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
3840 fprintf (rtl_dump_file,
3841 "Cross jumping from bb %i to bb %i; %i common insns\n",
3842 src1->index, src2->index, nmatch);
3844 redirect_to->count += src1->count;
3845 redirect_to->frequency += src1->frequency;
3847 /* Recompute the frequencies and counts of outgoing edges. */
3848 for (s = redirect_to->succ; s; s = s->succ_next)
3851 basic_block d = s->dest;
3853 if (forwarder_block_p (d))
3855 for (s2 = src1->succ; ; s2 = s2->succ_next)
3857 basic_block d2 = s2->dest;
3858 if (forwarder_block_p (d2))
3859 d2 = d2->succ->dest;
3863 s->count += s2->count;
3865 /* Take care to update possible forwarder blocks. We verified
3866 that there is no more than one in the chain, so we can't run
3867 into infinite loop. */
3868 if (forwarder_block_p (s->dest))
3870 s->dest->succ->count += s2->count;
3871 s->dest->count += s2->count;
3872 s->dest->frequency += EDGE_FREQUENCY (s);
3874 if (forwarder_block_p (s2->dest))
3876 s2->dest->succ->count -= s2->count;
3877 s2->dest->count -= s2->count;
3878 s2->dest->frequency -= EDGE_FREQUENCY (s);
3880 if (!redirect_to->frequency && !src1->frequency)
3881 s->probability = (s->probability + s2->probability) / 2;
3884 ((s->probability * redirect_to->frequency +
3885 s2->probability * src1->frequency)
3886 / (redirect_to->frequency + src1->frequency));
3889 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
3891 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
3893 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
3895 /* Skip possible basic block header. */
3896 if (GET_CODE (newpos1) == CODE_LABEL)
3897 newpos1 = NEXT_INSN (newpos1);
3898 if (GET_CODE (newpos1) == NOTE)
3899 newpos1 = NEXT_INSN (newpos1);
3902 /* Emit the jump insn. */
3903 label = block_label (redirect_to);
3904 src1->end = emit_jump_insn_before (gen_jump (label), newpos1);
3905 JUMP_LABEL (src1->end) = label;
3906 LABEL_NUSES (label)++;
3907 if (basic_block_for_insn)
3908 set_block_for_new_insns (src1->end, src1);
3910 /* Delete the now unreachable instructions. */
3911 flow_delete_insn_chain (newpos1, last);
3913 /* Make sure there is a barrier after the new jump. */
3914 last = next_nonnote_insn (src1->end);
3915 if (!last || GET_CODE (last) != BARRIER)
3916 emit_barrier_after (src1->end);
3920 remove_edge (src1->succ);
3921 make_edge (NULL, src1, redirect_to, 0);
3922 src1->succ->probability = REG_BR_PROB_BASE;
3923 src1->succ->count = src1->count;
3928 /* Search the predecessors of BB for common insn sequences. When found,
3929 share code between them by redirecting control flow. Return true if
3930 any changes made. */
3933 try_crossjump_bb (mode, bb)
3937 edge e, e2, nexte2, nexte, fallthru;
3940 /* Nothing to do if there is not at least two incomming edges. */
3941 if (!bb->pred || !bb->pred->pred_next)
3944 /* It is always cheapest to redirect a block that ends in a branch to
3945 a block that falls through into BB, as that adds no branches to the
3946 program. We'll try that combination first. */
3947 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
3948 if (fallthru->flags & EDGE_FALLTHRU)
3952 for (e = bb->pred; e; e = nexte)
3954 nexte = e->pred_next;
3956 /* Elide complex edges now, as neither try_crossjump_to_edge
3957 nor outgoing_edges_match can handle them. */
3958 if (e->flags & EDGE_COMPLEX)
3961 /* As noted above, first try with the fallthru predecessor. */
3964 /* Don't combine the fallthru edge into anything else.
3965 If there is a match, we'll do it the other way around. */
3969 if (try_crossjump_to_edge (mode, e, fallthru))
3977 /* Non-obvious work limiting check: Recognize that we're going
3978 to call try_crossjump_bb on every basic block. So if we have
3979 two blocks with lots of outgoing edges (a switch) and they
3980 share lots of common destinations, then we would do the
3981 cross-jump check once for each common destination.
3983 Now, if the blocks actually are cross-jump candidates, then
3984 all of their destinations will be shared. Which means that
3985 we only need check them for cross-jump candidacy once. We
3986 can eliminate redundant checks of crossjump(A,B) by arbitrarily
3987 choosing to do the check from the block for which the edge
3988 in question is the first successor of A. */
3989 if (e->src->succ != e)
3992 for (e2 = bb->pred; e2; e2 = nexte2)
3994 nexte2 = e2->pred_next;
3999 /* We've already checked the fallthru edge above. */
4003 /* Again, neither try_crossjump_to_edge nor outgoing_edges_match
4004 can handle complex edges. */
4005 if (e2->flags & EDGE_COMPLEX)
4008 /* The "first successor" check above only prevents multiple
4009 checks of crossjump(A,B). In order to prevent redundant
4010 checks of crossjump(B,A), require that A be the block
4011 with the lowest index. */
4012 if (e->src->index > e2->src->index)
4015 if (try_crossjump_to_edge (mode, e, e2))
4027 /* Do simple CFG optimizations - basic block merging, simplifying of jump
4028 instructions etc. Return nonzero if changes were made. */
4031 try_optimize_cfg (mode)
4035 bool changed_overall = false;
4039 /* Attempt to merge blocks as made possible by edge removal. If a block
4040 has only one successor, and the successor has only one predecessor,
4041 they may be combined. */
4049 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
4052 for (i = 0; i < n_basic_blocks;)
4054 basic_block c, b = BASIC_BLOCK (i);
4056 bool changed_here = false;
4058 /* Delete trivially dead basic blocks. */
4059 while (b->pred == NULL)
4061 c = BASIC_BLOCK (b->index - 1);
4063 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
4064 flow_delete_block (b);
4069 /* Remove code labels no longer used. Don't do this before
4070 CALL_PLACEHOLDER is removed, as some branches may be hidden
4072 if (b->pred->pred_next == NULL
4073 && (b->pred->flags & EDGE_FALLTHRU)
4074 && !(b->pred->flags & EDGE_COMPLEX)
4075 && GET_CODE (b->head) == CODE_LABEL
4076 && (!(mode & CLEANUP_PRE_SIBCALL)
4077 || !tail_recursion_label_p (b->head))
4078 /* If previous block ends with condjump jumping to next BB,
4079 we can't delete the label. */
4080 && (b->pred->src == ENTRY_BLOCK_PTR
4081 || !reg_mentioned_p (b->head, b->pred->src->end)))
4083 rtx label = b->head;
4084 b->head = NEXT_INSN (b->head);
4085 flow_delete_insn_chain (label, label);
4087 fprintf (rtl_dump_file, "Deleted label in block %i.\n",
4091 /* If we fall through an empty block, we can remove it. */
4092 if (b->pred->pred_next == NULL
4093 && (b->pred->flags & EDGE_FALLTHRU)
4094 && GET_CODE (b->head) != CODE_LABEL
4095 && forwarder_block_p (b)
4096 /* Note that forwarder_block_p true ensures that there
4097 is a successor for this block. */
4098 && (b->succ->flags & EDGE_FALLTHRU)
4099 && n_basic_blocks > 1)
4102 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
4104 c = BASIC_BLOCK (b->index ? b->index - 1 : 1);
4105 redirect_edge_succ_nodup (b->pred, b->succ->dest);
4106 flow_delete_block (b);
4111 /* Merge blocks. Loop because chains of blocks might be
4113 while ((s = b->succ) != NULL
4114 && s->succ_next == NULL
4115 && !(s->flags & EDGE_COMPLEX)
4116 && (c = s->dest) != EXIT_BLOCK_PTR
4117 && c->pred->pred_next == NULL
4118 /* If the jump insn has side effects,
4119 we can't kill the edge. */
4120 && (GET_CODE (b->end) != JUMP_INSN
4121 || onlyjump_p (b->end))
4122 && merge_blocks (s, b, c, mode))
4123 changed_here = true;
4125 /* Simplify branch over branch. */
4126 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
4127 changed_here = true;
4129 /* If B has a single outgoing edge, but uses a non-trivial jump
4130 instruction without side-effects, we can either delete the
4131 jump entirely, or replace it with a simple unconditional jump.
4132 Use redirect_edge_and_branch to do the dirty work. */
4134 && ! b->succ->succ_next
4135 && b->succ->dest != EXIT_BLOCK_PTR
4136 && onlyjump_p (b->end)
4137 && redirect_edge_and_branch (b->succ, b->succ->dest))
4138 changed_here = true;
4140 /* Simplify branch to branch. */
4141 if (try_forward_edges (mode, b))
4142 changed_here = true;
4144 /* Look for shared code between blocks. */
4145 if ((mode & CLEANUP_CROSSJUMP)
4146 && try_crossjump_bb (mode, b))
4147 changed_here = true;
4149 /* Don't get confused by the index shift caused by deleting
4157 if ((mode & CLEANUP_CROSSJUMP)
4158 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
4161 #ifdef ENABLE_CHECKING
4163 verify_flow_info ();
4166 changed_overall |= changed;
4169 return changed_overall;
4172 /* The given edge should potentially be a fallthru edge. If that is in
4173 fact true, delete the jump and barriers that are in the way. */
4176 tidy_fallthru_edge (e, b, c)
4182 /* ??? In a late-running flow pass, other folks may have deleted basic
4183 blocks by nopping out blocks, leaving multiple BARRIERs between here
4184 and the target label. They ought to be chastized and fixed.
4186 We can also wind up with a sequence of undeletable labels between
4187 one block and the next.
4189 So search through a sequence of barriers, labels, and notes for
4190 the head of block C and assert that we really do fall through. */
4192 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
4195 /* Remove what will soon cease being the jump insn from the source block.
4196 If block B consisted only of this single jump, turn it into a deleted
4199 if (GET_CODE (q) == JUMP_INSN
4201 && (any_uncondjump_p (q)
4202 || (b->succ == e && e->succ_next == NULL)))
4205 /* If this was a conditional jump, we need to also delete
4206 the insn that set cc0. */
4207 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
4214 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
4215 NOTE_SOURCE_FILE (q) = 0;
4221 /* We don't want a block to end on a line-number note since that has
4222 the potential of changing the code between -g and not -g. */
4223 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
4230 /* Selectively unlink the sequence. */
4231 if (q != PREV_INSN (c->head))
4232 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
4234 e->flags |= EDGE_FALLTHRU;
4237 /* Fix up edges that now fall through, or rather should now fall through
4238 but previously required a jump around now deleted blocks. Simplify
4239 the search by only examining blocks numerically adjacent, since this
4240 is how find_basic_blocks created them. */
4243 tidy_fallthru_edges ()
4247 for (i = 1; i < n_basic_blocks; ++i)
4249 basic_block b = BASIC_BLOCK (i - 1);
4250 basic_block c = BASIC_BLOCK (i);
4253 /* We care about simple conditional or unconditional jumps with
4256 If we had a conditional branch to the next instruction when
4257 find_basic_blocks was called, then there will only be one
4258 out edge for the block which ended with the conditional
4259 branch (since we do not create duplicate edges).
4261 Furthermore, the edge will be marked as a fallthru because we
4262 merge the flags for the duplicate edges. So we do not want to
4263 check that the edge is not a FALLTHRU edge. */
4264 if ((s = b->succ) != NULL
4265 && ! (s->flags & EDGE_COMPLEX)
4266 && s->succ_next == NULL
4268 /* If the jump insn has side effects, we can't tidy the edge. */
4269 && (GET_CODE (b->end) != JUMP_INSN
4270 || onlyjump_p (b->end)))
4271 tidy_fallthru_edge (s, b, c);
4275 /* Perform data flow analysis.
4276 F is the first insn of the function; FLAGS is a set of PROP_* flags
4277 to be used in accumulating flow info. */
4280 life_analysis (f, file, flags)
4285 #ifdef ELIMINABLE_REGS
4287 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
4290 /* Record which registers will be eliminated. We use this in
4293 CLEAR_HARD_REG_SET (elim_reg_set);
4295 #ifdef ELIMINABLE_REGS
4296 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4297 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4299 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4303 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
4305 /* The post-reload life analysis have (on a global basis) the same
4306 registers live as was computed by reload itself. elimination
4307 Otherwise offsets and such may be incorrect.
4309 Reload will make some registers as live even though they do not
4312 We don't want to create new auto-incs after reload, since they
4313 are unlikely to be useful and can cause problems with shared
4315 if (reload_completed)
4316 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
4318 /* We want alias analysis information for local dead store elimination. */
4319 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4320 init_alias_analysis ();
4322 /* Always remove no-op moves. Do this before other processing so
4323 that we don't have to keep re-scanning them. */
4324 delete_noop_moves (f);
4326 /* Some targets can emit simpler epilogues if they know that sp was
4327 not ever modified during the function. After reload, of course,
4328 we've already emitted the epilogue so there's no sense searching. */
4329 if (! reload_completed)
4330 notice_stack_pointer_modification (f);
4332 /* Allocate and zero out data structures that will record the
4333 data from lifetime analysis. */
4334 allocate_reg_life_data ();
4335 allocate_bb_life_data ();
4337 /* Find the set of registers live on function exit. */
4338 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
4340 /* "Update" life info from zero. It'd be nice to begin the
4341 relaxation with just the exit and noreturn blocks, but that set
4342 is not immediately handy. */
4344 if (flags & PROP_REG_INFO)
4345 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4346 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
4349 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4350 end_alias_analysis ();
4353 dump_flow_info (file);
4355 free_basic_block_vars (1);
4357 #ifdef ENABLE_CHECKING
4361 /* Search for any REG_LABEL notes which reference deleted labels. */
4362 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4364 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4366 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
4371 /* Removing dead insns should've made jumptables really dead. */
4372 delete_dead_jumptables ();
4375 /* A subroutine of verify_wide_reg, called through for_each_rtx.
4376 Search for REGNO. If found, abort if it is not wider than word_mode. */
4379 verify_wide_reg_1 (px, pregno)
4384 unsigned int regno = *(int *) pregno;
4386 if (GET_CODE (x) == REG && REGNO (x) == regno)
4388 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
4395 /* A subroutine of verify_local_live_at_start. Search through insns
4396 between HEAD and END looking for register REGNO. */
4399 verify_wide_reg (regno, head, end)
4406 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
4410 head = NEXT_INSN (head);
4413 /* We didn't find the register at all. Something's way screwy. */
4415 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
4416 print_rtl_and_abort ();
4419 /* A subroutine of update_life_info. Verify that there are no untoward
4420 changes in live_at_start during a local update. */
4423 verify_local_live_at_start (new_live_at_start, bb)
4424 regset new_live_at_start;
4427 if (reload_completed)
4429 /* After reload, there are no pseudos, nor subregs of multi-word
4430 registers. The regsets should exactly match. */
4431 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
4435 fprintf (rtl_dump_file,
4436 "live_at_start mismatch in bb %d, aborting\n",
4438 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
4439 debug_bitmap_file (rtl_dump_file, new_live_at_start);
4441 print_rtl_and_abort ();
4448 /* Find the set of changed registers. */
4449 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
4451 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
4453 /* No registers should die. */
4454 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
4457 fprintf (rtl_dump_file,
4458 "Register %d died unexpectedly in block %d\n", i,
4460 print_rtl_and_abort ();
4463 /* Verify that the now-live register is wider than word_mode. */
4464 verify_wide_reg (i, bb->head, bb->end);
4469 /* Updates life information starting with the basic blocks set in BLOCKS.
4470 If BLOCKS is null, consider it to be the universal set.
4472 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
4473 we are only expecting local modifications to basic blocks. If we find
4474 extra registers live at the beginning of a block, then we either killed
4475 useful data, or we have a broken split that wants data not provided.
4476 If we find registers removed from live_at_start, that means we have
4477 a broken peephole that is killing a register it shouldn't.
4479 ??? This is not true in one situation -- when a pre-reload splitter
4480 generates subregs of a multi-word pseudo, current life analysis will
4481 lose the kill. So we _can_ have a pseudo go live. How irritating.
4483 Including PROP_REG_INFO does not properly refresh regs_ever_live
4484 unless the caller resets it to zero. */
4487 update_life_info (blocks, extent, prop_flags)
4489 enum update_life_extent extent;
4493 regset_head tmp_head;
4496 tmp = INITIALIZE_REG_SET (tmp_head);
4498 /* Changes to the CFG are only allowed when
4499 doing a global update for the entire CFG. */
4500 if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
4501 && (extent == UPDATE_LIFE_LOCAL || blocks))
4504 /* For a global update, we go through the relaxation process again. */
4505 if (extent != UPDATE_LIFE_LOCAL)
4511 calculate_global_regs_live (blocks, blocks,
4512 prop_flags & (PROP_SCAN_DEAD_CODE
4513 | PROP_ALLOW_CFG_CHANGES));
4515 if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4516 != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4519 /* Removing dead code may allow the CFG to be simplified which
4520 in turn may allow for further dead code detection / removal. */
4521 for (i = n_basic_blocks - 1; i >= 0; --i)
4523 basic_block bb = BASIC_BLOCK (i);
4525 COPY_REG_SET (tmp, bb->global_live_at_end);
4526 changed |= propagate_block (bb, tmp, NULL, NULL,
4527 prop_flags & (PROP_SCAN_DEAD_CODE
4528 | PROP_KILL_DEAD_CODE));
4531 if (! changed || ! try_optimize_cfg (CLEANUP_EXPENSIVE))
4534 delete_unreachable_blocks ();
4535 mark_critical_edges ();
4538 /* If asked, remove notes from the blocks we'll update. */
4539 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
4540 count_or_remove_death_notes (blocks, 1);
4545 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4547 basic_block bb = BASIC_BLOCK (i);
4549 COPY_REG_SET (tmp, bb->global_live_at_end);
4550 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4552 if (extent == UPDATE_LIFE_LOCAL)
4553 verify_local_live_at_start (tmp, bb);
4558 for (i = n_basic_blocks - 1; i >= 0; --i)
4560 basic_block bb = BASIC_BLOCK (i);
4562 COPY_REG_SET (tmp, bb->global_live_at_end);
4563 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4565 if (extent == UPDATE_LIFE_LOCAL)
4566 verify_local_live_at_start (tmp, bb);
4572 if (prop_flags & PROP_REG_INFO)
4574 /* The only pseudos that are live at the beginning of the function
4575 are those that were not set anywhere in the function. local-alloc
4576 doesn't know how to handle these correctly, so mark them as not
4577 local to any one basic block. */
4578 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
4579 FIRST_PSEUDO_REGISTER, i,
4580 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4582 /* We have a problem with any pseudoreg that lives across the setjmp.
4583 ANSI says that if a user variable does not change in value between
4584 the setjmp and the longjmp, then the longjmp preserves it. This
4585 includes longjmp from a place where the pseudo appears dead.
4586 (In principle, the value still exists if it is in scope.)
4587 If the pseudo goes in a hard reg, some other value may occupy
4588 that hard reg where this pseudo is dead, thus clobbering the pseudo.
4589 Conclusion: such a pseudo must not go in a hard reg. */
4590 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
4591 FIRST_PSEUDO_REGISTER, i,
4593 if (regno_reg_rtx[i] != 0)
4595 REG_LIVE_LENGTH (i) = -1;
4596 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4602 /* Free the variables allocated by find_basic_blocks.
4604 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
4607 free_basic_block_vars (keep_head_end_p)
4608 int keep_head_end_p;
4610 if (basic_block_for_insn)
4612 VARRAY_FREE (basic_block_for_insn);
4613 basic_block_for_insn = NULL;
4616 if (! keep_head_end_p)
4618 if (basic_block_info)
4621 VARRAY_FREE (basic_block_info);
4625 ENTRY_BLOCK_PTR->aux = NULL;
4626 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
4627 EXIT_BLOCK_PTR->aux = NULL;
4628 EXIT_BLOCK_PTR->global_live_at_start = NULL;
4632 /* Delete any insns that copy a register to itself. */
4635 delete_noop_moves (f)
4636 rtx f ATTRIBUTE_UNUSED;
4642 for (i = 0; i < n_basic_blocks; i++)
4644 bb = BASIC_BLOCK (i);
4645 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
4647 next = NEXT_INSN (insn);
4648 if (INSN_P (insn) && noop_move_p (insn))
4650 /* Do not call flow_delete_insn here to not confuse backward
4651 pointers of LIBCALL block. */
4652 PUT_CODE (insn, NOTE);
4653 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4654 NOTE_SOURCE_FILE (insn) = 0;
4660 /* Delete any jump tables never referenced. We can't delete them at the
4661 time of removing tablejump insn as they are referenced by the preceeding
4662 insns computing the destination, so we delay deleting and garbagecollect
4663 them once life information is computed. */
4665 delete_dead_jumptables ()
4668 for (insn = get_insns (); insn; insn = next)
4670 next = NEXT_INSN (insn);
4671 if (GET_CODE (insn) == CODE_LABEL
4672 && LABEL_NUSES (insn) == 0
4673 && GET_CODE (next) == JUMP_INSN
4674 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4675 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4678 fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
4679 flow_delete_insn (NEXT_INSN (insn));
4680 flow_delete_insn (insn);
4681 next = NEXT_INSN (next);
4686 /* Determine if the stack pointer is constant over the life of the function.
4687 Only useful before prologues have been emitted. */
4690 notice_stack_pointer_modification_1 (x, pat, data)
4692 rtx pat ATTRIBUTE_UNUSED;
4693 void *data ATTRIBUTE_UNUSED;
4695 if (x == stack_pointer_rtx
4696 /* The stack pointer is only modified indirectly as the result
4697 of a push until later in flow. See the comments in rtl.texi
4698 regarding Embedded Side-Effects on Addresses. */
4699 || (GET_CODE (x) == MEM
4700 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
4701 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
4702 current_function_sp_is_unchanging = 0;
4706 notice_stack_pointer_modification (f)
4711 /* Assume that the stack pointer is unchanging if alloca hasn't
4713 current_function_sp_is_unchanging = !current_function_calls_alloca;
4714 if (! current_function_sp_is_unchanging)
4717 for (insn = f; insn; insn = NEXT_INSN (insn))
4721 /* Check if insn modifies the stack pointer. */
4722 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
4724 if (! current_function_sp_is_unchanging)
4730 /* Mark a register in SET. Hard registers in large modes get all
4731 of their component registers set as well. */
4734 mark_reg (reg, xset)
4738 regset set = (regset) xset;
4739 int regno = REGNO (reg);
4741 if (GET_MODE (reg) == BLKmode)
4744 SET_REGNO_REG_SET (set, regno);
4745 if (regno < FIRST_PSEUDO_REGISTER)
4747 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4749 SET_REGNO_REG_SET (set, regno + n);
4753 /* Mark those regs which are needed at the end of the function as live
4754 at the end of the last basic block. */
4757 mark_regs_live_at_end (set)
4762 /* If exiting needs the right stack value, consider the stack pointer
4763 live at the end of the function. */
4764 if ((HAVE_epilogue && reload_completed)
4765 || ! EXIT_IGNORE_STACK
4766 || (! FRAME_POINTER_REQUIRED
4767 && ! current_function_calls_alloca
4768 && flag_omit_frame_pointer)
4769 || current_function_sp_is_unchanging)
4771 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
4774 /* Mark the frame pointer if needed at the end of the function. If
4775 we end up eliminating it, it will be removed from the live list
4776 of each basic block by reload. */
4778 if (! reload_completed || frame_pointer_needed)
4780 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
4781 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4782 /* If they are different, also mark the hard frame pointer as live. */
4783 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4784 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
4788 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4789 /* Many architectures have a GP register even without flag_pic.
4790 Assume the pic register is not in use, or will be handled by
4791 other means, if it is not fixed. */
4792 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4793 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4794 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
4797 /* Mark all global registers, and all registers used by the epilogue
4798 as being live at the end of the function since they may be
4799 referenced by our caller. */
4800 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4801 if (global_regs[i] || EPILOGUE_USES (i))
4802 SET_REGNO_REG_SET (set, i);
4804 if (HAVE_epilogue && reload_completed)
4806 /* Mark all call-saved registers that we actually used. */
4807 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4808 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
4809 SET_REGNO_REG_SET (set, i);
4812 #ifdef EH_RETURN_DATA_REGNO
4813 /* Mark the registers that will contain data for the handler. */
4814 if (reload_completed && current_function_calls_eh_return)
4817 unsigned regno = EH_RETURN_DATA_REGNO(i);
4818 if (regno == INVALID_REGNUM)
4820 SET_REGNO_REG_SET (set, regno);
4823 #ifdef EH_RETURN_STACKADJ_RTX
4824 if ((! HAVE_epilogue || ! reload_completed)
4825 && current_function_calls_eh_return)
4827 rtx tmp = EH_RETURN_STACKADJ_RTX;
4828 if (tmp && REG_P (tmp))
4829 mark_reg (tmp, set);
4832 #ifdef EH_RETURN_HANDLER_RTX
4833 if ((! HAVE_epilogue || ! reload_completed)
4834 && current_function_calls_eh_return)
4836 rtx tmp = EH_RETURN_HANDLER_RTX;
4837 if (tmp && REG_P (tmp))
4838 mark_reg (tmp, set);
4842 /* Mark function return value. */
4843 diddle_return_value (mark_reg, set);
4846 /* Callback function for for_each_successor_phi. DATA is a regset.
4847 Sets the SRC_REGNO, the regno of the phi alternative for phi node
4848 INSN, in the regset. */
4851 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
4852 rtx insn ATTRIBUTE_UNUSED;
4853 int dest_regno ATTRIBUTE_UNUSED;
4857 regset live = (regset) data;
4858 SET_REGNO_REG_SET (live, src_regno);
4862 /* Propagate global life info around the graph of basic blocks. Begin
4863 considering blocks with their corresponding bit set in BLOCKS_IN.
4864 If BLOCKS_IN is null, consider it the universal set.
4866 BLOCKS_OUT is set for every block that was changed. */
4869 calculate_global_regs_live (blocks_in, blocks_out, flags)
4870 sbitmap blocks_in, blocks_out;
4873 basic_block *queue, *qhead, *qtail, *qend;
4874 regset tmp, new_live_at_end, call_used;
4875 regset_head tmp_head, call_used_head;
4876 regset_head new_live_at_end_head;
4879 tmp = INITIALIZE_REG_SET (tmp_head);
4880 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
4881 call_used = INITIALIZE_REG_SET (call_used_head);
4883 /* Inconveniently, this is only redily available in hard reg set form. */
4884 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4885 if (call_used_regs[i])
4886 SET_REGNO_REG_SET (call_used, i);
4888 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
4889 because the `head == tail' style test for an empty queue doesn't
4890 work with a full queue. */
4891 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
4893 qhead = qend = queue + n_basic_blocks + 2;
4895 /* Queue the blocks set in the initial mask. Do this in reverse block
4896 number order so that we are more likely for the first round to do
4897 useful work. We use AUX non-null to flag that the block is queued. */
4900 /* Clear out the garbage that might be hanging out in bb->aux. */
4901 for (i = n_basic_blocks - 1; i >= 0; --i)
4902 BASIC_BLOCK (i)->aux = NULL;
4904 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
4906 basic_block bb = BASIC_BLOCK (i);
4913 for (i = 0; i < n_basic_blocks; ++i)
4915 basic_block bb = BASIC_BLOCK (i);
4922 sbitmap_zero (blocks_out);
4924 /* We work through the queue until there are no more blocks. What
4925 is live at the end of this block is precisely the union of what
4926 is live at the beginning of all its successors. So, we set its
4927 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
4928 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
4929 this block by walking through the instructions in this block in
4930 reverse order and updating as we go. If that changed
4931 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
4932 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
4934 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
4935 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
4936 must either be live at the end of the block, or used within the
4937 block. In the latter case, it will certainly never disappear
4938 from GLOBAL_LIVE_AT_START. In the former case, the register
4939 could go away only if it disappeared from GLOBAL_LIVE_AT_START
4940 for one of the successor blocks. By induction, that cannot
4942 while (qhead != qtail)
4944 int rescan, changed;
4953 /* Begin by propagating live_at_start from the successor blocks. */
4954 CLEAR_REG_SET (new_live_at_end);
4955 for (e = bb->succ; e; e = e->succ_next)
4957 basic_block sb = e->dest;
4959 /* Call-clobbered registers die across exception and call edges. */
4960 /* ??? Abnormal call edges ignored for the moment, as this gets
4961 confused by sibling call edges, which crashes reg-stack. */
4962 if (e->flags & EDGE_EH)
4964 bitmap_operation (tmp, sb->global_live_at_start,
4965 call_used, BITMAP_AND_COMPL);
4966 IOR_REG_SET (new_live_at_end, tmp);
4969 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
4972 /* The all-important stack pointer must always be live. */
4973 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
4975 /* Before reload, there are a few registers that must be forced
4976 live everywhere -- which might not already be the case for
4977 blocks within infinite loops. */
4978 if (! reload_completed)
4980 /* Any reference to any pseudo before reload is a potential
4981 reference of the frame pointer. */
4982 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
4984 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4985 /* Pseudos with argument area equivalences may require
4986 reloading via the argument pointer. */
4987 if (fixed_regs[ARG_POINTER_REGNUM])
4988 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
4991 /* Any constant, or pseudo with constant equivalences, may
4992 require reloading from memory using the pic register. */
4993 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4994 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4995 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
4998 /* Regs used in phi nodes are not included in
4999 global_live_at_start, since they are live only along a
5000 particular edge. Set those regs that are live because of a
5001 phi node alternative corresponding to this particular block. */
5003 for_each_successor_phi (bb, &set_phi_alternative_reg,
5006 if (bb == ENTRY_BLOCK_PTR)
5008 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5012 /* On our first pass through this block, we'll go ahead and continue.
5013 Recognize first pass by local_set NULL. On subsequent passes, we
5014 get to skip out early if live_at_end wouldn't have changed. */
5016 if (bb->local_set == NULL)
5018 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5019 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5024 /* If any bits were removed from live_at_end, we'll have to
5025 rescan the block. This wouldn't be necessary if we had
5026 precalculated local_live, however with PROP_SCAN_DEAD_CODE
5027 local_live is really dependent on live_at_end. */
5028 CLEAR_REG_SET (tmp);
5029 rescan = bitmap_operation (tmp, bb->global_live_at_end,
5030 new_live_at_end, BITMAP_AND_COMPL);
5034 /* If any of the registers in the new live_at_end set are
5035 conditionally set in this basic block, we must rescan.
5036 This is because conditional lifetimes at the end of the
5037 block do not just take the live_at_end set into account,
5038 but also the liveness at the start of each successor
5039 block. We can miss changes in those sets if we only
5040 compare the new live_at_end against the previous one. */
5041 CLEAR_REG_SET (tmp);
5042 rescan = bitmap_operation (tmp, new_live_at_end,
5043 bb->cond_local_set, BITMAP_AND);
5048 /* Find the set of changed bits. Take this opportunity
5049 to notice that this set is empty and early out. */
5050 CLEAR_REG_SET (tmp);
5051 changed = bitmap_operation (tmp, bb->global_live_at_end,
5052 new_live_at_end, BITMAP_XOR);
5056 /* If any of the changed bits overlap with local_set,
5057 we'll have to rescan the block. Detect overlap by
5058 the AND with ~local_set turning off bits. */
5059 rescan = bitmap_operation (tmp, tmp, bb->local_set,
5064 /* Let our caller know that BB changed enough to require its
5065 death notes updated. */
5067 SET_BIT (blocks_out, bb->index);
5071 /* Add to live_at_start the set of all registers in
5072 new_live_at_end that aren't in the old live_at_end. */
5074 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
5076 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5078 changed = bitmap_operation (bb->global_live_at_start,
5079 bb->global_live_at_start,
5086 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5088 /* Rescan the block insn by insn to turn (a copy of) live_at_end
5089 into live_at_start. */
5090 propagate_block (bb, new_live_at_end, bb->local_set,
5091 bb->cond_local_set, flags);
5093 /* If live_at start didn't change, no need to go farther. */
5094 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
5097 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
5100 /* Queue all predecessors of BB so that we may re-examine
5101 their live_at_end. */
5102 for (e = bb->pred; e; e = e->pred_next)
5104 basic_block pb = e->src;
5105 if (pb->aux == NULL)
5116 FREE_REG_SET (new_live_at_end);
5117 FREE_REG_SET (call_used);
5121 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
5123 basic_block bb = BASIC_BLOCK (i);
5124 FREE_REG_SET (bb->local_set);
5125 FREE_REG_SET (bb->cond_local_set);
5130 for (i = n_basic_blocks - 1; i >= 0; --i)
5132 basic_block bb = BASIC_BLOCK (i);
5133 FREE_REG_SET (bb->local_set);
5134 FREE_REG_SET (bb->cond_local_set);
5141 /* Subroutines of life analysis. */
5143 /* Allocate the permanent data structures that represent the results
5144 of life analysis. Not static since used also for stupid life analysis. */
5147 allocate_bb_life_data ()
5151 for (i = 0; i < n_basic_blocks; i++)
5153 basic_block bb = BASIC_BLOCK (i);
5155 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5156 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5159 ENTRY_BLOCK_PTR->global_live_at_end
5160 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5161 EXIT_BLOCK_PTR->global_live_at_start
5162 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5164 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5168 allocate_reg_life_data ()
5172 max_regno = max_reg_num ();
5174 /* Recalculate the register space, in case it has grown. Old style
5175 vector oriented regsets would set regset_{size,bytes} here also. */
5176 allocate_reg_info (max_regno, FALSE, FALSE);
5178 /* Reset all the data we'll collect in propagate_block and its
5180 for (i = 0; i < max_regno; i++)
5184 REG_N_DEATHS (i) = 0;
5185 REG_N_CALLS_CROSSED (i) = 0;
5186 REG_LIVE_LENGTH (i) = 0;
5187 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
5191 /* Delete dead instructions for propagate_block. */
5194 propagate_block_delete_insn (bb, insn)
5198 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
5200 /* If the insn referred to a label, and that label was attached to
5201 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
5202 pretty much mandatory to delete it, because the ADDR_VEC may be
5203 referencing labels that no longer exist.
5205 INSN may reference a deleted label, particularly when a jump
5206 table has been optimized into a direct jump. There's no
5207 real good way to fix up the reference to the deleted label
5208 when the label is deleted, so we just allow it here.
5210 After dead code elimination is complete, we do search for
5211 any REG_LABEL notes which reference deleted labels as a
5214 if (inote && GET_CODE (inote) == CODE_LABEL)
5216 rtx label = XEXP (inote, 0);
5219 /* The label may be forced if it has been put in the constant
5220 pool. If that is the only use we must discard the table
5221 jump following it, but not the label itself. */
5222 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
5223 && (next = next_nonnote_insn (label)) != NULL
5224 && GET_CODE (next) == JUMP_INSN
5225 && (GET_CODE (PATTERN (next)) == ADDR_VEC
5226 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
5228 rtx pat = PATTERN (next);
5229 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
5230 int len = XVECLEN (pat, diff_vec_p);
5233 for (i = 0; i < len; i++)
5234 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
5236 flow_delete_insn (next);
5240 if (bb->end == insn)
5241 bb->end = PREV_INSN (insn);
5242 flow_delete_insn (insn);
5245 /* Delete dead libcalls for propagate_block. Return the insn
5246 before the libcall. */
5249 propagate_block_delete_libcall (bb, insn, note)
5253 rtx first = XEXP (note, 0);
5254 rtx before = PREV_INSN (first);
5256 if (insn == bb->end)
5259 flow_delete_insn_chain (first, insn);
5263 /* Update the life-status of regs for one insn. Return the previous insn. */
5266 propagate_one_insn (pbi, insn)
5267 struct propagate_block_info *pbi;
5270 rtx prev = PREV_INSN (insn);
5271 int flags = pbi->flags;
5272 int insn_is_dead = 0;
5273 int libcall_is_dead = 0;
5277 if (! INSN_P (insn))
5280 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
5281 if (flags & PROP_SCAN_DEAD_CODE)
5283 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
5284 libcall_is_dead = (insn_is_dead && note != 0
5285 && libcall_dead_p (pbi, note, insn));
5288 /* If an instruction consists of just dead store(s) on final pass,
5290 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
5292 /* If we're trying to delete a prologue or epilogue instruction
5293 that isn't flagged as possibly being dead, something is wrong.
5294 But if we are keeping the stack pointer depressed, we might well
5295 be deleting insns that are used to compute the amount to update
5296 it by, so they are fine. */
5297 if (reload_completed
5298 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5299 && (TYPE_RETURNS_STACK_DEPRESSED
5300 (TREE_TYPE (current_function_decl))))
5301 && (((HAVE_epilogue || HAVE_prologue)
5302 && prologue_epilogue_contains (insn))
5303 || (HAVE_sibcall_epilogue
5304 && sibcall_epilogue_contains (insn)))
5305 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
5308 /* Record sets. Do this even for dead instructions, since they
5309 would have killed the values if they hadn't been deleted. */
5310 mark_set_regs (pbi, PATTERN (insn), insn);
5312 /* CC0 is now known to be dead. Either this insn used it,
5313 in which case it doesn't anymore, or clobbered it,
5314 so the next insn can't use it. */
5317 if (libcall_is_dead)
5318 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
5320 propagate_block_delete_insn (pbi->bb, insn);
5325 /* See if this is an increment or decrement that can be merged into
5326 a following memory address. */
5329 register rtx x = single_set (insn);
5331 /* Does this instruction increment or decrement a register? */
5332 if ((flags & PROP_AUTOINC)
5334 && GET_CODE (SET_DEST (x)) == REG
5335 && (GET_CODE (SET_SRC (x)) == PLUS
5336 || GET_CODE (SET_SRC (x)) == MINUS)
5337 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
5338 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5339 /* Ok, look for a following memory ref we can combine with.
5340 If one is found, change the memory ref to a PRE_INC
5341 or PRE_DEC, cancel this insn, and return 1.
5342 Return 0 if nothing has been done. */
5343 && try_pre_increment_1 (pbi, insn))
5346 #endif /* AUTO_INC_DEC */
5348 CLEAR_REG_SET (pbi->new_set);
5350 /* If this is not the final pass, and this insn is copying the value of
5351 a library call and it's dead, don't scan the insns that perform the
5352 library call, so that the call's arguments are not marked live. */
5353 if (libcall_is_dead)
5355 /* Record the death of the dest reg. */
5356 mark_set_regs (pbi, PATTERN (insn), insn);
5358 insn = XEXP (note, 0);
5359 return PREV_INSN (insn);
5361 else if (GET_CODE (PATTERN (insn)) == SET
5362 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
5363 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
5364 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
5365 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
5366 /* We have an insn to pop a constant amount off the stack.
5367 (Such insns use PLUS regardless of the direction of the stack,
5368 and any insn to adjust the stack by a constant is always a pop.)
5369 These insns, if not dead stores, have no effect on life. */
5373 /* Any regs live at the time of a call instruction must not go
5374 in a register clobbered by calls. Find all regs now live and
5375 record this for them. */
5377 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
5378 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5379 { REG_N_CALLS_CROSSED (i)++; });
5381 /* Record sets. Do this even for dead instructions, since they
5382 would have killed the values if they hadn't been deleted. */
5383 mark_set_regs (pbi, PATTERN (insn), insn);
5385 if (GET_CODE (insn) == CALL_INSN)
5391 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5392 cond = COND_EXEC_TEST (PATTERN (insn));
5394 /* Non-constant calls clobber memory. */
5395 if (! CONST_CALL_P (insn))
5397 free_EXPR_LIST_list (&pbi->mem_set_list);
5398 pbi->mem_set_list_len = 0;
5401 /* There may be extra registers to be clobbered. */
5402 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5404 note = XEXP (note, 1))
5405 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
5406 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
5407 cond, insn, pbi->flags);
5409 /* Calls change all call-used and global registers. */
5410 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5411 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
5413 /* We do not want REG_UNUSED notes for these registers. */
5414 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
5416 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
5420 /* If an insn doesn't use CC0, it becomes dead since we assume
5421 that every insn clobbers it. So show it dead here;
5422 mark_used_regs will set it live if it is referenced. */
5427 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
5429 /* Sometimes we may have inserted something before INSN (such as a move)
5430 when we make an auto-inc. So ensure we will scan those insns. */
5432 prev = PREV_INSN (insn);
5435 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
5441 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5442 cond = COND_EXEC_TEST (PATTERN (insn));
5444 /* Calls use their arguments. */
5445 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5447 note = XEXP (note, 1))
5448 if (GET_CODE (XEXP (note, 0)) == USE)
5449 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
5452 /* The stack ptr is used (honorarily) by a CALL insn. */
5453 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
5455 /* Calls may also reference any of the global registers,
5456 so they are made live. */
5457 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5459 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
5464 /* On final pass, update counts of how many insns in which each reg
5466 if (flags & PROP_REG_INFO)
5467 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5468 { REG_LIVE_LENGTH (i)++; });
5473 /* Initialize a propagate_block_info struct for public consumption.
5474 Note that the structure itself is opaque to this file, but that
5475 the user can use the regsets provided here. */
5477 struct propagate_block_info *
5478 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
5480 regset live, local_set, cond_local_set;
5483 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
5486 pbi->reg_live = live;
5487 pbi->mem_set_list = NULL_RTX;
5488 pbi->mem_set_list_len = 0;
5489 pbi->local_set = local_set;
5490 pbi->cond_local_set = cond_local_set;
5494 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5495 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
5497 pbi->reg_next_use = NULL;
5499 pbi->new_set = BITMAP_XMALLOC ();
5501 #ifdef HAVE_conditional_execution
5502 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
5503 free_reg_cond_life_info);
5504 pbi->reg_cond_reg = BITMAP_XMALLOC ();
5506 /* If this block ends in a conditional branch, for each register live
5507 from one side of the branch and not the other, record the register
5508 as conditionally dead. */
5509 if (GET_CODE (bb->end) == JUMP_INSN
5510 && any_condjump_p (bb->end))
5512 regset_head diff_head;
5513 regset diff = INITIALIZE_REG_SET (diff_head);
5514 basic_block bb_true, bb_false;
5515 rtx cond_true, cond_false, set_src;
5518 /* Identify the successor blocks. */
5519 bb_true = bb->succ->dest;
5520 if (bb->succ->succ_next != NULL)
5522 bb_false = bb->succ->succ_next->dest;
5524 if (bb->succ->flags & EDGE_FALLTHRU)
5526 basic_block t = bb_false;
5530 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
5535 /* This can happen with a conditional jump to the next insn. */
5536 if (JUMP_LABEL (bb->end) != bb_true->head)
5539 /* Simplest way to do nothing. */
5543 /* Extract the condition from the branch. */
5544 set_src = SET_SRC (pc_set (bb->end));
5545 cond_true = XEXP (set_src, 0);
5546 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
5547 GET_MODE (cond_true), XEXP (cond_true, 0),
5548 XEXP (cond_true, 1));
5549 if (GET_CODE (XEXP (set_src, 1)) == PC)
5552 cond_false = cond_true;
5556 /* Compute which register lead different lives in the successors. */
5557 if (bitmap_operation (diff, bb_true->global_live_at_start,
5558 bb_false->global_live_at_start, BITMAP_XOR))
5560 rtx reg = XEXP (cond_true, 0);
5562 if (GET_CODE (reg) == SUBREG)
5563 reg = SUBREG_REG (reg);
5565 if (GET_CODE (reg) != REG)
5568 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
5570 /* For each such register, mark it conditionally dead. */
5571 EXECUTE_IF_SET_IN_REG_SET
5574 struct reg_cond_life_info *rcli;
5577 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5579 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
5583 rcli->condition = cond;
5584 rcli->stores = const0_rtx;
5585 rcli->orig_condition = cond;
5587 splay_tree_insert (pbi->reg_cond_dead, i,
5588 (splay_tree_value) rcli);
5592 FREE_REG_SET (diff);
5596 /* If this block has no successors, any stores to the frame that aren't
5597 used later in the block are dead. So make a pass over the block
5598 recording any such that are made and show them dead at the end. We do
5599 a very conservative and simple job here. */
5601 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5602 && (TYPE_RETURNS_STACK_DEPRESSED
5603 (TREE_TYPE (current_function_decl))))
5604 && (flags & PROP_SCAN_DEAD_CODE)
5605 && (bb->succ == NULL
5606 || (bb->succ->succ_next == NULL
5607 && bb->succ->dest == EXIT_BLOCK_PTR
5608 && ! current_function_calls_eh_return)))
5611 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
5612 if (GET_CODE (insn) == INSN
5613 && (set = single_set (insn))
5614 && GET_CODE (SET_DEST (set)) == MEM)
5616 rtx mem = SET_DEST (set);
5617 rtx canon_mem = canon_rtx (mem);
5619 /* This optimization is performed by faking a store to the
5620 memory at the end of the block. This doesn't work for
5621 unchanging memories because multiple stores to unchanging
5622 memory is illegal and alias analysis doesn't consider it. */
5623 if (RTX_UNCHANGING_P (canon_mem))
5626 if (XEXP (canon_mem, 0) == frame_pointer_rtx
5627 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
5628 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
5629 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
5630 add_to_mem_set_list (pbi, canon_mem);
5637 /* Release a propagate_block_info struct. */
5640 free_propagate_block_info (pbi)
5641 struct propagate_block_info *pbi;
5643 free_EXPR_LIST_list (&pbi->mem_set_list);
5645 BITMAP_XFREE (pbi->new_set);
5647 #ifdef HAVE_conditional_execution
5648 splay_tree_delete (pbi->reg_cond_dead);
5649 BITMAP_XFREE (pbi->reg_cond_reg);
5652 if (pbi->reg_next_use)
5653 free (pbi->reg_next_use);
5658 /* Compute the registers live at the beginning of a basic block BB from
5659 those live at the end.
5661 When called, REG_LIVE contains those live at the end. On return, it
5662 contains those live at the beginning.
5664 LOCAL_SET, if non-null, will be set with all registers killed
5665 unconditionally by this basic block.
5666 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
5667 killed conditionally by this basic block. If there is any unconditional
5668 set of a register, then the corresponding bit will be set in LOCAL_SET
5669 and cleared in COND_LOCAL_SET.
5670 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
5671 case, the resulting set will be equal to the union of the two sets that
5672 would otherwise be computed.
5674 Return non-zero if an INSN is deleted (i.e. by dead code removal). */
5677 propagate_block (bb, live, local_set, cond_local_set, flags)
5681 regset cond_local_set;
5684 struct propagate_block_info *pbi;
5688 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
5690 if (flags & PROP_REG_INFO)
5694 /* Process the regs live at the end of the block.
5695 Mark them as not local to any one basic block. */
5696 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
5697 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
5700 /* Scan the block an insn at a time from end to beginning. */
5703 for (insn = bb->end;; insn = prev)
5705 /* If this is a call to `setjmp' et al, warn if any
5706 non-volatile datum is live. */
5707 if ((flags & PROP_REG_INFO)
5708 && GET_CODE (insn) == NOTE
5709 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
5710 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
5712 prev = propagate_one_insn (pbi, insn);
5713 changed |= NEXT_INSN (prev) != insn;
5715 if (insn == bb->head)
5719 free_propagate_block_info (pbi);
5724 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
5725 (SET expressions whose destinations are registers dead after the insn).
5726 NEEDED is the regset that says which regs are alive after the insn.
5728 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
5730 If X is the entire body of an insn, NOTES contains the reg notes
5731 pertaining to the insn. */
5734 insn_dead_p (pbi, x, call_ok, notes)
5735 struct propagate_block_info *pbi;
5738 rtx notes ATTRIBUTE_UNUSED;
5740 enum rtx_code code = GET_CODE (x);
5743 /* If flow is invoked after reload, we must take existing AUTO_INC
5744 expresions into account. */
5745 if (reload_completed)
5747 for (; notes; notes = XEXP (notes, 1))
5749 if (REG_NOTE_KIND (notes) == REG_INC)
5751 int regno = REGNO (XEXP (notes, 0));
5753 /* Don't delete insns to set global regs. */
5754 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5755 || REGNO_REG_SET_P (pbi->reg_live, regno))
5762 /* If setting something that's a reg or part of one,
5763 see if that register's altered value will be live. */
5767 rtx r = SET_DEST (x);
5770 if (GET_CODE (r) == CC0)
5771 return ! pbi->cc0_live;
5774 /* A SET that is a subroutine call cannot be dead. */
5775 if (GET_CODE (SET_SRC (x)) == CALL)
5781 /* Don't eliminate loads from volatile memory or volatile asms. */
5782 else if (volatile_refs_p (SET_SRC (x)))
5785 if (GET_CODE (r) == MEM)
5789 if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
5792 canon_r = canon_rtx (r);
5794 /* Walk the set of memory locations we are currently tracking
5795 and see if one is an identical match to this memory location.
5796 If so, this memory write is dead (remember, we're walking
5797 backwards from the end of the block to the start). Since
5798 rtx_equal_p does not check the alias set or flags, we also
5799 must have the potential for them to conflict (anti_dependence). */
5800 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
5801 if (anti_dependence (r, XEXP (temp, 0)))
5803 rtx mem = XEXP (temp, 0);
5805 if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
5806 && (GET_MODE_SIZE (GET_MODE (canon_r))
5807 <= GET_MODE_SIZE (GET_MODE (mem))))
5811 /* Check if memory reference matches an auto increment. Only
5812 post increment/decrement or modify are valid. */
5813 if (GET_MODE (mem) == GET_MODE (r)
5814 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
5815 || GET_CODE (XEXP (mem, 0)) == POST_INC
5816 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
5817 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
5818 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
5825 while (GET_CODE (r) == SUBREG
5826 || GET_CODE (r) == STRICT_LOW_PART
5827 || GET_CODE (r) == ZERO_EXTRACT)
5830 if (GET_CODE (r) == REG)
5832 int regno = REGNO (r);
5835 if (REGNO_REG_SET_P (pbi->reg_live, regno))
5838 /* If this is a hard register, verify that subsequent
5839 words are not needed. */
5840 if (regno < FIRST_PSEUDO_REGISTER)
5842 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
5845 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
5849 /* Don't delete insns to set global regs. */
5850 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5853 /* Make sure insns to set the stack pointer aren't deleted. */
5854 if (regno == STACK_POINTER_REGNUM)
5857 /* ??? These bits might be redundant with the force live bits
5858 in calculate_global_regs_live. We would delete from
5859 sequential sets; whether this actually affects real code
5860 for anything but the stack pointer I don't know. */
5861 /* Make sure insns to set the frame pointer aren't deleted. */
5862 if (regno == FRAME_POINTER_REGNUM
5863 && (! reload_completed || frame_pointer_needed))
5865 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5866 if (regno == HARD_FRAME_POINTER_REGNUM
5867 && (! reload_completed || frame_pointer_needed))
5871 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5872 /* Make sure insns to set arg pointer are never deleted
5873 (if the arg pointer isn't fixed, there will be a USE
5874 for it, so we can treat it normally). */
5875 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5879 /* Otherwise, the set is dead. */
5885 /* If performing several activities, insn is dead if each activity
5886 is individually dead. Also, CLOBBERs and USEs can be ignored; a
5887 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
5889 else if (code == PARALLEL)
5891 int i = XVECLEN (x, 0);
5893 for (i--; i >= 0; i--)
5894 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
5895 && GET_CODE (XVECEXP (x, 0, i)) != USE
5896 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
5902 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
5903 is not necessarily true for hard registers. */
5904 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
5905 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
5906 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
5909 /* We do not check other CLOBBER or USE here. An insn consisting of just
5910 a CLOBBER or just a USE should not be deleted. */
5914 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
5915 return 1 if the entire library call is dead.
5916 This is true if INSN copies a register (hard or pseudo)
5917 and if the hard return reg of the call insn is dead.
5918 (The caller should have tested the destination of the SET inside
5919 INSN already for death.)
5921 If this insn doesn't just copy a register, then we don't
5922 have an ordinary libcall. In that case, cse could not have
5923 managed to substitute the source for the dest later on,
5924 so we can assume the libcall is dead.
5926 PBI is the block info giving pseudoregs live before this insn.
5927 NOTE is the REG_RETVAL note of the insn. */
5930 libcall_dead_p (pbi, note, insn)
5931 struct propagate_block_info *pbi;
5935 rtx x = single_set (insn);
5939 register rtx r = SET_SRC (x);
5940 if (GET_CODE (r) == REG)
5942 rtx call = XEXP (note, 0);
5946 /* Find the call insn. */
5947 while (call != insn && GET_CODE (call) != CALL_INSN)
5948 call = NEXT_INSN (call);
5950 /* If there is none, do nothing special,
5951 since ordinary death handling can understand these insns. */
5955 /* See if the hard reg holding the value is dead.
5956 If this is a PARALLEL, find the call within it. */
5957 call_pat = PATTERN (call);
5958 if (GET_CODE (call_pat) == PARALLEL)
5960 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
5961 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
5962 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
5965 /* This may be a library call that is returning a value
5966 via invisible pointer. Do nothing special, since
5967 ordinary death handling can understand these insns. */
5971 call_pat = XVECEXP (call_pat, 0, i);
5974 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
5980 /* Return 1 if register REGNO was used before it was set, i.e. if it is
5981 live at function entry. Don't count global register variables, variables
5982 in registers that can be used for function arg passing, or variables in
5983 fixed hard registers. */
5986 regno_uninitialized (regno)
5989 if (n_basic_blocks == 0
5990 || (regno < FIRST_PSEUDO_REGISTER
5991 && (global_regs[regno]
5992 || fixed_regs[regno]
5993 || FUNCTION_ARG_REGNO_P (regno))))
5996 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
5999 /* 1 if register REGNO was alive at a place where `setjmp' was called
6000 and was set more than once or is an argument.
6001 Such regs may be clobbered by `longjmp'. */
6004 regno_clobbered_at_setjmp (regno)
6007 if (n_basic_blocks == 0)
6010 return ((REG_N_SETS (regno) > 1
6011 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
6012 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
6015 /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the
6016 maximal list size; look for overlaps in mode and select the largest. */
6018 add_to_mem_set_list (pbi, mem)
6019 struct propagate_block_info *pbi;
6024 /* We don't know how large a BLKmode store is, so we must not
6025 take them into consideration. */
6026 if (GET_MODE (mem) == BLKmode)
6029 for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
6031 rtx e = XEXP (i, 0);
6032 if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
6034 if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
6037 /* If we must store a copy of the mem, we can just modify
6038 the mode of the stored copy. */
6039 if (pbi->flags & PROP_AUTOINC)
6040 PUT_MODE (e, GET_MODE (mem));
6049 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
6052 /* Store a copy of mem, otherwise the address may be
6053 scrogged by find_auto_inc. */
6054 if (pbi->flags & PROP_AUTOINC)
6055 mem = shallow_copy_rtx (mem);
6057 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
6058 pbi->mem_set_list_len++;
6062 /* INSN references memory, possibly using autoincrement addressing modes.
6063 Find any entries on the mem_set_list that need to be invalidated due
6064 to an address change. */
6067 invalidate_mems_from_autoinc (pbi, insn)
6068 struct propagate_block_info *pbi;
6071 rtx note = REG_NOTES (insn);
6072 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
6073 if (REG_NOTE_KIND (note) == REG_INC)
6074 invalidate_mems_from_set (pbi, XEXP (note, 0));
6077 /* EXP is a REG. Remove any dependant entries from pbi->mem_set_list. */
6080 invalidate_mems_from_set (pbi, exp)
6081 struct propagate_block_info *pbi;
6084 rtx temp = pbi->mem_set_list;
6085 rtx prev = NULL_RTX;
6090 next = XEXP (temp, 1);
6091 if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
6093 /* Splice this entry out of the list. */
6095 XEXP (prev, 1) = next;
6097 pbi->mem_set_list = next;
6098 free_EXPR_LIST_node (temp);
6099 pbi->mem_set_list_len--;
6107 /* Process the registers that are set within X. Their bits are set to
6108 1 in the regset DEAD, because they are dead prior to this insn.
6110 If INSN is nonzero, it is the insn being processed.
6112 FLAGS is the set of operations to perform. */
6115 mark_set_regs (pbi, x, insn)
6116 struct propagate_block_info *pbi;
6119 rtx cond = NULL_RTX;
6124 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6126 if (REG_NOTE_KIND (link) == REG_INC)
6127 mark_set_1 (pbi, SET, XEXP (link, 0),
6128 (GET_CODE (x) == COND_EXEC
6129 ? COND_EXEC_TEST (x) : NULL_RTX),
6133 switch (code = GET_CODE (x))
6137 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
6141 cond = COND_EXEC_TEST (x);
6142 x = COND_EXEC_CODE (x);
6148 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6150 rtx sub = XVECEXP (x, 0, i);
6151 switch (code = GET_CODE (sub))
6154 if (cond != NULL_RTX)
6157 cond = COND_EXEC_TEST (sub);
6158 sub = COND_EXEC_CODE (sub);
6159 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
6165 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
6180 /* Process a single set, which appears in INSN. REG (which may not
6181 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
6182 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
6183 If the set is conditional (because it appear in a COND_EXEC), COND
6184 will be the condition. */
6187 mark_set_1 (pbi, code, reg, cond, insn, flags)
6188 struct propagate_block_info *pbi;
6190 rtx reg, cond, insn;
6193 int regno_first = -1, regno_last = -1;
6194 unsigned long not_dead = 0;
6197 /* Modifying just one hardware register of a multi-reg value or just a
6198 byte field of a register does not mean the value from before this insn
6199 is now dead. Of course, if it was dead after it's unused now. */
6201 switch (GET_CODE (reg))
6204 /* Some targets place small structures in registers for return values of
6205 functions. We have to detect this case specially here to get correct
6206 flow information. */
6207 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
6208 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
6209 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
6215 case STRICT_LOW_PART:
6216 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
6218 reg = XEXP (reg, 0);
6219 while (GET_CODE (reg) == SUBREG
6220 || GET_CODE (reg) == ZERO_EXTRACT
6221 || GET_CODE (reg) == SIGN_EXTRACT
6222 || GET_CODE (reg) == STRICT_LOW_PART);
6223 if (GET_CODE (reg) == MEM)
6225 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
6229 regno_last = regno_first = REGNO (reg);
6230 if (regno_first < FIRST_PSEUDO_REGISTER)
6231 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6235 if (GET_CODE (SUBREG_REG (reg)) == REG)
6237 enum machine_mode outer_mode = GET_MODE (reg);
6238 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
6240 /* Identify the range of registers affected. This is moderately
6241 tricky for hard registers. See alter_subreg. */
6243 regno_last = regno_first = REGNO (SUBREG_REG (reg));
6244 if (regno_first < FIRST_PSEUDO_REGISTER)
6246 regno_first += subreg_regno_offset (regno_first, inner_mode,
6249 regno_last = (regno_first
6250 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
6252 /* Since we've just adjusted the register number ranges, make
6253 sure REG matches. Otherwise some_was_live will be clear
6254 when it shouldn't have been, and we'll create incorrect
6255 REG_UNUSED notes. */
6256 reg = gen_rtx_REG (outer_mode, regno_first);
6260 /* If the number of words in the subreg is less than the number
6261 of words in the full register, we have a well-defined partial
6262 set. Otherwise the high bits are undefined.
6264 This is only really applicable to pseudos, since we just took
6265 care of multi-word hard registers. */
6266 if (((GET_MODE_SIZE (outer_mode)
6267 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
6268 < ((GET_MODE_SIZE (inner_mode)
6269 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
6270 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
6273 reg = SUBREG_REG (reg);
6277 reg = SUBREG_REG (reg);
6284 /* If this set is a MEM, then it kills any aliased writes.
6285 If this set is a REG, then it kills any MEMs which use the reg. */
6286 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
6288 if (GET_CODE (reg) == REG)
6289 invalidate_mems_from_set (pbi, reg);
6291 /* If the memory reference had embedded side effects (autoincrement
6292 address modes. Then we may need to kill some entries on the
6294 if (insn && GET_CODE (reg) == MEM)
6295 invalidate_mems_from_autoinc (pbi, insn);
6297 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
6298 /* ??? With more effort we could track conditional memory life. */
6300 /* There are no REG_INC notes for SP, so we can't assume we'll see
6301 everything that invalidates it. To be safe, don't eliminate any
6302 stores though SP; none of them should be redundant anyway. */
6303 && ! reg_mentioned_p (stack_pointer_rtx, reg))
6304 add_to_mem_set_list (pbi, canon_rtx (reg));
6307 if (GET_CODE (reg) == REG
6308 && ! (regno_first == FRAME_POINTER_REGNUM
6309 && (! reload_completed || frame_pointer_needed))
6310 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
6311 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
6312 && (! reload_completed || frame_pointer_needed))
6314 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
6315 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
6319 int some_was_live = 0, some_was_dead = 0;
6321 for (i = regno_first; i <= regno_last; ++i)
6323 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6326 /* Order of the set operation matters here since both
6327 sets may be the same. */
6328 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
6329 if (cond != NULL_RTX
6330 && ! REGNO_REG_SET_P (pbi->local_set, i))
6331 SET_REGNO_REG_SET (pbi->cond_local_set, i);
6333 SET_REGNO_REG_SET (pbi->local_set, i);
6335 if (code != CLOBBER)
6336 SET_REGNO_REG_SET (pbi->new_set, i);
6338 some_was_live |= needed_regno;
6339 some_was_dead |= ! needed_regno;
6342 #ifdef HAVE_conditional_execution
6343 /* Consider conditional death in deciding that the register needs
6345 if (some_was_live && ! not_dead
6346 /* The stack pointer is never dead. Well, not strictly true,
6347 but it's very difficult to tell from here. Hopefully
6348 combine_stack_adjustments will fix up the most egregious
6350 && regno_first != STACK_POINTER_REGNUM)
6352 for (i = regno_first; i <= regno_last; ++i)
6353 if (! mark_regno_cond_dead (pbi, i, cond))
6354 not_dead |= ((unsigned long) 1) << (i - regno_first);
6358 /* Additional data to record if this is the final pass. */
6359 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
6360 | PROP_DEATH_NOTES | PROP_AUTOINC))
6363 register int blocknum = pbi->bb->index;
6366 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6368 y = pbi->reg_next_use[regno_first];
6370 /* The next use is no longer next, since a store intervenes. */
6371 for (i = regno_first; i <= regno_last; ++i)
6372 pbi->reg_next_use[i] = 0;
6375 if (flags & PROP_REG_INFO)
6377 for (i = regno_first; i <= regno_last; ++i)
6379 /* Count (weighted) references, stores, etc. This counts a
6380 register twice if it is modified, but that is correct. */
6381 REG_N_SETS (i) += 1;
6382 REG_N_REFS (i) += 1;
6383 REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
6385 /* The insns where a reg is live are normally counted
6386 elsewhere, but we want the count to include the insn
6387 where the reg is set, and the normal counting mechanism
6388 would not count it. */
6389 REG_LIVE_LENGTH (i) += 1;
6392 /* If this is a hard reg, record this function uses the reg. */
6393 if (regno_first < FIRST_PSEUDO_REGISTER)
6395 for (i = regno_first; i <= regno_last; i++)
6396 regs_ever_live[i] = 1;
6400 /* Keep track of which basic blocks each reg appears in. */
6401 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6402 REG_BASIC_BLOCK (regno_first) = blocknum;
6403 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6404 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6408 if (! some_was_dead)
6410 if (flags & PROP_LOG_LINKS)
6412 /* Make a logical link from the next following insn
6413 that uses this register, back to this insn.
6414 The following insns have already been processed.
6416 We don't build a LOG_LINK for hard registers containing
6417 in ASM_OPERANDs. If these registers get replaced,
6418 we might wind up changing the semantics of the insn,
6419 even if reload can make what appear to be valid
6420 assignments later. */
6421 if (y && (BLOCK_NUM (y) == blocknum)
6422 && (regno_first >= FIRST_PSEUDO_REGISTER
6423 || asm_noperands (PATTERN (y)) < 0))
6424 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
6429 else if (! some_was_live)
6431 if (flags & PROP_REG_INFO)
6432 REG_N_DEATHS (regno_first) += 1;
6434 if (flags & PROP_DEATH_NOTES)
6436 /* Note that dead stores have already been deleted
6437 when possible. If we get here, we have found a
6438 dead store that cannot be eliminated (because the
6439 same insn does something useful). Indicate this
6440 by marking the reg being set as dying here. */
6442 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6447 if (flags & PROP_DEATH_NOTES)
6449 /* This is a case where we have a multi-word hard register
6450 and some, but not all, of the words of the register are
6451 needed in subsequent insns. Write REG_UNUSED notes
6452 for those parts that were not needed. This case should
6455 for (i = regno_first; i <= regno_last; ++i)
6456 if (! REGNO_REG_SET_P (pbi->reg_live, i))
6458 = alloc_EXPR_LIST (REG_UNUSED,
6459 gen_rtx_REG (reg_raw_mode[i], i),
6465 /* Mark the register as being dead. */
6467 /* The stack pointer is never dead. Well, not strictly true,
6468 but it's very difficult to tell from here. Hopefully
6469 combine_stack_adjustments will fix up the most egregious
6471 && regno_first != STACK_POINTER_REGNUM)
6473 for (i = regno_first; i <= regno_last; ++i)
6474 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
6475 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
6478 else if (GET_CODE (reg) == REG)
6480 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6481 pbi->reg_next_use[regno_first] = 0;
6484 /* If this is the last pass and this is a SCRATCH, show it will be dying
6485 here and count it. */
6486 else if (GET_CODE (reg) == SCRATCH)
6488 if (flags & PROP_DEATH_NOTES)
6490 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6494 #ifdef HAVE_conditional_execution
6495 /* Mark REGNO conditionally dead.
6496 Return true if the register is now unconditionally dead. */
6499 mark_regno_cond_dead (pbi, regno, cond)
6500 struct propagate_block_info *pbi;
6504 /* If this is a store to a predicate register, the value of the
6505 predicate is changing, we don't know that the predicate as seen
6506 before is the same as that seen after. Flush all dependent
6507 conditions from reg_cond_dead. This will make all such
6508 conditionally live registers unconditionally live. */
6509 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
6510 flush_reg_cond_reg (pbi, regno);
6512 /* If this is an unconditional store, remove any conditional
6513 life that may have existed. */
6514 if (cond == NULL_RTX)
6515 splay_tree_remove (pbi->reg_cond_dead, regno);
6518 splay_tree_node node;
6519 struct reg_cond_life_info *rcli;
6522 /* Otherwise this is a conditional set. Record that fact.
6523 It may have been conditionally used, or there may be a
6524 subsequent set with a complimentary condition. */
6526 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
6529 /* The register was unconditionally live previously.
6530 Record the current condition as the condition under
6531 which it is dead. */
6532 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6533 rcli->condition = cond;
6534 rcli->stores = cond;
6535 rcli->orig_condition = const0_rtx;
6536 splay_tree_insert (pbi->reg_cond_dead, regno,
6537 (splay_tree_value) rcli);
6539 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6541 /* Not unconditionaly dead. */
6546 /* The register was conditionally live previously.
6547 Add the new condition to the old. */
6548 rcli = (struct reg_cond_life_info *) node->value;
6549 ncond = rcli->condition;
6550 ncond = ior_reg_cond (ncond, cond, 1);
6551 if (rcli->stores == const0_rtx)
6552 rcli->stores = cond;
6553 else if (rcli->stores != const1_rtx)
6554 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
6556 /* If the register is now unconditionally dead, remove the entry
6557 in the splay_tree. A register is unconditionally dead if the
6558 dead condition ncond is true. A register is also unconditionally
6559 dead if the sum of all conditional stores is an unconditional
6560 store (stores is true), and the dead condition is identically the
6561 same as the original dead condition initialized at the end of
6562 the block. This is a pointer compare, not an rtx_equal_p
6564 if (ncond == const1_rtx
6565 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
6566 splay_tree_remove (pbi->reg_cond_dead, regno);
6569 rcli->condition = ncond;
6571 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6573 /* Not unconditionaly dead. */
6582 /* Called from splay_tree_delete for pbi->reg_cond_life. */
6585 free_reg_cond_life_info (value)
6586 splay_tree_value value;
6588 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
6592 /* Helper function for flush_reg_cond_reg. */
6595 flush_reg_cond_reg_1 (node, data)
6596 splay_tree_node node;
6599 struct reg_cond_life_info *rcli;
6600 int *xdata = (int *) data;
6601 unsigned int regno = xdata[0];
6603 /* Don't need to search if last flushed value was farther on in
6604 the in-order traversal. */
6605 if (xdata[1] >= (int) node->key)
6608 /* Splice out portions of the expression that refer to regno. */
6609 rcli = (struct reg_cond_life_info *) node->value;
6610 rcli->condition = elim_reg_cond (rcli->condition, regno);
6611 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
6612 rcli->stores = elim_reg_cond (rcli->stores, regno);
6614 /* If the entire condition is now false, signal the node to be removed. */
6615 if (rcli->condition == const0_rtx)
6617 xdata[1] = node->key;
6620 else if (rcli->condition == const1_rtx)
6626 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
6629 flush_reg_cond_reg (pbi, regno)
6630 struct propagate_block_info *pbi;
6637 while (splay_tree_foreach (pbi->reg_cond_dead,
6638 flush_reg_cond_reg_1, pair) == -1)
6639 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
6641 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
6644 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
6645 For ior/and, the ADD flag determines whether we want to add the new
6646 condition X to the old one unconditionally. If it is zero, we will
6647 only return a new expression if X allows us to simplify part of
6648 OLD, otherwise we return OLD unchanged to the caller.
6649 If ADD is nonzero, we will return a new condition in all cases. The
6650 toplevel caller of one of these functions should always pass 1 for
6654 ior_reg_cond (old, x, add)
6660 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6662 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6663 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
6664 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6666 if (GET_CODE (x) == GET_CODE (old)
6667 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6671 return gen_rtx_IOR (0, old, x);
6674 switch (GET_CODE (old))
6677 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6678 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6679 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6681 if (op0 == const0_rtx)
6683 if (op1 == const0_rtx)
6685 if (op0 == const1_rtx || op1 == const1_rtx)
6687 if (op0 == XEXP (old, 0))
6688 op0 = gen_rtx_IOR (0, op0, x);
6690 op1 = gen_rtx_IOR (0, op1, x);
6691 return gen_rtx_IOR (0, op0, op1);
6695 return gen_rtx_IOR (0, old, x);
6698 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6699 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6700 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6702 if (op0 == const1_rtx)
6704 if (op1 == const1_rtx)
6706 if (op0 == const0_rtx || op1 == const0_rtx)
6708 if (op0 == XEXP (old, 0))
6709 op0 = gen_rtx_IOR (0, op0, x);
6711 op1 = gen_rtx_IOR (0, op1, x);
6712 return gen_rtx_AND (0, op0, op1);
6716 return gen_rtx_IOR (0, old, x);
6719 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6720 if (op0 != XEXP (old, 0))
6721 return not_reg_cond (op0);
6724 return gen_rtx_IOR (0, old, x);
6735 enum rtx_code x_code;
6737 if (x == const0_rtx)
6739 else if (x == const1_rtx)
6741 x_code = GET_CODE (x);
6744 if (GET_RTX_CLASS (x_code) == '<'
6745 && GET_CODE (XEXP (x, 0)) == REG)
6747 if (XEXP (x, 1) != const0_rtx)
6750 return gen_rtx_fmt_ee (reverse_condition (x_code),
6751 VOIDmode, XEXP (x, 0), const0_rtx);
6753 return gen_rtx_NOT (0, x);
6757 and_reg_cond (old, x, add)
6763 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6765 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6766 && GET_CODE (x) == reverse_condition (GET_CODE (old))
6767 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6769 if (GET_CODE (x) == GET_CODE (old)
6770 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6774 return gen_rtx_AND (0, old, x);
6777 switch (GET_CODE (old))
6780 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6781 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6782 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6784 if (op0 == const0_rtx)
6786 if (op1 == const0_rtx)
6788 if (op0 == const1_rtx || op1 == const1_rtx)
6790 if (op0 == XEXP (old, 0))
6791 op0 = gen_rtx_AND (0, op0, x);
6793 op1 = gen_rtx_AND (0, op1, x);
6794 return gen_rtx_IOR (0, op0, op1);
6798 return gen_rtx_AND (0, old, x);
6801 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6802 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6803 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6805 if (op0 == const1_rtx)
6807 if (op1 == const1_rtx)
6809 if (op0 == const0_rtx || op1 == const0_rtx)
6811 if (op0 == XEXP (old, 0))
6812 op0 = gen_rtx_AND (0, op0, x);
6814 op1 = gen_rtx_AND (0, op1, x);
6815 return gen_rtx_AND (0, op0, op1);
6820 /* If X is identical to one of the existing terms of the AND,
6821 then just return what we already have. */
6822 /* ??? There really should be some sort of recursive check here in
6823 case there are nested ANDs. */
6824 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
6825 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
6826 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
6827 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
6830 return gen_rtx_AND (0, old, x);
6833 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6834 if (op0 != XEXP (old, 0))
6835 return not_reg_cond (op0);
6838 return gen_rtx_AND (0, old, x);
6845 /* Given a condition X, remove references to reg REGNO and return the
6846 new condition. The removal will be done so that all conditions
6847 involving REGNO are considered to evaluate to false. This function
6848 is used when the value of REGNO changes. */
6851 elim_reg_cond (x, regno)
6857 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6859 if (REGNO (XEXP (x, 0)) == regno)
6864 switch (GET_CODE (x))
6867 op0 = elim_reg_cond (XEXP (x, 0), regno);
6868 op1 = elim_reg_cond (XEXP (x, 1), regno);
6869 if (op0 == const0_rtx || op1 == const0_rtx)
6871 if (op0 == const1_rtx)
6873 if (op1 == const1_rtx)
6875 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6877 return gen_rtx_AND (0, op0, op1);
6880 op0 = elim_reg_cond (XEXP (x, 0), regno);
6881 op1 = elim_reg_cond (XEXP (x, 1), regno);
6882 if (op0 == const1_rtx || op1 == const1_rtx)
6884 if (op0 == const0_rtx)
6886 if (op1 == const0_rtx)
6888 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6890 return gen_rtx_IOR (0, op0, op1);
6893 op0 = elim_reg_cond (XEXP (x, 0), regno);
6894 if (op0 == const0_rtx)
6896 if (op0 == const1_rtx)
6898 if (op0 != XEXP (x, 0))
6899 return not_reg_cond (op0);
6906 #endif /* HAVE_conditional_execution */
6910 /* Try to substitute the auto-inc expression INC as the address inside
6911 MEM which occurs in INSN. Currently, the address of MEM is an expression
6912 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
6913 that has a single set whose source is a PLUS of INCR_REG and something
6917 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
6918 struct propagate_block_info *pbi;
6919 rtx inc, insn, mem, incr, incr_reg;
6921 int regno = REGNO (incr_reg);
6922 rtx set = single_set (incr);
6923 rtx q = SET_DEST (set);
6924 rtx y = SET_SRC (set);
6925 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
6927 /* Make sure this reg appears only once in this insn. */
6928 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
6931 if (dead_or_set_p (incr, incr_reg)
6932 /* Mustn't autoinc an eliminable register. */
6933 && (regno >= FIRST_PSEUDO_REGISTER
6934 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
6936 /* This is the simple case. Try to make the auto-inc. If
6937 we can't, we are done. Otherwise, we will do any
6938 needed updates below. */
6939 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
6942 else if (GET_CODE (q) == REG
6943 /* PREV_INSN used here to check the semi-open interval
6945 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
6946 /* We must also check for sets of q as q may be
6947 a call clobbered hard register and there may
6948 be a call between PREV_INSN (insn) and incr. */
6949 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
6951 /* We have *p followed sometime later by q = p+size.
6952 Both p and q must be live afterward,
6953 and q is not used between INSN and its assignment.
6954 Change it to q = p, ...*q..., q = q+size.
6955 Then fall into the usual case. */
6959 emit_move_insn (q, incr_reg);
6960 insns = get_insns ();
6963 if (basic_block_for_insn)
6964 for (temp = insns; temp; temp = NEXT_INSN (temp))
6965 set_block_for_insn (temp, pbi->bb);
6967 /* If we can't make the auto-inc, or can't make the
6968 replacement into Y, exit. There's no point in making
6969 the change below if we can't do the auto-inc and doing
6970 so is not correct in the pre-inc case. */
6973 validate_change (insn, &XEXP (mem, 0), inc, 1);
6974 validate_change (incr, &XEXP (y, opnum), q, 1);
6975 if (! apply_change_group ())
6978 /* We now know we'll be doing this change, so emit the
6979 new insn(s) and do the updates. */
6980 emit_insns_before (insns, insn);
6982 if (pbi->bb->head == insn)
6983 pbi->bb->head = insns;
6985 /* INCR will become a NOTE and INSN won't contain a
6986 use of INCR_REG. If a use of INCR_REG was just placed in
6987 the insn before INSN, make that the next use.
6988 Otherwise, invalidate it. */
6989 if (GET_CODE (PREV_INSN (insn)) == INSN
6990 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
6991 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
6992 pbi->reg_next_use[regno] = PREV_INSN (insn);
6994 pbi->reg_next_use[regno] = 0;
6999 /* REGNO is now used in INCR which is below INSN, but
7000 it previously wasn't live here. If we don't mark
7001 it as live, we'll put a REG_DEAD note for it
7002 on this insn, which is incorrect. */
7003 SET_REGNO_REG_SET (pbi->reg_live, regno);
7005 /* If there are any calls between INSN and INCR, show
7006 that REGNO now crosses them. */
7007 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
7008 if (GET_CODE (temp) == CALL_INSN)
7009 REG_N_CALLS_CROSSED (regno)++;
7014 /* If we haven't returned, it means we were able to make the
7015 auto-inc, so update the status. First, record that this insn
7016 has an implicit side effect. */
7018 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
7020 /* Modify the old increment-insn to simply copy
7021 the already-incremented value of our register. */
7022 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
7025 /* If that makes it a no-op (copying the register into itself) delete
7026 it so it won't appear to be a "use" and a "set" of this
7028 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
7030 /* If the original source was dead, it's dead now. */
7033 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
7035 remove_note (incr, note);
7036 if (XEXP (note, 0) != incr_reg)
7037 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
7040 PUT_CODE (incr, NOTE);
7041 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
7042 NOTE_SOURCE_FILE (incr) = 0;
7045 if (regno >= FIRST_PSEUDO_REGISTER)
7047 /* Count an extra reference to the reg. When a reg is
7048 incremented, spilling it is worse, so we want to make
7049 that less likely. */
7050 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7052 /* Count the increment as a setting of the register,
7053 even though it isn't a SET in rtl. */
7054 REG_N_SETS (regno)++;
7058 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
7062 find_auto_inc (pbi, x, insn)
7063 struct propagate_block_info *pbi;
7067 rtx addr = XEXP (x, 0);
7068 HOST_WIDE_INT offset = 0;
7069 rtx set, y, incr, inc_val;
7071 int size = GET_MODE_SIZE (GET_MODE (x));
7073 if (GET_CODE (insn) == JUMP_INSN)
7076 /* Here we detect use of an index register which might be good for
7077 postincrement, postdecrement, preincrement, or predecrement. */
7079 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
7080 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
7082 if (GET_CODE (addr) != REG)
7085 regno = REGNO (addr);
7087 /* Is the next use an increment that might make auto-increment? */
7088 incr = pbi->reg_next_use[regno];
7089 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
7091 set = single_set (incr);
7092 if (set == 0 || GET_CODE (set) != SET)
7096 if (GET_CODE (y) != PLUS)
7099 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
7100 inc_val = XEXP (y, 1);
7101 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
7102 inc_val = XEXP (y, 0);
7106 if (GET_CODE (inc_val) == CONST_INT)
7108 if (HAVE_POST_INCREMENT
7109 && (INTVAL (inc_val) == size && offset == 0))
7110 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
7112 else if (HAVE_POST_DECREMENT
7113 && (INTVAL (inc_val) == -size && offset == 0))
7114 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
7116 else if (HAVE_PRE_INCREMENT
7117 && (INTVAL (inc_val) == size && offset == size))
7118 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
7120 else if (HAVE_PRE_DECREMENT
7121 && (INTVAL (inc_val) == -size && offset == -size))
7122 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
7124 else if (HAVE_POST_MODIFY_DISP && offset == 0)
7125 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
7126 gen_rtx_PLUS (Pmode,
7129 insn, x, incr, addr);
7131 else if (GET_CODE (inc_val) == REG
7132 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
7136 if (HAVE_POST_MODIFY_REG && offset == 0)
7137 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
7138 gen_rtx_PLUS (Pmode,
7141 insn, x, incr, addr);
7145 #endif /* AUTO_INC_DEC */
7148 mark_used_reg (pbi, reg, cond, insn)
7149 struct propagate_block_info *pbi;
7151 rtx cond ATTRIBUTE_UNUSED;
7154 unsigned int regno_first, regno_last, i;
7155 int some_was_live, some_was_dead, some_not_set;
7157 regno_last = regno_first = REGNO (reg);
7158 if (regno_first < FIRST_PSEUDO_REGISTER)
7159 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
7161 /* Find out if any of this register is live after this instruction. */
7162 some_was_live = some_was_dead = 0;
7163 for (i = regno_first; i <= regno_last; ++i)
7165 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
7166 some_was_live |= needed_regno;
7167 some_was_dead |= ! needed_regno;
7170 /* Find out if any of the register was set this insn. */
7172 for (i = regno_first; i <= regno_last; ++i)
7173 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
7175 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
7177 /* Record where each reg is used, so when the reg is set we know
7178 the next insn that uses it. */
7179 pbi->reg_next_use[regno_first] = insn;
7182 if (pbi->flags & PROP_REG_INFO)
7184 if (regno_first < FIRST_PSEUDO_REGISTER)
7186 /* If this is a register we are going to try to eliminate,
7187 don't mark it live here. If we are successful in
7188 eliminating it, it need not be live unless it is used for
7189 pseudos, in which case it will have been set live when it
7190 was allocated to the pseudos. If the register will not
7191 be eliminated, reload will set it live at that point.
7193 Otherwise, record that this function uses this register. */
7194 /* ??? The PPC backend tries to "eliminate" on the pic
7195 register to itself. This should be fixed. In the mean
7196 time, hack around it. */
7198 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
7199 && (regno_first == FRAME_POINTER_REGNUM
7200 || regno_first == ARG_POINTER_REGNUM)))
7201 for (i = regno_first; i <= regno_last; ++i)
7202 regs_ever_live[i] = 1;
7206 /* Keep track of which basic block each reg appears in. */
7208 register int blocknum = pbi->bb->index;
7209 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
7210 REG_BASIC_BLOCK (regno_first) = blocknum;
7211 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
7212 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
7214 /* Count (weighted) number of uses of each reg. */
7215 REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
7216 REG_N_REFS (regno_first)++;
7220 /* Record and count the insns in which a reg dies. If it is used in
7221 this insn and was dead below the insn then it dies in this insn.
7222 If it was set in this insn, we do not make a REG_DEAD note;
7223 likewise if we already made such a note. */
7224 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
7228 /* Check for the case where the register dying partially
7229 overlaps the register set by this insn. */
7230 if (regno_first != regno_last)
7231 for (i = regno_first; i <= regno_last; ++i)
7232 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
7234 /* If none of the words in X is needed, make a REG_DEAD note.
7235 Otherwise, we must make partial REG_DEAD notes. */
7236 if (! some_was_live)
7238 if ((pbi->flags & PROP_DEATH_NOTES)
7239 && ! find_regno_note (insn, REG_DEAD, regno_first))
7241 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
7243 if (pbi->flags & PROP_REG_INFO)
7244 REG_N_DEATHS (regno_first)++;
7248 /* Don't make a REG_DEAD note for a part of a register
7249 that is set in the insn. */
7250 for (i = regno_first; i <= regno_last; ++i)
7251 if (! REGNO_REG_SET_P (pbi->reg_live, i)
7252 && ! dead_or_set_regno_p (insn, i))
7254 = alloc_EXPR_LIST (REG_DEAD,
7255 gen_rtx_REG (reg_raw_mode[i], i),
7260 /* Mark the register as being live. */
7261 for (i = regno_first; i <= regno_last; ++i)
7263 SET_REGNO_REG_SET (pbi->reg_live, i);
7265 #ifdef HAVE_conditional_execution
7266 /* If this is a conditional use, record that fact. If it is later
7267 conditionally set, we'll know to kill the register. */
7268 if (cond != NULL_RTX)
7270 splay_tree_node node;
7271 struct reg_cond_life_info *rcli;
7276 node = splay_tree_lookup (pbi->reg_cond_dead, i);
7279 /* The register was unconditionally live previously.
7280 No need to do anything. */
7284 /* The register was conditionally live previously.
7285 Subtract the new life cond from the old death cond. */
7286 rcli = (struct reg_cond_life_info *) node->value;
7287 ncond = rcli->condition;
7288 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
7290 /* If the register is now unconditionally live,
7291 remove the entry in the splay_tree. */
7292 if (ncond == const0_rtx)
7293 splay_tree_remove (pbi->reg_cond_dead, i);
7296 rcli->condition = ncond;
7297 SET_REGNO_REG_SET (pbi->reg_cond_reg,
7298 REGNO (XEXP (cond, 0)));
7304 /* The register was not previously live at all. Record
7305 the condition under which it is still dead. */
7306 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
7307 rcli->condition = not_reg_cond (cond);
7308 rcli->stores = const0_rtx;
7309 rcli->orig_condition = const0_rtx;
7310 splay_tree_insert (pbi->reg_cond_dead, i,
7311 (splay_tree_value) rcli);
7313 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
7316 else if (some_was_live)
7318 /* The register may have been conditionally live previously, but
7319 is now unconditionally live. Remove it from the conditionally
7320 dead list, so that a conditional set won't cause us to think
7322 splay_tree_remove (pbi->reg_cond_dead, i);
7328 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
7329 This is done assuming the registers needed from X are those that
7330 have 1-bits in PBI->REG_LIVE.
7332 INSN is the containing instruction. If INSN is dead, this function
7336 mark_used_regs (pbi, x, cond, insn)
7337 struct propagate_block_info *pbi;
7340 register RTX_CODE code;
7342 int flags = pbi->flags;
7345 code = GET_CODE (x);
7365 /* If we are clobbering a MEM, mark any registers inside the address
7367 if (GET_CODE (XEXP (x, 0)) == MEM)
7368 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
7372 /* Don't bother watching stores to mems if this is not the
7373 final pass. We'll not be deleting dead stores this round. */
7374 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
7376 /* Invalidate the data for the last MEM stored, but only if MEM is
7377 something that can be stored into. */
7378 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
7379 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
7380 /* Needn't clear the memory set list. */
7384 rtx temp = pbi->mem_set_list;
7385 rtx prev = NULL_RTX;
7390 next = XEXP (temp, 1);
7391 if (anti_dependence (XEXP (temp, 0), x))
7393 /* Splice temp out of the list. */
7395 XEXP (prev, 1) = next;
7397 pbi->mem_set_list = next;
7398 free_EXPR_LIST_node (temp);
7399 pbi->mem_set_list_len--;
7407 /* If the memory reference had embedded side effects (autoincrement
7408 address modes. Then we may need to kill some entries on the
7411 invalidate_mems_from_autoinc (pbi, insn);
7415 if (flags & PROP_AUTOINC)
7416 find_auto_inc (pbi, x, insn);
7421 #ifdef CLASS_CANNOT_CHANGE_MODE
7422 if (GET_CODE (SUBREG_REG (x)) == REG
7423 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
7424 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
7425 GET_MODE (SUBREG_REG (x))))
7426 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
7429 /* While we're here, optimize this case. */
7431 if (GET_CODE (x) != REG)
7436 /* See a register other than being set => mark it as needed. */
7437 mark_used_reg (pbi, x, cond, insn);
7442 register rtx testreg = SET_DEST (x);
7445 /* If storing into MEM, don't show it as being used. But do
7446 show the address as being used. */
7447 if (GET_CODE (testreg) == MEM)
7450 if (flags & PROP_AUTOINC)
7451 find_auto_inc (pbi, testreg, insn);
7453 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
7454 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7458 /* Storing in STRICT_LOW_PART is like storing in a reg
7459 in that this SET might be dead, so ignore it in TESTREG.
7460 but in some other ways it is like using the reg.
7462 Storing in a SUBREG or a bit field is like storing the entire
7463 register in that if the register's value is not used
7464 then this SET is not needed. */
7465 while (GET_CODE (testreg) == STRICT_LOW_PART
7466 || GET_CODE (testreg) == ZERO_EXTRACT
7467 || GET_CODE (testreg) == SIGN_EXTRACT
7468 || GET_CODE (testreg) == SUBREG)
7470 #ifdef CLASS_CANNOT_CHANGE_MODE
7471 if (GET_CODE (testreg) == SUBREG
7472 && GET_CODE (SUBREG_REG (testreg)) == REG
7473 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
7474 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
7475 GET_MODE (testreg)))
7476 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
7479 /* Modifying a single register in an alternate mode
7480 does not use any of the old value. But these other
7481 ways of storing in a register do use the old value. */
7482 if (GET_CODE (testreg) == SUBREG
7483 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
7488 testreg = XEXP (testreg, 0);
7491 /* If this is a store into a register or group of registers,
7492 recursively scan the value being stored. */
7494 if ((GET_CODE (testreg) == PARALLEL
7495 && GET_MODE (testreg) == BLKmode)
7496 || (GET_CODE (testreg) == REG
7497 && (regno = REGNO (testreg),
7498 ! (regno == FRAME_POINTER_REGNUM
7499 && (! reload_completed || frame_pointer_needed)))
7500 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7501 && ! (regno == HARD_FRAME_POINTER_REGNUM
7502 && (! reload_completed || frame_pointer_needed))
7504 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
7505 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
7510 mark_used_regs (pbi, SET_DEST (x), cond, insn);
7511 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7518 case UNSPEC_VOLATILE:
7522 /* Traditional and volatile asm instructions must be considered to use
7523 and clobber all hard registers, all pseudo-registers and all of
7524 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
7526 Consider for instance a volatile asm that changes the fpu rounding
7527 mode. An insn should not be moved across this even if it only uses
7528 pseudo-regs because it might give an incorrectly rounded result.
7530 ?!? Unfortunately, marking all hard registers as live causes massive
7531 problems for the register allocator and marking all pseudos as live
7532 creates mountains of uninitialized variable warnings.
7534 So for now, just clear the memory set list and mark any regs
7535 we can find in ASM_OPERANDS as used. */
7536 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
7538 free_EXPR_LIST_list (&pbi->mem_set_list);
7539 pbi->mem_set_list_len = 0;
7542 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
7543 We can not just fall through here since then we would be confused
7544 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
7545 traditional asms unlike their normal usage. */
7546 if (code == ASM_OPERANDS)
7550 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
7551 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
7557 if (cond != NULL_RTX)
7560 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
7562 cond = COND_EXEC_TEST (x);
7563 x = COND_EXEC_CODE (x);
7567 /* We _do_not_ want to scan operands of phi nodes. Operands of
7568 a phi function are evaluated only when control reaches this
7569 block along a particular edge. Therefore, regs that appear
7570 as arguments to phi should not be added to the global live at
7578 /* Recursively scan the operands of this expression. */
7581 register const char *fmt = GET_RTX_FORMAT (code);
7584 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7588 /* Tail recursive case: save a function call level. */
7594 mark_used_regs (pbi, XEXP (x, i), cond, insn);
7596 else if (fmt[i] == 'E')
7599 for (j = 0; j < XVECLEN (x, i); j++)
7600 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
7609 try_pre_increment_1 (pbi, insn)
7610 struct propagate_block_info *pbi;
7613 /* Find the next use of this reg. If in same basic block,
7614 make it do pre-increment or pre-decrement if appropriate. */
7615 rtx x = single_set (insn);
7616 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
7617 * INTVAL (XEXP (SET_SRC (x), 1)));
7618 int regno = REGNO (SET_DEST (x));
7619 rtx y = pbi->reg_next_use[regno];
7621 && SET_DEST (x) != stack_pointer_rtx
7622 && BLOCK_NUM (y) == BLOCK_NUM (insn)
7623 /* Don't do this if the reg dies, or gets set in y; a standard addressing
7624 mode would be better. */
7625 && ! dead_or_set_p (y, SET_DEST (x))
7626 && try_pre_increment (y, SET_DEST (x), amount))
7628 /* We have found a suitable auto-increment and already changed
7629 insn Y to do it. So flush this increment instruction. */
7630 propagate_block_delete_insn (pbi->bb, insn);
7632 /* Count a reference to this reg for the increment insn we are
7633 deleting. When a reg is incremented, spilling it is worse,
7634 so we want to make that less likely. */
7635 if (regno >= FIRST_PSEUDO_REGISTER)
7637 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7638 REG_N_SETS (regno)++;
7641 /* Flush any remembered memories depending on the value of
7642 the incremented register. */
7643 invalidate_mems_from_set (pbi, SET_DEST (x));
7650 /* Try to change INSN so that it does pre-increment or pre-decrement
7651 addressing on register REG in order to add AMOUNT to REG.
7652 AMOUNT is negative for pre-decrement.
7653 Returns 1 if the change could be made.
7654 This checks all about the validity of the result of modifying INSN. */
7657 try_pre_increment (insn, reg, amount)
7659 HOST_WIDE_INT amount;
7663 /* Nonzero if we can try to make a pre-increment or pre-decrement.
7664 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
7666 /* Nonzero if we can try to make a post-increment or post-decrement.
7667 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
7668 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
7669 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
7672 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
7675 /* From the sign of increment, see which possibilities are conceivable
7676 on this target machine. */
7677 if (HAVE_PRE_INCREMENT && amount > 0)
7679 if (HAVE_POST_INCREMENT && amount > 0)
7682 if (HAVE_PRE_DECREMENT && amount < 0)
7684 if (HAVE_POST_DECREMENT && amount < 0)
7687 if (! (pre_ok || post_ok))
7690 /* It is not safe to add a side effect to a jump insn
7691 because if the incremented register is spilled and must be reloaded
7692 there would be no way to store the incremented value back in memory. */
7694 if (GET_CODE (insn) == JUMP_INSN)
7699 use = find_use_as_address (PATTERN (insn), reg, 0);
7700 if (post_ok && (use == 0 || use == (rtx) 1))
7702 use = find_use_as_address (PATTERN (insn), reg, -amount);
7706 if (use == 0 || use == (rtx) 1)
7709 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
7712 /* See if this combination of instruction and addressing mode exists. */
7713 if (! validate_change (insn, &XEXP (use, 0),
7714 gen_rtx_fmt_e (amount > 0
7715 ? (do_post ? POST_INC : PRE_INC)
7716 : (do_post ? POST_DEC : PRE_DEC),
7720 /* Record that this insn now has an implicit side effect on X. */
7721 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
7725 #endif /* AUTO_INC_DEC */
7727 /* Find the place in the rtx X where REG is used as a memory address.
7728 Return the MEM rtx that so uses it.
7729 If PLUSCONST is nonzero, search instead for a memory address equivalent to
7730 (plus REG (const_int PLUSCONST)).
7732 If such an address does not appear, return 0.
7733 If REG appears more than once, or is used other than in such an address,
7737 find_use_as_address (x, reg, plusconst)
7740 HOST_WIDE_INT plusconst;
7742 enum rtx_code code = GET_CODE (x);
7743 const char *fmt = GET_RTX_FORMAT (code);
7745 register rtx value = 0;
7748 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
7751 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
7752 && XEXP (XEXP (x, 0), 0) == reg
7753 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7754 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
7757 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
7759 /* If REG occurs inside a MEM used in a bit-field reference,
7760 that is unacceptable. */
7761 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
7762 return (rtx) (HOST_WIDE_INT) 1;
7766 return (rtx) (HOST_WIDE_INT) 1;
7768 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7772 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
7776 return (rtx) (HOST_WIDE_INT) 1;
7778 else if (fmt[i] == 'E')
7781 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7783 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
7787 return (rtx) (HOST_WIDE_INT) 1;
7795 /* Write information about registers and basic blocks into FILE.
7796 This is part of making a debugging dump. */
7799 dump_regset (r, outf)
7806 fputs (" (nil)", outf);
7810 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
7812 fprintf (outf, " %d", i);
7813 if (i < FIRST_PSEUDO_REGISTER)
7814 fprintf (outf, " [%s]",
7819 /* Print a human-reaable representation of R on the standard error
7820 stream. This function is designed to be used from within the
7827 dump_regset (r, stderr);
7828 putc ('\n', stderr);
7832 dump_flow_info (file)
7836 static const char * const reg_class_names[] = REG_CLASS_NAMES;
7838 fprintf (file, "%d registers.\n", max_regno);
7839 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
7842 enum reg_class class, altclass;
7843 fprintf (file, "\nRegister %d used %d times across %d insns",
7844 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
7845 if (REG_BASIC_BLOCK (i) >= 0)
7846 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
7848 fprintf (file, "; set %d time%s", REG_N_SETS (i),
7849 (REG_N_SETS (i) == 1) ? "" : "s");
7850 if (REG_USERVAR_P (regno_reg_rtx[i]))
7851 fprintf (file, "; user var");
7852 if (REG_N_DEATHS (i) != 1)
7853 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
7854 if (REG_N_CALLS_CROSSED (i) == 1)
7855 fprintf (file, "; crosses 1 call");
7856 else if (REG_N_CALLS_CROSSED (i))
7857 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
7858 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
7859 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
7860 class = reg_preferred_class (i);
7861 altclass = reg_alternate_class (i);
7862 if (class != GENERAL_REGS || altclass != ALL_REGS)
7864 if (altclass == ALL_REGS || class == ALL_REGS)
7865 fprintf (file, "; pref %s", reg_class_names[(int) class]);
7866 else if (altclass == NO_REGS)
7867 fprintf (file, "; %s or none", reg_class_names[(int) class]);
7869 fprintf (file, "; pref %s, else %s",
7870 reg_class_names[(int) class],
7871 reg_class_names[(int) altclass]);
7873 if (REG_POINTER (regno_reg_rtx[i]))
7874 fprintf (file, "; pointer");
7875 fprintf (file, ".\n");
7878 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
7879 for (i = 0; i < n_basic_blocks; i++)
7881 register basic_block bb = BASIC_BLOCK (i);
7884 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
7885 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
7886 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7887 fprintf (file, ", freq %i.\n", bb->frequency);
7889 fprintf (file, "Predecessors: ");
7890 for (e = bb->pred; e; e = e->pred_next)
7891 dump_edge_info (file, e, 0);
7893 fprintf (file, "\nSuccessors: ");
7894 for (e = bb->succ; e; e = e->succ_next)
7895 dump_edge_info (file, e, 1);
7897 fprintf (file, "\nRegisters live at start:");
7898 dump_regset (bb->global_live_at_start, file);
7900 fprintf (file, "\nRegisters live at end:");
7901 dump_regset (bb->global_live_at_end, file);
7912 dump_flow_info (stderr);
7916 dump_edge_info (file, e, do_succ)
7921 basic_block side = (do_succ ? e->dest : e->src);
7923 if (side == ENTRY_BLOCK_PTR)
7924 fputs (" ENTRY", file);
7925 else if (side == EXIT_BLOCK_PTR)
7926 fputs (" EXIT", file);
7928 fprintf (file, " %d", side->index);
7931 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
7935 fprintf (file, " count:");
7936 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
7941 static const char * const bitnames[] = {
7942 "fallthru", "crit", "ab", "abcall", "eh", "fake", "dfs_back"
7945 int i, flags = e->flags;
7949 for (i = 0; flags; i++)
7950 if (flags & (1 << i))
7956 if (i < (int) ARRAY_SIZE (bitnames))
7957 fputs (bitnames[i], file);
7959 fprintf (file, "%d", i);
7966 /* Print out one basic block with live information at start and end. */
7977 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
7978 bb->index, bb->loop_depth);
7979 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7982 fputs (";; Predecessors: ", outf);
7983 for (e = bb->pred; e; e = e->pred_next)
7984 dump_edge_info (outf, e, 0);
7987 fputs (";; Registers live at start:", outf);
7988 dump_regset (bb->global_live_at_start, outf);
7991 for (insn = bb->head, last = NEXT_INSN (bb->end);
7993 insn = NEXT_INSN (insn))
7994 print_rtl_single (outf, insn);
7996 fputs (";; Registers live at end:", outf);
7997 dump_regset (bb->global_live_at_end, outf);
8000 fputs (";; Successors: ", outf);
8001 for (e = bb->succ; e; e = e->succ_next)
8002 dump_edge_info (outf, e, 1);
8010 dump_bb (bb, stderr);
8017 dump_bb (BASIC_BLOCK (n), stderr);
8020 /* Like print_rtl, but also print out live information for the start of each
8024 print_rtl_with_bb (outf, rtx_first)
8028 register rtx tmp_rtx;
8031 fprintf (outf, "(nil)\n");
8035 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
8036 int max_uid = get_max_uid ();
8037 basic_block *start = (basic_block *)
8038 xcalloc (max_uid, sizeof (basic_block));
8039 basic_block *end = (basic_block *)
8040 xcalloc (max_uid, sizeof (basic_block));
8041 enum bb_state *in_bb_p = (enum bb_state *)
8042 xcalloc (max_uid, sizeof (enum bb_state));
8044 for (i = n_basic_blocks - 1; i >= 0; i--)
8046 basic_block bb = BASIC_BLOCK (i);
8049 start[INSN_UID (bb->head)] = bb;
8050 end[INSN_UID (bb->end)] = bb;
8051 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
8053 enum bb_state state = IN_MULTIPLE_BB;
8054 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
8056 in_bb_p[INSN_UID (x)] = state;
8063 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
8068 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
8070 fprintf (outf, ";; Start of basic block %d, registers live:",
8072 dump_regset (bb->global_live_at_start, outf);
8076 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
8077 && GET_CODE (tmp_rtx) != NOTE
8078 && GET_CODE (tmp_rtx) != BARRIER)
8079 fprintf (outf, ";; Insn is not within a basic block\n");
8080 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
8081 fprintf (outf, ";; Insn is in multiple basic blocks\n");
8083 did_output = print_rtl_single (outf, tmp_rtx);
8085 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
8087 fprintf (outf, ";; End of basic block %d, registers live:\n",
8089 dump_regset (bb->global_live_at_end, outf);
8102 if (current_function_epilogue_delay_list != 0)
8104 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
8105 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
8106 tmp_rtx = XEXP (tmp_rtx, 1))
8107 print_rtl_single (outf, XEXP (tmp_rtx, 0));
8111 /* Dump the rtl into the current debugging dump file, then abort. */
8114 print_rtl_and_abort_fcn (file, line, function)
8117 const char *function;
8121 print_rtl_with_bb (rtl_dump_file, get_insns ());
8122 fclose (rtl_dump_file);
8125 fancy_abort (file, line, function);
8128 /* Recompute register set/reference counts immediately prior to register
8131 This avoids problems with set/reference counts changing to/from values
8132 which have special meanings to the register allocators.
8134 Additionally, the reference counts are the primary component used by the
8135 register allocators to prioritize pseudos for allocation to hard regs.
8136 More accurate reference counts generally lead to better register allocation.
8138 F is the first insn to be scanned.
8140 LOOP_STEP denotes how much loop_depth should be incremented per
8141 loop nesting level in order to increase the ref count more for
8142 references in a loop.
8144 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
8145 possibly other information which is used by the register allocators. */
8148 recompute_reg_usage (f, loop_step)
8149 rtx f ATTRIBUTE_UNUSED;
8150 int loop_step ATTRIBUTE_UNUSED;
8152 allocate_reg_life_data ();
8153 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
8156 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
8157 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
8158 of the number of registers that died. */
8161 count_or_remove_death_notes (blocks, kill)
8167 for (i = n_basic_blocks - 1; i >= 0; --i)
8172 if (blocks && ! TEST_BIT (blocks, i))
8175 bb = BASIC_BLOCK (i);
8177 for (insn = bb->head;; insn = NEXT_INSN (insn))
8181 rtx *pprev = ®_NOTES (insn);
8186 switch (REG_NOTE_KIND (link))
8189 if (GET_CODE (XEXP (link, 0)) == REG)
8191 rtx reg = XEXP (link, 0);
8194 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
8197 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
8205 rtx next = XEXP (link, 1);
8206 free_EXPR_LIST_node (link);
8207 *pprev = link = next;
8213 pprev = &XEXP (link, 1);
8220 if (insn == bb->end)
8229 /* Update insns block within BB. */
8232 update_bb_for_insn (bb)
8237 if (! basic_block_for_insn)
8240 for (insn = bb->head; ; insn = NEXT_INSN (insn))
8242 set_block_for_insn (insn, bb);
8244 if (insn == bb->end)
8250 /* Record INSN's block as BB. */
8253 set_block_for_insn (insn, bb)
8257 size_t uid = INSN_UID (insn);
8258 if (uid >= basic_block_for_insn->num_elements)
8262 /* Add one-eighth the size so we don't keep calling xrealloc. */
8263 new_size = uid + (uid + 7) / 8;
8265 VARRAY_GROW (basic_block_for_insn, new_size);
8267 VARRAY_BB (basic_block_for_insn, uid) = bb;
8270 /* When a new insn has been inserted into an existing block, it will
8271 sometimes emit more than a single insn. This routine will set the
8272 block number for the specified insn, and look backwards in the insn
8273 chain to see if there are any other uninitialized insns immediately
8274 previous to this one, and set the block number for them too. */
8277 set_block_for_new_insns (insn, bb)
8281 set_block_for_insn (insn, bb);
8283 /* Scan the previous instructions setting the block number until we find
8284 an instruction that has the block number set, or we find a note
8286 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
8288 if (GET_CODE (insn) == NOTE)
8290 if (INSN_UID (insn) >= basic_block_for_insn->num_elements
8291 || BLOCK_FOR_INSN (insn) == 0)
8292 set_block_for_insn (insn, bb);
8298 /* Verify the CFG consistency. This function check some CFG invariants and
8299 aborts when something is wrong. Hope that this function will help to
8300 convert many optimization passes to preserve CFG consistent.
8302 Currently it does following checks:
8304 - test head/end pointers
8305 - overlapping of basic blocks
8306 - edge list correctness
8307 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
8308 - tails of basic blocks (ensure that boundary is necesary)
8309 - scans body of the basic block for JUMP_INSN, CODE_LABEL
8310 and NOTE_INSN_BASIC_BLOCK
8311 - check that all insns are in the basic blocks
8312 (except the switch handling code, barriers and notes)
8313 - check that all returns are followed by barriers
8315 In future it can be extended check a lot of other stuff as well
8316 (reachability of basic blocks, life information, etc. etc.). */
8321 const int max_uid = get_max_uid ();
8322 const rtx rtx_first = get_insns ();
8323 rtx last_head = get_last_insn ();
8324 basic_block *bb_info, *last_visited;
8326 int i, last_bb_num_seen, num_bb_notes, err = 0;
8328 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
8329 last_visited = (basic_block *) xcalloc (n_basic_blocks + 2,
8330 sizeof (basic_block));
8332 for (i = n_basic_blocks - 1; i >= 0; i--)
8334 basic_block bb = BASIC_BLOCK (i);
8335 rtx head = bb->head;
8338 /* Verify the end of the basic block is in the INSN chain. */
8339 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
8344 error ("End insn %d for block %d not found in the insn stream.",
8345 INSN_UID (end), bb->index);
8349 /* Work backwards from the end to the head of the basic block
8350 to verify the head is in the RTL chain. */
8351 for (; x != NULL_RTX; x = PREV_INSN (x))
8353 /* While walking over the insn chain, verify insns appear
8354 in only one basic block and initialize the BB_INFO array
8355 used by other passes. */
8356 if (bb_info[INSN_UID (x)] != NULL)
8358 error ("Insn %d is in multiple basic blocks (%d and %d)",
8359 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
8362 bb_info[INSN_UID (x)] = bb;
8369 error ("Head insn %d for block %d not found in the insn stream.",
8370 INSN_UID (head), bb->index);
8377 /* Now check the basic blocks (boundaries etc.) */
8378 for (i = n_basic_blocks - 1; i >= 0; i--)
8380 basic_block bb = BASIC_BLOCK (i);
8381 /* Check correctness of edge lists. */
8383 int has_fallthru = 0;
8388 if (last_visited [e->dest->index + 2] == bb)
8390 error ("verify_flow_info: Duplicate edge %i->%i",
8391 e->src->index, e->dest->index);
8394 last_visited [e->dest->index + 2] = bb;
8396 if (e->flags & EDGE_FALLTHRU)
8399 if ((e->flags & EDGE_FALLTHRU)
8400 && e->src != ENTRY_BLOCK_PTR
8401 && e->dest != EXIT_BLOCK_PTR)
8404 if (e->src->index + 1 != e->dest->index)
8406 error ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
8407 e->src->index, e->dest->index);
8411 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
8412 insn = NEXT_INSN (insn))
8413 if (GET_CODE (insn) == BARRIER || INSN_P (insn))
8415 error ("verify_flow_info: Incorrect fallthru %i->%i",
8416 e->src->index, e->dest->index);
8417 fatal_insn ("Wrong insn in the fallthru edge", insn);
8423 error ("verify_flow_info: Basic block %d succ edge is corrupted",
8425 fprintf (stderr, "Predecessor: ");
8426 dump_edge_info (stderr, e, 0);
8427 fprintf (stderr, "\nSuccessor: ");
8428 dump_edge_info (stderr, e, 1);
8429 fprintf (stderr, "\n");
8432 if (e->dest != EXIT_BLOCK_PTR)
8434 edge e2 = e->dest->pred;
8435 while (e2 && e2 != e)
8439 error ("Basic block %i edge lists are corrupted", bb->index);
8449 /* Ensure existence of barrier in BB with no fallthru edges. */
8450 for (insn = bb->end; GET_CODE (insn) != BARRIER;
8451 insn = NEXT_INSN (insn))
8453 || (GET_CODE (insn) == NOTE
8454 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
8456 error ("Missing barrier after block %i", bb->index);
8466 error ("Basic block %d pred edge is corrupted", bb->index);
8467 fputs ("Predecessor: ", stderr);
8468 dump_edge_info (stderr, e, 0);
8469 fputs ("\nSuccessor: ", stderr);
8470 dump_edge_info (stderr, e, 1);
8471 fputc ('\n', stderr);
8474 if (e->src != ENTRY_BLOCK_PTR)
8476 edge e2 = e->src->succ;
8477 while (e2 && e2 != e)
8481 error ("Basic block %i edge lists are corrupted", bb->index);
8488 /* OK pointers are correct. Now check the header of basic
8489 block. It ought to contain optional CODE_LABEL followed
8490 by NOTE_BASIC_BLOCK. */
8492 if (GET_CODE (x) == CODE_LABEL)
8496 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
8502 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
8504 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
8511 /* Do checks for empty blocks here */
8518 if (NOTE_INSN_BASIC_BLOCK_P (x))
8520 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
8521 INSN_UID (x), bb->index);
8528 if (GET_CODE (x) == JUMP_INSN
8529 || GET_CODE (x) == CODE_LABEL
8530 || GET_CODE (x) == BARRIER)
8532 error ("In basic block %d:", bb->index);
8533 fatal_insn ("Flow control insn inside a basic block", x);
8541 last_bb_num_seen = -1;
8546 if (NOTE_INSN_BASIC_BLOCK_P (x))
8548 basic_block bb = NOTE_BASIC_BLOCK (x);
8550 if (bb->index != last_bb_num_seen + 1)
8551 internal_error ("Basic blocks not numbered consecutively.");
8553 last_bb_num_seen = bb->index;
8556 if (!bb_info[INSN_UID (x)])
8558 switch (GET_CODE (x))
8565 /* An addr_vec is placed outside any block block. */
8567 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
8568 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
8569 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
8574 /* But in any case, non-deletable labels can appear anywhere. */
8578 fatal_insn ("Insn outside basic block", x);
8583 && GET_CODE (x) == JUMP_INSN
8584 && returnjump_p (x) && ! condjump_p (x)
8585 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
8586 fatal_insn ("Return not followed by barrier", x);
8591 if (num_bb_notes != n_basic_blocks)
8593 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
8594 num_bb_notes, n_basic_blocks);
8597 internal_error ("verify_flow_info failed.");
8601 free (last_visited);
8604 /* Functions to access an edge list with a vector representation.
8605 Enough data is kept such that given an index number, the
8606 pred and succ that edge represents can be determined, or
8607 given a pred and a succ, its index number can be returned.
8608 This allows algorithms which consume a lot of memory to
8609 represent the normally full matrix of edge (pred,succ) with a
8610 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
8611 wasted space in the client code due to sparse flow graphs. */
8613 /* This functions initializes the edge list. Basically the entire
8614 flowgraph is processed, and all edges are assigned a number,
8615 and the data structure is filled in. */
8620 struct edge_list *elist;
8626 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
8630 /* Determine the number of edges in the flow graph by counting successor
8631 edges on each basic block. */
8632 for (x = 0; x < n_basic_blocks; x++)
8634 basic_block bb = BASIC_BLOCK (x);
8636 for (e = bb->succ; e; e = e->succ_next)
8639 /* Don't forget successors of the entry block. */
8640 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8643 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
8644 elist->num_blocks = block_count;
8645 elist->num_edges = num_edges;
8646 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
8650 /* Follow successors of the entry block, and register these edges. */
8651 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8653 elist->index_to_edge[num_edges] = e;
8657 for (x = 0; x < n_basic_blocks; x++)
8659 basic_block bb = BASIC_BLOCK (x);
8661 /* Follow all successors of blocks, and register these edges. */
8662 for (e = bb->succ; e; e = e->succ_next)
8664 elist->index_to_edge[num_edges] = e;
8671 /* This function free's memory associated with an edge list. */
8674 free_edge_list (elist)
8675 struct edge_list *elist;
8679 free (elist->index_to_edge);
8684 /* This function provides debug output showing an edge list. */
8687 print_edge_list (f, elist)
8689 struct edge_list *elist;
8692 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
8693 elist->num_blocks - 2, elist->num_edges);
8695 for (x = 0; x < elist->num_edges; x++)
8697 fprintf (f, " %-4d - edge(", x);
8698 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
8699 fprintf (f, "entry,");
8701 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
8703 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
8704 fprintf (f, "exit)\n");
8706 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
8710 /* This function provides an internal consistency check of an edge list,
8711 verifying that all edges are present, and that there are no
8715 verify_edge_list (f, elist)
8717 struct edge_list *elist;
8719 int x, pred, succ, index;
8722 for (x = 0; x < n_basic_blocks; x++)
8724 basic_block bb = BASIC_BLOCK (x);
8726 for (e = bb->succ; e; e = e->succ_next)
8728 pred = e->src->index;
8729 succ = e->dest->index;
8730 index = EDGE_INDEX (elist, e->src, e->dest);
8731 if (index == EDGE_INDEX_NO_EDGE)
8733 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8736 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8737 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8738 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8739 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8740 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8741 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8744 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8746 pred = e->src->index;
8747 succ = e->dest->index;
8748 index = EDGE_INDEX (elist, e->src, e->dest);
8749 if (index == EDGE_INDEX_NO_EDGE)
8751 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8754 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8755 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8756 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8757 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8758 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8759 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8761 /* We've verified that all the edges are in the list, no lets make sure
8762 there are no spurious edges in the list. */
8764 for (pred = 0; pred < n_basic_blocks; pred++)
8765 for (succ = 0; succ < n_basic_blocks; succ++)
8767 basic_block p = BASIC_BLOCK (pred);
8768 basic_block s = BASIC_BLOCK (succ);
8772 for (e = p->succ; e; e = e->succ_next)
8778 for (e = s->pred; e; e = e->pred_next)
8784 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8785 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8786 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
8788 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8789 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8790 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
8791 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8792 BASIC_BLOCK (succ)));
8794 for (succ = 0; succ < n_basic_blocks; succ++)
8796 basic_block p = ENTRY_BLOCK_PTR;
8797 basic_block s = BASIC_BLOCK (succ);
8801 for (e = p->succ; e; e = e->succ_next)
8807 for (e = s->pred; e; e = e->pred_next)
8813 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8814 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8815 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
8817 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8818 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8819 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
8820 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
8821 BASIC_BLOCK (succ)));
8823 for (pred = 0; pred < n_basic_blocks; pred++)
8825 basic_block p = BASIC_BLOCK (pred);
8826 basic_block s = EXIT_BLOCK_PTR;
8830 for (e = p->succ; e; e = e->succ_next)
8836 for (e = s->pred; e; e = e->pred_next)
8842 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8843 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8844 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
8846 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8847 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8848 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
8849 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8854 /* This routine will determine what, if any, edge there is between
8855 a specified predecessor and successor. */
8858 find_edge_index (edge_list, pred, succ)
8859 struct edge_list *edge_list;
8860 basic_block pred, succ;
8863 for (x = 0; x < NUM_EDGES (edge_list); x++)
8865 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
8866 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
8869 return (EDGE_INDEX_NO_EDGE);
8872 /* This function will remove an edge from the flow graph. */
8878 edge last_pred = NULL;
8879 edge last_succ = NULL;
8881 basic_block src, dest;
8884 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
8890 last_succ->succ_next = e->succ_next;
8892 src->succ = e->succ_next;
8894 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
8900 last_pred->pred_next = e->pred_next;
8902 dest->pred = e->pred_next;
8908 /* This routine will remove any fake successor edges for a basic block.
8909 When the edge is removed, it is also removed from whatever predecessor
8913 remove_fake_successors (bb)
8917 for (e = bb->succ; e;)
8921 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
8926 /* This routine will remove all fake edges from the flow graph. If
8927 we remove all fake successors, it will automatically remove all
8928 fake predecessors. */
8931 remove_fake_edges ()
8935 for (x = 0; x < n_basic_blocks; x++)
8936 remove_fake_successors (BASIC_BLOCK (x));
8938 /* We've handled all successors except the entry block's. */
8939 remove_fake_successors (ENTRY_BLOCK_PTR);
8942 /* This function will add a fake edge between any block which has no
8943 successors, and the exit block. Some data flow equations require these
8947 add_noreturn_fake_exit_edges ()
8951 for (x = 0; x < n_basic_blocks; x++)
8952 if (BASIC_BLOCK (x)->succ == NULL)
8953 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
8956 /* This function adds a fake edge between any infinite loops to the
8957 exit block. Some optimizations require a path from each node to
8960 See also Morgan, Figure 3.10, pp. 82-83.
8962 The current implementation is ugly, not attempting to minimize the
8963 number of inserted fake edges. To reduce the number of fake edges
8964 to insert, add fake edges from _innermost_ loops containing only
8965 nodes not reachable from the exit block. */
8968 connect_infinite_loops_to_exit ()
8970 basic_block unvisited_block;
8972 /* Perform depth-first search in the reverse graph to find nodes
8973 reachable from the exit block. */
8974 struct depth_first_search_dsS dfs_ds;
8976 flow_dfs_compute_reverse_init (&dfs_ds);
8977 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
8979 /* Repeatedly add fake edges, updating the unreachable nodes. */
8982 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
8983 if (!unvisited_block)
8985 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
8986 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
8989 flow_dfs_compute_reverse_finish (&dfs_ds);
8994 /* Redirect an edge's successor from one block to another. */
8997 redirect_edge_succ (e, new_succ)
8999 basic_block new_succ;
9003 /* Disconnect the edge from the old successor block. */
9004 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
9006 *pe = (*pe)->pred_next;
9008 /* Reconnect the edge to the new successor block. */
9009 e->pred_next = new_succ->pred;
9014 /* Like previous but avoid possible dupplicate edge. */
9017 redirect_edge_succ_nodup (e, new_succ)
9019 basic_block new_succ;
9022 /* Check whether the edge is already present. */
9023 for (s = e->src->succ; s; s = s->succ_next)
9024 if (s->dest == new_succ && s != e)
9028 s->flags |= e->flags;
9029 s->probability += e->probability;
9030 s->count += e->count;
9034 redirect_edge_succ (e, new_succ);
9037 /* Redirect an edge's predecessor from one block to another. */
9040 redirect_edge_pred (e, new_pred)
9042 basic_block new_pred;
9046 /* Disconnect the edge from the old predecessor block. */
9047 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
9049 *pe = (*pe)->succ_next;
9051 /* Reconnect the edge to the new predecessor block. */
9052 e->succ_next = new_pred->succ;
9057 /* Dump the list of basic blocks in the bitmap NODES. */
9060 flow_nodes_print (str, nodes, file)
9062 const sbitmap nodes;
9070 fprintf (file, "%s { ", str);
9071 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
9072 fputs ("}\n", file);
9076 /* Dump the list of edges in the array EDGE_LIST. */
9079 flow_edge_list_print (str, edge_list, num_edges, file)
9081 const edge *edge_list;
9090 fprintf (file, "%s { ", str);
9091 for (i = 0; i < num_edges; i++)
9092 fprintf (file, "%d->%d ", edge_list[i]->src->index,
9093 edge_list[i]->dest->index);
9094 fputs ("}\n", file);
9098 /* Dump loop related CFG information. */
9101 flow_loops_cfg_dump (loops, file)
9102 const struct loops *loops;
9107 if (! loops->num || ! file || ! loops->cfg.dom)
9110 for (i = 0; i < n_basic_blocks; i++)
9114 fprintf (file, ";; %d succs { ", i);
9115 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
9116 fprintf (file, "%d ", succ->dest->index);
9117 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
9120 /* Dump the DFS node order. */
9121 if (loops->cfg.dfs_order)
9123 fputs (";; DFS order: ", file);
9124 for (i = 0; i < n_basic_blocks; i++)
9125 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
9128 /* Dump the reverse completion node order. */
9129 if (loops->cfg.rc_order)
9131 fputs (";; RC order: ", file);
9132 for (i = 0; i < n_basic_blocks; i++)
9133 fprintf (file, "%d ", loops->cfg.rc_order[i]);
9138 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
9141 flow_loop_nested_p (outer, loop)
9145 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
9149 /* Dump the loop information specified by LOOP to the stream FILE
9150 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9152 flow_loop_dump (loop, file, loop_dump_aux, verbose)
9153 const struct loop *loop;
9155 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9158 if (! loop || ! loop->header)
9161 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
9162 loop->num, INSN_UID (loop->first->head),
9163 INSN_UID (loop->last->end),
9164 loop->shared ? " shared" : "",
9165 loop->invalid ? " invalid" : "");
9166 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
9167 loop->header->index, loop->latch->index,
9168 loop->pre_header ? loop->pre_header->index : -1,
9169 loop->first->index, loop->last->index);
9170 fprintf (file, ";; depth %d, level %d, outer %ld\n",
9171 loop->depth, loop->level,
9172 (long) (loop->outer ? loop->outer->num : -1));
9174 if (loop->pre_header_edges)
9175 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
9176 loop->num_pre_header_edges, file);
9177 flow_edge_list_print (";; entry edges", loop->entry_edges,
9178 loop->num_entries, file);
9179 fprintf (file, ";; %d", loop->num_nodes);
9180 flow_nodes_print (" nodes", loop->nodes, file);
9181 flow_edge_list_print (";; exit edges", loop->exit_edges,
9182 loop->num_exits, file);
9183 if (loop->exits_doms)
9184 flow_nodes_print (";; exit doms", loop->exits_doms, file);
9186 loop_dump_aux (loop, file, verbose);
9190 /* Dump the loop information specified by LOOPS to the stream FILE,
9191 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9193 flow_loops_dump (loops, file, loop_dump_aux, verbose)
9194 const struct loops *loops;
9196 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9202 num_loops = loops->num;
9203 if (! num_loops || ! file)
9206 fprintf (file, ";; %d loops found, %d levels\n",
9207 num_loops, loops->levels);
9209 for (i = 0; i < num_loops; i++)
9211 struct loop *loop = &loops->array[i];
9213 flow_loop_dump (loop, file, loop_dump_aux, verbose);
9219 for (j = 0; j < i; j++)
9221 struct loop *oloop = &loops->array[j];
9223 if (loop->header == oloop->header)
9228 smaller = loop->num_nodes < oloop->num_nodes;
9230 /* If the union of LOOP and OLOOP is different than
9231 the larger of LOOP and OLOOP then LOOP and OLOOP
9232 must be disjoint. */
9233 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
9234 smaller ? oloop : loop);
9236 ";; loop header %d shared by loops %d, %d %s\n",
9237 loop->header->index, i, j,
9238 disjoint ? "disjoint" : "nested");
9245 flow_loops_cfg_dump (loops, file);
9249 /* Free all the memory allocated for LOOPS. */
9252 flow_loops_free (loops)
9253 struct loops *loops;
9262 /* Free the loop descriptors. */
9263 for (i = 0; i < loops->num; i++)
9265 struct loop *loop = &loops->array[i];
9267 if (loop->pre_header_edges)
9268 free (loop->pre_header_edges);
9270 sbitmap_free (loop->nodes);
9271 if (loop->entry_edges)
9272 free (loop->entry_edges);
9273 if (loop->exit_edges)
9274 free (loop->exit_edges);
9275 if (loop->exits_doms)
9276 sbitmap_free (loop->exits_doms);
9278 free (loops->array);
9279 loops->array = NULL;
9282 sbitmap_vector_free (loops->cfg.dom);
9283 if (loops->cfg.dfs_order)
9284 free (loops->cfg.dfs_order);
9286 if (loops->shared_headers)
9287 sbitmap_free (loops->shared_headers);
9292 /* Find the entry edges into the loop with header HEADER and nodes
9293 NODES and store in ENTRY_EDGES array. Return the number of entry
9294 edges from the loop. */
9297 flow_loop_entry_edges_find (header, nodes, entry_edges)
9299 const sbitmap nodes;
9305 *entry_edges = NULL;
9308 for (e = header->pred; e; e = e->pred_next)
9310 basic_block src = e->src;
9312 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9319 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
9322 for (e = header->pred; e; e = e->pred_next)
9324 basic_block src = e->src;
9326 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9327 (*entry_edges)[num_entries++] = e;
9334 /* Find the exit edges from the loop using the bitmap of loop nodes
9335 NODES and store in EXIT_EDGES array. Return the number of
9336 exit edges from the loop. */
9339 flow_loop_exit_edges_find (nodes, exit_edges)
9340 const sbitmap nodes;
9349 /* Check all nodes within the loop to see if there are any
9350 successors not in the loop. Note that a node may have multiple
9351 exiting edges ????? A node can have one jumping edge and one fallthru
9352 edge so only one of these can exit the loop. */
9354 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9355 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9357 basic_block dest = e->dest;
9359 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9367 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
9369 /* Store all exiting edges into an array. */
9371 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9372 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9374 basic_block dest = e->dest;
9376 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9377 (*exit_edges)[num_exits++] = e;
9385 /* Find the nodes contained within the loop with header HEADER and
9386 latch LATCH and store in NODES. Return the number of nodes within
9390 flow_loop_nodes_find (header, latch, nodes)
9399 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
9402 /* Start with only the loop header in the set of loop nodes. */
9403 sbitmap_zero (nodes);
9404 SET_BIT (nodes, header->index);
9406 header->loop_depth++;
9408 /* Push the loop latch on to the stack. */
9409 if (! TEST_BIT (nodes, latch->index))
9411 SET_BIT (nodes, latch->index);
9412 latch->loop_depth++;
9414 stack[sp++] = latch;
9423 for (e = node->pred; e; e = e->pred_next)
9425 basic_block ancestor = e->src;
9427 /* If each ancestor not marked as part of loop, add to set of
9428 loop nodes and push on to stack. */
9429 if (ancestor != ENTRY_BLOCK_PTR
9430 && ! TEST_BIT (nodes, ancestor->index))
9432 SET_BIT (nodes, ancestor->index);
9433 ancestor->loop_depth++;
9435 stack[sp++] = ancestor;
9443 /* Compute the depth first search order and store in the array
9444 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
9445 RC_ORDER is non-zero, return the reverse completion number for each
9446 node. Returns the number of nodes visited. A depth first search
9447 tries to get as far away from the starting point as quickly as
9451 flow_depth_first_order_compute (dfs_order, rc_order)
9458 int rcnum = n_basic_blocks - 1;
9461 /* Allocate stack for back-tracking up CFG. */
9462 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
9465 /* Allocate bitmap to track nodes that have been visited. */
9466 visited = sbitmap_alloc (n_basic_blocks);
9468 /* None of the nodes in the CFG have been visited yet. */
9469 sbitmap_zero (visited);
9471 /* Push the first edge on to the stack. */
9472 stack[sp++] = ENTRY_BLOCK_PTR->succ;
9480 /* Look at the edge on the top of the stack. */
9485 /* Check if the edge destination has been visited yet. */
9486 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
9488 /* Mark that we have visited the destination. */
9489 SET_BIT (visited, dest->index);
9492 dfs_order[dfsnum++] = dest->index;
9496 /* Since the DEST node has been visited for the first
9497 time, check its successors. */
9498 stack[sp++] = dest->succ;
9502 /* There are no successors for the DEST node so assign
9503 its reverse completion number. */
9505 rc_order[rcnum--] = dest->index;
9510 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
9512 /* There are no more successors for the SRC node
9513 so assign its reverse completion number. */
9515 rc_order[rcnum--] = src->index;
9519 stack[sp - 1] = e->succ_next;
9526 sbitmap_free (visited);
9528 /* The number of nodes visited should not be greater than
9530 if (dfsnum > n_basic_blocks)
9533 /* There are some nodes left in the CFG that are unreachable. */
9534 if (dfsnum < n_basic_blocks)
9539 /* Compute the depth first search order on the _reverse_ graph and
9540 store in the array DFS_ORDER, marking the nodes visited in VISITED.
9541 Returns the number of nodes visited.
9543 The computation is split into three pieces:
9545 flow_dfs_compute_reverse_init () creates the necessary data
9548 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
9549 structures. The block will start the search.
9551 flow_dfs_compute_reverse_execute () continues (or starts) the
9552 search using the block on the top of the stack, stopping when the
9555 flow_dfs_compute_reverse_finish () destroys the necessary data
9558 Thus, the user will probably call ..._init(), call ..._add_bb() to
9559 add a beginning basic block to the stack, call ..._execute(),
9560 possibly add another bb to the stack and again call ..._execute(),
9561 ..., and finally call _finish(). */
9563 /* Initialize the data structures used for depth-first search on the
9564 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
9565 added to the basic block stack. DATA is the current depth-first
9566 search context. If INITIALIZE_STACK is non-zero, there is an
9567 element on the stack. */
9570 flow_dfs_compute_reverse_init (data)
9571 depth_first_search_ds data;
9573 /* Allocate stack for back-tracking up CFG. */
9575 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
9576 * sizeof (basic_block));
9579 /* Allocate bitmap to track nodes that have been visited. */
9580 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
9582 /* None of the nodes in the CFG have been visited yet. */
9583 sbitmap_zero (data->visited_blocks);
9588 /* Add the specified basic block to the top of the dfs data
9589 structures. When the search continues, it will start at the
9593 flow_dfs_compute_reverse_add_bb (data, bb)
9594 depth_first_search_ds data;
9597 data->stack[data->sp++] = bb;
9601 /* Continue the depth-first search through the reverse graph starting
9602 with the block at the stack's top and ending when the stack is
9603 empty. Visited nodes are marked. Returns an unvisited basic
9604 block, or NULL if there is none available. */
9607 flow_dfs_compute_reverse_execute (data)
9608 depth_first_search_ds data;
9614 while (data->sp > 0)
9616 bb = data->stack[--data->sp];
9618 /* Mark that we have visited this node. */
9619 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
9621 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
9623 /* Perform depth-first search on adjacent vertices. */
9624 for (e = bb->pred; e; e = e->pred_next)
9625 flow_dfs_compute_reverse_add_bb (data, e->src);
9629 /* Determine if there are unvisited basic blocks. */
9630 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
9631 if (!TEST_BIT (data->visited_blocks, i))
9632 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
9636 /* Destroy the data structures needed for depth-first search on the
9640 flow_dfs_compute_reverse_finish (data)
9641 depth_first_search_ds data;
9644 sbitmap_free (data->visited_blocks);
9649 /* Find the root node of the loop pre-header extended basic block and
9650 the edges along the trace from the root node to the loop header. */
9653 flow_loop_pre_header_scan (loop)
9659 loop->num_pre_header_edges = 0;
9661 if (loop->num_entries != 1)
9664 ebb = loop->entry_edges[0]->src;
9666 if (ebb != ENTRY_BLOCK_PTR)
9670 /* Count number of edges along trace from loop header to
9671 root of pre-header extended basic block. Usually this is
9672 only one or two edges. */
9674 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
9676 ebb = ebb->pred->src;
9680 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
9681 loop->num_pre_header_edges = num;
9683 /* Store edges in order that they are followed. The source
9684 of the first edge is the root node of the pre-header extended
9685 basic block and the destination of the last last edge is
9687 for (e = loop->entry_edges[0]; num; e = e->src->pred)
9689 loop->pre_header_edges[--num] = e;
9695 /* Return the block for the pre-header of the loop with header
9696 HEADER where DOM specifies the dominator information. Return NULL if
9697 there is no pre-header. */
9700 flow_loop_pre_header_find (header, dom)
9704 basic_block pre_header;
9707 /* If block p is a predecessor of the header and is the only block
9708 that the header does not dominate, then it is the pre-header. */
9710 for (e = header->pred; e; e = e->pred_next)
9712 basic_block node = e->src;
9714 if (node != ENTRY_BLOCK_PTR
9715 && ! TEST_BIT (dom[node->index], header->index))
9717 if (pre_header == NULL)
9721 /* There are multiple edges into the header from outside
9722 the loop so there is no pre-header block. */
9731 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
9732 previously added. The insertion algorithm assumes that the loops
9733 are added in the order found by a depth first search of the CFG. */
9736 flow_loop_tree_node_add (prevloop, loop)
9737 struct loop *prevloop;
9741 if (flow_loop_nested_p (prevloop, loop))
9743 prevloop->inner = loop;
9744 loop->outer = prevloop;
9748 while (prevloop->outer)
9750 if (flow_loop_nested_p (prevloop->outer, loop))
9752 prevloop->next = loop;
9753 loop->outer = prevloop->outer;
9756 prevloop = prevloop->outer;
9759 prevloop->next = loop;
9763 /* Build the loop hierarchy tree for LOOPS. */
9766 flow_loops_tree_build (loops)
9767 struct loops *loops;
9772 num_loops = loops->num;
9776 /* Root the loop hierarchy tree with the first loop found.
9777 Since we used a depth first search this should be the
9779 loops->tree_root = &loops->array[0];
9780 loops->tree_root->outer = loops->tree_root->inner = loops->tree_root->next = NULL;
9782 /* Add the remaining loops to the tree. */
9783 for (i = 1; i < num_loops; i++)
9784 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
9787 /* Helper function to compute loop nesting depth and enclosed loop level
9788 for the natural loop specified by LOOP at the loop depth DEPTH.
9789 Returns the loop level. */
9792 flow_loop_level_compute (loop, depth)
9802 /* Traverse loop tree assigning depth and computing level as the
9803 maximum level of all the inner loops of this loop. The loop
9804 level is equivalent to the height of the loop in the loop tree
9805 and corresponds to the number of enclosed loop levels (including
9807 for (inner = loop->inner; inner; inner = inner->next)
9811 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
9816 loop->level = level;
9817 loop->depth = depth;
9821 /* Compute the loop nesting depth and enclosed loop level for the loop
9822 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
9826 flow_loops_level_compute (loops)
9827 struct loops *loops;
9833 /* Traverse all the outer level loops. */
9834 for (loop = loops->tree_root; loop; loop = loop->next)
9836 level = flow_loop_level_compute (loop, 1);
9844 /* Scan a single natural loop specified by LOOP collecting information
9845 about it specified by FLAGS. */
9848 flow_loop_scan (loops, loop, flags)
9849 struct loops *loops;
9853 /* Determine prerequisites. */
9854 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
9855 flags |= LOOP_EXIT_EDGES;
9857 if (flags & LOOP_ENTRY_EDGES)
9859 /* Find edges which enter the loop header.
9860 Note that the entry edges should only
9861 enter the header of a natural loop. */
9863 = flow_loop_entry_edges_find (loop->header,
9865 &loop->entry_edges);
9868 if (flags & LOOP_EXIT_EDGES)
9870 /* Find edges which exit the loop. */
9872 = flow_loop_exit_edges_find (loop->nodes,
9876 if (flags & LOOP_EXITS_DOMS)
9880 /* Determine which loop nodes dominate all the exits
9882 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
9883 sbitmap_copy (loop->exits_doms, loop->nodes);
9884 for (j = 0; j < loop->num_exits; j++)
9885 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
9886 loops->cfg.dom[loop->exit_edges[j]->src->index]);
9888 /* The header of a natural loop must dominate
9890 if (! TEST_BIT (loop->exits_doms, loop->header->index))
9894 if (flags & LOOP_PRE_HEADER)
9896 /* Look to see if the loop has a pre-header node. */
9898 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
9900 /* Find the blocks within the extended basic block of
9901 the loop pre-header. */
9902 flow_loop_pre_header_scan (loop);
9908 /* Find all the natural loops in the function and save in LOOPS structure
9909 and recalculate loop_depth information in basic block structures.
9910 FLAGS controls which loop information is collected.
9911 Return the number of natural loops found. */
9914 flow_loops_find (loops, flags)
9915 struct loops *loops;
9927 /* This function cannot be repeatedly called with different
9928 flags to build up the loop information. The loop tree
9929 must always be built if this function is called. */
9930 if (! (flags & LOOP_TREE))
9933 memset (loops, 0, sizeof (*loops));
9935 /* Taking care of this degenerate case makes the rest of
9936 this code simpler. */
9937 if (n_basic_blocks == 0)
9943 /* Compute the dominators. */
9944 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
9945 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
9947 /* Count the number of loop edges (back edges). This should be the
9948 same as the number of natural loops. */
9951 for (b = 0; b < n_basic_blocks; b++)
9955 header = BASIC_BLOCK (b);
9956 header->loop_depth = 0;
9958 for (e = header->pred; e; e = e->pred_next)
9960 basic_block latch = e->src;
9962 /* Look for back edges where a predecessor is dominated
9963 by this block. A natural loop has a single entry
9964 node (header) that dominates all the nodes in the
9965 loop. It also has single back edge to the header
9966 from a latch node. Note that multiple natural loops
9967 may share the same header. */
9968 if (b != header->index)
9971 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
9978 /* Compute depth first search order of the CFG so that outer
9979 natural loops will be found before inner natural loops. */
9980 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9981 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9982 flow_depth_first_order_compute (dfs_order, rc_order);
9984 /* Save CFG derived information to avoid recomputing it. */
9985 loops->cfg.dom = dom;
9986 loops->cfg.dfs_order = dfs_order;
9987 loops->cfg.rc_order = rc_order;
9989 /* Allocate loop structures. */
9991 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
9993 headers = sbitmap_alloc (n_basic_blocks);
9994 sbitmap_zero (headers);
9996 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
9997 sbitmap_zero (loops->shared_headers);
9999 /* Find and record information about all the natural loops
10002 for (b = 0; b < n_basic_blocks; b++)
10004 basic_block header;
10006 /* Search the nodes of the CFG in reverse completion order
10007 so that we can find outer loops first. */
10008 header = BASIC_BLOCK (rc_order[b]);
10010 /* Look for all the possible latch blocks for this header. */
10011 for (e = header->pred; e; e = e->pred_next)
10013 basic_block latch = e->src;
10015 /* Look for back edges where a predecessor is dominated
10016 by this block. A natural loop has a single entry
10017 node (header) that dominates all the nodes in the
10018 loop. It also has single back edge to the header
10019 from a latch node. Note that multiple natural loops
10020 may share the same header. */
10021 if (latch != ENTRY_BLOCK_PTR
10022 && TEST_BIT (dom[latch->index], header->index))
10026 loop = loops->array + num_loops;
10028 loop->header = header;
10029 loop->latch = latch;
10030 loop->num = num_loops;
10037 for (i = 0; i < num_loops; i++)
10039 struct loop *loop = &loops->array[i];
10041 /* Keep track of blocks that are loop headers so
10042 that we can tell which loops should be merged. */
10043 if (TEST_BIT (headers, loop->header->index))
10044 SET_BIT (loops->shared_headers, loop->header->index);
10045 SET_BIT (headers, loop->header->index);
10047 /* Find nodes contained within the loop. */
10048 loop->nodes = sbitmap_alloc (n_basic_blocks);
10050 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
10052 /* Compute first and last blocks within the loop.
10053 These are often the same as the loop header and
10054 loop latch respectively, but this is not always
10057 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
10059 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
10061 flow_loop_scan (loops, loop, flags);
10064 /* Natural loops with shared headers may either be disjoint or
10065 nested. Disjoint loops with shared headers cannot be inner
10066 loops and should be merged. For now just mark loops that share
10068 for (i = 0; i < num_loops; i++)
10069 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
10070 loops->array[i].shared = 1;
10072 sbitmap_free (headers);
10076 sbitmap_vector_free (dom);
10079 loops->num = num_loops;
10081 /* Build the loop hierarchy tree. */
10082 flow_loops_tree_build (loops);
10084 /* Assign the loop nesting depth and enclosed loop level for each
10086 loops->levels = flow_loops_level_compute (loops);
10092 /* Update the information regarding the loops in the CFG
10093 specified by LOOPS. */
10095 flow_loops_update (loops, flags)
10096 struct loops *loops;
10099 /* One day we may want to update the current loop data. For now
10100 throw away the old stuff and rebuild what we need. */
10102 flow_loops_free (loops);
10104 return flow_loops_find (loops, flags);
10108 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
10111 flow_loop_outside_edge_p (loop, e)
10112 const struct loop *loop;
10115 if (e->dest != loop->header)
10117 return (e->src == ENTRY_BLOCK_PTR)
10118 || ! TEST_BIT (loop->nodes, e->src->index);
10121 /* Clear LOG_LINKS fields of insns in a chain.
10122 Also clear the global_live_at_{start,end} fields of the basic block
10126 clear_log_links (insns)
10132 for (i = insns; i; i = NEXT_INSN (i))
10136 for (b = 0; b < n_basic_blocks; b++)
10138 basic_block bb = BASIC_BLOCK (b);
10140 bb->global_live_at_start = NULL;
10141 bb->global_live_at_end = NULL;
10144 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
10145 EXIT_BLOCK_PTR->global_live_at_start = NULL;
10148 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
10149 correspond to the hard registers, if any, set in that map. This
10150 could be done far more efficiently by having all sorts of special-cases
10151 with moving single words, but probably isn't worth the trouble. */
10154 reg_set_to_hard_reg_set (to, from)
10160 EXECUTE_IF_SET_IN_BITMAP
10163 if (i >= FIRST_PSEUDO_REGISTER)
10165 SET_HARD_REG_BIT (*to, i);
10169 /* Called once at intialization time. */
10174 static int initialized;
10178 gcc_obstack_init (&flow_obstack);
10179 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10184 obstack_free (&flow_obstack, flow_firstobj);
10185 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10189 /* Assume that the preceeding pass has possibly eliminated jump instructions
10190 or converted the unconditional jumps. Eliminate the edges from CFG.
10191 Return true if any edges are eliminated. */
10194 purge_dead_edges (bb)
10198 rtx insn = bb->end;
10199 bool purged = false;
10201 if (GET_CODE (insn) == JUMP_INSN && !simplejump_p (insn))
10203 if (GET_CODE (insn) == JUMP_INSN)
10207 /* We do care only about conditional jumps and simplejumps. */
10208 if (!any_condjump_p (insn)
10209 && !returnjump_p (insn)
10210 && !simplejump_p (insn))
10212 for (e = bb->succ; e; e = next)
10214 next = e->succ_next;
10216 /* Check purposes we can have edge. */
10217 if ((e->flags & EDGE_FALLTHRU)
10218 && any_condjump_p (insn))
10220 if (e->dest != EXIT_BLOCK_PTR
10221 && e->dest->head == JUMP_LABEL (insn))
10223 if (e->dest == EXIT_BLOCK_PTR
10224 && returnjump_p (insn))
10229 if (!bb->succ || !purged)
10232 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
10236 /* Redistribute probabilities. */
10237 if (!bb->succ->succ_next)
10239 bb->succ->probability = REG_BR_PROB_BASE;
10240 bb->succ->count = bb->count;
10244 note = find_reg_note (insn, REG_BR_PROB, NULL);
10247 b = BRANCH_EDGE (bb);
10248 f = FALLTHRU_EDGE (bb);
10249 b->probability = INTVAL (XEXP (note, 0));
10250 f->probability = REG_BR_PROB_BASE - b->probability;
10251 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
10252 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
10257 /* Cleanup abnormal edges caused by throwing insns that have been
10259 if (! can_throw_internal (bb->end))
10260 for (e = bb->succ; e; e = next)
10262 next = e->succ_next;
10263 if (e->flags & EDGE_EH)
10270 /* If we don't see a jump insn, we don't know exactly why the block would
10271 have been broken at this point. Look for a simple, non-fallthru edge,
10272 as these are only created by conditional branches. If we find such an
10273 edge we know that there used to be a jump here and can then safely
10274 remove all non-fallthru edges. */
10275 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
10279 for (e = bb->succ; e; e = next)
10281 next = e->succ_next;
10282 if (!(e->flags & EDGE_FALLTHRU))
10283 remove_edge (e), purged = true;
10285 if (!bb->succ || bb->succ->succ_next)
10287 bb->succ->probability = REG_BR_PROB_BASE;
10288 bb->succ->count = bb->count;
10291 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
10296 /* Search all basic blocks for potentionally dead edges and purge them.
10298 Return true ifif some edge has been elliminated.
10302 purge_all_dead_edges ()
10304 int i, purged = false;
10305 for (i = 0; i < n_basic_blocks; i++)
10306 purged |= purge_dead_edges (BASIC_BLOCK (i));