1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
165 #define LOCAL_REGNO(REGNO) 0
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
177 /* The obstack on which the flow graph components are allocated. */
179 struct obstack flow_obstack;
180 static char *flow_firstobj;
182 /* Number of basic blocks in the current function. */
186 /* Number of edges in the current function. */
190 /* The basic block array. */
192 varray_type basic_block_info;
194 /* The special entry and exit blocks. */
196 struct basic_block_def entry_exit_blocks[2]
199 NULL, /* head_tree */
203 NULL, /* local_set */
204 NULL, /* cond_local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
208 ENTRY_BLOCK, /* index */
216 NULL, /* head_tree */
220 NULL, /* local_set */
221 NULL, /* cond_local_set */
222 NULL, /* global_live_at_start */
223 NULL, /* global_live_at_end */
225 EXIT_BLOCK, /* index */
232 /* Nonzero if the second flow pass has completed. */
235 /* Maximum register number used in this function, plus one. */
239 /* Indexed by n, giving various register information */
241 varray_type reg_n_info;
243 /* Size of a regset for the current function,
244 in (1) bytes and (2) elements. */
249 /* Regset of regs live when calls to `setjmp'-like functions happen. */
250 /* ??? Does this exist only for the setjmp-clobbered warning message? */
252 regset regs_live_at_setjmp;
254 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
255 that have to go in the same hard reg.
256 The first two regs in the list are a pair, and the next two
257 are another pair, etc. */
260 /* Callback that determines if it's ok for a function to have no
261 noreturn attribute. */
262 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
264 /* Set of registers that may be eliminable. These are handled specially
265 in updating regs_ever_live. */
267 static HARD_REG_SET elim_reg_set;
269 /* The basic block structure for every insn, indexed by uid. */
271 varray_type basic_block_for_insn;
273 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
274 /* ??? Should probably be using LABEL_NUSES instead. It would take a
275 bit of surgery to be able to use or co-opt the routines in jump. */
277 static rtx label_value_list;
278 static rtx tail_recursion_label_list;
280 /* Holds information for tracking conditional register life information. */
281 struct reg_cond_life_info
283 /* A boolean expression of conditions under which a register is dead. */
285 /* Conditions under which a register is dead at the basic block end. */
288 /* A boolean expression of conditions under which a register has been
292 /* ??? Could store mask of bytes that are dead, so that we could finally
293 track lifetimes of multi-word registers accessed via subregs. */
296 /* For use in communicating between propagate_block and its subroutines.
297 Holds all information needed to compute life and def-use information. */
299 struct propagate_block_info
301 /* The basic block we're considering. */
304 /* Bit N is set if register N is conditionally or unconditionally live. */
307 /* Bit N is set if register N is set this insn. */
310 /* Element N is the next insn that uses (hard or pseudo) register N
311 within the current basic block; or zero, if there is no such insn. */
314 /* Contains a list of all the MEMs we are tracking for dead store
318 /* If non-null, record the set of registers set unconditionally in the
322 /* If non-null, record the set of registers set conditionally in the
324 regset cond_local_set;
326 #ifdef HAVE_conditional_execution
327 /* Indexed by register number, holds a reg_cond_life_info for each
328 register that is not unconditionally live or dead. */
329 splay_tree reg_cond_dead;
331 /* Bit N is set if register N is in an expression in reg_cond_dead. */
335 /* The length of mem_set_list. */
336 int mem_set_list_len;
338 /* Non-zero if the value of CC0 is live. */
341 /* Flags controling the set of information propagate_block collects. */
345 /* Maximum length of pbi->mem_set_list before we start dropping
346 new elements on the floor. */
347 #define MAX_MEM_SET_LIST_LEN 100
349 /* Store the data structures necessary for depth-first search. */
350 struct depth_first_search_dsS {
351 /* stack for backtracking during the algorithm */
354 /* number of edges in the stack. That is, positions 0, ..., sp-1
358 /* record of basic blocks already seen by depth-first search */
359 sbitmap visited_blocks;
361 typedef struct depth_first_search_dsS *depth_first_search_ds;
363 /* Have print_rtl_and_abort give the same information that fancy_abort
365 #define print_rtl_and_abort() \
366 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
368 /* Forward declarations */
369 static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
370 static bool try_crossjump_bb PARAMS ((int, basic_block));
371 static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
372 static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
374 static int count_basic_blocks PARAMS ((rtx));
375 static void find_basic_blocks_1 PARAMS ((rtx));
376 static rtx find_label_refs PARAMS ((rtx, rtx));
377 static void make_edges PARAMS ((rtx, int, int, int));
378 static void make_label_edge PARAMS ((sbitmap *, basic_block,
380 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
382 static void commit_one_edge_insertion PARAMS ((edge));
384 static void delete_unreachable_blocks PARAMS ((void));
385 static int can_delete_note_p PARAMS ((rtx));
386 static void expunge_block PARAMS ((basic_block));
387 static int can_delete_label_p PARAMS ((rtx));
388 static int tail_recursion_label_p PARAMS ((rtx));
389 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
391 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
393 static int merge_blocks PARAMS ((edge,basic_block,basic_block,
395 static bool try_optimize_cfg PARAMS ((int));
396 static bool can_fallthru PARAMS ((basic_block, basic_block));
397 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
398 static bool try_simplify_condjump PARAMS ((basic_block));
399 static bool try_forward_edges PARAMS ((int, basic_block));
400 static void tidy_fallthru_edges PARAMS ((void));
401 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
402 static void verify_wide_reg PARAMS ((int, rtx, rtx));
403 static void verify_local_live_at_start PARAMS ((regset, basic_block));
404 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
405 static void notice_stack_pointer_modification PARAMS ((rtx));
406 static void mark_reg PARAMS ((rtx, void *));
407 static void mark_regs_live_at_end PARAMS ((regset));
408 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
409 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
410 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
411 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
412 static int insn_dead_p PARAMS ((struct propagate_block_info *,
414 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
416 static void mark_set_regs PARAMS ((struct propagate_block_info *,
418 static void mark_set_1 PARAMS ((struct propagate_block_info *,
419 enum rtx_code, rtx, rtx,
421 #ifdef HAVE_conditional_execution
422 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
424 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
425 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
426 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
428 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
429 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
430 static rtx not_reg_cond PARAMS ((rtx));
431 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
434 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
435 rtx, rtx, rtx, rtx, rtx));
436 static void find_auto_inc PARAMS ((struct propagate_block_info *,
438 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
440 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
442 static void mark_used_reg PARAMS ((struct propagate_block_info *,
444 static void mark_used_regs PARAMS ((struct propagate_block_info *,
446 void dump_flow_info PARAMS ((FILE *));
447 void debug_flow_info PARAMS ((void));
448 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
452 static void add_to_mem_set_list PARAMS ((struct propagate_block_info *,
454 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
456 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
458 static void remove_fake_successors PARAMS ((basic_block));
459 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
461 static void flow_edge_list_print PARAMS ((const char *, const edge *,
463 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
465 static int flow_loop_nested_p PARAMS ((struct loop *,
467 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
469 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
470 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
471 static void flow_dfs_compute_reverse_init
472 PARAMS ((depth_first_search_ds));
473 static void flow_dfs_compute_reverse_add_bb
474 PARAMS ((depth_first_search_ds, basic_block));
475 static basic_block flow_dfs_compute_reverse_execute
476 PARAMS ((depth_first_search_ds));
477 static void flow_dfs_compute_reverse_finish
478 PARAMS ((depth_first_search_ds));
479 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
480 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
482 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
483 static void flow_loops_tree_build PARAMS ((struct loops *));
484 static int flow_loop_level_compute PARAMS ((struct loop *, int));
485 static int flow_loops_level_compute PARAMS ((struct loops *));
486 static void delete_dead_jumptables PARAMS ((void));
487 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
488 static bool need_fake_edge_p PARAMS ((rtx));
490 /* Find basic blocks of the current function.
491 F is the first insn of the function and NREGS the number of register
495 find_basic_blocks (f, nregs, file)
497 int nregs ATTRIBUTE_UNUSED;
498 FILE *file ATTRIBUTE_UNUSED;
501 timevar_push (TV_CFG);
503 /* Flush out existing data. */
504 if (basic_block_info != NULL)
510 /* Clear bb->aux on all extant basic blocks. We'll use this as a
511 tag for reuse during create_basic_block, just in case some pass
512 copies around basic block notes improperly. */
513 for (i = 0; i < n_basic_blocks; ++i)
514 BASIC_BLOCK (i)->aux = NULL;
516 VARRAY_FREE (basic_block_info);
519 n_basic_blocks = count_basic_blocks (f);
521 /* Size the basic block table. The actual structures will be allocated
522 by find_basic_blocks_1, since we want to keep the structure pointers
523 stable across calls to find_basic_blocks. */
524 /* ??? This whole issue would be much simpler if we called find_basic_blocks
525 exactly once, and thereafter we don't have a single long chain of
526 instructions at all until close to the end of compilation when we
527 actually lay them out. */
529 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
531 find_basic_blocks_1 (f);
533 /* Record the block to which an insn belongs. */
534 /* ??? This should be done another way, by which (perhaps) a label is
535 tagged directly with the basic block that it starts. It is used for
536 more than that currently, but IMO that is the only valid use. */
538 max_uid = get_max_uid ();
540 /* Leave space for insns life_analysis makes in some cases for auto-inc.
541 These cases are rare, so we don't need too much space. */
542 max_uid += max_uid / 10;
545 compute_bb_for_insn (max_uid);
547 /* Discover the edges of our cfg. */
548 make_edges (label_value_list, 0, n_basic_blocks - 1, 0);
550 /* Do very simple cleanup now, for the benefit of code that runs between
551 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
552 tidy_fallthru_edges ();
554 mark_critical_edges ();
556 #ifdef ENABLE_CHECKING
559 timevar_pop (TV_CFG);
563 check_function_return_warnings ()
565 if (warn_missing_noreturn
566 && !TREE_THIS_VOLATILE (cfun->decl)
567 && EXIT_BLOCK_PTR->pred == NULL
568 && (lang_missing_noreturn_ok_p
569 && !lang_missing_noreturn_ok_p (cfun->decl)))
570 warning ("function might be possible candidate for attribute `noreturn'");
572 /* If we have a path to EXIT, then we do return. */
573 if (TREE_THIS_VOLATILE (cfun->decl)
574 && EXIT_BLOCK_PTR->pred != NULL)
575 warning ("`noreturn' function does return");
577 /* If the clobber_return_insn appears in some basic block, then we
578 do reach the end without returning a value. */
579 else if (warn_return_type
580 && cfun->x_clobber_return_insn != NULL
581 && EXIT_BLOCK_PTR->pred != NULL)
583 int max_uid = get_max_uid ();
585 /* If clobber_return_insn was excised by jump1, then renumber_insns
586 can make max_uid smaller than the number still recorded in our rtx.
587 That's fine, since this is a quick way of verifying that the insn
588 is no longer in the chain. */
589 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
591 /* Recompute insn->block mapping, since the initial mapping is
592 set before we delete unreachable blocks. */
593 compute_bb_for_insn (max_uid);
595 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
596 warning ("control reaches end of non-void function");
601 /* Count the basic blocks of the function. */
604 count_basic_blocks (f)
608 register RTX_CODE prev_code;
609 register int count = 0;
610 int saw_abnormal_edge = 0;
612 prev_code = JUMP_INSN;
613 for (insn = f; insn; insn = NEXT_INSN (insn))
615 enum rtx_code code = GET_CODE (insn);
617 if (code == CODE_LABEL
618 || (GET_RTX_CLASS (code) == 'i'
619 && (prev_code == JUMP_INSN
620 || prev_code == BARRIER
621 || saw_abnormal_edge)))
623 saw_abnormal_edge = 0;
627 /* Record whether this insn created an edge. */
628 if (code == CALL_INSN)
632 /* If there is a nonlocal goto label and the specified
633 region number isn't -1, we have an edge. */
634 if (nonlocal_goto_handler_labels
635 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
636 || INTVAL (XEXP (note, 0)) >= 0))
637 saw_abnormal_edge = 1;
639 else if (can_throw_internal (insn))
640 saw_abnormal_edge = 1;
642 else if (flag_non_call_exceptions
644 && can_throw_internal (insn))
645 saw_abnormal_edge = 1;
651 /* The rest of the compiler works a bit smoother when we don't have to
652 check for the edge case of do-nothing functions with no basic blocks. */
655 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
662 /* Scan a list of insns for labels referred to other than by jumps.
663 This is used to scan the alternatives of a call placeholder. */
665 find_label_refs (f, lvl)
671 for (insn = f; insn; insn = NEXT_INSN (insn))
672 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
676 /* Make a list of all labels referred to other than by jumps
677 (which just don't have the REG_LABEL notes).
679 Make a special exception for labels followed by an ADDR*VEC,
680 as this would be a part of the tablejump setup code.
682 Make a special exception to registers loaded with label
683 values just before jump insns that use them. */
685 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
686 if (REG_NOTE_KIND (note) == REG_LABEL)
688 rtx lab = XEXP (note, 0), next;
690 if ((next = next_nonnote_insn (lab)) != NULL
691 && GET_CODE (next) == JUMP_INSN
692 && (GET_CODE (PATTERN (next)) == ADDR_VEC
693 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
695 else if (GET_CODE (lab) == NOTE)
697 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
698 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
701 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
708 /* Assume that someone emitted code with control flow instructions to the
709 basic block. Update the data structure. */
711 find_sub_basic_blocks (bb)
716 rtx jump_insn = NULL_RTX;
718 basic_block first_bb = bb;
724 if (GET_CODE (insn) == CODE_LABEL)
725 insn = NEXT_INSN (insn);
727 /* Scan insn chain and try to find new basic block boundaries. */
730 enum rtx_code code = GET_CODE (insn);
737 /* On code label, split current basic block. */
739 falltru = split_block (bb, PREV_INSN (insn));
743 remove_edge (falltru);
745 if (LABEL_ALTERNATE_NAME (insn))
746 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
750 /* In case we've previously split insn on the JUMP_INSN, move the
751 block header to proper place. */
754 falltru = split_block (bb, PREV_INSN (insn));
757 remove_edge (falltru);
760 /* We need some special care for those expressions. */
761 if (GET_CODE (insn) == JUMP_INSN)
763 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
764 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
774 insn = NEXT_INSN (insn);
777 /* In case expander replaced normal insn by sequence terminating by
778 return and barrier, or possibly other sequence not behaving like
779 ordinary jump, we need to take care and move basic block boundary. */
780 if (jump_insn && GET_CODE (bb->end) != JUMP_INSN)
783 /* We've possibly replaced the conditional jump by conditional jump
784 followed by cleanup at fallthru edge, so the outgoing edges may
786 purge_dead_edges (bb);
788 /* Now re-scan and wire in all edges. This expect simple (conditional)
789 jumps at the end of each new basic blocks. */
790 make_edges (NULL, first_bb->index, bb->index, 1);
792 /* Update branch probabilities. Expect only (un)conditional jumps
793 to be created with only the forward edges. */
794 for (i = first_bb->index; i <= bb->index; i++)
797 basic_block b = BASIC_BLOCK (i);
802 for (e = b->pred; e; e=e->pred_next)
804 b->count += e->count;
805 b->frequency += EDGE_FREQUENCY (e);
808 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
810 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
815 probability = INTVAL (XEXP (find_reg_note (b->end,
819 e->probability = probability;
820 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
822 f = FALLTHRU_EDGE (b);
823 f->probability = REG_BR_PROB_BASE - probability;
824 f->count = b->count - e->count;
826 if (b->succ && !b->succ->succ_next)
829 e->probability = REG_BR_PROB_BASE;
835 /* Find all basic blocks of the function whose first insn is F.
837 Collect and return a list of labels whose addresses are taken. This
838 will be used in make_edges for use with computed gotos. */
841 find_basic_blocks_1 (f)
844 register rtx insn, next;
846 rtx bb_note = NULL_RTX;
852 /* We process the instructions in a slightly different way than we did
853 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
854 closed out the previous block, so that it gets attached at the proper
855 place. Since this form should be equivalent to the previous,
856 count_basic_blocks continues to use the old form as a check. */
858 for (insn = f; insn; insn = next)
860 enum rtx_code code = GET_CODE (insn);
862 next = NEXT_INSN (insn);
868 int kind = NOTE_LINE_NUMBER (insn);
870 /* Look for basic block notes with which to keep the
871 basic_block_info pointers stable. Unthread the note now;
872 we'll put it back at the right place in create_basic_block.
873 Or not at all if we've already found a note in this block. */
874 if (kind == NOTE_INSN_BASIC_BLOCK)
876 if (bb_note == NULL_RTX)
879 next = flow_delete_insn (insn);
885 /* A basic block starts at a label. If we've closed one off due
886 to a barrier or some such, no need to do it again. */
887 if (head != NULL_RTX)
889 create_basic_block (i++, head, end, bb_note);
897 /* A basic block ends at a jump. */
898 if (head == NULL_RTX)
902 /* ??? Make a special check for table jumps. The way this
903 happens is truly and amazingly gross. We are about to
904 create a basic block that contains just a code label and
905 an addr*vec jump insn. Worse, an addr_diff_vec creates
906 its own natural loop.
908 Prevent this bit of brain damage, pasting things together
909 correctly in make_edges.
911 The correct solution involves emitting the table directly
912 on the tablejump instruction as a note, or JUMP_LABEL. */
914 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
915 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
923 goto new_bb_inclusive;
926 /* A basic block ends at a barrier. It may be that an unconditional
927 jump already closed the basic block -- no need to do it again. */
928 if (head == NULL_RTX)
930 goto new_bb_exclusive;
934 /* Record whether this call created an edge. */
935 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
936 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
938 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
940 /* Scan each of the alternatives for label refs. */
941 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
942 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
943 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
944 /* Record its tail recursion label, if any. */
945 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
946 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
949 /* A basic block ends at a call that can either throw or
950 do a non-local goto. */
951 if ((nonlocal_goto_handler_labels && region >= 0)
952 || can_throw_internal (insn))
955 if (head == NULL_RTX)
960 create_basic_block (i++, head, end, bb_note);
961 head = end = NULL_RTX;
969 /* Non-call exceptions generate new blocks just like calls. */
970 if (flag_non_call_exceptions && can_throw_internal (insn))
971 goto new_bb_inclusive;
973 if (head == NULL_RTX)
982 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
986 /* Make a list of all labels referred to other than by jumps.
988 Make a special exception for labels followed by an ADDR*VEC,
989 as this would be a part of the tablejump setup code.
991 Make a special exception to registers loaded with label
992 values just before jump insns that use them. */
994 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
995 if (REG_NOTE_KIND (note) == REG_LABEL)
997 rtx lab = XEXP (note, 0), next;
999 if ((next = next_nonnote_insn (lab)) != NULL
1000 && GET_CODE (next) == JUMP_INSN
1001 && (GET_CODE (PATTERN (next)) == ADDR_VEC
1002 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1004 else if (GET_CODE (lab) == NOTE)
1006 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1007 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
1010 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
1015 if (head != NULL_RTX)
1016 create_basic_block (i++, head, end, bb_note);
1018 flow_delete_insn (bb_note);
1020 if (i != n_basic_blocks)
1023 label_value_list = lvl;
1024 tail_recursion_label_list = trll;
1027 /* Tidy the CFG by deleting unreachable code and whatnot. */
1033 timevar_push (TV_CLEANUP_CFG);
1034 delete_unreachable_blocks ();
1035 if (try_optimize_cfg (mode))
1036 delete_unreachable_blocks ();
1037 mark_critical_edges ();
1039 /* Kill the data we won't maintain. */
1040 free_EXPR_LIST_list (&label_value_list);
1041 free_EXPR_LIST_list (&tail_recursion_label_list);
1042 timevar_pop (TV_CLEANUP_CFG);
1045 /* Create a new basic block consisting of the instructions between
1046 HEAD and END inclusive. Reuses the note and basic block struct
1047 in BB_NOTE, if any. */
1050 create_basic_block (index, head, end, bb_note)
1052 rtx head, end, bb_note;
1057 && ! RTX_INTEGRATED_P (bb_note)
1058 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1061 /* If we found an existing note, thread it back onto the chain. */
1065 if (GET_CODE (head) == CODE_LABEL)
1069 after = PREV_INSN (head);
1073 if (after != bb_note && NEXT_INSN (after) != bb_note)
1074 reorder_insns (bb_note, bb_note, after);
1078 /* Otherwise we must create a note and a basic block structure.
1079 Since we allow basic block structs in rtl, give the struct
1080 the same lifetime by allocating it off the function obstack
1081 rather than using malloc. */
1083 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1084 memset (bb, 0, sizeof (*bb));
1086 if (GET_CODE (head) == CODE_LABEL)
1087 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1090 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1093 NOTE_BASIC_BLOCK (bb_note) = bb;
1096 /* Always include the bb note in the block. */
1097 if (NEXT_INSN (end) == bb_note)
1103 BASIC_BLOCK (index) = bb;
1105 /* Tag the block so that we know it has been used when considering
1106 other basic block notes. */
1110 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
1111 note associated with the BLOCK. */
1114 first_insn_after_basic_block_note (block)
1119 /* Get the first instruction in the block. */
1122 if (insn == NULL_RTX)
1124 if (GET_CODE (insn) == CODE_LABEL)
1125 insn = NEXT_INSN (insn);
1126 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
1129 return NEXT_INSN (insn);
1132 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1133 indexed by INSN_UID. MAX is the size of the array. */
1136 compute_bb_for_insn (max)
1141 if (basic_block_for_insn)
1142 VARRAY_FREE (basic_block_for_insn);
1143 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1145 for (i = 0; i < n_basic_blocks; ++i)
1147 basic_block bb = BASIC_BLOCK (i);
1154 int uid = INSN_UID (insn);
1156 VARRAY_BB (basic_block_for_insn, uid) = bb;
1159 insn = NEXT_INSN (insn);
1164 /* Free the memory associated with the edge structures. */
1172 for (i = 0; i < n_basic_blocks; ++i)
1174 basic_block bb = BASIC_BLOCK (i);
1176 for (e = bb->succ; e; e = n)
1186 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1192 ENTRY_BLOCK_PTR->succ = 0;
1193 EXIT_BLOCK_PTR->pred = 0;
1198 /* Identify the edges between basic blocks MIN to MAX.
1200 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1201 that are otherwise unreachable may be reachable with a non-local goto.
1203 BB_EH_END is an array indexed by basic block number in which we record
1204 the list of exception regions active at the end of the basic block. */
1207 make_edges (label_value_list, min, max, update_p)
1208 rtx label_value_list;
1209 int min, max, update_p;
1212 sbitmap *edge_cache = NULL;
1214 /* Assume no computed jump; revise as we create edges. */
1215 current_function_has_computed_jump = 0;
1217 /* Heavy use of computed goto in machine-generated code can lead to
1218 nearly fully-connected CFGs. In that case we spend a significant
1219 amount of time searching the edge lists for duplicates. */
1220 if (forced_labels || label_value_list)
1222 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1223 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1226 for (i = min; i <= max; ++i)
1229 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1230 if (e->dest != EXIT_BLOCK_PTR)
1231 SET_BIT (edge_cache[i], e->dest->index);
1235 /* By nature of the way these get numbered, block 0 is always the entry. */
1236 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1238 for (i = min; i <= max; ++i)
1240 basic_block bb = BASIC_BLOCK (i);
1243 int force_fallthru = 0;
1245 if (GET_CODE (bb->head) == CODE_LABEL
1246 && LABEL_ALTERNATE_NAME (bb->head))
1247 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1249 /* Examine the last instruction of the block, and discover the
1250 ways we can leave the block. */
1253 code = GET_CODE (insn);
1256 if (code == JUMP_INSN)
1260 /* Recognize exception handling placeholders. */
1261 if (GET_CODE (PATTERN (insn)) == RESX)
1262 make_eh_edge (edge_cache, bb, insn);
1264 /* Recognize a non-local goto as a branch outside the
1265 current function. */
1266 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1269 /* ??? Recognize a tablejump and do the right thing. */
1270 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1271 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1272 && GET_CODE (tmp) == JUMP_INSN
1273 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1274 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1279 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1280 vec = XVEC (PATTERN (tmp), 0);
1282 vec = XVEC (PATTERN (tmp), 1);
1284 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1285 make_label_edge (edge_cache, bb,
1286 XEXP (RTVEC_ELT (vec, j), 0), 0);
1288 /* Some targets (eg, ARM) emit a conditional jump that also
1289 contains the out-of-range target. Scan for these and
1290 add an edge if necessary. */
1291 if ((tmp = single_set (insn)) != NULL
1292 && SET_DEST (tmp) == pc_rtx
1293 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1294 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1295 make_label_edge (edge_cache, bb,
1296 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1298 #ifdef CASE_DROPS_THROUGH
1299 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1300 us naturally detecting fallthru into the next block. */
1305 /* If this is a computed jump, then mark it as reaching
1306 everything on the label_value_list and forced_labels list. */
1307 else if (computed_jump_p (insn))
1309 current_function_has_computed_jump = 1;
1311 for (x = label_value_list; x; x = XEXP (x, 1))
1312 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1314 for (x = forced_labels; x; x = XEXP (x, 1))
1315 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1318 /* Returns create an exit out. */
1319 else if (returnjump_p (insn))
1320 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1322 /* Otherwise, we have a plain conditional or unconditional jump. */
1325 if (! JUMP_LABEL (insn))
1327 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1331 /* If this is a sibling call insn, then this is in effect a
1332 combined call and return, and so we need an edge to the
1333 exit block. No need to worry about EH edges, since we
1334 wouldn't have created the sibling call in the first place. */
1336 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1337 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1338 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1340 /* If this is a CALL_INSN, then mark it as reaching the active EH
1341 handler for this CALL_INSN. If we're handling non-call
1342 exceptions then any insn can reach any of the active handlers.
1344 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1346 else if (code == CALL_INSN || flag_non_call_exceptions)
1348 /* Add any appropriate EH edges. */
1349 make_eh_edge (edge_cache, bb, insn);
1351 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1353 /* ??? This could be made smarter: in some cases it's possible
1354 to tell that certain calls will not do a nonlocal goto.
1356 For example, if the nested functions that do the nonlocal
1357 gotos do not have their addresses taken, then only calls to
1358 those functions or to other nested functions that use them
1359 could possibly do nonlocal gotos. */
1360 /* We do know that a REG_EH_REGION note with a value less
1361 than 0 is guaranteed not to perform a non-local goto. */
1362 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1363 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1364 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1365 make_label_edge (edge_cache, bb, XEXP (x, 0),
1366 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1370 /* Find out if we can drop through to the next block. */
1371 insn = next_nonnote_insn (insn);
1372 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1373 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1374 else if (i + 1 < n_basic_blocks)
1376 rtx tmp = BLOCK_HEAD (i + 1);
1377 if (GET_CODE (tmp) == NOTE)
1378 tmp = next_nonnote_insn (tmp);
1379 if (force_fallthru || insn == tmp)
1380 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1385 sbitmap_vector_free (edge_cache);
1388 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1389 about the edge that is accumulated between calls. */
1392 make_edge (edge_cache, src, dst, flags)
1393 sbitmap *edge_cache;
1394 basic_block src, dst;
1400 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1401 many edges to them, and we didn't allocate memory for it. */
1402 use_edge_cache = (edge_cache
1403 && src != ENTRY_BLOCK_PTR
1404 && dst != EXIT_BLOCK_PTR);
1406 /* Make sure we don't add duplicate edges. */
1407 switch (use_edge_cache)
1410 /* Quick test for non-existance of the edge. */
1411 if (! TEST_BIT (edge_cache[src->index], dst->index))
1414 /* The edge exists; early exit if no work to do. */
1420 for (e = src->succ; e; e = e->succ_next)
1429 e = (edge) xcalloc (1, sizeof (*e));
1432 e->succ_next = src->succ;
1433 e->pred_next = dst->pred;
1442 SET_BIT (edge_cache[src->index], dst->index);
1445 /* Create an edge from a basic block to a label. */
1448 make_label_edge (edge_cache, src, label, flags)
1449 sbitmap *edge_cache;
1454 if (GET_CODE (label) != CODE_LABEL)
1457 /* If the label was never emitted, this insn is junk, but avoid a
1458 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1459 as a result of a syntax error and a diagnostic has already been
1462 if (INSN_UID (label) == 0)
1465 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1468 /* Create the edges generated by INSN in REGION. */
1471 make_eh_edge (edge_cache, src, insn)
1472 sbitmap *edge_cache;
1476 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1479 handlers = reachable_handlers (insn);
1481 for (i = handlers; i; i = XEXP (i, 1))
1482 make_label_edge (edge_cache, src, XEXP (i, 0),
1483 EDGE_ABNORMAL | EDGE_EH | is_call);
1485 free_INSN_LIST_list (&handlers);
1488 /* Identify critical edges and set the bits appropriately. */
1491 mark_critical_edges ()
1493 int i, n = n_basic_blocks;
1496 /* We begin with the entry block. This is not terribly important now,
1497 but could be if a front end (Fortran) implemented alternate entry
1499 bb = ENTRY_BLOCK_PTR;
1506 /* (1) Critical edges must have a source with multiple successors. */
1507 if (bb->succ && bb->succ->succ_next)
1509 for (e = bb->succ; e; e = e->succ_next)
1511 /* (2) Critical edges must have a destination with multiple
1512 predecessors. Note that we know there is at least one
1513 predecessor -- the edge we followed to get here. */
1514 if (e->dest->pred->pred_next)
1515 e->flags |= EDGE_CRITICAL;
1517 e->flags &= ~EDGE_CRITICAL;
1522 for (e = bb->succ; e; e = e->succ_next)
1523 e->flags &= ~EDGE_CRITICAL;
1528 bb = BASIC_BLOCK (i);
1532 /* Mark the back edges in DFS traversal.
1533 Return non-zero if a loop (natural or otherwise) is present.
1534 Inspired by Depth_First_Search_PP described in:
1536 Advanced Compiler Design and Implementation
1538 Morgan Kaufmann, 1997
1540 and heavily borrowed from flow_depth_first_order_compute. */
1543 mark_dfs_back_edges ()
1554 /* Allocate the preorder and postorder number arrays. */
1555 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
1556 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
1558 /* Allocate stack for back-tracking up CFG. */
1559 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
1562 /* Allocate bitmap to track nodes that have been visited. */
1563 visited = sbitmap_alloc (n_basic_blocks);
1565 /* None of the nodes in the CFG have been visited yet. */
1566 sbitmap_zero (visited);
1568 /* Push the first edge on to the stack. */
1569 stack[sp++] = ENTRY_BLOCK_PTR->succ;
1577 /* Look at the edge on the top of the stack. */
1581 e->flags &= ~EDGE_DFS_BACK;
1583 /* Check if the edge destination has been visited yet. */
1584 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1586 /* Mark that we have visited the destination. */
1587 SET_BIT (visited, dest->index);
1589 pre[dest->index] = prenum++;
1593 /* Since the DEST node has been visited for the first
1594 time, check its successors. */
1595 stack[sp++] = dest->succ;
1598 post[dest->index] = postnum++;
1602 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
1603 && pre[src->index] >= pre[dest->index]
1604 && post[dest->index] == 0)
1605 e->flags |= EDGE_DFS_BACK, found = true;
1607 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
1608 post[src->index] = postnum++;
1611 stack[sp - 1] = e->succ_next;
1620 sbitmap_free (visited);
1625 /* Split a block BB after insn INSN creating a new fallthru edge.
1626 Return the new edge. Note that to keep other parts of the compiler happy,
1627 this function renumbers all the basic blocks so that the new
1628 one has a number one greater than the block split. */
1631 split_block (bb, insn)
1641 /* There is no point splitting the block after its end. */
1642 if (bb->end == insn)
1645 /* Create the new structures. */
1646 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1647 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1650 memset (new_bb, 0, sizeof (*new_bb));
1652 new_bb->head = NEXT_INSN (insn);
1653 new_bb->end = bb->end;
1656 new_bb->succ = bb->succ;
1657 bb->succ = new_edge;
1658 new_bb->pred = new_edge;
1659 new_bb->count = bb->count;
1660 new_bb->frequency = bb->frequency;
1661 new_bb->loop_depth = bb->loop_depth;
1664 new_edge->dest = new_bb;
1665 new_edge->flags = EDGE_FALLTHRU;
1666 new_edge->probability = REG_BR_PROB_BASE;
1667 new_edge->count = bb->count;
1669 /* Redirect the src of the successor edges of bb to point to new_bb. */
1670 for (e = new_bb->succ; e; e = e->succ_next)
1673 /* Place the new block just after the block being split. */
1674 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1676 /* Some parts of the compiler expect blocks to be number in
1677 sequential order so insert the new block immediately after the
1678 block being split.. */
1680 for (i = n_basic_blocks - 1; i > j + 1; --i)
1682 basic_block tmp = BASIC_BLOCK (i - 1);
1683 BASIC_BLOCK (i) = tmp;
1687 BASIC_BLOCK (i) = new_bb;
1690 if (GET_CODE (new_bb->head) == CODE_LABEL)
1692 /* Create the basic block note. */
1693 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1695 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1697 /* If the only thing in this new block was the label, make sure
1698 the block note gets included. */
1699 if (new_bb->head == new_bb->end)
1700 new_bb->end = bb_note;
1704 /* Create the basic block note. */
1705 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1707 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1708 new_bb->head = bb_note;
1711 update_bb_for_insn (new_bb);
1713 if (bb->global_live_at_start)
1715 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1716 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1717 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1719 /* We now have to calculate which registers are live at the end
1720 of the split basic block and at the start of the new basic
1721 block. Start with those registers that are known to be live
1722 at the end of the original basic block and get
1723 propagate_block to determine which registers are live. */
1724 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1725 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1726 COPY_REG_SET (bb->global_live_at_end,
1727 new_bb->global_live_at_start);
1733 /* Return label in the head of basic block. Create one if it doesn't exist. */
1738 if (block == EXIT_BLOCK_PTR)
1740 if (GET_CODE (block->head) != CODE_LABEL)
1742 block->head = emit_label_before (gen_label_rtx (), block->head);
1743 if (basic_block_for_insn)
1744 set_block_for_insn (block->head, block);
1749 /* Return true if the block has no effect and only forwards control flow to
1750 its single destination. */
1752 forwarder_block_p (bb)
1755 rtx insn = bb->head;
1756 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1757 || !bb->succ || bb->succ->succ_next)
1760 while (insn != bb->end)
1762 if (active_insn_p (insn))
1764 insn = NEXT_INSN (insn);
1766 return (!active_insn_p (insn)
1767 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
1770 /* Return nonzero if we can reach target from src by falling trought. */
1772 can_fallthru (src, target)
1773 basic_block src, target;
1775 rtx insn = src->end;
1776 rtx insn2 = target->head;
1778 if (src->index + 1 == target->index && !active_insn_p (insn2))
1779 insn2 = next_active_insn (insn2);
1780 /* ??? Later we may add code to move jump tables offline. */
1781 return next_active_insn (insn) == insn2;
1784 /* Attempt to perform edge redirection by replacing possibly complex jump
1785 instruction by unconditional jump or removing jump completely.
1786 This can apply only if all edges now point to the same block.
1788 The parameters and return values are equivalent to redirect_edge_and_branch.
1791 try_redirect_by_replacing_jump (e, target)
1795 basic_block src = e->src;
1796 rtx insn = src->end, kill_from;
1801 /* Verify that all targets will be TARGET. */
1802 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1803 if (tmp->dest != target && tmp != e)
1805 if (tmp || !onlyjump_p (insn))
1808 /* Avoid removing branch with side effects. */
1809 set = single_set (insn);
1810 if (!set || side_effects_p (set))
1813 /* In case we zap a conditional jump, we'll need to kill
1814 the cc0 setter too. */
1817 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
1818 kill_from = PREV_INSN (insn);
1821 /* See if we can create the fallthru edge. */
1822 if (can_fallthru (src, target))
1824 src->end = PREV_INSN (kill_from);
1826 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1829 /* Selectivly unlink whole insn chain. */
1830 flow_delete_insn_chain (kill_from, PREV_INSN (target->head));
1832 /* If this already is simplejump, redirect it. */
1833 else if (simplejump_p (insn))
1835 if (e->dest == target)
1838 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1839 INSN_UID (insn), e->dest->index, target->index);
1840 redirect_jump (insn, block_label (target), 0);
1842 /* Or replace possibly complicated jump insn by simple jump insn. */
1845 rtx target_label = block_label (target);
1848 src->end = emit_jump_insn_before (gen_jump (target_label), kill_from);
1849 JUMP_LABEL (src->end) = target_label;
1850 LABEL_NUSES (target_label)++;
1851 if (basic_block_for_insn)
1852 set_block_for_new_insns (src->end, src);
1854 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1855 INSN_UID (insn), INSN_UID (src->end));
1857 flow_delete_insn_chain (kill_from, insn);
1859 barrier = next_nonnote_insn (src->end);
1860 if (!barrier || GET_CODE (barrier) != BARRIER)
1861 emit_barrier_after (src->end);
1864 /* Keep only one edge out and set proper flags. */
1865 while (src->succ->succ_next)
1866 remove_edge (src->succ);
1869 e->flags = EDGE_FALLTHRU;
1872 e->probability = REG_BR_PROB_BASE;
1873 e->count = src->count;
1875 /* We don't want a block to end on a line-number note since that has
1876 the potential of changing the code between -g and not -g. */
1877 while (GET_CODE (e->src->end) == NOTE
1878 && NOTE_LINE_NUMBER (e->src->end) >= 0)
1880 rtx prev = PREV_INSN (e->src->end);
1881 flow_delete_insn (e->src->end);
1885 if (e->dest != target)
1886 redirect_edge_succ (e, target);
1890 /* Return last loop_beg note appearing after INSN, before start of next
1891 basic block. Return INSN if there are no such notes.
1893 When emmiting jump to redirect an fallthru edge, it should always
1894 appear after the LOOP_BEG notes, as loop optimizer expect loop to
1895 eighter start by fallthru edge or jump following the LOOP_BEG note
1896 jumping to the loop exit test. */
1898 last_loop_beg_note (insn)
1902 insn = NEXT_INSN (insn);
1903 while (GET_CODE (insn) == NOTE
1904 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1906 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1908 insn = NEXT_INSN (insn);
1913 /* Attempt to change code to redirect edge E to TARGET.
1914 Don't do that on expense of adding new instructions or reordering
1917 Function can be also called with edge destionation equivalent to the
1918 TARGET. Then it should try the simplifications and do nothing if
1921 Return true if transformation suceeded. We still return flase in case
1922 E already destinated TARGET and we didn't managed to simplify instruction
1925 redirect_edge_and_branch (e, target)
1930 rtx old_label = e->dest->head;
1931 basic_block src = e->src;
1932 rtx insn = src->end;
1934 if (e->flags & EDGE_COMPLEX)
1937 if (try_redirect_by_replacing_jump (e, target))
1939 /* Do this fast path late, as we want above code to simplify for cases
1940 where called on single edge leaving basic block containing nontrivial
1942 else if (e->dest == target)
1945 /* We can only redirect non-fallthru edges of jump insn. */
1946 if (e->flags & EDGE_FALLTHRU)
1948 if (GET_CODE (insn) != JUMP_INSN)
1951 /* Recognize a tablejump and adjust all matching cases. */
1952 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1953 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1954 && GET_CODE (tmp) == JUMP_INSN
1955 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1956 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1960 rtx new_label = block_label (target);
1962 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1963 vec = XVEC (PATTERN (tmp), 0);
1965 vec = XVEC (PATTERN (tmp), 1);
1967 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1968 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1970 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1971 --LABEL_NUSES (old_label);
1972 ++LABEL_NUSES (new_label);
1975 /* Handle casesi dispatch insns */
1976 if ((tmp = single_set (insn)) != NULL
1977 && SET_DEST (tmp) == pc_rtx
1978 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1979 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1980 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1982 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1984 --LABEL_NUSES (old_label);
1985 ++LABEL_NUSES (new_label);
1990 /* ?? We may play the games with moving the named labels from
1991 one basic block to the other in case only one computed_jump is
1993 if (computed_jump_p (insn))
1996 /* A return instruction can't be redirected. */
1997 if (returnjump_p (insn))
2000 /* If the insn doesn't go where we think, we're confused. */
2001 if (JUMP_LABEL (insn) != old_label)
2003 redirect_jump (insn, block_label (target), 0);
2007 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
2008 e->src->index, e->dest->index, target->index);
2009 if (e->dest != target)
2010 redirect_edge_succ_nodup (e, target);
2014 /* Redirect edge even at the expense of creating new jump insn or
2015 basic block. Return new basic block if created, NULL otherwise.
2016 Abort if converison is impossible. */
2018 redirect_edge_and_branch_force (e, target)
2028 if (redirect_edge_and_branch (e, target))
2030 if (e->dest == target)
2032 if (e->flags & EDGE_ABNORMAL)
2034 if (!(e->flags & EDGE_FALLTHRU))
2037 e->flags &= ~EDGE_FALLTHRU;
2038 label = block_label (target);
2039 /* Case of the fallthru block. */
2040 if (!e->src->succ->succ_next)
2042 e->src->end = emit_jump_insn_after (gen_jump (label),
2043 last_loop_beg_note (e->src->end));
2044 JUMP_LABEL (e->src->end) = label;
2045 LABEL_NUSES (label)++;
2046 if (basic_block_for_insn)
2047 set_block_for_new_insns (e->src->end, e->src);
2048 emit_barrier_after (e->src->end);
2050 fprintf (rtl_dump_file,
2051 "Emitting jump insn %i to redirect edge %i->%i to %i\n",
2052 INSN_UID (e->src->end), e->src->index, e->dest->index,
2054 redirect_edge_succ (e, target);
2057 /* Redirecting fallthru edge of the conditional needs extra work. */
2060 fprintf (rtl_dump_file,
2061 "Emitting jump insn %i in new BB to redirect edge %i->%i to %i\n",
2062 INSN_UID (e->src->end), e->src->index, e->dest->index,
2065 /* Create the new structures. */
2066 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
2067 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
2070 memset (new_bb, 0, sizeof (*new_bb));
2072 new_bb->end = new_bb->head = last_loop_beg_note (e->src->end);
2073 new_bb->succ = NULL;
2074 new_bb->pred = new_edge;
2075 new_bb->count = e->count;
2076 new_bb->frequency = EDGE_FREQUENCY (e);
2077 new_bb->loop_depth = e->dest->loop_depth;
2079 new_edge->flags = EDGE_FALLTHRU;
2080 new_edge->probability = e->probability;
2081 new_edge->count = e->count;
2083 if (target->global_live_at_start)
2085 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2086 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2087 COPY_REG_SET (new_bb->global_live_at_start,
2088 target->global_live_at_start);
2089 COPY_REG_SET (new_bb->global_live_at_end, new_bb->global_live_at_start);
2093 new_edge->src = e->src;
2094 new_edge->dest = new_bb;
2095 new_edge->succ_next = e->src->succ;
2096 e->src->succ = new_edge;
2097 new_edge->pred_next = NULL;
2099 /* Redirect old edge. */
2100 redirect_edge_succ (e, target);
2101 redirect_edge_pred (e, new_bb);
2102 e->probability = REG_BR_PROB_BASE;
2104 /* Place the new block just after the block being split. */
2105 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2107 /* Some parts of the compiler expect blocks to be number in
2108 sequential order so insert the new block immediately after the
2109 block being split.. */
2110 j = new_edge->src->index;
2111 for (i = n_basic_blocks - 1; i > j + 1; --i)
2113 basic_block tmp = BASIC_BLOCK (i - 1);
2114 BASIC_BLOCK (i) = tmp;
2118 BASIC_BLOCK (i) = new_bb;
2121 /* Create the basic block note. */
2122 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, new_bb->head);
2123 NOTE_BASIC_BLOCK (bb_note) = new_bb;
2124 new_bb->head = bb_note;
2126 new_bb->end = emit_jump_insn_after (gen_jump (label), new_bb->head);
2127 JUMP_LABEL (new_bb->end) = label;
2128 LABEL_NUSES (label)++;
2129 if (basic_block_for_insn)
2130 set_block_for_new_insns (new_bb->end, new_bb);
2131 emit_barrier_after (new_bb->end);
2135 /* Helper function for split_edge. Return true in case edge BB2 to BB1
2136 is back edge of syntactic loop. */
2138 back_edge_of_syntactic_loop_p (bb1, bb2)
2139 basic_block bb1, bb2;
2144 if (bb1->index > bb2->index)
2147 if (bb1->index == bb2->index)
2150 for (insn = bb1->end; insn != bb2->head && count >= 0;
2151 insn = NEXT_INSN (insn))
2152 if (GET_CODE (insn) == NOTE)
2154 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2156 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2163 /* Split a (typically critical) edge. Return the new block.
2164 Abort on abnormal edges.
2166 ??? The code generally expects to be called on critical edges.
2167 The case of a block ending in an unconditional jump to a
2168 block with multiple predecessors is not handled optimally. */
2171 split_edge (edge_in)
2174 basic_block old_pred, bb, old_succ;
2179 /* Abnormal edges cannot be split. */
2180 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
2183 old_pred = edge_in->src;
2184 old_succ = edge_in->dest;
2186 /* Create the new structures. */
2187 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
2188 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
2191 memset (bb, 0, sizeof (*bb));
2193 /* ??? This info is likely going to be out of date very soon. */
2194 if (old_succ->global_live_at_start)
2196 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2197 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2198 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
2199 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
2203 bb->succ = edge_out;
2204 bb->count = edge_in->count;
2205 bb->frequency = EDGE_FREQUENCY (edge_in);
2207 edge_in->flags &= ~EDGE_CRITICAL;
2209 edge_out->pred_next = old_succ->pred;
2210 edge_out->succ_next = NULL;
2212 edge_out->dest = old_succ;
2213 edge_out->flags = EDGE_FALLTHRU;
2214 edge_out->probability = REG_BR_PROB_BASE;
2215 edge_out->count = edge_in->count;
2217 old_succ->pred = edge_out;
2219 /* Tricky case -- if there existed a fallthru into the successor
2220 (and we're not it) we must add a new unconditional jump around
2221 the new block we're actually interested in.
2223 Further, if that edge is critical, this means a second new basic
2224 block must be created to hold it. In order to simplify correct
2225 insn placement, do this before we touch the existing basic block
2226 ordering for the block we were really wanting. */
2227 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2230 for (e = edge_out->pred_next; e; e = e->pred_next)
2231 if (e->flags & EDGE_FALLTHRU)
2236 basic_block jump_block;
2239 if ((e->flags & EDGE_CRITICAL) == 0
2240 && e->src != ENTRY_BLOCK_PTR)
2242 /* Non critical -- we can simply add a jump to the end
2243 of the existing predecessor. */
2244 jump_block = e->src;
2248 /* We need a new block to hold the jump. The simplest
2249 way to do the bulk of the work here is to recursively
2251 jump_block = split_edge (e);
2252 e = jump_block->succ;
2255 /* Now add the jump insn ... */
2256 pos = emit_jump_insn_after (gen_jump (old_succ->head),
2257 last_loop_beg_note (jump_block->end));
2258 jump_block->end = pos;
2259 if (basic_block_for_insn)
2260 set_block_for_new_insns (pos, jump_block);
2261 emit_barrier_after (pos);
2263 /* ... let jump know that label is in use, ... */
2264 JUMP_LABEL (pos) = old_succ->head;
2265 ++LABEL_NUSES (old_succ->head);
2267 /* ... and clear fallthru on the outgoing edge. */
2268 e->flags &= ~EDGE_FALLTHRU;
2270 /* Continue splitting the interesting edge. */
2274 /* Place the new block just in front of the successor. */
2275 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2276 if (old_succ == EXIT_BLOCK_PTR)
2277 j = n_basic_blocks - 1;
2279 j = old_succ->index;
2280 for (i = n_basic_blocks - 1; i > j; --i)
2282 basic_block tmp = BASIC_BLOCK (i - 1);
2283 BASIC_BLOCK (i) = tmp;
2286 BASIC_BLOCK (i) = bb;
2289 /* Create the basic block note.
2291 Where we place the note can have a noticable impact on the generated
2292 code. Consider this cfg:
2302 If we need to insert an insn on the edge from block 0 to block 1,
2303 we want to ensure the instructions we insert are outside of any
2304 loop notes that physically sit between block 0 and block 1. Otherwise
2305 we confuse the loop optimizer into thinking the loop is a phony. */
2306 if (old_succ != EXIT_BLOCK_PTR
2307 && PREV_INSN (old_succ->head)
2308 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
2309 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG
2310 && !back_edge_of_syntactic_loop_p (old_succ, old_pred))
2311 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
2312 PREV_INSN (old_succ->head));
2313 else if (old_succ != EXIT_BLOCK_PTR)
2314 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
2316 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
2317 NOTE_BASIC_BLOCK (bb_note) = bb;
2318 bb->head = bb->end = bb_note;
2320 /* For non-fallthry edges, we must adjust the predecessor's
2321 jump instruction to target our new block. */
2322 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2324 if (!redirect_edge_and_branch (edge_in, bb))
2328 redirect_edge_succ (edge_in, bb);
2333 /* Queue instructions for insertion on an edge between two basic blocks.
2334 The new instructions and basic blocks (if any) will not appear in the
2335 CFG until commit_edge_insertions is called. */
2338 insert_insn_on_edge (pattern, e)
2342 /* We cannot insert instructions on an abnormal critical edge.
2343 It will be easier to find the culprit if we die now. */
2344 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2345 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2348 if (e->insns == NULL_RTX)
2351 push_to_sequence (e->insns);
2353 emit_insn (pattern);
2355 e->insns = get_insns ();
2359 /* Update the CFG for the instructions queued on edge E. */
2362 commit_one_edge_insertion (e)
2365 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2368 /* Pull the insns off the edge now since the edge might go away. */
2370 e->insns = NULL_RTX;
2372 /* Figure out where to put these things. If the destination has
2373 one predecessor, insert there. Except for the exit block. */
2374 if (e->dest->pred->pred_next == NULL
2375 && e->dest != EXIT_BLOCK_PTR)
2379 /* Get the location correct wrt a code label, and "nice" wrt
2380 a basic block note, and before everything else. */
2382 if (GET_CODE (tmp) == CODE_LABEL)
2383 tmp = NEXT_INSN (tmp);
2384 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2385 tmp = NEXT_INSN (tmp);
2386 if (tmp == bb->head)
2389 after = PREV_INSN (tmp);
2392 /* If the source has one successor and the edge is not abnormal,
2393 insert there. Except for the entry block. */
2394 else if ((e->flags & EDGE_ABNORMAL) == 0
2395 && e->src->succ->succ_next == NULL
2396 && e->src != ENTRY_BLOCK_PTR)
2399 /* It is possible to have a non-simple jump here. Consider a target
2400 where some forms of unconditional jumps clobber a register. This
2401 happens on the fr30 for example.
2403 We know this block has a single successor, so we can just emit
2404 the queued insns before the jump. */
2405 if (GET_CODE (bb->end) == JUMP_INSN)
2411 /* We'd better be fallthru, or we've lost track of what's what. */
2412 if ((e->flags & EDGE_FALLTHRU) == 0)
2419 /* Otherwise we must split the edge. */
2422 bb = split_edge (e);
2426 /* Now that we've found the spot, do the insertion. */
2428 /* Set the new block number for these insns, if structure is allocated. */
2429 if (basic_block_for_insn)
2432 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2433 set_block_for_insn (i, bb);
2438 emit_insns_before (insns, before);
2439 if (before == bb->head)
2442 last = prev_nonnote_insn (before);
2446 last = emit_insns_after (insns, after);
2447 if (after == bb->end)
2451 if (returnjump_p (last))
2453 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2454 This is not currently a problem because this only happens
2455 for the (single) epilogue, which already has a fallthru edge
2459 if (e->dest != EXIT_BLOCK_PTR
2460 || e->succ_next != NULL
2461 || (e->flags & EDGE_FALLTHRU) == 0)
2463 e->flags &= ~EDGE_FALLTHRU;
2465 emit_barrier_after (last);
2469 flow_delete_insn (before);
2471 else if (GET_CODE (last) == JUMP_INSN)
2473 find_sub_basic_blocks (bb);
2476 /* Update the CFG for all queued instructions. */
2479 commit_edge_insertions ()
2483 compute_bb_for_insn (get_max_uid ());
2485 #ifdef ENABLE_CHECKING
2486 verify_flow_info ();
2490 bb = ENTRY_BLOCK_PTR;
2495 for (e = bb->succ; e; e = next)
2497 next = e->succ_next;
2499 commit_one_edge_insertion (e);
2502 if (++i >= n_basic_blocks)
2504 bb = BASIC_BLOCK (i);
2508 /* Return true if we need to add fake edge to exit.
2509 Helper function for the flow_call_edges_add. */
2511 need_fake_edge_p (insn)
2517 if ((GET_CODE (insn) == CALL_INSN
2518 && !SIBLING_CALL_P (insn)
2519 && !find_reg_note (insn, REG_NORETURN, NULL)
2520 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
2521 && !CONST_OR_PURE_CALL_P (insn)))
2524 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2525 && MEM_VOLATILE_P (PATTERN (insn)))
2526 || (GET_CODE (PATTERN (insn)) == PARALLEL
2527 && asm_noperands (insn) != -1
2528 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2529 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2532 /* Add fake edges to the function exit for any non constant and non noreturn
2533 calls, volatile inline assembly in the bitmap of blocks specified by
2534 BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks
2537 The goal is to expose cases in which entering a basic block does not imply
2538 that all subsequent instructions must be executed. */
2541 flow_call_edges_add (blocks)
2545 int blocks_split = 0;
2548 bool check_last_block = false;
2550 /* Map bb indicies into basic block pointers since split_block
2551 will renumber the basic blocks. */
2553 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2557 for (i = 0; i < n_basic_blocks; i++)
2558 bbs[bb_num++] = BASIC_BLOCK (i);
2559 check_last_block = true;
2563 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2565 bbs[bb_num++] = BASIC_BLOCK (i);
2566 if (i == n_basic_blocks - 1)
2567 check_last_block = true;
2571 /* In the last basic block, before epilogue generation, there will be
2572 a fallthru edge to EXIT. Special care is required if the last insn
2573 of the last basic block is a call because make_edge folds duplicate
2574 edges, which would result in the fallthru edge also being marked
2575 fake, which would result in the fallthru edge being removed by
2576 remove_fake_edges, which would result in an invalid CFG.
2578 Moreover, we can't elide the outgoing fake edge, since the block
2579 profiler needs to take this into account in order to solve the minimal
2580 spanning tree in the case that the call doesn't return.
2582 Handle this by adding a dummy instruction in a new last basic block. */
2583 if (check_last_block
2584 && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
2587 for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
2588 if (e->dest == EXIT_BLOCK_PTR)
2590 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2591 commit_edge_insertions ();
2595 /* Now add fake edges to the function exit for any non constant
2596 calls since there is no way that we can determine if they will
2599 for (i = 0; i < bb_num; i++)
2601 basic_block bb = bbs[i];
2605 for (insn = bb->end; ; insn = prev_insn)
2607 prev_insn = PREV_INSN (insn);
2608 if (need_fake_edge_p (insn))
2612 /* The above condition should be enought to verify that there is
2613 no edge to the exit block in CFG already. Calling make_edge in
2614 such case would make us to mark that edge as fake and remove it
2616 #ifdef ENABLE_CHECKING
2617 if (insn == bb->end)
2618 for (e = bb->succ; e; e = e->succ_next)
2619 if (e->dest == EXIT_BLOCK_PTR)
2623 /* Note that the following may create a new basic block
2624 and renumber the existing basic blocks. */
2625 e = split_block (bb, insn);
2629 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2631 if (insn == bb->head)
2637 verify_flow_info ();
2640 return blocks_split;
2643 /* Find unreachable blocks. An unreachable block will have NULL in
2644 block->aux, a non-NULL value indicates the block is reachable. */
2647 find_unreachable_blocks ()
2651 basic_block *tos, *worklist;
2654 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2656 /* Use basic_block->aux as a marker. Clear them all. */
2658 for (i = 0; i < n; ++i)
2659 BASIC_BLOCK (i)->aux = NULL;
2661 /* Add our starting points to the worklist. Almost always there will
2662 be only one. It isn't inconcievable that we might one day directly
2663 support Fortran alternate entry points. */
2665 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2669 /* Mark the block with a handy non-null value. */
2673 /* Iterate: find everything reachable from what we've already seen. */
2675 while (tos != worklist)
2677 basic_block b = *--tos;
2679 for (e = b->succ; e; e = e->succ_next)
2690 /* Delete all unreachable basic blocks. */
2692 delete_unreachable_blocks ()
2696 find_unreachable_blocks ();
2698 /* Delete all unreachable basic blocks. Count down so that we
2699 don't interfere with the block renumbering that happens in
2700 flow_delete_block. */
2702 for (i = n_basic_blocks - 1; i >= 0; --i)
2704 basic_block b = BASIC_BLOCK (i);
2707 /* This block was found. Tidy up the mark. */
2710 flow_delete_block (b);
2713 tidy_fallthru_edges ();
2716 /* Return true if NOTE is not one of the ones that must be kept paired,
2717 so that we may simply delete them. */
2720 can_delete_note_p (note)
2723 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2724 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2727 /* Unlink a chain of insns between START and FINISH, leaving notes
2728 that must be paired. */
2731 flow_delete_insn_chain (start, finish)
2734 /* Unchain the insns one by one. It would be quicker to delete all
2735 of these with a single unchaining, rather than one at a time, but
2736 we need to keep the NOTE's. */
2742 next = NEXT_INSN (start);
2743 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2745 else if (GET_CODE (start) == CODE_LABEL
2746 && ! can_delete_label_p (start))
2748 const char *name = LABEL_NAME (start);
2749 PUT_CODE (start, NOTE);
2750 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2751 NOTE_SOURCE_FILE (start) = name;
2754 next = flow_delete_insn (start);
2756 if (start == finish)
2762 /* Delete the insns in a (non-live) block. We physically delete every
2763 non-deleted-note insn, and update the flow graph appropriately.
2765 Return nonzero if we deleted an exception handler. */
2767 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2768 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2771 flow_delete_block (b)
2774 int deleted_handler = 0;
2777 /* If the head of this block is a CODE_LABEL, then it might be the
2778 label for an exception handler which can't be reached.
2780 We need to remove the label from the exception_handler_label list
2781 and remove the associated NOTE_INSN_EH_REGION_BEG and
2782 NOTE_INSN_EH_REGION_END notes. */
2786 never_reached_warning (insn);
2788 if (GET_CODE (insn) == CODE_LABEL)
2789 maybe_remove_eh_handler (insn);
2791 /* Include any jump table following the basic block. */
2793 if (GET_CODE (end) == JUMP_INSN
2794 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2795 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2796 && GET_CODE (tmp) == JUMP_INSN
2797 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2798 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2801 /* Include any barrier that may follow the basic block. */
2802 tmp = next_nonnote_insn (end);
2803 if (tmp && GET_CODE (tmp) == BARRIER)
2806 /* Selectively delete the entire chain. */
2807 flow_delete_insn_chain (insn, end);
2809 /* Remove the edges into and out of this block. Note that there may
2810 indeed be edges in, if we are removing an unreachable loop. */
2814 for (e = b->pred; e; e = next)
2816 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2819 next = e->pred_next;
2823 for (e = b->succ; e; e = next)
2825 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2828 next = e->succ_next;
2837 /* Remove the basic block from the array, and compact behind it. */
2840 return deleted_handler;
2843 /* Remove block B from the basic block array and compact behind it. */
2849 int i, n = n_basic_blocks;
2851 for (i = b->index; i + 1 < n; ++i)
2853 basic_block x = BASIC_BLOCK (i + 1);
2854 BASIC_BLOCK (i) = x;
2858 basic_block_info->num_elements--;
2862 /* Delete INSN by patching it out. Return the next insn. */
2865 flow_delete_insn (insn)
2868 rtx prev = PREV_INSN (insn);
2869 rtx next = NEXT_INSN (insn);
2872 PREV_INSN (insn) = NULL_RTX;
2873 NEXT_INSN (insn) = NULL_RTX;
2874 INSN_DELETED_P (insn) = 1;
2877 NEXT_INSN (prev) = next;
2879 PREV_INSN (next) = prev;
2881 set_last_insn (prev);
2883 if (GET_CODE (insn) == CODE_LABEL)
2884 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2886 /* If deleting a jump, decrement the use count of the label. Deleting
2887 the label itself should happen in the normal course of block merging. */
2888 if (GET_CODE (insn) == JUMP_INSN
2889 && JUMP_LABEL (insn)
2890 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2891 LABEL_NUSES (JUMP_LABEL (insn))--;
2893 /* Also if deleting an insn that references a label. */
2894 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2895 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2896 LABEL_NUSES (XEXP (note, 0))--;
2898 if (GET_CODE (insn) == JUMP_INSN
2899 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2900 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2902 rtx pat = PATTERN (insn);
2903 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2904 int len = XVECLEN (pat, diff_vec_p);
2907 for (i = 0; i < len; i++)
2908 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2914 /* True if a given label can be deleted. */
2917 can_delete_label_p (label)
2922 if (LABEL_PRESERVE_P (label))
2925 for (x = forced_labels; x; x = XEXP (x, 1))
2926 if (label == XEXP (x, 0))
2928 for (x = label_value_list; x; x = XEXP (x, 1))
2929 if (label == XEXP (x, 0))
2931 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2932 if (label == XEXP (x, 0))
2935 /* User declared labels must be preserved. */
2936 if (LABEL_NAME (label) != 0)
2943 tail_recursion_label_p (label)
2948 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2949 if (label == XEXP (x, 0))
2955 /* Blocks A and B are to be merged into a single block A. The insns
2956 are already contiguous, hence `nomove'. */
2959 merge_blocks_nomove (a, b)
2963 rtx b_head, b_end, a_end;
2964 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2967 /* If there was a CODE_LABEL beginning B, delete it. */
2970 if (GET_CODE (b_head) == CODE_LABEL)
2972 /* Detect basic blocks with nothing but a label. This can happen
2973 in particular at the end of a function. */
2974 if (b_head == b_end)
2976 del_first = del_last = b_head;
2977 b_head = NEXT_INSN (b_head);
2980 /* Delete the basic block note. */
2981 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2983 if (b_head == b_end)
2988 b_head = NEXT_INSN (b_head);
2991 /* If there was a jump out of A, delete it. */
2993 if (GET_CODE (a_end) == JUMP_INSN)
2997 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2998 if (GET_CODE (prev) != NOTE
2999 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
3006 /* If this was a conditional jump, we need to also delete
3007 the insn that set cc0. */
3008 if (prev && sets_cc0_p (prev))
3011 prev = prev_nonnote_insn (prev);
3020 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
3021 del_first = NEXT_INSN (a_end);
3023 /* Delete everything marked above as well as crap that might be
3024 hanging out between the two blocks. */
3025 flow_delete_insn_chain (del_first, del_last);
3027 /* Normally there should only be one successor of A and that is B, but
3028 partway though the merge of blocks for conditional_execution we'll
3029 be merging a TEST block with THEN and ELSE successors. Free the
3030 whole lot of them and hope the caller knows what they're doing. */
3032 remove_edge (a->succ);
3034 /* Adjust the edges out of B for the new owner. */
3035 for (e = b->succ; e; e = e->succ_next)
3039 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
3040 b->pred = b->succ = NULL;
3042 /* Reassociate the insns of B with A. */
3045 if (basic_block_for_insn)
3047 BLOCK_FOR_INSN (b_head) = a;
3048 while (b_head != b_end)
3050 b_head = NEXT_INSN (b_head);
3051 BLOCK_FOR_INSN (b_head) = a;
3061 /* Blocks A and B are to be merged into a single block. A has no incoming
3062 fallthru edge, so it can be moved before B without adding or modifying
3063 any jumps (aside from the jump from A to B). */
3066 merge_blocks_move_predecessor_nojumps (a, b)
3069 rtx start, end, barrier;
3075 barrier = next_nonnote_insn (end);
3076 if (GET_CODE (barrier) != BARRIER)
3078 flow_delete_insn (barrier);
3080 /* Move block and loop notes out of the chain so that we do not
3081 disturb their order.
3083 ??? A better solution would be to squeeze out all the non-nested notes
3084 and adjust the block trees appropriately. Even better would be to have
3085 a tighter connection between block trees and rtl so that this is not
3087 start = squeeze_notes (start, end);
3089 /* Scramble the insn chain. */
3090 if (end != PREV_INSN (b->head))
3091 reorder_insns (start, end, PREV_INSN (b->head));
3095 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
3096 a->index, b->index);
3099 /* Swap the records for the two blocks around. Although we are deleting B,
3100 A is now where B was and we want to compact the BB array from where
3102 BASIC_BLOCK (a->index) = b;
3103 BASIC_BLOCK (b->index) = a;
3105 a->index = b->index;
3108 /* Now blocks A and B are contiguous. Merge them. */
3109 merge_blocks_nomove (a, b);
3114 /* Blocks A and B are to be merged into a single block. B has no outgoing
3115 fallthru edge, so it can be moved after A without adding or modifying
3116 any jumps (aside from the jump from A to B). */
3119 merge_blocks_move_successor_nojumps (a, b)
3122 rtx start, end, barrier;
3126 barrier = NEXT_INSN (end);
3128 /* Recognize a jump table following block B. */
3130 && GET_CODE (barrier) == CODE_LABEL
3131 && NEXT_INSN (barrier)
3132 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
3133 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
3134 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
3136 end = NEXT_INSN (barrier);
3137 barrier = NEXT_INSN (end);
3140 /* There had better have been a barrier there. Delete it. */
3141 if (barrier && GET_CODE (barrier) == BARRIER)
3142 flow_delete_insn (barrier);
3144 /* Move block and loop notes out of the chain so that we do not
3145 disturb their order.
3147 ??? A better solution would be to squeeze out all the non-nested notes
3148 and adjust the block trees appropriately. Even better would be to have
3149 a tighter connection between block trees and rtl so that this is not
3151 start = squeeze_notes (start, end);
3153 /* Scramble the insn chain. */
3154 reorder_insns (start, end, a->end);
3156 /* Now blocks A and B are contiguous. Merge them. */
3157 merge_blocks_nomove (a, b);
3161 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
3162 b->index, a->index);
3168 /* Attempt to merge basic blocks that are potentially non-adjacent.
3169 Return true iff the attempt succeeded. */
3172 merge_blocks (e, b, c, mode)
3177 /* If C has a tail recursion label, do not merge. There is no
3178 edge recorded from the call_placeholder back to this label, as
3179 that would make optimize_sibling_and_tail_recursive_calls more
3180 complex for no gain. */
3181 if (GET_CODE (c->head) == CODE_LABEL
3182 && tail_recursion_label_p (c->head))
3185 /* If B has a fallthru edge to C, no need to move anything. */
3186 if (e->flags & EDGE_FALLTHRU)
3188 merge_blocks_nomove (b, c);
3192 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
3193 b->index, c->index);
3198 /* Otherwise we will need to move code around. Do that only if expensive
3199 transformations are allowed. */
3200 else if (mode & CLEANUP_EXPENSIVE)
3202 edge tmp_edge, c_fallthru_edge;
3203 int c_has_outgoing_fallthru;
3204 int b_has_incoming_fallthru;
3206 /* Avoid overactive code motion, as the forwarder blocks should be
3207 eliminated by edge redirection instead. One exception might have
3208 been if B is a forwarder block and C has no fallthru edge, but
3209 that should be cleaned up by bb-reorder instead. */
3210 if (forwarder_block_p (b) || forwarder_block_p (c))
3213 /* We must make sure to not munge nesting of lexical blocks,
3214 and loop notes. This is done by squeezing out all the notes
3215 and leaving them there to lie. Not ideal, but functional. */
3217 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
3218 if (tmp_edge->flags & EDGE_FALLTHRU)
3220 c_has_outgoing_fallthru = (tmp_edge != NULL);
3221 c_fallthru_edge = tmp_edge;
3223 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
3224 if (tmp_edge->flags & EDGE_FALLTHRU)
3226 b_has_incoming_fallthru = (tmp_edge != NULL);
3228 /* If B does not have an incoming fallthru, then it can be moved
3229 immediately before C without introducing or modifying jumps.
3230 C cannot be the first block, so we do not have to worry about
3231 accessing a non-existent block. */
3232 if (! b_has_incoming_fallthru)
3233 return merge_blocks_move_predecessor_nojumps (b, c);
3235 /* Otherwise, we're going to try to move C after B. If C does
3236 not have an outgoing fallthru, then it can be moved
3237 immediately after B without introducing or modifying jumps. */
3238 if (! c_has_outgoing_fallthru)
3239 return merge_blocks_move_successor_nojumps (b, c);
3241 /* Otherwise, we'll need to insert an extra jump, and possibly
3242 a new block to contain it. We can't redirect to EXIT_BLOCK_PTR,
3243 as we don't have explicit return instructions before epilogues
3244 are generated, so give up on that case. */
3246 if (c_fallthru_edge->dest != EXIT_BLOCK_PTR
3247 && merge_blocks_move_successor_nojumps (b, c))
3249 basic_block target = c_fallthru_edge->dest;
3253 /* This is a dirty hack to avoid code duplication.
3255 Set edge to point to wrong basic block, so
3256 redirect_edge_and_branch_force will do the trick
3257 and rewire edge back to the original location. */
3258 redirect_edge_succ (c_fallthru_edge, ENTRY_BLOCK_PTR);
3259 new = redirect_edge_and_branch_force (c_fallthru_edge, target);
3261 /* We've just created barrier, but another barrier is
3262 already present in the stream. Avoid the duplicate. */
3263 barrier = next_nonnote_insn (new ? new->end : b->end);
3264 if (GET_CODE (barrier) != BARRIER)
3266 flow_delete_insn (barrier);
3276 /* Simplify a conditional jump around an unconditional jump.
3277 Return true if something changed. */
3280 try_simplify_condjump (cbranch_block)
3281 basic_block cbranch_block;
3283 basic_block jump_block, jump_dest_block, cbranch_dest_block;
3284 edge cbranch_jump_edge, cbranch_fallthru_edge;
3287 /* Verify that there are exactly two successors. */
3288 if (!cbranch_block->succ
3289 || !cbranch_block->succ->succ_next
3290 || cbranch_block->succ->succ_next->succ_next)
3293 /* Verify that we've got a normal conditional branch at the end
3295 cbranch_insn = cbranch_block->end;
3296 if (!any_condjump_p (cbranch_insn))
3299 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
3300 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
3302 /* The next block must not have multiple predecessors, must not
3303 be the last block in the function, and must contain just the
3304 unconditional jump. */
3305 jump_block = cbranch_fallthru_edge->dest;
3306 if (jump_block->pred->pred_next
3307 || jump_block->index == n_basic_blocks - 1
3308 || !forwarder_block_p (jump_block))
3310 jump_dest_block = jump_block->succ->dest;
3312 /* The conditional branch must target the block after the
3313 unconditional branch. */
3314 cbranch_dest_block = cbranch_jump_edge->dest;
3316 if (!can_fallthru (jump_block, cbranch_dest_block))
3319 /* Invert the conditional branch. Prevent jump.c from deleting
3320 "unreachable" instructions. */
3321 LABEL_NUSES (JUMP_LABEL (cbranch_insn))++;
3322 if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 1))
3324 LABEL_NUSES (JUMP_LABEL (cbranch_insn))--;
3329 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
3330 INSN_UID (cbranch_insn), INSN_UID (jump_block->end));
3332 /* Success. Update the CFG to match. Note that after this point
3333 the edge variable names appear backwards; the redirection is done
3334 this way to preserve edge profile data. */
3335 redirect_edge_succ_nodup (cbranch_jump_edge, cbranch_dest_block);
3336 redirect_edge_succ_nodup (cbranch_fallthru_edge, jump_dest_block);
3337 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
3338 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
3340 /* Delete the block with the unconditional jump, and clean up the mess. */
3341 flow_delete_block (jump_block);
3342 tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block);
3347 /* Attempt to forward edges leaving basic block B.
3348 Return true if sucessful. */
3351 try_forward_edges (mode, b)
3355 bool changed = false;
3358 for (e = b->succ; e ; e = next)
3360 basic_block target, first;
3363 next = e->succ_next;
3365 /* Skip complex edges because we don't know how to update them.
3367 Still handle fallthru edges, as we can suceed to forward fallthru
3368 edge to the same place as the branch edge of conditional branch
3369 and turn conditional branch to an unconditonal branch. */
3370 if (e->flags & EDGE_COMPLEX)
3373 target = first = e->dest;
3376 /* Look for the real destination of the jump.
3377 Avoid inifinite loop in the infinite empty loop by counting
3378 up to n_basic_blocks. */
3379 while (forwarder_block_p (target)
3380 && target->succ->dest != EXIT_BLOCK_PTR
3381 && counter < n_basic_blocks)
3383 /* Bypass trivial infinite loops. */
3384 if (target == target->succ->dest)
3385 counter = n_basic_blocks;
3387 /* Avoid killing of loop pre-headers, as it is the place loop
3388 optimizer wants to hoist code to.
3390 For fallthru forwarders, the LOOP_BEG note must appear between
3391 the header of block and CODE_LABEL of the loop, for non forwarders
3392 it must appear before the JUMP_INSN. */
3393 if (mode & CLEANUP_PRE_LOOP)
3395 rtx insn = (target->succ->flags & EDGE_FALLTHRU
3396 ? target->head : prev_nonnote_insn (target->end));
3398 if (GET_CODE (insn) != NOTE)
3399 insn = NEXT_INSN (insn);
3401 for (;insn && GET_CODE (insn) != CODE_LABEL && !INSN_P (insn);
3402 insn = NEXT_INSN (insn))
3403 if (GET_CODE (insn) == NOTE
3404 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
3407 if (GET_CODE (insn) == NOTE)
3410 target = target->succ->dest, counter++;
3413 if (counter >= n_basic_blocks)
3416 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n",
3419 else if (target == first)
3420 ; /* We didn't do anything. */
3423 /* Save the values now, as the edge may get removed. */
3424 gcov_type edge_count = e->count;
3425 int edge_probability = e->probability;
3427 if (redirect_edge_and_branch (e, target))
3429 /* We successfully forwarded the edge. Now update profile
3430 data: for each edge we traversed in the chain, remove
3431 the original edge's execution count. */
3432 int edge_frequency = ((edge_probability * b->frequency
3433 + REG_BR_PROB_BASE / 2)
3434 / REG_BR_PROB_BASE);
3438 first->count -= edge_count;
3439 first->succ->count -= edge_count;
3440 first->frequency -= edge_frequency;
3441 first = first->succ->dest;
3443 while (first != target);
3450 fprintf (rtl_dump_file, "Forwarding edge %i->%i to %i failed.\n",
3451 b->index, e->dest->index, target->index);
3459 /* Look through the insns at the end of BB1 and BB2 and find the longest
3460 sequence that are equivalent. Store the first insns for that sequence
3461 in *F1 and *F2 and return the sequence length.
3463 To simplify callers of this function, if the blocks match exactly,
3464 store the head of the blocks in *F1 and *F2. */
3467 flow_find_cross_jump (mode, bb1, bb2, f1, f2)
3468 int mode ATTRIBUTE_UNUSED;
3469 basic_block bb1, bb2;
3472 rtx i1, i2, p1, p2, last1, last2, afterlast1, afterlast2;
3475 /* Skip simple jumps at the end of the blocks. Complex jumps still
3476 need to be compared for equivalence, which we'll do below. */
3479 if (onlyjump_p (i1))
3480 i1 = PREV_INSN (i1);
3482 if (onlyjump_p (i2))
3483 i2 = PREV_INSN (i2);
3485 last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
3489 while ((GET_CODE (i1) == NOTE && i1 != bb1->head))
3490 i1 = PREV_INSN (i1);
3491 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
3492 i2 = PREV_INSN (i2);
3494 if (i1 == bb1->head || i2 == bb2->head)
3497 /* Verify that I1 and I2 are equivalent. */
3499 if (GET_CODE (i1) != GET_CODE (i2))
3505 /* If this is a CALL_INSN, compare register usage information.
3506 If we don't check this on stack register machines, the two
3507 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3508 numbers of stack registers in the same basic block.
3509 If we don't check this on machines with delay slots, a delay slot may
3510 be filled that clobbers a parameter expected by the subroutine.
3512 ??? We take the simple route for now and assume that if they're
3513 equal, they were constructed identically. */
3515 if (GET_CODE (i1) == CALL_INSN
3516 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3517 CALL_INSN_FUNCTION_USAGE (i2)))
3521 /* If cross_jump_death_matters is not 0, the insn's mode
3522 indicates whether or not the insn contains any stack-like
3525 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
3527 /* If register stack conversion has already been done, then
3528 death notes must also be compared before it is certain that
3529 the two instruction streams match. */
3532 HARD_REG_SET i1_regset, i2_regset;
3534 CLEAR_HARD_REG_SET (i1_regset);
3535 CLEAR_HARD_REG_SET (i2_regset);
3537 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3538 if (REG_NOTE_KIND (note) == REG_DEAD
3539 && STACK_REG_P (XEXP (note, 0)))
3540 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3542 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3543 if (REG_NOTE_KIND (note) == REG_DEAD
3544 && STACK_REG_P (XEXP (note, 0)))
3545 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3547 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3556 if (GET_CODE (p1) != GET_CODE (p2))
3559 if (! rtx_renumbered_equal_p (p1, p2))
3561 /* The following code helps take care of G++ cleanups. */
3562 rtx equiv1 = find_reg_equal_equiv_note (i1);
3563 rtx equiv2 = find_reg_equal_equiv_note (i2);
3565 if (equiv1 && equiv2
3566 /* If the equivalences are not to a constant, they may
3567 reference pseudos that no longer exist, so we can't
3569 && CONSTANT_P (XEXP (equiv1, 0))
3570 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3572 rtx s1 = single_set (i1);
3573 rtx s2 = single_set (i2);
3574 if (s1 != 0 && s2 != 0
3575 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3577 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3578 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3579 if (! rtx_renumbered_equal_p (p1, p2))
3581 else if (apply_change_group ())
3589 /* Don't begin a cross-jump with a USE or CLOBBER insn. */
3590 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3592 afterlast1 = last1, afterlast2 = last2;
3593 last1 = i1, last2 = i2;
3596 i1 = PREV_INSN (i1);
3597 i2 = PREV_INSN (i2);
3603 /* Don't allow the insn after a compare to be shared by
3604 cross-jumping unless the compare is also shared. */
3605 if (reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
3606 last1 = afterlast1, last2 = afterlast2, ninsns--;
3610 /* Include preceeding notes and labels in the cross-jump. One,
3611 this may bring us to the head of the blocks as requested above.
3612 Two, it keeps line number notes as matched as may be. */
3615 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
3616 last1 = PREV_INSN (last1);
3617 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
3618 last1 = PREV_INSN (last1);
3619 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
3620 last2 = PREV_INSN (last2);
3621 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
3622 last2 = PREV_INSN (last2);
3631 /* Return true iff outgoing edges of BB1 and BB2 match, together with
3632 the branch instruction. This means that if we commonize the control
3633 flow before end of the basic block, the semantic remains unchanged.
3635 We may assume that there exists one edge with a common destination. */
3638 outgoing_edges_match (bb1, bb2)
3642 /* If BB1 has only one successor, we must be looking at an unconditional
3643 jump. Which, by the assumption above, means that we only need to check
3644 that BB2 has one successor. */
3645 if (bb1->succ && !bb1->succ->succ_next)
3646 return (bb2->succ && !bb2->succ->succ_next);
3648 /* Match conditional jumps - this may get tricky when fallthru and branch
3649 edges are crossed. */
3651 && bb1->succ->succ_next
3652 && !bb1->succ->succ_next->succ_next
3653 && any_condjump_p (bb1->end))
3655 edge b1, f1, b2, f2;
3656 bool reverse, match;
3657 rtx set1, set2, cond1, cond2;
3658 enum rtx_code code1, code2;
3661 || !bb2->succ->succ_next
3662 || bb1->succ->succ_next->succ_next
3663 || !any_condjump_p (bb2->end))
3666 b1 = BRANCH_EDGE (bb1);
3667 b2 = BRANCH_EDGE (bb2);
3668 f1 = FALLTHRU_EDGE (bb1);
3669 f2 = FALLTHRU_EDGE (bb2);
3671 /* Get around possible forwarders on fallthru edges. Other cases
3672 should be optimized out already. */
3673 if (forwarder_block_p (f1->dest))
3674 f1 = f1->dest->succ;
3675 if (forwarder_block_p (f2->dest))
3676 f2 = f2->dest->succ;
3678 /* To simplify use of this function, return false if there are
3679 unneeded forwarder blocks. These will get eliminated later
3680 during cleanup_cfg. */
3681 if (forwarder_block_p (f1->dest)
3682 || forwarder_block_p (f2->dest)
3683 || forwarder_block_p (b1->dest)
3684 || forwarder_block_p (b2->dest))
3687 if (f1->dest == f2->dest && b1->dest == b2->dest)
3689 else if (f1->dest == b2->dest && b1->dest == f2->dest)
3694 set1 = pc_set (bb1->end);
3695 set2 = pc_set (bb2->end);
3696 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
3697 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
3700 cond1 = XEXP (SET_SRC (set1), 0);
3701 cond2 = XEXP (SET_SRC (set2), 0);
3702 code1 = GET_CODE (cond1);
3704 code2 = reversed_comparison_code (cond2, bb2->end);
3706 code2 = GET_CODE (cond2);
3707 if (code2 == UNKNOWN)
3710 /* Verify codes and operands match. */
3711 match = ((code1 == code2
3712 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
3713 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
3714 || (code1 == swap_condition (code2)
3715 && rtx_renumbered_equal_p (XEXP (cond1, 1),
3717 && rtx_renumbered_equal_p (XEXP (cond1, 0),
3720 /* If we return true, we will join the blocks. Which means that
3721 we will only have one branch prediction bit to work with. Thus
3722 we require the existing branches to have probabilities that are
3724 /* ??? We should use bb->frequency to allow merging in infrequently
3725 executed blocks, but at the moment it is not available when
3726 cleanup_cfg is run. */
3727 if (match && !optimize_size)
3731 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
3732 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
3736 prob1 = INTVAL (XEXP (note1, 0));
3737 prob2 = INTVAL (XEXP (note2, 0));
3739 prob2 = REG_BR_PROB_BASE - prob2;
3741 /* Fail if the difference in probabilities is
3743 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 20)
3746 else if (note1 || note2)
3750 if (rtl_dump_file && match)
3751 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
3752 bb1->index, bb2->index);
3757 /* ??? We can handle computed jumps too. This may be important for
3758 inlined functions containing switch statements. Also jumps w/o
3759 fallthru edges can be handled by simply matching whole insn. */
3763 /* E1 and E2 are edges with the same destination block. Search their
3764 predecessors for common code. If found, redirect control flow from
3765 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
3768 try_crossjump_to_edge (mode, e1, e2)
3773 basic_block src1 = e1->src, src2 = e2->src;
3774 basic_block redirect_to;
3775 rtx newpos1, newpos2;
3781 /* Search backward through forwarder blocks. We don't need to worry
3782 about multiple entry or chained forwarders, as they will be optimized
3783 away. We do this to look past the unconditional jump following a
3784 conditional jump that is required due to the current CFG shape. */
3786 && !src1->pred->pred_next
3787 && forwarder_block_p (src1))
3793 && !src2->pred->pred_next
3794 && forwarder_block_p (src2))
3800 /* Nothing to do if we reach ENTRY, or a common source block. */
3801 if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
3806 /* Seeing more than 1 forwarder blocks would confuse us later... */
3807 if (forwarder_block_p (e1->dest)
3808 && forwarder_block_p (e1->dest->succ->dest))
3810 if (forwarder_block_p (e2->dest)
3811 && forwarder_block_p (e2->dest->succ->dest))
3814 /* Likewise with dead code (possibly newly created by the other optimizations
3816 if (!src1->pred || !src2->pred)
3819 /* Likewise with complex edges.
3820 ??? We should be able to handle most complex edges later with some
3822 if (e1->flags & EDGE_COMPLEX)
3825 /* Look for the common insn sequence, part the first ... */
3826 if (!outgoing_edges_match (src1, src2))
3829 /* ... and part the second. */
3830 nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
3834 /* Avoid splitting if possible. */
3835 if (newpos2 == src2->head)
3840 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
3841 src2->index, nmatch);
3842 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
3846 fprintf (rtl_dump_file,
3847 "Cross jumping from bb %i to bb %i; %i common insns\n",
3848 src1->index, src2->index, nmatch);
3850 redirect_to->count += src1->count;
3851 redirect_to->frequency += src1->frequency;
3853 /* Recompute the frequencies and counts of outgoing edges. */
3854 for (s = redirect_to->succ; s; s = s->succ_next)
3857 basic_block d = s->dest;
3859 if (forwarder_block_p (d))
3861 for (s2 = src1->succ; ; s2 = s2->succ_next)
3863 basic_block d2 = s2->dest;
3864 if (forwarder_block_p (d2))
3865 d2 = d2->succ->dest;
3869 s->count += s2->count;
3871 /* Take care to update possible forwarder blocks. We verified
3872 that there is no more than one in the chain, so we can't run
3873 into infinite loop. */
3874 if (forwarder_block_p (s->dest))
3876 s->dest->succ->count += s2->count;
3877 s->dest->count += s2->count;
3878 s->dest->frequency += EDGE_FREQUENCY (s);
3880 if (forwarder_block_p (s2->dest))
3882 s2->dest->succ->count -= s2->count;
3883 s2->dest->count -= s2->count;
3884 s2->dest->frequency -= EDGE_FREQUENCY (s);
3886 if (!redirect_to->frequency && !src1->frequency)
3887 s->probability = (s->probability + s2->probability) / 2;
3890 ((s->probability * redirect_to->frequency +
3891 s2->probability * src1->frequency)
3892 / (redirect_to->frequency + src1->frequency));
3895 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
3897 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
3899 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
3901 /* Skip possible basic block header. */
3902 if (GET_CODE (newpos1) == CODE_LABEL)
3903 newpos1 = NEXT_INSN (newpos1);
3904 if (GET_CODE (newpos1) == NOTE)
3905 newpos1 = NEXT_INSN (newpos1);
3908 /* Emit the jump insn. */
3909 label = block_label (redirect_to);
3910 src1->end = emit_jump_insn_before (gen_jump (label), newpos1);
3911 JUMP_LABEL (src1->end) = label;
3912 LABEL_NUSES (label)++;
3913 if (basic_block_for_insn)
3914 set_block_for_new_insns (src1->end, src1);
3916 /* Delete the now unreachable instructions. */
3917 flow_delete_insn_chain (newpos1, last);
3919 /* Make sure there is a barrier after the new jump. */
3920 last = next_nonnote_insn (src1->end);
3921 if (!last || GET_CODE (last) != BARRIER)
3922 emit_barrier_after (src1->end);
3926 remove_edge (src1->succ);
3927 make_edge (NULL, src1, redirect_to, 0);
3928 src1->succ->probability = REG_BR_PROB_BASE;
3929 src1->succ->count = src1->count;
3934 /* Search the predecessors of BB for common insn sequences. When found,
3935 share code between them by redirecting control flow. Return true if
3936 any changes made. */
3939 try_crossjump_bb (mode, bb)
3943 edge e, e2, nexte2, nexte, fallthru;
3946 /* Nothing to do if there is not at least two incomming edges. */
3947 if (!bb->pred || !bb->pred->pred_next)
3950 /* It is always cheapest to redirect a block that ends in a branch to
3951 a block that falls through into BB, as that adds no branches to the
3952 program. We'll try that combination first. */
3953 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
3954 if (fallthru->flags & EDGE_FALLTHRU)
3958 for (e = bb->pred; e; e = nexte)
3960 nexte = e->pred_next;
3962 /* Elide complex edges now, as neither try_crossjump_to_edge
3963 nor outgoing_edges_match can handle them. */
3964 if (e->flags & EDGE_COMPLEX)
3967 /* As noted above, first try with the fallthru predecessor. */
3970 /* Don't combine the fallthru edge into anything else.
3971 If there is a match, we'll do it the other way around. */
3975 if (try_crossjump_to_edge (mode, e, fallthru))
3983 /* Non-obvious work limiting check: Recognize that we're going
3984 to call try_crossjump_bb on every basic block. So if we have
3985 two blocks with lots of outgoing edges (a switch) and they
3986 share lots of common destinations, then we would do the
3987 cross-jump check once for each common destination.
3989 Now, if the blocks actually are cross-jump candidates, then
3990 all of their destinations will be shared. Which means that
3991 we only need check them for cross-jump candidacy once. We
3992 can eliminate redundant checks of crossjump(A,B) by arbitrarily
3993 choosing to do the check from the block for which the edge
3994 in question is the first successor of A. */
3995 if (e->src->succ != e)
3998 for (e2 = bb->pred; e2; e2 = nexte2)
4000 nexte2 = e2->pred_next;
4005 /* We've already checked the fallthru edge above. */
4009 /* Again, neither try_crossjump_to_edge nor outgoing_edges_match
4010 can handle complex edges. */
4011 if (e2->flags & EDGE_COMPLEX)
4014 /* The "first successor" check above only prevents multiple
4015 checks of crossjump(A,B). In order to prevent redundant
4016 checks of crossjump(B,A), require that A be the block
4017 with the lowest index. */
4018 if (e->src->index > e2->src->index)
4021 if (try_crossjump_to_edge (mode, e, e2))
4033 /* Do simple CFG optimizations - basic block merging, simplifying of jump
4034 instructions etc. Return nonzero if changes were made. */
4037 try_optimize_cfg (mode)
4041 bool changed_overall = false;
4045 /* Attempt to merge blocks as made possible by edge removal. If a block
4046 has only one successor, and the successor has only one predecessor,
4047 they may be combined. */
4055 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
4058 for (i = 0; i < n_basic_blocks;)
4060 basic_block c, b = BASIC_BLOCK (i);
4062 bool changed_here = false;
4064 /* Delete trivially dead basic blocks. */
4065 while (b->pred == NULL)
4067 c = BASIC_BLOCK (b->index - 1);
4069 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
4070 flow_delete_block (b);
4075 /* Remove code labels no longer used. Don't do this before
4076 CALL_PLACEHOLDER is removed, as some branches may be hidden
4078 if (b->pred->pred_next == NULL
4079 && (b->pred->flags & EDGE_FALLTHRU)
4080 && !(b->pred->flags & EDGE_COMPLEX)
4081 && GET_CODE (b->head) == CODE_LABEL
4082 && (!(mode & CLEANUP_PRE_SIBCALL)
4083 || !tail_recursion_label_p (b->head))
4084 /* If previous block ends with condjump jumping to next BB,
4085 we can't delete the label. */
4086 && (b->pred->src == ENTRY_BLOCK_PTR
4087 || !reg_mentioned_p (b->head, b->pred->src->end)))
4089 rtx label = b->head;
4090 b->head = NEXT_INSN (b->head);
4091 flow_delete_insn_chain (label, label);
4093 fprintf (rtl_dump_file, "Deleted label in block %i.\n",
4097 /* If we fall through an empty block, we can remove it. */
4098 if (b->pred->pred_next == NULL
4099 && (b->pred->flags & EDGE_FALLTHRU)
4100 && GET_CODE (b->head) != CODE_LABEL
4101 && forwarder_block_p (b)
4102 /* Note that forwarder_block_p true ensures that there
4103 is a successor for this block. */
4104 && (b->succ->flags & EDGE_FALLTHRU)
4105 && n_basic_blocks > 1)
4108 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
4110 c = BASIC_BLOCK (b->index ? b->index - 1 : 1);
4111 redirect_edge_succ_nodup (b->pred, b->succ->dest);
4112 flow_delete_block (b);
4117 /* Merge blocks. Loop because chains of blocks might be
4119 while ((s = b->succ) != NULL
4120 && s->succ_next == NULL
4121 && !(s->flags & EDGE_COMPLEX)
4122 && (c = s->dest) != EXIT_BLOCK_PTR
4123 && c->pred->pred_next == NULL
4124 /* If the jump insn has side effects,
4125 we can't kill the edge. */
4126 && (GET_CODE (b->end) != JUMP_INSN
4127 || onlyjump_p (b->end))
4128 && merge_blocks (s, b, c, mode))
4129 changed_here = true;
4131 /* Simplify branch over branch. */
4132 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
4133 changed_here = true;
4135 /* If B has a single outgoing edge, but uses a non-trivial jump
4136 instruction without side-effects, we can either delete the
4137 jump entirely, or replace it with a simple unconditional jump.
4138 Use redirect_edge_and_branch to do the dirty work. */
4140 && ! b->succ->succ_next
4141 && b->succ->dest != EXIT_BLOCK_PTR
4142 && onlyjump_p (b->end)
4143 && redirect_edge_and_branch (b->succ, b->succ->dest))
4144 changed_here = true;
4146 /* Simplify branch to branch. */
4147 if (try_forward_edges (mode, b))
4148 changed_here = true;
4150 /* Look for shared code between blocks. */
4151 if ((mode & CLEANUP_CROSSJUMP)
4152 && try_crossjump_bb (mode, b))
4153 changed_here = true;
4155 /* Don't get confused by the index shift caused by deleting
4163 if ((mode & CLEANUP_CROSSJUMP)
4164 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
4167 #ifdef ENABLE_CHECKING
4169 verify_flow_info ();
4172 changed_overall |= changed;
4175 return changed_overall;
4178 /* The given edge should potentially be a fallthru edge. If that is in
4179 fact true, delete the jump and barriers that are in the way. */
4182 tidy_fallthru_edge (e, b, c)
4188 /* ??? In a late-running flow pass, other folks may have deleted basic
4189 blocks by nopping out blocks, leaving multiple BARRIERs between here
4190 and the target label. They ought to be chastized and fixed.
4192 We can also wind up with a sequence of undeletable labels between
4193 one block and the next.
4195 So search through a sequence of barriers, labels, and notes for
4196 the head of block C and assert that we really do fall through. */
4198 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
4201 /* Remove what will soon cease being the jump insn from the source block.
4202 If block B consisted only of this single jump, turn it into a deleted
4205 if (GET_CODE (q) == JUMP_INSN
4207 && (any_uncondjump_p (q)
4208 || (b->succ == e && e->succ_next == NULL)))
4211 /* If this was a conditional jump, we need to also delete
4212 the insn that set cc0. */
4213 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
4220 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
4221 NOTE_SOURCE_FILE (q) = 0;
4227 /* We don't want a block to end on a line-number note since that has
4228 the potential of changing the code between -g and not -g. */
4229 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
4236 /* Selectively unlink the sequence. */
4237 if (q != PREV_INSN (c->head))
4238 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
4240 e->flags |= EDGE_FALLTHRU;
4243 /* Fix up edges that now fall through, or rather should now fall through
4244 but previously required a jump around now deleted blocks. Simplify
4245 the search by only examining blocks numerically adjacent, since this
4246 is how find_basic_blocks created them. */
4249 tidy_fallthru_edges ()
4253 for (i = 1; i < n_basic_blocks; ++i)
4255 basic_block b = BASIC_BLOCK (i - 1);
4256 basic_block c = BASIC_BLOCK (i);
4259 /* We care about simple conditional or unconditional jumps with
4262 If we had a conditional branch to the next instruction when
4263 find_basic_blocks was called, then there will only be one
4264 out edge for the block which ended with the conditional
4265 branch (since we do not create duplicate edges).
4267 Furthermore, the edge will be marked as a fallthru because we
4268 merge the flags for the duplicate edges. So we do not want to
4269 check that the edge is not a FALLTHRU edge. */
4270 if ((s = b->succ) != NULL
4271 && ! (s->flags & EDGE_COMPLEX)
4272 && s->succ_next == NULL
4274 /* If the jump insn has side effects, we can't tidy the edge. */
4275 && (GET_CODE (b->end) != JUMP_INSN
4276 || onlyjump_p (b->end)))
4277 tidy_fallthru_edge (s, b, c);
4281 /* Perform data flow analysis.
4282 F is the first insn of the function; FLAGS is a set of PROP_* flags
4283 to be used in accumulating flow info. */
4286 life_analysis (f, file, flags)
4291 #ifdef ELIMINABLE_REGS
4293 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
4296 /* Record which registers will be eliminated. We use this in
4299 CLEAR_HARD_REG_SET (elim_reg_set);
4301 #ifdef ELIMINABLE_REGS
4302 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4303 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4305 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4309 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
4311 /* The post-reload life analysis have (on a global basis) the same
4312 registers live as was computed by reload itself. elimination
4313 Otherwise offsets and such may be incorrect.
4315 Reload will make some registers as live even though they do not
4318 We don't want to create new auto-incs after reload, since they
4319 are unlikely to be useful and can cause problems with shared
4321 if (reload_completed)
4322 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
4324 /* We want alias analysis information for local dead store elimination. */
4325 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4326 init_alias_analysis ();
4328 /* Always remove no-op moves. Do this before other processing so
4329 that we don't have to keep re-scanning them. */
4330 delete_noop_moves (f);
4332 /* Some targets can emit simpler epilogues if they know that sp was
4333 not ever modified during the function. After reload, of course,
4334 we've already emitted the epilogue so there's no sense searching. */
4335 if (! reload_completed)
4336 notice_stack_pointer_modification (f);
4338 /* Allocate and zero out data structures that will record the
4339 data from lifetime analysis. */
4340 allocate_reg_life_data ();
4341 allocate_bb_life_data ();
4343 /* Find the set of registers live on function exit. */
4344 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
4346 /* "Update" life info from zero. It'd be nice to begin the
4347 relaxation with just the exit and noreturn blocks, but that set
4348 is not immediately handy. */
4350 if (flags & PROP_REG_INFO)
4351 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4352 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
4355 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4356 end_alias_analysis ();
4359 dump_flow_info (file);
4361 free_basic_block_vars (1);
4363 #ifdef ENABLE_CHECKING
4367 /* Search for any REG_LABEL notes which reference deleted labels. */
4368 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4370 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4372 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
4377 /* Removing dead insns should've made jumptables really dead. */
4378 delete_dead_jumptables ();
4381 /* A subroutine of verify_wide_reg, called through for_each_rtx.
4382 Search for REGNO. If found, abort if it is not wider than word_mode. */
4385 verify_wide_reg_1 (px, pregno)
4390 unsigned int regno = *(int *) pregno;
4392 if (GET_CODE (x) == REG && REGNO (x) == regno)
4394 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
4401 /* A subroutine of verify_local_live_at_start. Search through insns
4402 between HEAD and END looking for register REGNO. */
4405 verify_wide_reg (regno, head, end)
4412 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
4416 head = NEXT_INSN (head);
4419 /* We didn't find the register at all. Something's way screwy. */
4421 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
4422 print_rtl_and_abort ();
4425 /* A subroutine of update_life_info. Verify that there are no untoward
4426 changes in live_at_start during a local update. */
4429 verify_local_live_at_start (new_live_at_start, bb)
4430 regset new_live_at_start;
4433 if (reload_completed)
4435 /* After reload, there are no pseudos, nor subregs of multi-word
4436 registers. The regsets should exactly match. */
4437 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
4441 fprintf (rtl_dump_file,
4442 "live_at_start mismatch in bb %d, aborting\n",
4444 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
4445 debug_bitmap_file (rtl_dump_file, new_live_at_start);
4447 print_rtl_and_abort ();
4454 /* Find the set of changed registers. */
4455 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
4457 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
4459 /* No registers should die. */
4460 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
4463 fprintf (rtl_dump_file,
4464 "Register %d died unexpectedly in block %d\n", i,
4466 print_rtl_and_abort ();
4469 /* Verify that the now-live register is wider than word_mode. */
4470 verify_wide_reg (i, bb->head, bb->end);
4475 /* Updates life information starting with the basic blocks set in BLOCKS.
4476 If BLOCKS is null, consider it to be the universal set.
4478 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
4479 we are only expecting local modifications to basic blocks. If we find
4480 extra registers live at the beginning of a block, then we either killed
4481 useful data, or we have a broken split that wants data not provided.
4482 If we find registers removed from live_at_start, that means we have
4483 a broken peephole that is killing a register it shouldn't.
4485 ??? This is not true in one situation -- when a pre-reload splitter
4486 generates subregs of a multi-word pseudo, current life analysis will
4487 lose the kill. So we _can_ have a pseudo go live. How irritating.
4489 Including PROP_REG_INFO does not properly refresh regs_ever_live
4490 unless the caller resets it to zero. */
4493 update_life_info (blocks, extent, prop_flags)
4495 enum update_life_extent extent;
4499 regset_head tmp_head;
4502 tmp = INITIALIZE_REG_SET (tmp_head);
4504 /* Changes to the CFG are only allowed when
4505 doing a global update for the entire CFG. */
4506 if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
4507 && (extent == UPDATE_LIFE_LOCAL || blocks))
4510 /* For a global update, we go through the relaxation process again. */
4511 if (extent != UPDATE_LIFE_LOCAL)
4517 calculate_global_regs_live (blocks, blocks,
4518 prop_flags & (PROP_SCAN_DEAD_CODE
4519 | PROP_ALLOW_CFG_CHANGES));
4521 if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4522 != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4525 /* Removing dead code may allow the CFG to be simplified which
4526 in turn may allow for further dead code detection / removal. */
4527 for (i = n_basic_blocks - 1; i >= 0; --i)
4529 basic_block bb = BASIC_BLOCK (i);
4531 COPY_REG_SET (tmp, bb->global_live_at_end);
4532 changed |= propagate_block (bb, tmp, NULL, NULL,
4533 prop_flags & (PROP_SCAN_DEAD_CODE
4534 | PROP_KILL_DEAD_CODE));
4537 if (! changed || ! try_optimize_cfg (CLEANUP_EXPENSIVE))
4540 delete_unreachable_blocks ();
4541 mark_critical_edges ();
4544 /* If asked, remove notes from the blocks we'll update. */
4545 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
4546 count_or_remove_death_notes (blocks, 1);
4551 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4553 basic_block bb = BASIC_BLOCK (i);
4555 COPY_REG_SET (tmp, bb->global_live_at_end);
4556 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4558 if (extent == UPDATE_LIFE_LOCAL)
4559 verify_local_live_at_start (tmp, bb);
4564 for (i = n_basic_blocks - 1; i >= 0; --i)
4566 basic_block bb = BASIC_BLOCK (i);
4568 COPY_REG_SET (tmp, bb->global_live_at_end);
4569 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4571 if (extent == UPDATE_LIFE_LOCAL)
4572 verify_local_live_at_start (tmp, bb);
4578 if (prop_flags & PROP_REG_INFO)
4580 /* The only pseudos that are live at the beginning of the function
4581 are those that were not set anywhere in the function. local-alloc
4582 doesn't know how to handle these correctly, so mark them as not
4583 local to any one basic block. */
4584 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
4585 FIRST_PSEUDO_REGISTER, i,
4586 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4588 /* We have a problem with any pseudoreg that lives across the setjmp.
4589 ANSI says that if a user variable does not change in value between
4590 the setjmp and the longjmp, then the longjmp preserves it. This
4591 includes longjmp from a place where the pseudo appears dead.
4592 (In principle, the value still exists if it is in scope.)
4593 If the pseudo goes in a hard reg, some other value may occupy
4594 that hard reg where this pseudo is dead, thus clobbering the pseudo.
4595 Conclusion: such a pseudo must not go in a hard reg. */
4596 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
4597 FIRST_PSEUDO_REGISTER, i,
4599 if (regno_reg_rtx[i] != 0)
4601 REG_LIVE_LENGTH (i) = -1;
4602 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4608 /* Free the variables allocated by find_basic_blocks.
4610 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
4613 free_basic_block_vars (keep_head_end_p)
4614 int keep_head_end_p;
4616 if (basic_block_for_insn)
4618 VARRAY_FREE (basic_block_for_insn);
4619 basic_block_for_insn = NULL;
4622 if (! keep_head_end_p)
4624 if (basic_block_info)
4627 VARRAY_FREE (basic_block_info);
4631 ENTRY_BLOCK_PTR->aux = NULL;
4632 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
4633 EXIT_BLOCK_PTR->aux = NULL;
4634 EXIT_BLOCK_PTR->global_live_at_start = NULL;
4638 /* Delete any insns that copy a register to itself. */
4641 delete_noop_moves (f)
4642 rtx f ATTRIBUTE_UNUSED;
4648 for (i = 0; i < n_basic_blocks; i++)
4650 bb = BASIC_BLOCK (i);
4651 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
4653 next = NEXT_INSN (insn);
4654 if (INSN_P (insn) && noop_move_p (insn))
4656 /* Do not call flow_delete_insn here to not confuse backward
4657 pointers of LIBCALL block. */
4658 PUT_CODE (insn, NOTE);
4659 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4660 NOTE_SOURCE_FILE (insn) = 0;
4666 /* Delete any jump tables never referenced. We can't delete them at the
4667 time of removing tablejump insn as they are referenced by the preceeding
4668 insns computing the destination, so we delay deleting and garbagecollect
4669 them once life information is computed. */
4671 delete_dead_jumptables ()
4674 for (insn = get_insns (); insn; insn = next)
4676 next = NEXT_INSN (insn);
4677 if (GET_CODE (insn) == CODE_LABEL
4678 && LABEL_NUSES (insn) == 0
4679 && GET_CODE (next) == JUMP_INSN
4680 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4681 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4684 fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
4685 flow_delete_insn (NEXT_INSN (insn));
4686 flow_delete_insn (insn);
4687 next = NEXT_INSN (next);
4692 /* Determine if the stack pointer is constant over the life of the function.
4693 Only useful before prologues have been emitted. */
4696 notice_stack_pointer_modification_1 (x, pat, data)
4698 rtx pat ATTRIBUTE_UNUSED;
4699 void *data ATTRIBUTE_UNUSED;
4701 if (x == stack_pointer_rtx
4702 /* The stack pointer is only modified indirectly as the result
4703 of a push until later in flow. See the comments in rtl.texi
4704 regarding Embedded Side-Effects on Addresses. */
4705 || (GET_CODE (x) == MEM
4706 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
4707 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
4708 current_function_sp_is_unchanging = 0;
4712 notice_stack_pointer_modification (f)
4717 /* Assume that the stack pointer is unchanging if alloca hasn't
4719 current_function_sp_is_unchanging = !current_function_calls_alloca;
4720 if (! current_function_sp_is_unchanging)
4723 for (insn = f; insn; insn = NEXT_INSN (insn))
4727 /* Check if insn modifies the stack pointer. */
4728 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
4730 if (! current_function_sp_is_unchanging)
4736 /* Mark a register in SET. Hard registers in large modes get all
4737 of their component registers set as well. */
4740 mark_reg (reg, xset)
4744 regset set = (regset) xset;
4745 int regno = REGNO (reg);
4747 if (GET_MODE (reg) == BLKmode)
4750 SET_REGNO_REG_SET (set, regno);
4751 if (regno < FIRST_PSEUDO_REGISTER)
4753 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4755 SET_REGNO_REG_SET (set, regno + n);
4759 /* Mark those regs which are needed at the end of the function as live
4760 at the end of the last basic block. */
4763 mark_regs_live_at_end (set)
4768 /* If exiting needs the right stack value, consider the stack pointer
4769 live at the end of the function. */
4770 if ((HAVE_epilogue && reload_completed)
4771 || ! EXIT_IGNORE_STACK
4772 || (! FRAME_POINTER_REQUIRED
4773 && ! current_function_calls_alloca
4774 && flag_omit_frame_pointer)
4775 || current_function_sp_is_unchanging)
4777 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
4780 /* Mark the frame pointer if needed at the end of the function. If
4781 we end up eliminating it, it will be removed from the live list
4782 of each basic block by reload. */
4784 if (! reload_completed || frame_pointer_needed)
4786 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
4787 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4788 /* If they are different, also mark the hard frame pointer as live. */
4789 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4790 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
4794 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4795 /* Many architectures have a GP register even without flag_pic.
4796 Assume the pic register is not in use, or will be handled by
4797 other means, if it is not fixed. */
4798 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4799 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4800 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
4803 /* Mark all global registers, and all registers used by the epilogue
4804 as being live at the end of the function since they may be
4805 referenced by our caller. */
4806 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4807 if (global_regs[i] || EPILOGUE_USES (i))
4808 SET_REGNO_REG_SET (set, i);
4810 if (HAVE_epilogue && reload_completed)
4812 /* Mark all call-saved registers that we actually used. */
4813 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4814 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
4815 SET_REGNO_REG_SET (set, i);
4818 #ifdef EH_RETURN_DATA_REGNO
4819 /* Mark the registers that will contain data for the handler. */
4820 if (reload_completed && current_function_calls_eh_return)
4823 unsigned regno = EH_RETURN_DATA_REGNO(i);
4824 if (regno == INVALID_REGNUM)
4826 SET_REGNO_REG_SET (set, regno);
4829 #ifdef EH_RETURN_STACKADJ_RTX
4830 if ((! HAVE_epilogue || ! reload_completed)
4831 && current_function_calls_eh_return)
4833 rtx tmp = EH_RETURN_STACKADJ_RTX;
4834 if (tmp && REG_P (tmp))
4835 mark_reg (tmp, set);
4838 #ifdef EH_RETURN_HANDLER_RTX
4839 if ((! HAVE_epilogue || ! reload_completed)
4840 && current_function_calls_eh_return)
4842 rtx tmp = EH_RETURN_HANDLER_RTX;
4843 if (tmp && REG_P (tmp))
4844 mark_reg (tmp, set);
4848 /* Mark function return value. */
4849 diddle_return_value (mark_reg, set);
4852 /* Callback function for for_each_successor_phi. DATA is a regset.
4853 Sets the SRC_REGNO, the regno of the phi alternative for phi node
4854 INSN, in the regset. */
4857 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
4858 rtx insn ATTRIBUTE_UNUSED;
4859 int dest_regno ATTRIBUTE_UNUSED;
4863 regset live = (regset) data;
4864 SET_REGNO_REG_SET (live, src_regno);
4868 /* Propagate global life info around the graph of basic blocks. Begin
4869 considering blocks with their corresponding bit set in BLOCKS_IN.
4870 If BLOCKS_IN is null, consider it the universal set.
4872 BLOCKS_OUT is set for every block that was changed. */
4875 calculate_global_regs_live (blocks_in, blocks_out, flags)
4876 sbitmap blocks_in, blocks_out;
4879 basic_block *queue, *qhead, *qtail, *qend;
4880 regset tmp, new_live_at_end, call_used;
4881 regset_head tmp_head, call_used_head;
4882 regset_head new_live_at_end_head;
4885 tmp = INITIALIZE_REG_SET (tmp_head);
4886 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
4887 call_used = INITIALIZE_REG_SET (call_used_head);
4889 /* Inconveniently, this is only redily available in hard reg set form. */
4890 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4891 if (call_used_regs[i])
4892 SET_REGNO_REG_SET (call_used, i);
4894 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
4895 because the `head == tail' style test for an empty queue doesn't
4896 work with a full queue. */
4897 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
4899 qhead = qend = queue + n_basic_blocks + 2;
4901 /* Queue the blocks set in the initial mask. Do this in reverse block
4902 number order so that we are more likely for the first round to do
4903 useful work. We use AUX non-null to flag that the block is queued. */
4906 /* Clear out the garbage that might be hanging out in bb->aux. */
4907 for (i = n_basic_blocks - 1; i >= 0; --i)
4908 BASIC_BLOCK (i)->aux = NULL;
4910 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
4912 basic_block bb = BASIC_BLOCK (i);
4919 for (i = 0; i < n_basic_blocks; ++i)
4921 basic_block bb = BASIC_BLOCK (i);
4928 sbitmap_zero (blocks_out);
4930 /* We work through the queue until there are no more blocks. What
4931 is live at the end of this block is precisely the union of what
4932 is live at the beginning of all its successors. So, we set its
4933 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
4934 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
4935 this block by walking through the instructions in this block in
4936 reverse order and updating as we go. If that changed
4937 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
4938 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
4940 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
4941 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
4942 must either be live at the end of the block, or used within the
4943 block. In the latter case, it will certainly never disappear
4944 from GLOBAL_LIVE_AT_START. In the former case, the register
4945 could go away only if it disappeared from GLOBAL_LIVE_AT_START
4946 for one of the successor blocks. By induction, that cannot
4948 while (qhead != qtail)
4950 int rescan, changed;
4959 /* Begin by propagating live_at_start from the successor blocks. */
4960 CLEAR_REG_SET (new_live_at_end);
4961 for (e = bb->succ; e; e = e->succ_next)
4963 basic_block sb = e->dest;
4965 /* Call-clobbered registers die across exception and call edges. */
4966 /* ??? Abnormal call edges ignored for the moment, as this gets
4967 confused by sibling call edges, which crashes reg-stack. */
4968 if (e->flags & EDGE_EH)
4970 bitmap_operation (tmp, sb->global_live_at_start,
4971 call_used, BITMAP_AND_COMPL);
4972 IOR_REG_SET (new_live_at_end, tmp);
4975 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
4978 /* The all-important stack pointer must always be live. */
4979 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
4981 /* Before reload, there are a few registers that must be forced
4982 live everywhere -- which might not already be the case for
4983 blocks within infinite loops. */
4984 if (! reload_completed)
4986 /* Any reference to any pseudo before reload is a potential
4987 reference of the frame pointer. */
4988 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
4990 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4991 /* Pseudos with argument area equivalences may require
4992 reloading via the argument pointer. */
4993 if (fixed_regs[ARG_POINTER_REGNUM])
4994 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
4997 /* Any constant, or pseudo with constant equivalences, may
4998 require reloading from memory using the pic register. */
4999 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
5000 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
5001 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
5004 /* Regs used in phi nodes are not included in
5005 global_live_at_start, since they are live only along a
5006 particular edge. Set those regs that are live because of a
5007 phi node alternative corresponding to this particular block. */
5009 for_each_successor_phi (bb, &set_phi_alternative_reg,
5012 if (bb == ENTRY_BLOCK_PTR)
5014 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5018 /* On our first pass through this block, we'll go ahead and continue.
5019 Recognize first pass by local_set NULL. On subsequent passes, we
5020 get to skip out early if live_at_end wouldn't have changed. */
5022 if (bb->local_set == NULL)
5024 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5025 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5030 /* If any bits were removed from live_at_end, we'll have to
5031 rescan the block. This wouldn't be necessary if we had
5032 precalculated local_live, however with PROP_SCAN_DEAD_CODE
5033 local_live is really dependent on live_at_end. */
5034 CLEAR_REG_SET (tmp);
5035 rescan = bitmap_operation (tmp, bb->global_live_at_end,
5036 new_live_at_end, BITMAP_AND_COMPL);
5040 /* If any of the registers in the new live_at_end set are
5041 conditionally set in this basic block, we must rescan.
5042 This is because conditional lifetimes at the end of the
5043 block do not just take the live_at_end set into account,
5044 but also the liveness at the start of each successor
5045 block. We can miss changes in those sets if we only
5046 compare the new live_at_end against the previous one. */
5047 CLEAR_REG_SET (tmp);
5048 rescan = bitmap_operation (tmp, new_live_at_end,
5049 bb->cond_local_set, BITMAP_AND);
5054 /* Find the set of changed bits. Take this opportunity
5055 to notice that this set is empty and early out. */
5056 CLEAR_REG_SET (tmp);
5057 changed = bitmap_operation (tmp, bb->global_live_at_end,
5058 new_live_at_end, BITMAP_XOR);
5062 /* If any of the changed bits overlap with local_set,
5063 we'll have to rescan the block. Detect overlap by
5064 the AND with ~local_set turning off bits. */
5065 rescan = bitmap_operation (tmp, tmp, bb->local_set,
5070 /* Let our caller know that BB changed enough to require its
5071 death notes updated. */
5073 SET_BIT (blocks_out, bb->index);
5077 /* Add to live_at_start the set of all registers in
5078 new_live_at_end that aren't in the old live_at_end. */
5080 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
5082 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5084 changed = bitmap_operation (bb->global_live_at_start,
5085 bb->global_live_at_start,
5092 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5094 /* Rescan the block insn by insn to turn (a copy of) live_at_end
5095 into live_at_start. */
5096 propagate_block (bb, new_live_at_end, bb->local_set,
5097 bb->cond_local_set, flags);
5099 /* If live_at start didn't change, no need to go farther. */
5100 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
5103 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
5106 /* Queue all predecessors of BB so that we may re-examine
5107 their live_at_end. */
5108 for (e = bb->pred; e; e = e->pred_next)
5110 basic_block pb = e->src;
5111 if (pb->aux == NULL)
5122 FREE_REG_SET (new_live_at_end);
5123 FREE_REG_SET (call_used);
5127 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
5129 basic_block bb = BASIC_BLOCK (i);
5130 FREE_REG_SET (bb->local_set);
5131 FREE_REG_SET (bb->cond_local_set);
5136 for (i = n_basic_blocks - 1; i >= 0; --i)
5138 basic_block bb = BASIC_BLOCK (i);
5139 FREE_REG_SET (bb->local_set);
5140 FREE_REG_SET (bb->cond_local_set);
5147 /* Subroutines of life analysis. */
5149 /* Allocate the permanent data structures that represent the results
5150 of life analysis. Not static since used also for stupid life analysis. */
5153 allocate_bb_life_data ()
5157 for (i = 0; i < n_basic_blocks; i++)
5159 basic_block bb = BASIC_BLOCK (i);
5161 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5162 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5165 ENTRY_BLOCK_PTR->global_live_at_end
5166 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5167 EXIT_BLOCK_PTR->global_live_at_start
5168 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5170 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5174 allocate_reg_life_data ()
5178 max_regno = max_reg_num ();
5180 /* Recalculate the register space, in case it has grown. Old style
5181 vector oriented regsets would set regset_{size,bytes} here also. */
5182 allocate_reg_info (max_regno, FALSE, FALSE);
5184 /* Reset all the data we'll collect in propagate_block and its
5186 for (i = 0; i < max_regno; i++)
5190 REG_N_DEATHS (i) = 0;
5191 REG_N_CALLS_CROSSED (i) = 0;
5192 REG_LIVE_LENGTH (i) = 0;
5193 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
5197 /* Delete dead instructions for propagate_block. */
5200 propagate_block_delete_insn (bb, insn)
5204 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
5206 /* If the insn referred to a label, and that label was attached to
5207 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
5208 pretty much mandatory to delete it, because the ADDR_VEC may be
5209 referencing labels that no longer exist.
5211 INSN may reference a deleted label, particularly when a jump
5212 table has been optimized into a direct jump. There's no
5213 real good way to fix up the reference to the deleted label
5214 when the label is deleted, so we just allow it here.
5216 After dead code elimination is complete, we do search for
5217 any REG_LABEL notes which reference deleted labels as a
5220 if (inote && GET_CODE (inote) == CODE_LABEL)
5222 rtx label = XEXP (inote, 0);
5225 /* The label may be forced if it has been put in the constant
5226 pool. If that is the only use we must discard the table
5227 jump following it, but not the label itself. */
5228 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
5229 && (next = next_nonnote_insn (label)) != NULL
5230 && GET_CODE (next) == JUMP_INSN
5231 && (GET_CODE (PATTERN (next)) == ADDR_VEC
5232 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
5234 rtx pat = PATTERN (next);
5235 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
5236 int len = XVECLEN (pat, diff_vec_p);
5239 for (i = 0; i < len; i++)
5240 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
5242 flow_delete_insn (next);
5246 if (bb->end == insn)
5247 bb->end = PREV_INSN (insn);
5248 flow_delete_insn (insn);
5251 /* Delete dead libcalls for propagate_block. Return the insn
5252 before the libcall. */
5255 propagate_block_delete_libcall (bb, insn, note)
5259 rtx first = XEXP (note, 0);
5260 rtx before = PREV_INSN (first);
5262 if (insn == bb->end)
5265 flow_delete_insn_chain (first, insn);
5269 /* Update the life-status of regs for one insn. Return the previous insn. */
5272 propagate_one_insn (pbi, insn)
5273 struct propagate_block_info *pbi;
5276 rtx prev = PREV_INSN (insn);
5277 int flags = pbi->flags;
5278 int insn_is_dead = 0;
5279 int libcall_is_dead = 0;
5283 if (! INSN_P (insn))
5286 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
5287 if (flags & PROP_SCAN_DEAD_CODE)
5289 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
5290 libcall_is_dead = (insn_is_dead && note != 0
5291 && libcall_dead_p (pbi, note, insn));
5294 /* If an instruction consists of just dead store(s) on final pass,
5296 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
5298 /* If we're trying to delete a prologue or epilogue instruction
5299 that isn't flagged as possibly being dead, something is wrong.
5300 But if we are keeping the stack pointer depressed, we might well
5301 be deleting insns that are used to compute the amount to update
5302 it by, so they are fine. */
5303 if (reload_completed
5304 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5305 && (TYPE_RETURNS_STACK_DEPRESSED
5306 (TREE_TYPE (current_function_decl))))
5307 && (((HAVE_epilogue || HAVE_prologue)
5308 && prologue_epilogue_contains (insn))
5309 || (HAVE_sibcall_epilogue
5310 && sibcall_epilogue_contains (insn)))
5311 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
5314 /* Record sets. Do this even for dead instructions, since they
5315 would have killed the values if they hadn't been deleted. */
5316 mark_set_regs (pbi, PATTERN (insn), insn);
5318 /* CC0 is now known to be dead. Either this insn used it,
5319 in which case it doesn't anymore, or clobbered it,
5320 so the next insn can't use it. */
5323 if (libcall_is_dead)
5324 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
5326 propagate_block_delete_insn (pbi->bb, insn);
5331 /* See if this is an increment or decrement that can be merged into
5332 a following memory address. */
5335 register rtx x = single_set (insn);
5337 /* Does this instruction increment or decrement a register? */
5338 if ((flags & PROP_AUTOINC)
5340 && GET_CODE (SET_DEST (x)) == REG
5341 && (GET_CODE (SET_SRC (x)) == PLUS
5342 || GET_CODE (SET_SRC (x)) == MINUS)
5343 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
5344 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5345 /* Ok, look for a following memory ref we can combine with.
5346 If one is found, change the memory ref to a PRE_INC
5347 or PRE_DEC, cancel this insn, and return 1.
5348 Return 0 if nothing has been done. */
5349 && try_pre_increment_1 (pbi, insn))
5352 #endif /* AUTO_INC_DEC */
5354 CLEAR_REG_SET (pbi->new_set);
5356 /* If this is not the final pass, and this insn is copying the value of
5357 a library call and it's dead, don't scan the insns that perform the
5358 library call, so that the call's arguments are not marked live. */
5359 if (libcall_is_dead)
5361 /* Record the death of the dest reg. */
5362 mark_set_regs (pbi, PATTERN (insn), insn);
5364 insn = XEXP (note, 0);
5365 return PREV_INSN (insn);
5367 else if (GET_CODE (PATTERN (insn)) == SET
5368 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
5369 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
5370 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
5371 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
5372 /* We have an insn to pop a constant amount off the stack.
5373 (Such insns use PLUS regardless of the direction of the stack,
5374 and any insn to adjust the stack by a constant is always a pop.)
5375 These insns, if not dead stores, have no effect on life. */
5379 /* Any regs live at the time of a call instruction must not go
5380 in a register clobbered by calls. Find all regs now live and
5381 record this for them. */
5383 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
5384 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5385 { REG_N_CALLS_CROSSED (i)++; });
5387 /* Record sets. Do this even for dead instructions, since they
5388 would have killed the values if they hadn't been deleted. */
5389 mark_set_regs (pbi, PATTERN (insn), insn);
5391 if (GET_CODE (insn) == CALL_INSN)
5397 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5398 cond = COND_EXEC_TEST (PATTERN (insn));
5400 /* Non-constant calls clobber memory. */
5401 if (! CONST_OR_PURE_CALL_P (insn))
5403 free_EXPR_LIST_list (&pbi->mem_set_list);
5404 pbi->mem_set_list_len = 0;
5407 /* There may be extra registers to be clobbered. */
5408 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5410 note = XEXP (note, 1))
5411 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
5412 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
5413 cond, insn, pbi->flags);
5415 /* Calls change all call-used and global registers. */
5416 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5417 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
5419 /* We do not want REG_UNUSED notes for these registers. */
5420 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
5422 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
5426 /* If an insn doesn't use CC0, it becomes dead since we assume
5427 that every insn clobbers it. So show it dead here;
5428 mark_used_regs will set it live if it is referenced. */
5433 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
5435 /* Sometimes we may have inserted something before INSN (such as a move)
5436 when we make an auto-inc. So ensure we will scan those insns. */
5438 prev = PREV_INSN (insn);
5441 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
5447 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5448 cond = COND_EXEC_TEST (PATTERN (insn));
5450 /* Calls use their arguments. */
5451 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5453 note = XEXP (note, 1))
5454 if (GET_CODE (XEXP (note, 0)) == USE)
5455 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
5458 /* The stack ptr is used (honorarily) by a CALL insn. */
5459 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
5461 /* Calls may also reference any of the global registers,
5462 so they are made live. */
5463 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5465 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
5470 /* On final pass, update counts of how many insns in which each reg
5472 if (flags & PROP_REG_INFO)
5473 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5474 { REG_LIVE_LENGTH (i)++; });
5479 /* Initialize a propagate_block_info struct for public consumption.
5480 Note that the structure itself is opaque to this file, but that
5481 the user can use the regsets provided here. */
5483 struct propagate_block_info *
5484 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
5486 regset live, local_set, cond_local_set;
5489 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
5492 pbi->reg_live = live;
5493 pbi->mem_set_list = NULL_RTX;
5494 pbi->mem_set_list_len = 0;
5495 pbi->local_set = local_set;
5496 pbi->cond_local_set = cond_local_set;
5500 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5501 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
5503 pbi->reg_next_use = NULL;
5505 pbi->new_set = BITMAP_XMALLOC ();
5507 #ifdef HAVE_conditional_execution
5508 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
5509 free_reg_cond_life_info);
5510 pbi->reg_cond_reg = BITMAP_XMALLOC ();
5512 /* If this block ends in a conditional branch, for each register live
5513 from one side of the branch and not the other, record the register
5514 as conditionally dead. */
5515 if (GET_CODE (bb->end) == JUMP_INSN
5516 && any_condjump_p (bb->end))
5518 regset_head diff_head;
5519 regset diff = INITIALIZE_REG_SET (diff_head);
5520 basic_block bb_true, bb_false;
5521 rtx cond_true, cond_false, set_src;
5524 /* Identify the successor blocks. */
5525 bb_true = bb->succ->dest;
5526 if (bb->succ->succ_next != NULL)
5528 bb_false = bb->succ->succ_next->dest;
5530 if (bb->succ->flags & EDGE_FALLTHRU)
5532 basic_block t = bb_false;
5536 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
5541 /* This can happen with a conditional jump to the next insn. */
5542 if (JUMP_LABEL (bb->end) != bb_true->head)
5545 /* Simplest way to do nothing. */
5549 /* Extract the condition from the branch. */
5550 set_src = SET_SRC (pc_set (bb->end));
5551 cond_true = XEXP (set_src, 0);
5552 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
5553 GET_MODE (cond_true), XEXP (cond_true, 0),
5554 XEXP (cond_true, 1));
5555 if (GET_CODE (XEXP (set_src, 1)) == PC)
5558 cond_false = cond_true;
5562 /* Compute which register lead different lives in the successors. */
5563 if (bitmap_operation (diff, bb_true->global_live_at_start,
5564 bb_false->global_live_at_start, BITMAP_XOR))
5566 rtx reg = XEXP (cond_true, 0);
5568 if (GET_CODE (reg) == SUBREG)
5569 reg = SUBREG_REG (reg);
5571 if (GET_CODE (reg) != REG)
5574 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
5576 /* For each such register, mark it conditionally dead. */
5577 EXECUTE_IF_SET_IN_REG_SET
5580 struct reg_cond_life_info *rcli;
5583 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5585 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
5589 rcli->condition = cond;
5590 rcli->stores = const0_rtx;
5591 rcli->orig_condition = cond;
5593 splay_tree_insert (pbi->reg_cond_dead, i,
5594 (splay_tree_value) rcli);
5598 FREE_REG_SET (diff);
5602 /* If this block has no successors, any stores to the frame that aren't
5603 used later in the block are dead. So make a pass over the block
5604 recording any such that are made and show them dead at the end. We do
5605 a very conservative and simple job here. */
5607 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5608 && (TYPE_RETURNS_STACK_DEPRESSED
5609 (TREE_TYPE (current_function_decl))))
5610 && (flags & PROP_SCAN_DEAD_CODE)
5611 && (bb->succ == NULL
5612 || (bb->succ->succ_next == NULL
5613 && bb->succ->dest == EXIT_BLOCK_PTR
5614 && ! current_function_calls_eh_return)))
5617 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
5618 if (GET_CODE (insn) == INSN
5619 && (set = single_set (insn))
5620 && GET_CODE (SET_DEST (set)) == MEM)
5622 rtx mem = SET_DEST (set);
5623 rtx canon_mem = canon_rtx (mem);
5625 /* This optimization is performed by faking a store to the
5626 memory at the end of the block. This doesn't work for
5627 unchanging memories because multiple stores to unchanging
5628 memory is illegal and alias analysis doesn't consider it. */
5629 if (RTX_UNCHANGING_P (canon_mem))
5632 if (XEXP (canon_mem, 0) == frame_pointer_rtx
5633 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
5634 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
5635 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
5636 add_to_mem_set_list (pbi, canon_mem);
5643 /* Release a propagate_block_info struct. */
5646 free_propagate_block_info (pbi)
5647 struct propagate_block_info *pbi;
5649 free_EXPR_LIST_list (&pbi->mem_set_list);
5651 BITMAP_XFREE (pbi->new_set);
5653 #ifdef HAVE_conditional_execution
5654 splay_tree_delete (pbi->reg_cond_dead);
5655 BITMAP_XFREE (pbi->reg_cond_reg);
5658 if (pbi->reg_next_use)
5659 free (pbi->reg_next_use);
5664 /* Compute the registers live at the beginning of a basic block BB from
5665 those live at the end.
5667 When called, REG_LIVE contains those live at the end. On return, it
5668 contains those live at the beginning.
5670 LOCAL_SET, if non-null, will be set with all registers killed
5671 unconditionally by this basic block.
5672 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
5673 killed conditionally by this basic block. If there is any unconditional
5674 set of a register, then the corresponding bit will be set in LOCAL_SET
5675 and cleared in COND_LOCAL_SET.
5676 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
5677 case, the resulting set will be equal to the union of the two sets that
5678 would otherwise be computed.
5680 Return non-zero if an INSN is deleted (i.e. by dead code removal). */
5683 propagate_block (bb, live, local_set, cond_local_set, flags)
5687 regset cond_local_set;
5690 struct propagate_block_info *pbi;
5694 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
5696 if (flags & PROP_REG_INFO)
5700 /* Process the regs live at the end of the block.
5701 Mark them as not local to any one basic block. */
5702 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
5703 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
5706 /* Scan the block an insn at a time from end to beginning. */
5709 for (insn = bb->end;; insn = prev)
5711 /* If this is a call to `setjmp' et al, warn if any
5712 non-volatile datum is live. */
5713 if ((flags & PROP_REG_INFO)
5714 && GET_CODE (insn) == CALL
5715 && find_reg_note (insn, REG_SETJMP, NULL))
5716 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
5718 prev = propagate_one_insn (pbi, insn);
5719 changed |= NEXT_INSN (prev) != insn;
5721 if (insn == bb->head)
5725 free_propagate_block_info (pbi);
5730 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
5731 (SET expressions whose destinations are registers dead after the insn).
5732 NEEDED is the regset that says which regs are alive after the insn.
5734 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
5736 If X is the entire body of an insn, NOTES contains the reg notes
5737 pertaining to the insn. */
5740 insn_dead_p (pbi, x, call_ok, notes)
5741 struct propagate_block_info *pbi;
5744 rtx notes ATTRIBUTE_UNUSED;
5746 enum rtx_code code = GET_CODE (x);
5749 /* If flow is invoked after reload, we must take existing AUTO_INC
5750 expresions into account. */
5751 if (reload_completed)
5753 for (; notes; notes = XEXP (notes, 1))
5755 if (REG_NOTE_KIND (notes) == REG_INC)
5757 int regno = REGNO (XEXP (notes, 0));
5759 /* Don't delete insns to set global regs. */
5760 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5761 || REGNO_REG_SET_P (pbi->reg_live, regno))
5768 /* If setting something that's a reg or part of one,
5769 see if that register's altered value will be live. */
5773 rtx r = SET_DEST (x);
5776 if (GET_CODE (r) == CC0)
5777 return ! pbi->cc0_live;
5780 /* A SET that is a subroutine call cannot be dead. */
5781 if (GET_CODE (SET_SRC (x)) == CALL)
5787 /* Don't eliminate loads from volatile memory or volatile asms. */
5788 else if (volatile_refs_p (SET_SRC (x)))
5791 if (GET_CODE (r) == MEM)
5795 if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
5798 canon_r = canon_rtx (r);
5800 /* Walk the set of memory locations we are currently tracking
5801 and see if one is an identical match to this memory location.
5802 If so, this memory write is dead (remember, we're walking
5803 backwards from the end of the block to the start). Since
5804 rtx_equal_p does not check the alias set or flags, we also
5805 must have the potential for them to conflict (anti_dependence). */
5806 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
5807 if (anti_dependence (r, XEXP (temp, 0)))
5809 rtx mem = XEXP (temp, 0);
5811 if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
5812 && (GET_MODE_SIZE (GET_MODE (canon_r))
5813 <= GET_MODE_SIZE (GET_MODE (mem))))
5817 /* Check if memory reference matches an auto increment. Only
5818 post increment/decrement or modify are valid. */
5819 if (GET_MODE (mem) == GET_MODE (r)
5820 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
5821 || GET_CODE (XEXP (mem, 0)) == POST_INC
5822 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
5823 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
5824 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
5831 while (GET_CODE (r) == SUBREG
5832 || GET_CODE (r) == STRICT_LOW_PART
5833 || GET_CODE (r) == ZERO_EXTRACT)
5836 if (GET_CODE (r) == REG)
5838 int regno = REGNO (r);
5841 if (REGNO_REG_SET_P (pbi->reg_live, regno))
5844 /* If this is a hard register, verify that subsequent
5845 words are not needed. */
5846 if (regno < FIRST_PSEUDO_REGISTER)
5848 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
5851 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
5855 /* Don't delete insns to set global regs. */
5856 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5859 /* Make sure insns to set the stack pointer aren't deleted. */
5860 if (regno == STACK_POINTER_REGNUM)
5863 /* ??? These bits might be redundant with the force live bits
5864 in calculate_global_regs_live. We would delete from
5865 sequential sets; whether this actually affects real code
5866 for anything but the stack pointer I don't know. */
5867 /* Make sure insns to set the frame pointer aren't deleted. */
5868 if (regno == FRAME_POINTER_REGNUM
5869 && (! reload_completed || frame_pointer_needed))
5871 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5872 if (regno == HARD_FRAME_POINTER_REGNUM
5873 && (! reload_completed || frame_pointer_needed))
5877 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5878 /* Make sure insns to set arg pointer are never deleted
5879 (if the arg pointer isn't fixed, there will be a USE
5880 for it, so we can treat it normally). */
5881 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5885 /* Otherwise, the set is dead. */
5891 /* If performing several activities, insn is dead if each activity
5892 is individually dead. Also, CLOBBERs and USEs can be ignored; a
5893 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
5895 else if (code == PARALLEL)
5897 int i = XVECLEN (x, 0);
5899 for (i--; i >= 0; i--)
5900 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
5901 && GET_CODE (XVECEXP (x, 0, i)) != USE
5902 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
5908 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
5909 is not necessarily true for hard registers. */
5910 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
5911 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
5912 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
5915 /* We do not check other CLOBBER or USE here. An insn consisting of just
5916 a CLOBBER or just a USE should not be deleted. */
5920 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
5921 return 1 if the entire library call is dead.
5922 This is true if INSN copies a register (hard or pseudo)
5923 and if the hard return reg of the call insn is dead.
5924 (The caller should have tested the destination of the SET inside
5925 INSN already for death.)
5927 If this insn doesn't just copy a register, then we don't
5928 have an ordinary libcall. In that case, cse could not have
5929 managed to substitute the source for the dest later on,
5930 so we can assume the libcall is dead.
5932 PBI is the block info giving pseudoregs live before this insn.
5933 NOTE is the REG_RETVAL note of the insn. */
5936 libcall_dead_p (pbi, note, insn)
5937 struct propagate_block_info *pbi;
5941 rtx x = single_set (insn);
5945 register rtx r = SET_SRC (x);
5947 if (GET_CODE (r) == REG)
5949 rtx call = XEXP (note, 0);
5953 /* Find the call insn. */
5954 while (call != insn && GET_CODE (call) != CALL_INSN)
5955 call = NEXT_INSN (call);
5957 /* If there is none, do nothing special,
5958 since ordinary death handling can understand these insns. */
5962 /* See if the hard reg holding the value is dead.
5963 If this is a PARALLEL, find the call within it. */
5964 call_pat = PATTERN (call);
5965 if (GET_CODE (call_pat) == PARALLEL)
5967 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
5968 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
5969 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
5972 /* This may be a library call that is returning a value
5973 via invisible pointer. Do nothing special, since
5974 ordinary death handling can understand these insns. */
5978 call_pat = XVECEXP (call_pat, 0, i);
5981 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
5987 /* Return 1 if register REGNO was used before it was set, i.e. if it is
5988 live at function entry. Don't count global register variables, variables
5989 in registers that can be used for function arg passing, or variables in
5990 fixed hard registers. */
5993 regno_uninitialized (regno)
5996 if (n_basic_blocks == 0
5997 || (regno < FIRST_PSEUDO_REGISTER
5998 && (global_regs[regno]
5999 || fixed_regs[regno]
6000 || FUNCTION_ARG_REGNO_P (regno))))
6003 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
6006 /* 1 if register REGNO was alive at a place where `setjmp' was called
6007 and was set more than once or is an argument.
6008 Such regs may be clobbered by `longjmp'. */
6011 regno_clobbered_at_setjmp (regno)
6014 if (n_basic_blocks == 0)
6017 return ((REG_N_SETS (regno) > 1
6018 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
6019 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
6022 /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the
6023 maximal list size; look for overlaps in mode and select the largest. */
6025 add_to_mem_set_list (pbi, mem)
6026 struct propagate_block_info *pbi;
6031 /* We don't know how large a BLKmode store is, so we must not
6032 take them into consideration. */
6033 if (GET_MODE (mem) == BLKmode)
6036 for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
6038 rtx e = XEXP (i, 0);
6039 if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
6041 if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
6044 /* If we must store a copy of the mem, we can just modify
6045 the mode of the stored copy. */
6046 if (pbi->flags & PROP_AUTOINC)
6047 PUT_MODE (e, GET_MODE (mem));
6056 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
6059 /* Store a copy of mem, otherwise the address may be
6060 scrogged by find_auto_inc. */
6061 if (pbi->flags & PROP_AUTOINC)
6062 mem = shallow_copy_rtx (mem);
6064 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
6065 pbi->mem_set_list_len++;
6069 /* INSN references memory, possibly using autoincrement addressing modes.
6070 Find any entries on the mem_set_list that need to be invalidated due
6071 to an address change. */
6074 invalidate_mems_from_autoinc (pbi, insn)
6075 struct propagate_block_info *pbi;
6078 rtx note = REG_NOTES (insn);
6079 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
6080 if (REG_NOTE_KIND (note) == REG_INC)
6081 invalidate_mems_from_set (pbi, XEXP (note, 0));
6084 /* EXP is a REG. Remove any dependant entries from pbi->mem_set_list. */
6087 invalidate_mems_from_set (pbi, exp)
6088 struct propagate_block_info *pbi;
6091 rtx temp = pbi->mem_set_list;
6092 rtx prev = NULL_RTX;
6097 next = XEXP (temp, 1);
6098 if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
6100 /* Splice this entry out of the list. */
6102 XEXP (prev, 1) = next;
6104 pbi->mem_set_list = next;
6105 free_EXPR_LIST_node (temp);
6106 pbi->mem_set_list_len--;
6114 /* Process the registers that are set within X. Their bits are set to
6115 1 in the regset DEAD, because they are dead prior to this insn.
6117 If INSN is nonzero, it is the insn being processed.
6119 FLAGS is the set of operations to perform. */
6122 mark_set_regs (pbi, x, insn)
6123 struct propagate_block_info *pbi;
6126 rtx cond = NULL_RTX;
6131 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6133 if (REG_NOTE_KIND (link) == REG_INC)
6134 mark_set_1 (pbi, SET, XEXP (link, 0),
6135 (GET_CODE (x) == COND_EXEC
6136 ? COND_EXEC_TEST (x) : NULL_RTX),
6140 switch (code = GET_CODE (x))
6144 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
6148 cond = COND_EXEC_TEST (x);
6149 x = COND_EXEC_CODE (x);
6155 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6157 rtx sub = XVECEXP (x, 0, i);
6158 switch (code = GET_CODE (sub))
6161 if (cond != NULL_RTX)
6164 cond = COND_EXEC_TEST (sub);
6165 sub = COND_EXEC_CODE (sub);
6166 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
6172 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
6187 /* Process a single set, which appears in INSN. REG (which may not
6188 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
6189 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
6190 If the set is conditional (because it appear in a COND_EXEC), COND
6191 will be the condition. */
6194 mark_set_1 (pbi, code, reg, cond, insn, flags)
6195 struct propagate_block_info *pbi;
6197 rtx reg, cond, insn;
6200 int regno_first = -1, regno_last = -1;
6201 unsigned long not_dead = 0;
6204 /* Modifying just one hardware register of a multi-reg value or just a
6205 byte field of a register does not mean the value from before this insn
6206 is now dead. Of course, if it was dead after it's unused now. */
6208 switch (GET_CODE (reg))
6211 /* Some targets place small structures in registers for return values of
6212 functions. We have to detect this case specially here to get correct
6213 flow information. */
6214 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
6215 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
6216 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
6222 case STRICT_LOW_PART:
6223 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
6225 reg = XEXP (reg, 0);
6226 while (GET_CODE (reg) == SUBREG
6227 || GET_CODE (reg) == ZERO_EXTRACT
6228 || GET_CODE (reg) == SIGN_EXTRACT
6229 || GET_CODE (reg) == STRICT_LOW_PART);
6230 if (GET_CODE (reg) == MEM)
6232 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
6236 regno_last = regno_first = REGNO (reg);
6237 if (regno_first < FIRST_PSEUDO_REGISTER)
6238 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6242 if (GET_CODE (SUBREG_REG (reg)) == REG)
6244 enum machine_mode outer_mode = GET_MODE (reg);
6245 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
6247 /* Identify the range of registers affected. This is moderately
6248 tricky for hard registers. See alter_subreg. */
6250 regno_last = regno_first = REGNO (SUBREG_REG (reg));
6251 if (regno_first < FIRST_PSEUDO_REGISTER)
6253 regno_first += subreg_regno_offset (regno_first, inner_mode,
6256 regno_last = (regno_first
6257 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
6259 /* Since we've just adjusted the register number ranges, make
6260 sure REG matches. Otherwise some_was_live will be clear
6261 when it shouldn't have been, and we'll create incorrect
6262 REG_UNUSED notes. */
6263 reg = gen_rtx_REG (outer_mode, regno_first);
6267 /* If the number of words in the subreg is less than the number
6268 of words in the full register, we have a well-defined partial
6269 set. Otherwise the high bits are undefined.
6271 This is only really applicable to pseudos, since we just took
6272 care of multi-word hard registers. */
6273 if (((GET_MODE_SIZE (outer_mode)
6274 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
6275 < ((GET_MODE_SIZE (inner_mode)
6276 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
6277 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
6280 reg = SUBREG_REG (reg);
6284 reg = SUBREG_REG (reg);
6291 /* If this set is a MEM, then it kills any aliased writes.
6292 If this set is a REG, then it kills any MEMs which use the reg. */
6293 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
6295 if (GET_CODE (reg) == REG)
6296 invalidate_mems_from_set (pbi, reg);
6298 /* If the memory reference had embedded side effects (autoincrement
6299 address modes. Then we may need to kill some entries on the
6301 if (insn && GET_CODE (reg) == MEM)
6302 invalidate_mems_from_autoinc (pbi, insn);
6304 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
6305 /* ??? With more effort we could track conditional memory life. */
6307 /* There are no REG_INC notes for SP, so we can't assume we'll see
6308 everything that invalidates it. To be safe, don't eliminate any
6309 stores though SP; none of them should be redundant anyway. */
6310 && ! reg_mentioned_p (stack_pointer_rtx, reg))
6311 add_to_mem_set_list (pbi, canon_rtx (reg));
6314 if (GET_CODE (reg) == REG
6315 && ! (regno_first == FRAME_POINTER_REGNUM
6316 && (! reload_completed || frame_pointer_needed))
6317 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
6318 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
6319 && (! reload_completed || frame_pointer_needed))
6321 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
6322 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
6326 int some_was_live = 0, some_was_dead = 0;
6328 for (i = regno_first; i <= regno_last; ++i)
6330 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6333 /* Order of the set operation matters here since both
6334 sets may be the same. */
6335 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
6336 if (cond != NULL_RTX
6337 && ! REGNO_REG_SET_P (pbi->local_set, i))
6338 SET_REGNO_REG_SET (pbi->cond_local_set, i);
6340 SET_REGNO_REG_SET (pbi->local_set, i);
6342 if (code != CLOBBER)
6343 SET_REGNO_REG_SET (pbi->new_set, i);
6345 some_was_live |= needed_regno;
6346 some_was_dead |= ! needed_regno;
6349 #ifdef HAVE_conditional_execution
6350 /* Consider conditional death in deciding that the register needs
6352 if (some_was_live && ! not_dead
6353 /* The stack pointer is never dead. Well, not strictly true,
6354 but it's very difficult to tell from here. Hopefully
6355 combine_stack_adjustments will fix up the most egregious
6357 && regno_first != STACK_POINTER_REGNUM)
6359 for (i = regno_first; i <= regno_last; ++i)
6360 if (! mark_regno_cond_dead (pbi, i, cond))
6361 not_dead |= ((unsigned long) 1) << (i - regno_first);
6365 /* Additional data to record if this is the final pass. */
6366 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
6367 | PROP_DEATH_NOTES | PROP_AUTOINC))
6370 register int blocknum = pbi->bb->index;
6373 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6375 y = pbi->reg_next_use[regno_first];
6377 /* The next use is no longer next, since a store intervenes. */
6378 for (i = regno_first; i <= regno_last; ++i)
6379 pbi->reg_next_use[i] = 0;
6382 if (flags & PROP_REG_INFO)
6384 for (i = regno_first; i <= regno_last; ++i)
6386 /* Count (weighted) references, stores, etc. This counts a
6387 register twice if it is modified, but that is correct. */
6388 REG_N_SETS (i) += 1;
6389 REG_N_REFS (i) += 1;
6390 REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
6392 /* The insns where a reg is live are normally counted
6393 elsewhere, but we want the count to include the insn
6394 where the reg is set, and the normal counting mechanism
6395 would not count it. */
6396 REG_LIVE_LENGTH (i) += 1;
6399 /* If this is a hard reg, record this function uses the reg. */
6400 if (regno_first < FIRST_PSEUDO_REGISTER)
6402 for (i = regno_first; i <= regno_last; i++)
6403 regs_ever_live[i] = 1;
6407 /* Keep track of which basic blocks each reg appears in. */
6408 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6409 REG_BASIC_BLOCK (regno_first) = blocknum;
6410 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6411 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6415 if (! some_was_dead)
6417 if (flags & PROP_LOG_LINKS)
6419 /* Make a logical link from the next following insn
6420 that uses this register, back to this insn.
6421 The following insns have already been processed.
6423 We don't build a LOG_LINK for hard registers containing
6424 in ASM_OPERANDs. If these registers get replaced,
6425 we might wind up changing the semantics of the insn,
6426 even if reload can make what appear to be valid
6427 assignments later. */
6428 if (y && (BLOCK_NUM (y) == blocknum)
6429 && (regno_first >= FIRST_PSEUDO_REGISTER
6430 || asm_noperands (PATTERN (y)) < 0))
6431 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
6436 else if (! some_was_live)
6438 if (flags & PROP_REG_INFO)
6439 REG_N_DEATHS (regno_first) += 1;
6441 if (flags & PROP_DEATH_NOTES)
6443 /* Note that dead stores have already been deleted
6444 when possible. If we get here, we have found a
6445 dead store that cannot be eliminated (because the
6446 same insn does something useful). Indicate this
6447 by marking the reg being set as dying here. */
6449 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6454 if (flags & PROP_DEATH_NOTES)
6456 /* This is a case where we have a multi-word hard register
6457 and some, but not all, of the words of the register are
6458 needed in subsequent insns. Write REG_UNUSED notes
6459 for those parts that were not needed. This case should
6462 for (i = regno_first; i <= regno_last; ++i)
6463 if (! REGNO_REG_SET_P (pbi->reg_live, i))
6465 = alloc_EXPR_LIST (REG_UNUSED,
6466 gen_rtx_REG (reg_raw_mode[i], i),
6472 /* Mark the register as being dead. */
6474 /* The stack pointer is never dead. Well, not strictly true,
6475 but it's very difficult to tell from here. Hopefully
6476 combine_stack_adjustments will fix up the most egregious
6478 && regno_first != STACK_POINTER_REGNUM)
6480 for (i = regno_first; i <= regno_last; ++i)
6481 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
6482 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
6485 else if (GET_CODE (reg) == REG)
6487 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6488 pbi->reg_next_use[regno_first] = 0;
6491 /* If this is the last pass and this is a SCRATCH, show it will be dying
6492 here and count it. */
6493 else if (GET_CODE (reg) == SCRATCH)
6495 if (flags & PROP_DEATH_NOTES)
6497 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6501 #ifdef HAVE_conditional_execution
6502 /* Mark REGNO conditionally dead.
6503 Return true if the register is now unconditionally dead. */
6506 mark_regno_cond_dead (pbi, regno, cond)
6507 struct propagate_block_info *pbi;
6511 /* If this is a store to a predicate register, the value of the
6512 predicate is changing, we don't know that the predicate as seen
6513 before is the same as that seen after. Flush all dependent
6514 conditions from reg_cond_dead. This will make all such
6515 conditionally live registers unconditionally live. */
6516 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
6517 flush_reg_cond_reg (pbi, regno);
6519 /* If this is an unconditional store, remove any conditional
6520 life that may have existed. */
6521 if (cond == NULL_RTX)
6522 splay_tree_remove (pbi->reg_cond_dead, regno);
6525 splay_tree_node node;
6526 struct reg_cond_life_info *rcli;
6529 /* Otherwise this is a conditional set. Record that fact.
6530 It may have been conditionally used, or there may be a
6531 subsequent set with a complimentary condition. */
6533 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
6536 /* The register was unconditionally live previously.
6537 Record the current condition as the condition under
6538 which it is dead. */
6539 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6540 rcli->condition = cond;
6541 rcli->stores = cond;
6542 rcli->orig_condition = const0_rtx;
6543 splay_tree_insert (pbi->reg_cond_dead, regno,
6544 (splay_tree_value) rcli);
6546 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6548 /* Not unconditionaly dead. */
6553 /* The register was conditionally live previously.
6554 Add the new condition to the old. */
6555 rcli = (struct reg_cond_life_info *) node->value;
6556 ncond = rcli->condition;
6557 ncond = ior_reg_cond (ncond, cond, 1);
6558 if (rcli->stores == const0_rtx)
6559 rcli->stores = cond;
6560 else if (rcli->stores != const1_rtx)
6561 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
6563 /* If the register is now unconditionally dead, remove the entry
6564 in the splay_tree. A register is unconditionally dead if the
6565 dead condition ncond is true. A register is also unconditionally
6566 dead if the sum of all conditional stores is an unconditional
6567 store (stores is true), and the dead condition is identically the
6568 same as the original dead condition initialized at the end of
6569 the block. This is a pointer compare, not an rtx_equal_p
6571 if (ncond == const1_rtx
6572 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
6573 splay_tree_remove (pbi->reg_cond_dead, regno);
6576 rcli->condition = ncond;
6578 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6580 /* Not unconditionaly dead. */
6589 /* Called from splay_tree_delete for pbi->reg_cond_life. */
6592 free_reg_cond_life_info (value)
6593 splay_tree_value value;
6595 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
6599 /* Helper function for flush_reg_cond_reg. */
6602 flush_reg_cond_reg_1 (node, data)
6603 splay_tree_node node;
6606 struct reg_cond_life_info *rcli;
6607 int *xdata = (int *) data;
6608 unsigned int regno = xdata[0];
6610 /* Don't need to search if last flushed value was farther on in
6611 the in-order traversal. */
6612 if (xdata[1] >= (int) node->key)
6615 /* Splice out portions of the expression that refer to regno. */
6616 rcli = (struct reg_cond_life_info *) node->value;
6617 rcli->condition = elim_reg_cond (rcli->condition, regno);
6618 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
6619 rcli->stores = elim_reg_cond (rcli->stores, regno);
6621 /* If the entire condition is now false, signal the node to be removed. */
6622 if (rcli->condition == const0_rtx)
6624 xdata[1] = node->key;
6627 else if (rcli->condition == const1_rtx)
6633 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
6636 flush_reg_cond_reg (pbi, regno)
6637 struct propagate_block_info *pbi;
6644 while (splay_tree_foreach (pbi->reg_cond_dead,
6645 flush_reg_cond_reg_1, pair) == -1)
6646 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
6648 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
6651 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
6652 For ior/and, the ADD flag determines whether we want to add the new
6653 condition X to the old one unconditionally. If it is zero, we will
6654 only return a new expression if X allows us to simplify part of
6655 OLD, otherwise we return OLD unchanged to the caller.
6656 If ADD is nonzero, we will return a new condition in all cases. The
6657 toplevel caller of one of these functions should always pass 1 for
6661 ior_reg_cond (old, x, add)
6667 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6669 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6670 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
6671 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6673 if (GET_CODE (x) == GET_CODE (old)
6674 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6678 return gen_rtx_IOR (0, old, x);
6681 switch (GET_CODE (old))
6684 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6685 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6686 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6688 if (op0 == const0_rtx)
6690 if (op1 == const0_rtx)
6692 if (op0 == const1_rtx || op1 == const1_rtx)
6694 if (op0 == XEXP (old, 0))
6695 op0 = gen_rtx_IOR (0, op0, x);
6697 op1 = gen_rtx_IOR (0, op1, x);
6698 return gen_rtx_IOR (0, op0, op1);
6702 return gen_rtx_IOR (0, old, x);
6705 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6706 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6707 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6709 if (op0 == const1_rtx)
6711 if (op1 == const1_rtx)
6713 if (op0 == const0_rtx || op1 == const0_rtx)
6715 if (op0 == XEXP (old, 0))
6716 op0 = gen_rtx_IOR (0, op0, x);
6718 op1 = gen_rtx_IOR (0, op1, x);
6719 return gen_rtx_AND (0, op0, op1);
6723 return gen_rtx_IOR (0, old, x);
6726 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6727 if (op0 != XEXP (old, 0))
6728 return not_reg_cond (op0);
6731 return gen_rtx_IOR (0, old, x);
6742 enum rtx_code x_code;
6744 if (x == const0_rtx)
6746 else if (x == const1_rtx)
6748 x_code = GET_CODE (x);
6751 if (GET_RTX_CLASS (x_code) == '<'
6752 && GET_CODE (XEXP (x, 0)) == REG)
6754 if (XEXP (x, 1) != const0_rtx)
6757 return gen_rtx_fmt_ee (reverse_condition (x_code),
6758 VOIDmode, XEXP (x, 0), const0_rtx);
6760 return gen_rtx_NOT (0, x);
6764 and_reg_cond (old, x, add)
6770 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6772 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6773 && GET_CODE (x) == reverse_condition (GET_CODE (old))
6774 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6776 if (GET_CODE (x) == GET_CODE (old)
6777 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6781 return gen_rtx_AND (0, old, x);
6784 switch (GET_CODE (old))
6787 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6788 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6789 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6791 if (op0 == const0_rtx)
6793 if (op1 == const0_rtx)
6795 if (op0 == const1_rtx || op1 == const1_rtx)
6797 if (op0 == XEXP (old, 0))
6798 op0 = gen_rtx_AND (0, op0, x);
6800 op1 = gen_rtx_AND (0, op1, x);
6801 return gen_rtx_IOR (0, op0, op1);
6805 return gen_rtx_AND (0, old, x);
6808 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6809 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6810 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6812 if (op0 == const1_rtx)
6814 if (op1 == const1_rtx)
6816 if (op0 == const0_rtx || op1 == const0_rtx)
6818 if (op0 == XEXP (old, 0))
6819 op0 = gen_rtx_AND (0, op0, x);
6821 op1 = gen_rtx_AND (0, op1, x);
6822 return gen_rtx_AND (0, op0, op1);
6827 /* If X is identical to one of the existing terms of the AND,
6828 then just return what we already have. */
6829 /* ??? There really should be some sort of recursive check here in
6830 case there are nested ANDs. */
6831 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
6832 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
6833 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
6834 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
6837 return gen_rtx_AND (0, old, x);
6840 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6841 if (op0 != XEXP (old, 0))
6842 return not_reg_cond (op0);
6845 return gen_rtx_AND (0, old, x);
6852 /* Given a condition X, remove references to reg REGNO and return the
6853 new condition. The removal will be done so that all conditions
6854 involving REGNO are considered to evaluate to false. This function
6855 is used when the value of REGNO changes. */
6858 elim_reg_cond (x, regno)
6864 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6866 if (REGNO (XEXP (x, 0)) == regno)
6871 switch (GET_CODE (x))
6874 op0 = elim_reg_cond (XEXP (x, 0), regno);
6875 op1 = elim_reg_cond (XEXP (x, 1), regno);
6876 if (op0 == const0_rtx || op1 == const0_rtx)
6878 if (op0 == const1_rtx)
6880 if (op1 == const1_rtx)
6882 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6884 return gen_rtx_AND (0, op0, op1);
6887 op0 = elim_reg_cond (XEXP (x, 0), regno);
6888 op1 = elim_reg_cond (XEXP (x, 1), regno);
6889 if (op0 == const1_rtx || op1 == const1_rtx)
6891 if (op0 == const0_rtx)
6893 if (op1 == const0_rtx)
6895 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6897 return gen_rtx_IOR (0, op0, op1);
6900 op0 = elim_reg_cond (XEXP (x, 0), regno);
6901 if (op0 == const0_rtx)
6903 if (op0 == const1_rtx)
6905 if (op0 != XEXP (x, 0))
6906 return not_reg_cond (op0);
6913 #endif /* HAVE_conditional_execution */
6917 /* Try to substitute the auto-inc expression INC as the address inside
6918 MEM which occurs in INSN. Currently, the address of MEM is an expression
6919 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
6920 that has a single set whose source is a PLUS of INCR_REG and something
6924 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
6925 struct propagate_block_info *pbi;
6926 rtx inc, insn, mem, incr, incr_reg;
6928 int regno = REGNO (incr_reg);
6929 rtx set = single_set (incr);
6930 rtx q = SET_DEST (set);
6931 rtx y = SET_SRC (set);
6932 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
6934 /* Make sure this reg appears only once in this insn. */
6935 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
6938 if (dead_or_set_p (incr, incr_reg)
6939 /* Mustn't autoinc an eliminable register. */
6940 && (regno >= FIRST_PSEUDO_REGISTER
6941 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
6943 /* This is the simple case. Try to make the auto-inc. If
6944 we can't, we are done. Otherwise, we will do any
6945 needed updates below. */
6946 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
6949 else if (GET_CODE (q) == REG
6950 /* PREV_INSN used here to check the semi-open interval
6952 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
6953 /* We must also check for sets of q as q may be
6954 a call clobbered hard register and there may
6955 be a call between PREV_INSN (insn) and incr. */
6956 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
6958 /* We have *p followed sometime later by q = p+size.
6959 Both p and q must be live afterward,
6960 and q is not used between INSN and its assignment.
6961 Change it to q = p, ...*q..., q = q+size.
6962 Then fall into the usual case. */
6966 emit_move_insn (q, incr_reg);
6967 insns = get_insns ();
6970 if (basic_block_for_insn)
6971 for (temp = insns; temp; temp = NEXT_INSN (temp))
6972 set_block_for_insn (temp, pbi->bb);
6974 /* If we can't make the auto-inc, or can't make the
6975 replacement into Y, exit. There's no point in making
6976 the change below if we can't do the auto-inc and doing
6977 so is not correct in the pre-inc case. */
6980 validate_change (insn, &XEXP (mem, 0), inc, 1);
6981 validate_change (incr, &XEXP (y, opnum), q, 1);
6982 if (! apply_change_group ())
6985 /* We now know we'll be doing this change, so emit the
6986 new insn(s) and do the updates. */
6987 emit_insns_before (insns, insn);
6989 if (pbi->bb->head == insn)
6990 pbi->bb->head = insns;
6992 /* INCR will become a NOTE and INSN won't contain a
6993 use of INCR_REG. If a use of INCR_REG was just placed in
6994 the insn before INSN, make that the next use.
6995 Otherwise, invalidate it. */
6996 if (GET_CODE (PREV_INSN (insn)) == INSN
6997 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
6998 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
6999 pbi->reg_next_use[regno] = PREV_INSN (insn);
7001 pbi->reg_next_use[regno] = 0;
7006 /* REGNO is now used in INCR which is below INSN, but
7007 it previously wasn't live here. If we don't mark
7008 it as live, we'll put a REG_DEAD note for it
7009 on this insn, which is incorrect. */
7010 SET_REGNO_REG_SET (pbi->reg_live, regno);
7012 /* If there are any calls between INSN and INCR, show
7013 that REGNO now crosses them. */
7014 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
7015 if (GET_CODE (temp) == CALL_INSN)
7016 REG_N_CALLS_CROSSED (regno)++;
7021 /* If we haven't returned, it means we were able to make the
7022 auto-inc, so update the status. First, record that this insn
7023 has an implicit side effect. */
7025 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
7027 /* Modify the old increment-insn to simply copy
7028 the already-incremented value of our register. */
7029 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
7032 /* If that makes it a no-op (copying the register into itself) delete
7033 it so it won't appear to be a "use" and a "set" of this
7035 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
7037 /* If the original source was dead, it's dead now. */
7040 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
7042 remove_note (incr, note);
7043 if (XEXP (note, 0) != incr_reg)
7044 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
7047 PUT_CODE (incr, NOTE);
7048 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
7049 NOTE_SOURCE_FILE (incr) = 0;
7052 if (regno >= FIRST_PSEUDO_REGISTER)
7054 /* Count an extra reference to the reg. When a reg is
7055 incremented, spilling it is worse, so we want to make
7056 that less likely. */
7057 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7059 /* Count the increment as a setting of the register,
7060 even though it isn't a SET in rtl. */
7061 REG_N_SETS (regno)++;
7065 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
7069 find_auto_inc (pbi, x, insn)
7070 struct propagate_block_info *pbi;
7074 rtx addr = XEXP (x, 0);
7075 HOST_WIDE_INT offset = 0;
7076 rtx set, y, incr, inc_val;
7078 int size = GET_MODE_SIZE (GET_MODE (x));
7080 if (GET_CODE (insn) == JUMP_INSN)
7083 /* Here we detect use of an index register which might be good for
7084 postincrement, postdecrement, preincrement, or predecrement. */
7086 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
7087 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
7089 if (GET_CODE (addr) != REG)
7092 regno = REGNO (addr);
7094 /* Is the next use an increment that might make auto-increment? */
7095 incr = pbi->reg_next_use[regno];
7096 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
7098 set = single_set (incr);
7099 if (set == 0 || GET_CODE (set) != SET)
7103 if (GET_CODE (y) != PLUS)
7106 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
7107 inc_val = XEXP (y, 1);
7108 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
7109 inc_val = XEXP (y, 0);
7113 if (GET_CODE (inc_val) == CONST_INT)
7115 if (HAVE_POST_INCREMENT
7116 && (INTVAL (inc_val) == size && offset == 0))
7117 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
7119 else if (HAVE_POST_DECREMENT
7120 && (INTVAL (inc_val) == -size && offset == 0))
7121 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
7123 else if (HAVE_PRE_INCREMENT
7124 && (INTVAL (inc_val) == size && offset == size))
7125 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
7127 else if (HAVE_PRE_DECREMENT
7128 && (INTVAL (inc_val) == -size && offset == -size))
7129 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
7131 else if (HAVE_POST_MODIFY_DISP && offset == 0)
7132 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
7133 gen_rtx_PLUS (Pmode,
7136 insn, x, incr, addr);
7138 else if (GET_CODE (inc_val) == REG
7139 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
7143 if (HAVE_POST_MODIFY_REG && offset == 0)
7144 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
7145 gen_rtx_PLUS (Pmode,
7148 insn, x, incr, addr);
7152 #endif /* AUTO_INC_DEC */
7155 mark_used_reg (pbi, reg, cond, insn)
7156 struct propagate_block_info *pbi;
7158 rtx cond ATTRIBUTE_UNUSED;
7161 unsigned int regno_first, regno_last, i;
7162 int some_was_live, some_was_dead, some_not_set;
7164 regno_last = regno_first = REGNO (reg);
7165 if (regno_first < FIRST_PSEUDO_REGISTER)
7166 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
7168 /* Find out if any of this register is live after this instruction. */
7169 some_was_live = some_was_dead = 0;
7170 for (i = regno_first; i <= regno_last; ++i)
7172 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
7173 some_was_live |= needed_regno;
7174 some_was_dead |= ! needed_regno;
7177 /* Find out if any of the register was set this insn. */
7179 for (i = regno_first; i <= regno_last; ++i)
7180 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
7182 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
7184 /* Record where each reg is used, so when the reg is set we know
7185 the next insn that uses it. */
7186 pbi->reg_next_use[regno_first] = insn;
7189 if (pbi->flags & PROP_REG_INFO)
7191 if (regno_first < FIRST_PSEUDO_REGISTER)
7193 /* If this is a register we are going to try to eliminate,
7194 don't mark it live here. If we are successful in
7195 eliminating it, it need not be live unless it is used for
7196 pseudos, in which case it will have been set live when it
7197 was allocated to the pseudos. If the register will not
7198 be eliminated, reload will set it live at that point.
7200 Otherwise, record that this function uses this register. */
7201 /* ??? The PPC backend tries to "eliminate" on the pic
7202 register to itself. This should be fixed. In the mean
7203 time, hack around it. */
7205 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
7206 && (regno_first == FRAME_POINTER_REGNUM
7207 || regno_first == ARG_POINTER_REGNUM)))
7208 for (i = regno_first; i <= regno_last; ++i)
7209 regs_ever_live[i] = 1;
7213 /* Keep track of which basic block each reg appears in. */
7215 register int blocknum = pbi->bb->index;
7216 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
7217 REG_BASIC_BLOCK (regno_first) = blocknum;
7218 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
7219 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
7221 /* Count (weighted) number of uses of each reg. */
7222 REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
7223 REG_N_REFS (regno_first)++;
7227 /* Record and count the insns in which a reg dies. If it is used in
7228 this insn and was dead below the insn then it dies in this insn.
7229 If it was set in this insn, we do not make a REG_DEAD note;
7230 likewise if we already made such a note. */
7231 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
7235 /* Check for the case where the register dying partially
7236 overlaps the register set by this insn. */
7237 if (regno_first != regno_last)
7238 for (i = regno_first; i <= regno_last; ++i)
7239 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
7241 /* If none of the words in X is needed, make a REG_DEAD note.
7242 Otherwise, we must make partial REG_DEAD notes. */
7243 if (! some_was_live)
7245 if ((pbi->flags & PROP_DEATH_NOTES)
7246 && ! find_regno_note (insn, REG_DEAD, regno_first))
7248 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
7250 if (pbi->flags & PROP_REG_INFO)
7251 REG_N_DEATHS (regno_first)++;
7255 /* Don't make a REG_DEAD note for a part of a register
7256 that is set in the insn. */
7257 for (i = regno_first; i <= regno_last; ++i)
7258 if (! REGNO_REG_SET_P (pbi->reg_live, i)
7259 && ! dead_or_set_regno_p (insn, i))
7261 = alloc_EXPR_LIST (REG_DEAD,
7262 gen_rtx_REG (reg_raw_mode[i], i),
7267 /* Mark the register as being live. */
7268 for (i = regno_first; i <= regno_last; ++i)
7270 SET_REGNO_REG_SET (pbi->reg_live, i);
7272 #ifdef HAVE_conditional_execution
7273 /* If this is a conditional use, record that fact. If it is later
7274 conditionally set, we'll know to kill the register. */
7275 if (cond != NULL_RTX)
7277 splay_tree_node node;
7278 struct reg_cond_life_info *rcli;
7283 node = splay_tree_lookup (pbi->reg_cond_dead, i);
7286 /* The register was unconditionally live previously.
7287 No need to do anything. */
7291 /* The register was conditionally live previously.
7292 Subtract the new life cond from the old death cond. */
7293 rcli = (struct reg_cond_life_info *) node->value;
7294 ncond = rcli->condition;
7295 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
7297 /* If the register is now unconditionally live,
7298 remove the entry in the splay_tree. */
7299 if (ncond == const0_rtx)
7300 splay_tree_remove (pbi->reg_cond_dead, i);
7303 rcli->condition = ncond;
7304 SET_REGNO_REG_SET (pbi->reg_cond_reg,
7305 REGNO (XEXP (cond, 0)));
7311 /* The register was not previously live at all. Record
7312 the condition under which it is still dead. */
7313 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
7314 rcli->condition = not_reg_cond (cond);
7315 rcli->stores = const0_rtx;
7316 rcli->orig_condition = const0_rtx;
7317 splay_tree_insert (pbi->reg_cond_dead, i,
7318 (splay_tree_value) rcli);
7320 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
7323 else if (some_was_live)
7325 /* The register may have been conditionally live previously, but
7326 is now unconditionally live. Remove it from the conditionally
7327 dead list, so that a conditional set won't cause us to think
7329 splay_tree_remove (pbi->reg_cond_dead, i);
7335 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
7336 This is done assuming the registers needed from X are those that
7337 have 1-bits in PBI->REG_LIVE.
7339 INSN is the containing instruction. If INSN is dead, this function
7343 mark_used_regs (pbi, x, cond, insn)
7344 struct propagate_block_info *pbi;
7347 register RTX_CODE code;
7349 int flags = pbi->flags;
7352 code = GET_CODE (x);
7372 /* If we are clobbering a MEM, mark any registers inside the address
7374 if (GET_CODE (XEXP (x, 0)) == MEM)
7375 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
7379 /* Don't bother watching stores to mems if this is not the
7380 final pass. We'll not be deleting dead stores this round. */
7381 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
7383 /* Invalidate the data for the last MEM stored, but only if MEM is
7384 something that can be stored into. */
7385 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
7386 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
7387 /* Needn't clear the memory set list. */
7391 rtx temp = pbi->mem_set_list;
7392 rtx prev = NULL_RTX;
7397 next = XEXP (temp, 1);
7398 if (anti_dependence (XEXP (temp, 0), x))
7400 /* Splice temp out of the list. */
7402 XEXP (prev, 1) = next;
7404 pbi->mem_set_list = next;
7405 free_EXPR_LIST_node (temp);
7406 pbi->mem_set_list_len--;
7414 /* If the memory reference had embedded side effects (autoincrement
7415 address modes. Then we may need to kill some entries on the
7418 invalidate_mems_from_autoinc (pbi, insn);
7422 if (flags & PROP_AUTOINC)
7423 find_auto_inc (pbi, x, insn);
7428 #ifdef CLASS_CANNOT_CHANGE_MODE
7429 if (GET_CODE (SUBREG_REG (x)) == REG
7430 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
7431 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
7432 GET_MODE (SUBREG_REG (x))))
7433 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
7436 /* While we're here, optimize this case. */
7438 if (GET_CODE (x) != REG)
7443 /* See a register other than being set => mark it as needed. */
7444 mark_used_reg (pbi, x, cond, insn);
7449 register rtx testreg = SET_DEST (x);
7452 /* If storing into MEM, don't show it as being used. But do
7453 show the address as being used. */
7454 if (GET_CODE (testreg) == MEM)
7457 if (flags & PROP_AUTOINC)
7458 find_auto_inc (pbi, testreg, insn);
7460 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
7461 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7465 /* Storing in STRICT_LOW_PART is like storing in a reg
7466 in that this SET might be dead, so ignore it in TESTREG.
7467 but in some other ways it is like using the reg.
7469 Storing in a SUBREG or a bit field is like storing the entire
7470 register in that if the register's value is not used
7471 then this SET is not needed. */
7472 while (GET_CODE (testreg) == STRICT_LOW_PART
7473 || GET_CODE (testreg) == ZERO_EXTRACT
7474 || GET_CODE (testreg) == SIGN_EXTRACT
7475 || GET_CODE (testreg) == SUBREG)
7477 #ifdef CLASS_CANNOT_CHANGE_MODE
7478 if (GET_CODE (testreg) == SUBREG
7479 && GET_CODE (SUBREG_REG (testreg)) == REG
7480 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
7481 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
7482 GET_MODE (testreg)))
7483 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
7486 /* Modifying a single register in an alternate mode
7487 does not use any of the old value. But these other
7488 ways of storing in a register do use the old value. */
7489 if (GET_CODE (testreg) == SUBREG
7490 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
7495 testreg = XEXP (testreg, 0);
7498 /* If this is a store into a register or group of registers,
7499 recursively scan the value being stored. */
7501 if ((GET_CODE (testreg) == PARALLEL
7502 && GET_MODE (testreg) == BLKmode)
7503 || (GET_CODE (testreg) == REG
7504 && (regno = REGNO (testreg),
7505 ! (regno == FRAME_POINTER_REGNUM
7506 && (! reload_completed || frame_pointer_needed)))
7507 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7508 && ! (regno == HARD_FRAME_POINTER_REGNUM
7509 && (! reload_completed || frame_pointer_needed))
7511 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
7512 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
7517 mark_used_regs (pbi, SET_DEST (x), cond, insn);
7518 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7525 case UNSPEC_VOLATILE:
7529 /* Traditional and volatile asm instructions must be considered to use
7530 and clobber all hard registers, all pseudo-registers and all of
7531 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
7533 Consider for instance a volatile asm that changes the fpu rounding
7534 mode. An insn should not be moved across this even if it only uses
7535 pseudo-regs because it might give an incorrectly rounded result.
7537 ?!? Unfortunately, marking all hard registers as live causes massive
7538 problems for the register allocator and marking all pseudos as live
7539 creates mountains of uninitialized variable warnings.
7541 So for now, just clear the memory set list and mark any regs
7542 we can find in ASM_OPERANDS as used. */
7543 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
7545 free_EXPR_LIST_list (&pbi->mem_set_list);
7546 pbi->mem_set_list_len = 0;
7549 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
7550 We can not just fall through here since then we would be confused
7551 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
7552 traditional asms unlike their normal usage. */
7553 if (code == ASM_OPERANDS)
7557 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
7558 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
7564 if (cond != NULL_RTX)
7567 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
7569 cond = COND_EXEC_TEST (x);
7570 x = COND_EXEC_CODE (x);
7574 /* We _do_not_ want to scan operands of phi nodes. Operands of
7575 a phi function are evaluated only when control reaches this
7576 block along a particular edge. Therefore, regs that appear
7577 as arguments to phi should not be added to the global live at
7585 /* Recursively scan the operands of this expression. */
7588 register const char * const fmt = GET_RTX_FORMAT (code);
7591 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7595 /* Tail recursive case: save a function call level. */
7601 mark_used_regs (pbi, XEXP (x, i), cond, insn);
7603 else if (fmt[i] == 'E')
7606 for (j = 0; j < XVECLEN (x, i); j++)
7607 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
7616 try_pre_increment_1 (pbi, insn)
7617 struct propagate_block_info *pbi;
7620 /* Find the next use of this reg. If in same basic block,
7621 make it do pre-increment or pre-decrement if appropriate. */
7622 rtx x = single_set (insn);
7623 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
7624 * INTVAL (XEXP (SET_SRC (x), 1)));
7625 int regno = REGNO (SET_DEST (x));
7626 rtx y = pbi->reg_next_use[regno];
7628 && SET_DEST (x) != stack_pointer_rtx
7629 && BLOCK_NUM (y) == BLOCK_NUM (insn)
7630 /* Don't do this if the reg dies, or gets set in y; a standard addressing
7631 mode would be better. */
7632 && ! dead_or_set_p (y, SET_DEST (x))
7633 && try_pre_increment (y, SET_DEST (x), amount))
7635 /* We have found a suitable auto-increment and already changed
7636 insn Y to do it. So flush this increment instruction. */
7637 propagate_block_delete_insn (pbi->bb, insn);
7639 /* Count a reference to this reg for the increment insn we are
7640 deleting. When a reg is incremented, spilling it is worse,
7641 so we want to make that less likely. */
7642 if (regno >= FIRST_PSEUDO_REGISTER)
7644 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7645 REG_N_SETS (regno)++;
7648 /* Flush any remembered memories depending on the value of
7649 the incremented register. */
7650 invalidate_mems_from_set (pbi, SET_DEST (x));
7657 /* Try to change INSN so that it does pre-increment or pre-decrement
7658 addressing on register REG in order to add AMOUNT to REG.
7659 AMOUNT is negative for pre-decrement.
7660 Returns 1 if the change could be made.
7661 This checks all about the validity of the result of modifying INSN. */
7664 try_pre_increment (insn, reg, amount)
7666 HOST_WIDE_INT amount;
7670 /* Nonzero if we can try to make a pre-increment or pre-decrement.
7671 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
7673 /* Nonzero if we can try to make a post-increment or post-decrement.
7674 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
7675 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
7676 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
7679 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
7682 /* From the sign of increment, see which possibilities are conceivable
7683 on this target machine. */
7684 if (HAVE_PRE_INCREMENT && amount > 0)
7686 if (HAVE_POST_INCREMENT && amount > 0)
7689 if (HAVE_PRE_DECREMENT && amount < 0)
7691 if (HAVE_POST_DECREMENT && amount < 0)
7694 if (! (pre_ok || post_ok))
7697 /* It is not safe to add a side effect to a jump insn
7698 because if the incremented register is spilled and must be reloaded
7699 there would be no way to store the incremented value back in memory. */
7701 if (GET_CODE (insn) == JUMP_INSN)
7706 use = find_use_as_address (PATTERN (insn), reg, 0);
7707 if (post_ok && (use == 0 || use == (rtx) 1))
7709 use = find_use_as_address (PATTERN (insn), reg, -amount);
7713 if (use == 0 || use == (rtx) 1)
7716 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
7719 /* See if this combination of instruction and addressing mode exists. */
7720 if (! validate_change (insn, &XEXP (use, 0),
7721 gen_rtx_fmt_e (amount > 0
7722 ? (do_post ? POST_INC : PRE_INC)
7723 : (do_post ? POST_DEC : PRE_DEC),
7727 /* Record that this insn now has an implicit side effect on X. */
7728 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
7732 #endif /* AUTO_INC_DEC */
7734 /* Find the place in the rtx X where REG is used as a memory address.
7735 Return the MEM rtx that so uses it.
7736 If PLUSCONST is nonzero, search instead for a memory address equivalent to
7737 (plus REG (const_int PLUSCONST)).
7739 If such an address does not appear, return 0.
7740 If REG appears more than once, or is used other than in such an address,
7744 find_use_as_address (x, reg, plusconst)
7747 HOST_WIDE_INT plusconst;
7749 enum rtx_code code = GET_CODE (x);
7750 const char * const fmt = GET_RTX_FORMAT (code);
7752 register rtx value = 0;
7755 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
7758 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
7759 && XEXP (XEXP (x, 0), 0) == reg
7760 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7761 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
7764 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
7766 /* If REG occurs inside a MEM used in a bit-field reference,
7767 that is unacceptable. */
7768 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
7769 return (rtx) (HOST_WIDE_INT) 1;
7773 return (rtx) (HOST_WIDE_INT) 1;
7775 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7779 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
7783 return (rtx) (HOST_WIDE_INT) 1;
7785 else if (fmt[i] == 'E')
7788 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7790 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
7794 return (rtx) (HOST_WIDE_INT) 1;
7802 /* Write information about registers and basic blocks into FILE.
7803 This is part of making a debugging dump. */
7806 dump_regset (r, outf)
7813 fputs (" (nil)", outf);
7817 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
7819 fprintf (outf, " %d", i);
7820 if (i < FIRST_PSEUDO_REGISTER)
7821 fprintf (outf, " [%s]",
7826 /* Print a human-reaable representation of R on the standard error
7827 stream. This function is designed to be used from within the
7834 dump_regset (r, stderr);
7835 putc ('\n', stderr);
7839 dump_flow_info (file)
7843 static const char * const reg_class_names[] = REG_CLASS_NAMES;
7845 fprintf (file, "%d registers.\n", max_regno);
7846 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
7849 enum reg_class class, altclass;
7850 fprintf (file, "\nRegister %d used %d times across %d insns",
7851 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
7852 if (REG_BASIC_BLOCK (i) >= 0)
7853 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
7855 fprintf (file, "; set %d time%s", REG_N_SETS (i),
7856 (REG_N_SETS (i) == 1) ? "" : "s");
7857 if (REG_USERVAR_P (regno_reg_rtx[i]))
7858 fprintf (file, "; user var");
7859 if (REG_N_DEATHS (i) != 1)
7860 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
7861 if (REG_N_CALLS_CROSSED (i) == 1)
7862 fprintf (file, "; crosses 1 call");
7863 else if (REG_N_CALLS_CROSSED (i))
7864 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
7865 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
7866 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
7867 class = reg_preferred_class (i);
7868 altclass = reg_alternate_class (i);
7869 if (class != GENERAL_REGS || altclass != ALL_REGS)
7871 if (altclass == ALL_REGS || class == ALL_REGS)
7872 fprintf (file, "; pref %s", reg_class_names[(int) class]);
7873 else if (altclass == NO_REGS)
7874 fprintf (file, "; %s or none", reg_class_names[(int) class]);
7876 fprintf (file, "; pref %s, else %s",
7877 reg_class_names[(int) class],
7878 reg_class_names[(int) altclass]);
7880 if (REG_POINTER (regno_reg_rtx[i]))
7881 fprintf (file, "; pointer");
7882 fprintf (file, ".\n");
7885 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
7886 for (i = 0; i < n_basic_blocks; i++)
7888 register basic_block bb = BASIC_BLOCK (i);
7891 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
7892 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
7893 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7894 fprintf (file, ", freq %i.\n", bb->frequency);
7896 fprintf (file, "Predecessors: ");
7897 for (e = bb->pred; e; e = e->pred_next)
7898 dump_edge_info (file, e, 0);
7900 fprintf (file, "\nSuccessors: ");
7901 for (e = bb->succ; e; e = e->succ_next)
7902 dump_edge_info (file, e, 1);
7904 fprintf (file, "\nRegisters live at start:");
7905 dump_regset (bb->global_live_at_start, file);
7907 fprintf (file, "\nRegisters live at end:");
7908 dump_regset (bb->global_live_at_end, file);
7919 dump_flow_info (stderr);
7923 dump_edge_info (file, e, do_succ)
7928 basic_block side = (do_succ ? e->dest : e->src);
7930 if (side == ENTRY_BLOCK_PTR)
7931 fputs (" ENTRY", file);
7932 else if (side == EXIT_BLOCK_PTR)
7933 fputs (" EXIT", file);
7935 fprintf (file, " %d", side->index);
7938 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
7942 fprintf (file, " count:");
7943 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
7948 static const char * const bitnames[] = {
7949 "fallthru", "crit", "ab", "abcall", "eh", "fake", "dfs_back"
7952 int i, flags = e->flags;
7956 for (i = 0; flags; i++)
7957 if (flags & (1 << i))
7963 if (i < (int) ARRAY_SIZE (bitnames))
7964 fputs (bitnames[i], file);
7966 fprintf (file, "%d", i);
7973 /* Print out one basic block with live information at start and end. */
7984 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
7985 bb->index, bb->loop_depth);
7986 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7989 fputs (";; Predecessors: ", outf);
7990 for (e = bb->pred; e; e = e->pred_next)
7991 dump_edge_info (outf, e, 0);
7994 fputs (";; Registers live at start:", outf);
7995 dump_regset (bb->global_live_at_start, outf);
7998 for (insn = bb->head, last = NEXT_INSN (bb->end);
8000 insn = NEXT_INSN (insn))
8001 print_rtl_single (outf, insn);
8003 fputs (";; Registers live at end:", outf);
8004 dump_regset (bb->global_live_at_end, outf);
8007 fputs (";; Successors: ", outf);
8008 for (e = bb->succ; e; e = e->succ_next)
8009 dump_edge_info (outf, e, 1);
8017 dump_bb (bb, stderr);
8024 dump_bb (BASIC_BLOCK (n), stderr);
8027 /* Like print_rtl, but also print out live information for the start of each
8031 print_rtl_with_bb (outf, rtx_first)
8035 register rtx tmp_rtx;
8038 fprintf (outf, "(nil)\n");
8042 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
8043 int max_uid = get_max_uid ();
8044 basic_block *start = (basic_block *)
8045 xcalloc (max_uid, sizeof (basic_block));
8046 basic_block *end = (basic_block *)
8047 xcalloc (max_uid, sizeof (basic_block));
8048 enum bb_state *in_bb_p = (enum bb_state *)
8049 xcalloc (max_uid, sizeof (enum bb_state));
8051 for (i = n_basic_blocks - 1; i >= 0; i--)
8053 basic_block bb = BASIC_BLOCK (i);
8056 start[INSN_UID (bb->head)] = bb;
8057 end[INSN_UID (bb->end)] = bb;
8058 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
8060 enum bb_state state = IN_MULTIPLE_BB;
8061 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
8063 in_bb_p[INSN_UID (x)] = state;
8070 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
8075 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
8077 fprintf (outf, ";; Start of basic block %d, registers live:",
8079 dump_regset (bb->global_live_at_start, outf);
8083 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
8084 && GET_CODE (tmp_rtx) != NOTE
8085 && GET_CODE (tmp_rtx) != BARRIER)
8086 fprintf (outf, ";; Insn is not within a basic block\n");
8087 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
8088 fprintf (outf, ";; Insn is in multiple basic blocks\n");
8090 did_output = print_rtl_single (outf, tmp_rtx);
8092 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
8094 fprintf (outf, ";; End of basic block %d, registers live:\n",
8096 dump_regset (bb->global_live_at_end, outf);
8109 if (current_function_epilogue_delay_list != 0)
8111 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
8112 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
8113 tmp_rtx = XEXP (tmp_rtx, 1))
8114 print_rtl_single (outf, XEXP (tmp_rtx, 0));
8118 /* Dump the rtl into the current debugging dump file, then abort. */
8121 print_rtl_and_abort_fcn (file, line, function)
8124 const char *function;
8128 print_rtl_with_bb (rtl_dump_file, get_insns ());
8129 fclose (rtl_dump_file);
8132 fancy_abort (file, line, function);
8135 /* Recompute register set/reference counts immediately prior to register
8138 This avoids problems with set/reference counts changing to/from values
8139 which have special meanings to the register allocators.
8141 Additionally, the reference counts are the primary component used by the
8142 register allocators to prioritize pseudos for allocation to hard regs.
8143 More accurate reference counts generally lead to better register allocation.
8145 F is the first insn to be scanned.
8147 LOOP_STEP denotes how much loop_depth should be incremented per
8148 loop nesting level in order to increase the ref count more for
8149 references in a loop.
8151 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
8152 possibly other information which is used by the register allocators. */
8155 recompute_reg_usage (f, loop_step)
8156 rtx f ATTRIBUTE_UNUSED;
8157 int loop_step ATTRIBUTE_UNUSED;
8159 allocate_reg_life_data ();
8160 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
8163 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
8164 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
8165 of the number of registers that died. */
8168 count_or_remove_death_notes (blocks, kill)
8174 for (i = n_basic_blocks - 1; i >= 0; --i)
8179 if (blocks && ! TEST_BIT (blocks, i))
8182 bb = BASIC_BLOCK (i);
8184 for (insn = bb->head;; insn = NEXT_INSN (insn))
8188 rtx *pprev = ®_NOTES (insn);
8193 switch (REG_NOTE_KIND (link))
8196 if (GET_CODE (XEXP (link, 0)) == REG)
8198 rtx reg = XEXP (link, 0);
8201 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
8204 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
8212 rtx next = XEXP (link, 1);
8213 free_EXPR_LIST_node (link);
8214 *pprev = link = next;
8220 pprev = &XEXP (link, 1);
8227 if (insn == bb->end)
8236 /* Update insns block within BB. */
8239 update_bb_for_insn (bb)
8244 if (! basic_block_for_insn)
8247 for (insn = bb->head; ; insn = NEXT_INSN (insn))
8249 set_block_for_insn (insn, bb);
8251 if (insn == bb->end)
8257 /* Record INSN's block as BB. */
8260 set_block_for_insn (insn, bb)
8264 size_t uid = INSN_UID (insn);
8265 if (uid >= basic_block_for_insn->num_elements)
8269 /* Add one-eighth the size so we don't keep calling xrealloc. */
8270 new_size = uid + (uid + 7) / 8;
8272 VARRAY_GROW (basic_block_for_insn, new_size);
8274 VARRAY_BB (basic_block_for_insn, uid) = bb;
8277 /* When a new insn has been inserted into an existing block, it will
8278 sometimes emit more than a single insn. This routine will set the
8279 block number for the specified insn, and look backwards in the insn
8280 chain to see if there are any other uninitialized insns immediately
8281 previous to this one, and set the block number for them too. */
8284 set_block_for_new_insns (insn, bb)
8288 set_block_for_insn (insn, bb);
8290 /* Scan the previous instructions setting the block number until we find
8291 an instruction that has the block number set, or we find a note
8293 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
8295 if (GET_CODE (insn) == NOTE)
8297 if ((unsigned) INSN_UID (insn) >= basic_block_for_insn->num_elements
8298 || BLOCK_FOR_INSN (insn) == 0)
8299 set_block_for_insn (insn, bb);
8305 /* Verify the CFG consistency. This function check some CFG invariants and
8306 aborts when something is wrong. Hope that this function will help to
8307 convert many optimization passes to preserve CFG consistent.
8309 Currently it does following checks:
8311 - test head/end pointers
8312 - overlapping of basic blocks
8313 - edge list correctness
8314 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
8315 - tails of basic blocks (ensure that boundary is necesary)
8316 - scans body of the basic block for JUMP_INSN, CODE_LABEL
8317 and NOTE_INSN_BASIC_BLOCK
8318 - check that all insns are in the basic blocks
8319 (except the switch handling code, barriers and notes)
8320 - check that all returns are followed by barriers
8322 In future it can be extended check a lot of other stuff as well
8323 (reachability of basic blocks, life information, etc. etc.). */
8328 const int max_uid = get_max_uid ();
8329 const rtx rtx_first = get_insns ();
8330 rtx last_head = get_last_insn ();
8331 basic_block *bb_info, *last_visited;
8333 int i, last_bb_num_seen, num_bb_notes, err = 0;
8335 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
8336 last_visited = (basic_block *) xcalloc (n_basic_blocks + 2,
8337 sizeof (basic_block));
8339 for (i = n_basic_blocks - 1; i >= 0; i--)
8341 basic_block bb = BASIC_BLOCK (i);
8342 rtx head = bb->head;
8345 /* Verify the end of the basic block is in the INSN chain. */
8346 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
8351 error ("End insn %d for block %d not found in the insn stream.",
8352 INSN_UID (end), bb->index);
8356 /* Work backwards from the end to the head of the basic block
8357 to verify the head is in the RTL chain. */
8358 for (; x != NULL_RTX; x = PREV_INSN (x))
8360 /* While walking over the insn chain, verify insns appear
8361 in only one basic block and initialize the BB_INFO array
8362 used by other passes. */
8363 if (bb_info[INSN_UID (x)] != NULL)
8365 error ("Insn %d is in multiple basic blocks (%d and %d)",
8366 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
8369 bb_info[INSN_UID (x)] = bb;
8376 error ("Head insn %d for block %d not found in the insn stream.",
8377 INSN_UID (head), bb->index);
8384 /* Now check the basic blocks (boundaries etc.) */
8385 for (i = n_basic_blocks - 1; i >= 0; i--)
8387 basic_block bb = BASIC_BLOCK (i);
8388 /* Check correctness of edge lists. */
8390 int has_fallthru = 0;
8395 if (last_visited [e->dest->index + 2] == bb)
8397 error ("verify_flow_info: Duplicate edge %i->%i",
8398 e->src->index, e->dest->index);
8401 last_visited [e->dest->index + 2] = bb;
8403 if (e->flags & EDGE_FALLTHRU)
8406 if ((e->flags & EDGE_FALLTHRU)
8407 && e->src != ENTRY_BLOCK_PTR
8408 && e->dest != EXIT_BLOCK_PTR)
8411 if (e->src->index + 1 != e->dest->index)
8413 error ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
8414 e->src->index, e->dest->index);
8418 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
8419 insn = NEXT_INSN (insn))
8420 if (GET_CODE (insn) == BARRIER || INSN_P (insn))
8422 error ("verify_flow_info: Incorrect fallthru %i->%i",
8423 e->src->index, e->dest->index);
8424 fatal_insn ("Wrong insn in the fallthru edge", insn);
8430 error ("verify_flow_info: Basic block %d succ edge is corrupted",
8432 fprintf (stderr, "Predecessor: ");
8433 dump_edge_info (stderr, e, 0);
8434 fprintf (stderr, "\nSuccessor: ");
8435 dump_edge_info (stderr, e, 1);
8436 fprintf (stderr, "\n");
8439 if (e->dest != EXIT_BLOCK_PTR)
8441 edge e2 = e->dest->pred;
8442 while (e2 && e2 != e)
8446 error ("Basic block %i edge lists are corrupted", bb->index);
8456 /* Ensure existence of barrier in BB with no fallthru edges. */
8457 for (insn = bb->end; GET_CODE (insn) != BARRIER;
8458 insn = NEXT_INSN (insn))
8460 || (GET_CODE (insn) == NOTE
8461 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
8463 error ("Missing barrier after block %i", bb->index);
8473 error ("Basic block %d pred edge is corrupted", bb->index);
8474 fputs ("Predecessor: ", stderr);
8475 dump_edge_info (stderr, e, 0);
8476 fputs ("\nSuccessor: ", stderr);
8477 dump_edge_info (stderr, e, 1);
8478 fputc ('\n', stderr);
8481 if (e->src != ENTRY_BLOCK_PTR)
8483 edge e2 = e->src->succ;
8484 while (e2 && e2 != e)
8488 error ("Basic block %i edge lists are corrupted", bb->index);
8495 /* OK pointers are correct. Now check the header of basic
8496 block. It ought to contain optional CODE_LABEL followed
8497 by NOTE_BASIC_BLOCK. */
8499 if (GET_CODE (x) == CODE_LABEL)
8503 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
8509 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
8511 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
8518 /* Do checks for empty blocks here */
8525 if (NOTE_INSN_BASIC_BLOCK_P (x))
8527 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
8528 INSN_UID (x), bb->index);
8535 if (GET_CODE (x) == JUMP_INSN
8536 || GET_CODE (x) == CODE_LABEL
8537 || GET_CODE (x) == BARRIER)
8539 error ("In basic block %d:", bb->index);
8540 fatal_insn ("Flow control insn inside a basic block", x);
8548 last_bb_num_seen = -1;
8553 if (NOTE_INSN_BASIC_BLOCK_P (x))
8555 basic_block bb = NOTE_BASIC_BLOCK (x);
8557 if (bb->index != last_bb_num_seen + 1)
8558 internal_error ("Basic blocks not numbered consecutively.");
8560 last_bb_num_seen = bb->index;
8563 if (!bb_info[INSN_UID (x)])
8565 switch (GET_CODE (x))
8572 /* An addr_vec is placed outside any block block. */
8574 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
8575 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
8576 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
8581 /* But in any case, non-deletable labels can appear anywhere. */
8585 fatal_insn ("Insn outside basic block", x);
8590 && GET_CODE (x) == JUMP_INSN
8591 && returnjump_p (x) && ! condjump_p (x)
8592 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
8593 fatal_insn ("Return not followed by barrier", x);
8598 if (num_bb_notes != n_basic_blocks)
8600 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
8601 num_bb_notes, n_basic_blocks);
8604 internal_error ("verify_flow_info failed.");
8608 free (last_visited);
8611 /* Functions to access an edge list with a vector representation.
8612 Enough data is kept such that given an index number, the
8613 pred and succ that edge represents can be determined, or
8614 given a pred and a succ, its index number can be returned.
8615 This allows algorithms which consume a lot of memory to
8616 represent the normally full matrix of edge (pred,succ) with a
8617 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
8618 wasted space in the client code due to sparse flow graphs. */
8620 /* This functions initializes the edge list. Basically the entire
8621 flowgraph is processed, and all edges are assigned a number,
8622 and the data structure is filled in. */
8627 struct edge_list *elist;
8633 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
8637 /* Determine the number of edges in the flow graph by counting successor
8638 edges on each basic block. */
8639 for (x = 0; x < n_basic_blocks; x++)
8641 basic_block bb = BASIC_BLOCK (x);
8643 for (e = bb->succ; e; e = e->succ_next)
8646 /* Don't forget successors of the entry block. */
8647 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8650 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
8651 elist->num_blocks = block_count;
8652 elist->num_edges = num_edges;
8653 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
8657 /* Follow successors of the entry block, and register these edges. */
8658 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8660 elist->index_to_edge[num_edges] = e;
8664 for (x = 0; x < n_basic_blocks; x++)
8666 basic_block bb = BASIC_BLOCK (x);
8668 /* Follow all successors of blocks, and register these edges. */
8669 for (e = bb->succ; e; e = e->succ_next)
8671 elist->index_to_edge[num_edges] = e;
8678 /* This function free's memory associated with an edge list. */
8681 free_edge_list (elist)
8682 struct edge_list *elist;
8686 free (elist->index_to_edge);
8691 /* This function provides debug output showing an edge list. */
8694 print_edge_list (f, elist)
8696 struct edge_list *elist;
8699 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
8700 elist->num_blocks - 2, elist->num_edges);
8702 for (x = 0; x < elist->num_edges; x++)
8704 fprintf (f, " %-4d - edge(", x);
8705 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
8706 fprintf (f, "entry,");
8708 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
8710 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
8711 fprintf (f, "exit)\n");
8713 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
8717 /* This function provides an internal consistency check of an edge list,
8718 verifying that all edges are present, and that there are no
8722 verify_edge_list (f, elist)
8724 struct edge_list *elist;
8726 int x, pred, succ, index;
8729 for (x = 0; x < n_basic_blocks; x++)
8731 basic_block bb = BASIC_BLOCK (x);
8733 for (e = bb->succ; e; e = e->succ_next)
8735 pred = e->src->index;
8736 succ = e->dest->index;
8737 index = EDGE_INDEX (elist, e->src, e->dest);
8738 if (index == EDGE_INDEX_NO_EDGE)
8740 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8743 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8744 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8745 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8746 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8747 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8748 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8751 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8753 pred = e->src->index;
8754 succ = e->dest->index;
8755 index = EDGE_INDEX (elist, e->src, e->dest);
8756 if (index == EDGE_INDEX_NO_EDGE)
8758 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8761 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8762 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8763 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8764 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8765 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8766 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8768 /* We've verified that all the edges are in the list, no lets make sure
8769 there are no spurious edges in the list. */
8771 for (pred = 0; pred < n_basic_blocks; pred++)
8772 for (succ = 0; succ < n_basic_blocks; succ++)
8774 basic_block p = BASIC_BLOCK (pred);
8775 basic_block s = BASIC_BLOCK (succ);
8779 for (e = p->succ; e; e = e->succ_next)
8785 for (e = s->pred; e; e = e->pred_next)
8791 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8792 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8793 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
8795 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8796 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8797 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
8798 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8799 BASIC_BLOCK (succ)));
8801 for (succ = 0; succ < n_basic_blocks; succ++)
8803 basic_block p = ENTRY_BLOCK_PTR;
8804 basic_block s = BASIC_BLOCK (succ);
8808 for (e = p->succ; e; e = e->succ_next)
8814 for (e = s->pred; e; e = e->pred_next)
8820 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8821 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8822 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
8824 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8825 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8826 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
8827 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
8828 BASIC_BLOCK (succ)));
8830 for (pred = 0; pred < n_basic_blocks; pred++)
8832 basic_block p = BASIC_BLOCK (pred);
8833 basic_block s = EXIT_BLOCK_PTR;
8837 for (e = p->succ; e; e = e->succ_next)
8843 for (e = s->pred; e; e = e->pred_next)
8849 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8850 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8851 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
8853 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8854 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8855 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
8856 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8861 /* This routine will determine what, if any, edge there is between
8862 a specified predecessor and successor. */
8865 find_edge_index (edge_list, pred, succ)
8866 struct edge_list *edge_list;
8867 basic_block pred, succ;
8870 for (x = 0; x < NUM_EDGES (edge_list); x++)
8872 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
8873 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
8876 return (EDGE_INDEX_NO_EDGE);
8879 /* This function will remove an edge from the flow graph. */
8885 edge last_pred = NULL;
8886 edge last_succ = NULL;
8888 basic_block src, dest;
8891 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
8897 last_succ->succ_next = e->succ_next;
8899 src->succ = e->succ_next;
8901 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
8907 last_pred->pred_next = e->pred_next;
8909 dest->pred = e->pred_next;
8915 /* This routine will remove any fake successor edges for a basic block.
8916 When the edge is removed, it is also removed from whatever predecessor
8920 remove_fake_successors (bb)
8924 for (e = bb->succ; e;)
8928 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
8933 /* This routine will remove all fake edges from the flow graph. If
8934 we remove all fake successors, it will automatically remove all
8935 fake predecessors. */
8938 remove_fake_edges ()
8942 for (x = 0; x < n_basic_blocks; x++)
8943 remove_fake_successors (BASIC_BLOCK (x));
8945 /* We've handled all successors except the entry block's. */
8946 remove_fake_successors (ENTRY_BLOCK_PTR);
8949 /* This function will add a fake edge between any block which has no
8950 successors, and the exit block. Some data flow equations require these
8954 add_noreturn_fake_exit_edges ()
8958 for (x = 0; x < n_basic_blocks; x++)
8959 if (BASIC_BLOCK (x)->succ == NULL)
8960 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
8963 /* This function adds a fake edge between any infinite loops to the
8964 exit block. Some optimizations require a path from each node to
8967 See also Morgan, Figure 3.10, pp. 82-83.
8969 The current implementation is ugly, not attempting to minimize the
8970 number of inserted fake edges. To reduce the number of fake edges
8971 to insert, add fake edges from _innermost_ loops containing only
8972 nodes not reachable from the exit block. */
8975 connect_infinite_loops_to_exit ()
8977 basic_block unvisited_block;
8979 /* Perform depth-first search in the reverse graph to find nodes
8980 reachable from the exit block. */
8981 struct depth_first_search_dsS dfs_ds;
8983 flow_dfs_compute_reverse_init (&dfs_ds);
8984 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
8986 /* Repeatedly add fake edges, updating the unreachable nodes. */
8989 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
8990 if (!unvisited_block)
8992 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
8993 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
8996 flow_dfs_compute_reverse_finish (&dfs_ds);
9001 /* Redirect an edge's successor from one block to another. */
9004 redirect_edge_succ (e, new_succ)
9006 basic_block new_succ;
9010 /* Disconnect the edge from the old successor block. */
9011 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
9013 *pe = (*pe)->pred_next;
9015 /* Reconnect the edge to the new successor block. */
9016 e->pred_next = new_succ->pred;
9021 /* Like previous but avoid possible dupplicate edge. */
9024 redirect_edge_succ_nodup (e, new_succ)
9026 basic_block new_succ;
9029 /* Check whether the edge is already present. */
9030 for (s = e->src->succ; s; s = s->succ_next)
9031 if (s->dest == new_succ && s != e)
9035 s->flags |= e->flags;
9036 s->probability += e->probability;
9037 s->count += e->count;
9041 redirect_edge_succ (e, new_succ);
9044 /* Redirect an edge's predecessor from one block to another. */
9047 redirect_edge_pred (e, new_pred)
9049 basic_block new_pred;
9053 /* Disconnect the edge from the old predecessor block. */
9054 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
9056 *pe = (*pe)->succ_next;
9058 /* Reconnect the edge to the new predecessor block. */
9059 e->succ_next = new_pred->succ;
9064 /* Dump the list of basic blocks in the bitmap NODES. */
9067 flow_nodes_print (str, nodes, file)
9069 const sbitmap nodes;
9077 fprintf (file, "%s { ", str);
9078 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
9079 fputs ("}\n", file);
9083 /* Dump the list of edges in the array EDGE_LIST. */
9086 flow_edge_list_print (str, edge_list, num_edges, file)
9088 const edge *edge_list;
9097 fprintf (file, "%s { ", str);
9098 for (i = 0; i < num_edges; i++)
9099 fprintf (file, "%d->%d ", edge_list[i]->src->index,
9100 edge_list[i]->dest->index);
9101 fputs ("}\n", file);
9105 /* Dump loop related CFG information. */
9108 flow_loops_cfg_dump (loops, file)
9109 const struct loops *loops;
9114 if (! loops->num || ! file || ! loops->cfg.dom)
9117 for (i = 0; i < n_basic_blocks; i++)
9121 fprintf (file, ";; %d succs { ", i);
9122 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
9123 fprintf (file, "%d ", succ->dest->index);
9124 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
9127 /* Dump the DFS node order. */
9128 if (loops->cfg.dfs_order)
9130 fputs (";; DFS order: ", file);
9131 for (i = 0; i < n_basic_blocks; i++)
9132 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
9135 /* Dump the reverse completion node order. */
9136 if (loops->cfg.rc_order)
9138 fputs (";; RC order: ", file);
9139 for (i = 0; i < n_basic_blocks; i++)
9140 fprintf (file, "%d ", loops->cfg.rc_order[i]);
9145 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
9148 flow_loop_nested_p (outer, loop)
9152 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
9156 /* Dump the loop information specified by LOOP to the stream FILE
9157 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9159 flow_loop_dump (loop, file, loop_dump_aux, verbose)
9160 const struct loop *loop;
9162 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9165 if (! loop || ! loop->header)
9168 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
9169 loop->num, INSN_UID (loop->first->head),
9170 INSN_UID (loop->last->end),
9171 loop->shared ? " shared" : "",
9172 loop->invalid ? " invalid" : "");
9173 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
9174 loop->header->index, loop->latch->index,
9175 loop->pre_header ? loop->pre_header->index : -1,
9176 loop->first->index, loop->last->index);
9177 fprintf (file, ";; depth %d, level %d, outer %ld\n",
9178 loop->depth, loop->level,
9179 (long) (loop->outer ? loop->outer->num : -1));
9181 if (loop->pre_header_edges)
9182 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
9183 loop->num_pre_header_edges, file);
9184 flow_edge_list_print (";; entry edges", loop->entry_edges,
9185 loop->num_entries, file);
9186 fprintf (file, ";; %d", loop->num_nodes);
9187 flow_nodes_print (" nodes", loop->nodes, file);
9188 flow_edge_list_print (";; exit edges", loop->exit_edges,
9189 loop->num_exits, file);
9190 if (loop->exits_doms)
9191 flow_nodes_print (";; exit doms", loop->exits_doms, file);
9193 loop_dump_aux (loop, file, verbose);
9197 /* Dump the loop information specified by LOOPS to the stream FILE,
9198 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9200 flow_loops_dump (loops, file, loop_dump_aux, verbose)
9201 const struct loops *loops;
9203 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9209 num_loops = loops->num;
9210 if (! num_loops || ! file)
9213 fprintf (file, ";; %d loops found, %d levels\n",
9214 num_loops, loops->levels);
9216 for (i = 0; i < num_loops; i++)
9218 struct loop *loop = &loops->array[i];
9220 flow_loop_dump (loop, file, loop_dump_aux, verbose);
9226 for (j = 0; j < i; j++)
9228 struct loop *oloop = &loops->array[j];
9230 if (loop->header == oloop->header)
9235 smaller = loop->num_nodes < oloop->num_nodes;
9237 /* If the union of LOOP and OLOOP is different than
9238 the larger of LOOP and OLOOP then LOOP and OLOOP
9239 must be disjoint. */
9240 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
9241 smaller ? oloop : loop);
9243 ";; loop header %d shared by loops %d, %d %s\n",
9244 loop->header->index, i, j,
9245 disjoint ? "disjoint" : "nested");
9252 flow_loops_cfg_dump (loops, file);
9256 /* Free all the memory allocated for LOOPS. */
9259 flow_loops_free (loops)
9260 struct loops *loops;
9269 /* Free the loop descriptors. */
9270 for (i = 0; i < loops->num; i++)
9272 struct loop *loop = &loops->array[i];
9274 if (loop->pre_header_edges)
9275 free (loop->pre_header_edges);
9277 sbitmap_free (loop->nodes);
9278 if (loop->entry_edges)
9279 free (loop->entry_edges);
9280 if (loop->exit_edges)
9281 free (loop->exit_edges);
9282 if (loop->exits_doms)
9283 sbitmap_free (loop->exits_doms);
9285 free (loops->array);
9286 loops->array = NULL;
9289 sbitmap_vector_free (loops->cfg.dom);
9290 if (loops->cfg.dfs_order)
9291 free (loops->cfg.dfs_order);
9293 if (loops->shared_headers)
9294 sbitmap_free (loops->shared_headers);
9299 /* Find the entry edges into the loop with header HEADER and nodes
9300 NODES and store in ENTRY_EDGES array. Return the number of entry
9301 edges from the loop. */
9304 flow_loop_entry_edges_find (header, nodes, entry_edges)
9306 const sbitmap nodes;
9312 *entry_edges = NULL;
9315 for (e = header->pred; e; e = e->pred_next)
9317 basic_block src = e->src;
9319 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9326 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
9329 for (e = header->pred; e; e = e->pred_next)
9331 basic_block src = e->src;
9333 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9334 (*entry_edges)[num_entries++] = e;
9341 /* Find the exit edges from the loop using the bitmap of loop nodes
9342 NODES and store in EXIT_EDGES array. Return the number of
9343 exit edges from the loop. */
9346 flow_loop_exit_edges_find (nodes, exit_edges)
9347 const sbitmap nodes;
9356 /* Check all nodes within the loop to see if there are any
9357 successors not in the loop. Note that a node may have multiple
9358 exiting edges ????? A node can have one jumping edge and one fallthru
9359 edge so only one of these can exit the loop. */
9361 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9362 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9364 basic_block dest = e->dest;
9366 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9374 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
9376 /* Store all exiting edges into an array. */
9378 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9379 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9381 basic_block dest = e->dest;
9383 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9384 (*exit_edges)[num_exits++] = e;
9392 /* Find the nodes contained within the loop with header HEADER and
9393 latch LATCH and store in NODES. Return the number of nodes within
9397 flow_loop_nodes_find (header, latch, nodes)
9406 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
9409 /* Start with only the loop header in the set of loop nodes. */
9410 sbitmap_zero (nodes);
9411 SET_BIT (nodes, header->index);
9413 header->loop_depth++;
9415 /* Push the loop latch on to the stack. */
9416 if (! TEST_BIT (nodes, latch->index))
9418 SET_BIT (nodes, latch->index);
9419 latch->loop_depth++;
9421 stack[sp++] = latch;
9430 for (e = node->pred; e; e = e->pred_next)
9432 basic_block ancestor = e->src;
9434 /* If each ancestor not marked as part of loop, add to set of
9435 loop nodes and push on to stack. */
9436 if (ancestor != ENTRY_BLOCK_PTR
9437 && ! TEST_BIT (nodes, ancestor->index))
9439 SET_BIT (nodes, ancestor->index);
9440 ancestor->loop_depth++;
9442 stack[sp++] = ancestor;
9450 /* Compute the depth first search order and store in the array
9451 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
9452 RC_ORDER is non-zero, return the reverse completion number for each
9453 node. Returns the number of nodes visited. A depth first search
9454 tries to get as far away from the starting point as quickly as
9458 flow_depth_first_order_compute (dfs_order, rc_order)
9465 int rcnum = n_basic_blocks - 1;
9468 /* Allocate stack for back-tracking up CFG. */
9469 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
9472 /* Allocate bitmap to track nodes that have been visited. */
9473 visited = sbitmap_alloc (n_basic_blocks);
9475 /* None of the nodes in the CFG have been visited yet. */
9476 sbitmap_zero (visited);
9478 /* Push the first edge on to the stack. */
9479 stack[sp++] = ENTRY_BLOCK_PTR->succ;
9487 /* Look at the edge on the top of the stack. */
9492 /* Check if the edge destination has been visited yet. */
9493 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
9495 /* Mark that we have visited the destination. */
9496 SET_BIT (visited, dest->index);
9499 dfs_order[dfsnum++] = dest->index;
9503 /* Since the DEST node has been visited for the first
9504 time, check its successors. */
9505 stack[sp++] = dest->succ;
9509 /* There are no successors for the DEST node so assign
9510 its reverse completion number. */
9512 rc_order[rcnum--] = dest->index;
9517 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
9519 /* There are no more successors for the SRC node
9520 so assign its reverse completion number. */
9522 rc_order[rcnum--] = src->index;
9526 stack[sp - 1] = e->succ_next;
9533 sbitmap_free (visited);
9535 /* The number of nodes visited should not be greater than
9537 if (dfsnum > n_basic_blocks)
9540 /* There are some nodes left in the CFG that are unreachable. */
9541 if (dfsnum < n_basic_blocks)
9546 /* Compute the depth first search order on the _reverse_ graph and
9547 store in the array DFS_ORDER, marking the nodes visited in VISITED.
9548 Returns the number of nodes visited.
9550 The computation is split into three pieces:
9552 flow_dfs_compute_reverse_init () creates the necessary data
9555 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
9556 structures. The block will start the search.
9558 flow_dfs_compute_reverse_execute () continues (or starts) the
9559 search using the block on the top of the stack, stopping when the
9562 flow_dfs_compute_reverse_finish () destroys the necessary data
9565 Thus, the user will probably call ..._init(), call ..._add_bb() to
9566 add a beginning basic block to the stack, call ..._execute(),
9567 possibly add another bb to the stack and again call ..._execute(),
9568 ..., and finally call _finish(). */
9570 /* Initialize the data structures used for depth-first search on the
9571 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
9572 added to the basic block stack. DATA is the current depth-first
9573 search context. If INITIALIZE_STACK is non-zero, there is an
9574 element on the stack. */
9577 flow_dfs_compute_reverse_init (data)
9578 depth_first_search_ds data;
9580 /* Allocate stack for back-tracking up CFG. */
9582 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
9583 * sizeof (basic_block));
9586 /* Allocate bitmap to track nodes that have been visited. */
9587 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
9589 /* None of the nodes in the CFG have been visited yet. */
9590 sbitmap_zero (data->visited_blocks);
9595 /* Add the specified basic block to the top of the dfs data
9596 structures. When the search continues, it will start at the
9600 flow_dfs_compute_reverse_add_bb (data, bb)
9601 depth_first_search_ds data;
9604 data->stack[data->sp++] = bb;
9608 /* Continue the depth-first search through the reverse graph starting
9609 with the block at the stack's top and ending when the stack is
9610 empty. Visited nodes are marked. Returns an unvisited basic
9611 block, or NULL if there is none available. */
9614 flow_dfs_compute_reverse_execute (data)
9615 depth_first_search_ds data;
9621 while (data->sp > 0)
9623 bb = data->stack[--data->sp];
9625 /* Mark that we have visited this node. */
9626 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
9628 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
9630 /* Perform depth-first search on adjacent vertices. */
9631 for (e = bb->pred; e; e = e->pred_next)
9632 flow_dfs_compute_reverse_add_bb (data, e->src);
9636 /* Determine if there are unvisited basic blocks. */
9637 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
9638 if (!TEST_BIT (data->visited_blocks, i))
9639 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
9643 /* Destroy the data structures needed for depth-first search on the
9647 flow_dfs_compute_reverse_finish (data)
9648 depth_first_search_ds data;
9651 sbitmap_free (data->visited_blocks);
9656 /* Find the root node of the loop pre-header extended basic block and
9657 the edges along the trace from the root node to the loop header. */
9660 flow_loop_pre_header_scan (loop)
9666 loop->num_pre_header_edges = 0;
9668 if (loop->num_entries != 1)
9671 ebb = loop->entry_edges[0]->src;
9673 if (ebb != ENTRY_BLOCK_PTR)
9677 /* Count number of edges along trace from loop header to
9678 root of pre-header extended basic block. Usually this is
9679 only one or two edges. */
9681 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
9683 ebb = ebb->pred->src;
9687 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
9688 loop->num_pre_header_edges = num;
9690 /* Store edges in order that they are followed. The source
9691 of the first edge is the root node of the pre-header extended
9692 basic block and the destination of the last last edge is
9694 for (e = loop->entry_edges[0]; num; e = e->src->pred)
9696 loop->pre_header_edges[--num] = e;
9702 /* Return the block for the pre-header of the loop with header
9703 HEADER where DOM specifies the dominator information. Return NULL if
9704 there is no pre-header. */
9707 flow_loop_pre_header_find (header, dom)
9711 basic_block pre_header;
9714 /* If block p is a predecessor of the header and is the only block
9715 that the header does not dominate, then it is the pre-header. */
9717 for (e = header->pred; e; e = e->pred_next)
9719 basic_block node = e->src;
9721 if (node != ENTRY_BLOCK_PTR
9722 && ! TEST_BIT (dom[node->index], header->index))
9724 if (pre_header == NULL)
9728 /* There are multiple edges into the header from outside
9729 the loop so there is no pre-header block. */
9738 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
9739 previously added. The insertion algorithm assumes that the loops
9740 are added in the order found by a depth first search of the CFG. */
9743 flow_loop_tree_node_add (prevloop, loop)
9744 struct loop *prevloop;
9748 if (flow_loop_nested_p (prevloop, loop))
9750 prevloop->inner = loop;
9751 loop->outer = prevloop;
9755 while (prevloop->outer)
9757 if (flow_loop_nested_p (prevloop->outer, loop))
9759 prevloop->next = loop;
9760 loop->outer = prevloop->outer;
9763 prevloop = prevloop->outer;
9766 prevloop->next = loop;
9770 /* Build the loop hierarchy tree for LOOPS. */
9773 flow_loops_tree_build (loops)
9774 struct loops *loops;
9779 num_loops = loops->num;
9783 /* Root the loop hierarchy tree with the first loop found.
9784 Since we used a depth first search this should be the
9786 loops->tree_root = &loops->array[0];
9787 loops->tree_root->outer = loops->tree_root->inner = loops->tree_root->next = NULL;
9789 /* Add the remaining loops to the tree. */
9790 for (i = 1; i < num_loops; i++)
9791 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
9794 /* Helper function to compute loop nesting depth and enclosed loop level
9795 for the natural loop specified by LOOP at the loop depth DEPTH.
9796 Returns the loop level. */
9799 flow_loop_level_compute (loop, depth)
9809 /* Traverse loop tree assigning depth and computing level as the
9810 maximum level of all the inner loops of this loop. The loop
9811 level is equivalent to the height of the loop in the loop tree
9812 and corresponds to the number of enclosed loop levels (including
9814 for (inner = loop->inner; inner; inner = inner->next)
9818 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
9823 loop->level = level;
9824 loop->depth = depth;
9828 /* Compute the loop nesting depth and enclosed loop level for the loop
9829 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
9833 flow_loops_level_compute (loops)
9834 struct loops *loops;
9840 /* Traverse all the outer level loops. */
9841 for (loop = loops->tree_root; loop; loop = loop->next)
9843 level = flow_loop_level_compute (loop, 1);
9851 /* Scan a single natural loop specified by LOOP collecting information
9852 about it specified by FLAGS. */
9855 flow_loop_scan (loops, loop, flags)
9856 struct loops *loops;
9860 /* Determine prerequisites. */
9861 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
9862 flags |= LOOP_EXIT_EDGES;
9864 if (flags & LOOP_ENTRY_EDGES)
9866 /* Find edges which enter the loop header.
9867 Note that the entry edges should only
9868 enter the header of a natural loop. */
9870 = flow_loop_entry_edges_find (loop->header,
9872 &loop->entry_edges);
9875 if (flags & LOOP_EXIT_EDGES)
9877 /* Find edges which exit the loop. */
9879 = flow_loop_exit_edges_find (loop->nodes,
9883 if (flags & LOOP_EXITS_DOMS)
9887 /* Determine which loop nodes dominate all the exits
9889 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
9890 sbitmap_copy (loop->exits_doms, loop->nodes);
9891 for (j = 0; j < loop->num_exits; j++)
9892 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
9893 loops->cfg.dom[loop->exit_edges[j]->src->index]);
9895 /* The header of a natural loop must dominate
9897 if (! TEST_BIT (loop->exits_doms, loop->header->index))
9901 if (flags & LOOP_PRE_HEADER)
9903 /* Look to see if the loop has a pre-header node. */
9905 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
9907 /* Find the blocks within the extended basic block of
9908 the loop pre-header. */
9909 flow_loop_pre_header_scan (loop);
9915 /* Find all the natural loops in the function and save in LOOPS structure
9916 and recalculate loop_depth information in basic block structures.
9917 FLAGS controls which loop information is collected.
9918 Return the number of natural loops found. */
9921 flow_loops_find (loops, flags)
9922 struct loops *loops;
9934 /* This function cannot be repeatedly called with different
9935 flags to build up the loop information. The loop tree
9936 must always be built if this function is called. */
9937 if (! (flags & LOOP_TREE))
9940 memset (loops, 0, sizeof (*loops));
9942 /* Taking care of this degenerate case makes the rest of
9943 this code simpler. */
9944 if (n_basic_blocks == 0)
9950 /* Compute the dominators. */
9951 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
9952 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
9954 /* Count the number of loop edges (back edges). This should be the
9955 same as the number of natural loops. */
9958 for (b = 0; b < n_basic_blocks; b++)
9962 header = BASIC_BLOCK (b);
9963 header->loop_depth = 0;
9965 for (e = header->pred; e; e = e->pred_next)
9967 basic_block latch = e->src;
9969 /* Look for back edges where a predecessor is dominated
9970 by this block. A natural loop has a single entry
9971 node (header) that dominates all the nodes in the
9972 loop. It also has single back edge to the header
9973 from a latch node. Note that multiple natural loops
9974 may share the same header. */
9975 if (b != header->index)
9978 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
9985 /* Compute depth first search order of the CFG so that outer
9986 natural loops will be found before inner natural loops. */
9987 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9988 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9989 flow_depth_first_order_compute (dfs_order, rc_order);
9991 /* Save CFG derived information to avoid recomputing it. */
9992 loops->cfg.dom = dom;
9993 loops->cfg.dfs_order = dfs_order;
9994 loops->cfg.rc_order = rc_order;
9996 /* Allocate loop structures. */
9998 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
10000 headers = sbitmap_alloc (n_basic_blocks);
10001 sbitmap_zero (headers);
10003 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
10004 sbitmap_zero (loops->shared_headers);
10006 /* Find and record information about all the natural loops
10009 for (b = 0; b < n_basic_blocks; b++)
10011 basic_block header;
10013 /* Search the nodes of the CFG in reverse completion order
10014 so that we can find outer loops first. */
10015 header = BASIC_BLOCK (rc_order[b]);
10017 /* Look for all the possible latch blocks for this header. */
10018 for (e = header->pred; e; e = e->pred_next)
10020 basic_block latch = e->src;
10022 /* Look for back edges where a predecessor is dominated
10023 by this block. A natural loop has a single entry
10024 node (header) that dominates all the nodes in the
10025 loop. It also has single back edge to the header
10026 from a latch node. Note that multiple natural loops
10027 may share the same header. */
10028 if (latch != ENTRY_BLOCK_PTR
10029 && TEST_BIT (dom[latch->index], header->index))
10033 loop = loops->array + num_loops;
10035 loop->header = header;
10036 loop->latch = latch;
10037 loop->num = num_loops;
10044 for (i = 0; i < num_loops; i++)
10046 struct loop *loop = &loops->array[i];
10048 /* Keep track of blocks that are loop headers so
10049 that we can tell which loops should be merged. */
10050 if (TEST_BIT (headers, loop->header->index))
10051 SET_BIT (loops->shared_headers, loop->header->index);
10052 SET_BIT (headers, loop->header->index);
10054 /* Find nodes contained within the loop. */
10055 loop->nodes = sbitmap_alloc (n_basic_blocks);
10057 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
10059 /* Compute first and last blocks within the loop.
10060 These are often the same as the loop header and
10061 loop latch respectively, but this is not always
10064 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
10066 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
10068 flow_loop_scan (loops, loop, flags);
10071 /* Natural loops with shared headers may either be disjoint or
10072 nested. Disjoint loops with shared headers cannot be inner
10073 loops and should be merged. For now just mark loops that share
10075 for (i = 0; i < num_loops; i++)
10076 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
10077 loops->array[i].shared = 1;
10079 sbitmap_free (headers);
10083 sbitmap_vector_free (dom);
10086 loops->num = num_loops;
10088 /* Build the loop hierarchy tree. */
10089 flow_loops_tree_build (loops);
10091 /* Assign the loop nesting depth and enclosed loop level for each
10093 loops->levels = flow_loops_level_compute (loops);
10099 /* Update the information regarding the loops in the CFG
10100 specified by LOOPS. */
10102 flow_loops_update (loops, flags)
10103 struct loops *loops;
10106 /* One day we may want to update the current loop data. For now
10107 throw away the old stuff and rebuild what we need. */
10109 flow_loops_free (loops);
10111 return flow_loops_find (loops, flags);
10115 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
10118 flow_loop_outside_edge_p (loop, e)
10119 const struct loop *loop;
10122 if (e->dest != loop->header)
10124 return (e->src == ENTRY_BLOCK_PTR)
10125 || ! TEST_BIT (loop->nodes, e->src->index);
10128 /* Clear LOG_LINKS fields of insns in a chain.
10129 Also clear the global_live_at_{start,end} fields of the basic block
10133 clear_log_links (insns)
10139 for (i = insns; i; i = NEXT_INSN (i))
10143 for (b = 0; b < n_basic_blocks; b++)
10145 basic_block bb = BASIC_BLOCK (b);
10147 bb->global_live_at_start = NULL;
10148 bb->global_live_at_end = NULL;
10151 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
10152 EXIT_BLOCK_PTR->global_live_at_start = NULL;
10155 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
10156 correspond to the hard registers, if any, set in that map. This
10157 could be done far more efficiently by having all sorts of special-cases
10158 with moving single words, but probably isn't worth the trouble. */
10161 reg_set_to_hard_reg_set (to, from)
10167 EXECUTE_IF_SET_IN_BITMAP
10170 if (i >= FIRST_PSEUDO_REGISTER)
10172 SET_HARD_REG_BIT (*to, i);
10176 /* Called once at intialization time. */
10181 static int initialized;
10185 gcc_obstack_init (&flow_obstack);
10186 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10191 obstack_free (&flow_obstack, flow_firstobj);
10192 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10196 /* Assume that the preceeding pass has possibly eliminated jump instructions
10197 or converted the unconditional jumps. Eliminate the edges from CFG.
10198 Return true if any edges are eliminated. */
10201 purge_dead_edges (bb)
10205 rtx insn = bb->end;
10206 bool purged = false;
10208 if (GET_CODE (insn) == JUMP_INSN && !simplejump_p (insn))
10210 if (GET_CODE (insn) == JUMP_INSN)
10214 /* We do care only about conditional jumps and simplejumps. */
10215 if (!any_condjump_p (insn)
10216 && !returnjump_p (insn)
10217 && !simplejump_p (insn))
10219 for (e = bb->succ; e; e = next)
10221 next = e->succ_next;
10223 /* Check purposes we can have edge. */
10224 if ((e->flags & EDGE_FALLTHRU)
10225 && any_condjump_p (insn))
10227 if (e->dest != EXIT_BLOCK_PTR
10228 && e->dest->head == JUMP_LABEL (insn))
10230 if (e->dest == EXIT_BLOCK_PTR
10231 && returnjump_p (insn))
10236 if (!bb->succ || !purged)
10239 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
10243 /* Redistribute probabilities. */
10244 if (!bb->succ->succ_next)
10246 bb->succ->probability = REG_BR_PROB_BASE;
10247 bb->succ->count = bb->count;
10251 note = find_reg_note (insn, REG_BR_PROB, NULL);
10254 b = BRANCH_EDGE (bb);
10255 f = FALLTHRU_EDGE (bb);
10256 b->probability = INTVAL (XEXP (note, 0));
10257 f->probability = REG_BR_PROB_BASE - b->probability;
10258 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
10259 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
10264 /* Cleanup abnormal edges caused by throwing insns that have been
10266 if (! can_throw_internal (bb->end))
10267 for (e = bb->succ; e; e = next)
10269 next = e->succ_next;
10270 if (e->flags & EDGE_EH)
10277 /* If we don't see a jump insn, we don't know exactly why the block would
10278 have been broken at this point. Look for a simple, non-fallthru edge,
10279 as these are only created by conditional branches. If we find such an
10280 edge we know that there used to be a jump here and can then safely
10281 remove all non-fallthru edges. */
10282 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
10286 for (e = bb->succ; e; e = next)
10288 next = e->succ_next;
10289 if (!(e->flags & EDGE_FALLTHRU))
10290 remove_edge (e), purged = true;
10292 if (!bb->succ || bb->succ->succ_next)
10294 bb->succ->probability = REG_BR_PROB_BASE;
10295 bb->succ->count = bb->count;
10298 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
10303 /* Search all basic blocks for potentionally dead edges and purge them.
10305 Return true ifif some edge has been elliminated.
10309 purge_all_dead_edges ()
10311 int i, purged = false;
10312 for (i = 0; i < n_basic_blocks; i++)
10313 purged |= purge_dead_edges (BASIC_BLOCK (i));