1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
165 #define LOCAL_REGNO(REGNO) 0
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
177 /* The obstack on which the flow graph components are allocated. */
179 struct obstack flow_obstack;
180 static char *flow_firstobj;
182 /* Number of basic blocks in the current function. */
186 /* Number of edges in the current function. */
190 /* The basic block array. */
192 varray_type basic_block_info;
194 /* The special entry and exit blocks. */
196 struct basic_block_def entry_exit_blocks[2]
199 NULL, /* head_tree */
203 NULL, /* local_set */
204 NULL, /* cond_local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
208 ENTRY_BLOCK, /* index */
216 NULL, /* head_tree */
220 NULL, /* local_set */
221 NULL, /* cond_local_set */
222 NULL, /* global_live_at_start */
223 NULL, /* global_live_at_end */
225 EXIT_BLOCK, /* index */
232 /* Nonzero if the second flow pass has completed. */
235 /* Maximum register number used in this function, plus one. */
239 /* Indexed by n, giving various register information */
241 varray_type reg_n_info;
243 /* Size of a regset for the current function,
244 in (1) bytes and (2) elements. */
249 /* Regset of regs live when calls to `setjmp'-like functions happen. */
250 /* ??? Does this exist only for the setjmp-clobbered warning message? */
252 regset regs_live_at_setjmp;
254 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
255 that have to go in the same hard reg.
256 The first two regs in the list are a pair, and the next two
257 are another pair, etc. */
260 /* Callback that determines if it's ok for a function to have no
261 noreturn attribute. */
262 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
264 /* Set of registers that may be eliminable. These are handled specially
265 in updating regs_ever_live. */
267 static HARD_REG_SET elim_reg_set;
269 /* The basic block structure for every insn, indexed by uid. */
271 varray_type basic_block_for_insn;
273 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
274 /* ??? Should probably be using LABEL_NUSES instead. It would take a
275 bit of surgery to be able to use or co-opt the routines in jump. */
277 static rtx label_value_list;
278 static rtx tail_recursion_label_list;
280 /* Holds information for tracking conditional register life information. */
281 struct reg_cond_life_info
283 /* A boolean expression of conditions under which a register is dead. */
285 /* Conditions under which a register is dead at the basic block end. */
288 /* A boolean expression of conditions under which a register has been
292 /* ??? Could store mask of bytes that are dead, so that we could finally
293 track lifetimes of multi-word registers accessed via subregs. */
296 /* For use in communicating between propagate_block and its subroutines.
297 Holds all information needed to compute life and def-use information. */
299 struct propagate_block_info
301 /* The basic block we're considering. */
304 /* Bit N is set if register N is conditionally or unconditionally live. */
307 /* Bit N is set if register N is set this insn. */
310 /* Element N is the next insn that uses (hard or pseudo) register N
311 within the current basic block; or zero, if there is no such insn. */
314 /* Contains a list of all the MEMs we are tracking for dead store
318 /* If non-null, record the set of registers set unconditionally in the
322 /* If non-null, record the set of registers set conditionally in the
324 regset cond_local_set;
326 #ifdef HAVE_conditional_execution
327 /* Indexed by register number, holds a reg_cond_life_info for each
328 register that is not unconditionally live or dead. */
329 splay_tree reg_cond_dead;
331 /* Bit N is set if register N is in an expression in reg_cond_dead. */
335 /* The length of mem_set_list. */
336 int mem_set_list_len;
338 /* Non-zero if the value of CC0 is live. */
341 /* Flags controling the set of information propagate_block collects. */
345 /* Maximum length of pbi->mem_set_list before we start dropping
346 new elements on the floor. */
347 #define MAX_MEM_SET_LIST_LEN 100
349 /* Store the data structures necessary for depth-first search. */
350 struct depth_first_search_dsS {
351 /* stack for backtracking during the algorithm */
354 /* number of edges in the stack. That is, positions 0, ..., sp-1
358 /* record of basic blocks already seen by depth-first search */
359 sbitmap visited_blocks;
361 typedef struct depth_first_search_dsS *depth_first_search_ds;
363 /* Have print_rtl_and_abort give the same information that fancy_abort
365 #define print_rtl_and_abort() \
366 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
368 /* Forward declarations */
369 static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
370 static bool try_crossjump_bb PARAMS ((int, basic_block));
371 static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
372 static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
374 static int count_basic_blocks PARAMS ((rtx));
375 static void find_basic_blocks_1 PARAMS ((rtx));
376 static rtx find_label_refs PARAMS ((rtx, rtx));
377 static void make_edges PARAMS ((rtx, int, int, int));
378 static void make_label_edge PARAMS ((sbitmap *, basic_block,
380 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
382 static void commit_one_edge_insertion PARAMS ((edge));
384 static void delete_unreachable_blocks PARAMS ((void));
385 static int can_delete_note_p PARAMS ((rtx));
386 static void expunge_block PARAMS ((basic_block));
387 static int can_delete_label_p PARAMS ((rtx));
388 static int tail_recursion_label_p PARAMS ((rtx));
389 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
391 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
393 static int merge_blocks PARAMS ((edge,basic_block,basic_block,
395 static bool try_optimize_cfg PARAMS ((int));
396 static bool can_fallthru PARAMS ((basic_block, basic_block));
397 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
398 static bool try_simplify_condjump PARAMS ((basic_block));
399 static bool try_forward_edges PARAMS ((int, basic_block));
400 static void tidy_fallthru_edges PARAMS ((void));
401 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
402 static void verify_wide_reg PARAMS ((int, rtx, rtx));
403 static void verify_local_live_at_start PARAMS ((regset, basic_block));
404 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
405 static void notice_stack_pointer_modification PARAMS ((rtx));
406 static void mark_reg PARAMS ((rtx, void *));
407 static void mark_regs_live_at_end PARAMS ((regset));
408 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
409 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
410 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
411 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
412 static int insn_dead_p PARAMS ((struct propagate_block_info *,
414 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
416 static void mark_set_regs PARAMS ((struct propagate_block_info *,
418 static void mark_set_1 PARAMS ((struct propagate_block_info *,
419 enum rtx_code, rtx, rtx,
421 #ifdef HAVE_conditional_execution
422 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
424 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
425 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
426 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
428 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
429 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
430 static rtx not_reg_cond PARAMS ((rtx));
431 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
434 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
435 rtx, rtx, rtx, rtx, rtx));
436 static void find_auto_inc PARAMS ((struct propagate_block_info *,
438 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
440 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
442 static void mark_used_reg PARAMS ((struct propagate_block_info *,
444 static void mark_used_regs PARAMS ((struct propagate_block_info *,
446 void dump_flow_info PARAMS ((FILE *));
447 void debug_flow_info PARAMS ((void));
448 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
452 static void add_to_mem_set_list PARAMS ((struct propagate_block_info *,
454 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
456 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
458 static void remove_fake_successors PARAMS ((basic_block));
459 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
461 static void flow_edge_list_print PARAMS ((const char *, const edge *,
463 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
465 static int flow_loop_nested_p PARAMS ((struct loop *,
467 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
469 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
470 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
471 static void flow_dfs_compute_reverse_init
472 PARAMS ((depth_first_search_ds));
473 static void flow_dfs_compute_reverse_add_bb
474 PARAMS ((depth_first_search_ds, basic_block));
475 static basic_block flow_dfs_compute_reverse_execute
476 PARAMS ((depth_first_search_ds));
477 static void flow_dfs_compute_reverse_finish
478 PARAMS ((depth_first_search_ds));
479 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
480 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
482 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
483 static void flow_loops_tree_build PARAMS ((struct loops *));
484 static int flow_loop_level_compute PARAMS ((struct loop *, int));
485 static int flow_loops_level_compute PARAMS ((struct loops *));
486 static void delete_dead_jumptables PARAMS ((void));
487 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
488 static bool need_fake_edge_p PARAMS ((rtx));
490 /* Find basic blocks of the current function.
491 F is the first insn of the function and NREGS the number of register
495 find_basic_blocks (f, nregs, file)
497 int nregs ATTRIBUTE_UNUSED;
498 FILE *file ATTRIBUTE_UNUSED;
501 timevar_push (TV_CFG);
503 /* Flush out existing data. */
504 if (basic_block_info != NULL)
510 /* Clear bb->aux on all extant basic blocks. We'll use this as a
511 tag for reuse during create_basic_block, just in case some pass
512 copies around basic block notes improperly. */
513 for (i = 0; i < n_basic_blocks; ++i)
514 BASIC_BLOCK (i)->aux = NULL;
516 VARRAY_FREE (basic_block_info);
519 n_basic_blocks = count_basic_blocks (f);
521 /* Size the basic block table. The actual structures will be allocated
522 by find_basic_blocks_1, since we want to keep the structure pointers
523 stable across calls to find_basic_blocks. */
524 /* ??? This whole issue would be much simpler if we called find_basic_blocks
525 exactly once, and thereafter we don't have a single long chain of
526 instructions at all until close to the end of compilation when we
527 actually lay them out. */
529 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
531 find_basic_blocks_1 (f);
533 /* Record the block to which an insn belongs. */
534 /* ??? This should be done another way, by which (perhaps) a label is
535 tagged directly with the basic block that it starts. It is used for
536 more than that currently, but IMO that is the only valid use. */
538 max_uid = get_max_uid ();
540 /* Leave space for insns life_analysis makes in some cases for auto-inc.
541 These cases are rare, so we don't need too much space. */
542 max_uid += max_uid / 10;
545 compute_bb_for_insn (max_uid);
547 /* Discover the edges of our cfg. */
548 make_edges (label_value_list, 0, n_basic_blocks - 1, 0);
550 /* Do very simple cleanup now, for the benefit of code that runs between
551 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
552 tidy_fallthru_edges ();
554 mark_critical_edges ();
556 #ifdef ENABLE_CHECKING
559 timevar_pop (TV_CFG);
563 check_function_return_warnings ()
565 if (warn_missing_noreturn
566 && !TREE_THIS_VOLATILE (cfun->decl)
567 && EXIT_BLOCK_PTR->pred == NULL
568 && (lang_missing_noreturn_ok_p
569 && !lang_missing_noreturn_ok_p (cfun->decl)))
570 warning ("function might be possible candidate for attribute `noreturn'");
572 /* If we have a path to EXIT, then we do return. */
573 if (TREE_THIS_VOLATILE (cfun->decl)
574 && EXIT_BLOCK_PTR->pred != NULL)
575 warning ("`noreturn' function does return");
577 /* If the clobber_return_insn appears in some basic block, then we
578 do reach the end without returning a value. */
579 else if (warn_return_type
580 && cfun->x_clobber_return_insn != NULL
581 && EXIT_BLOCK_PTR->pred != NULL)
583 int max_uid = get_max_uid ();
585 /* If clobber_return_insn was excised by jump1, then renumber_insns
586 can make max_uid smaller than the number still recorded in our rtx.
587 That's fine, since this is a quick way of verifying that the insn
588 is no longer in the chain. */
589 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
591 /* Recompute insn->block mapping, since the initial mapping is
592 set before we delete unreachable blocks. */
593 compute_bb_for_insn (max_uid);
595 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
596 warning ("control reaches end of non-void function");
601 /* Count the basic blocks of the function. */
604 count_basic_blocks (f)
608 register RTX_CODE prev_code;
609 register int count = 0;
610 int saw_abnormal_edge = 0;
612 prev_code = JUMP_INSN;
613 for (insn = f; insn; insn = NEXT_INSN (insn))
615 enum rtx_code code = GET_CODE (insn);
617 if (code == CODE_LABEL
618 || (GET_RTX_CLASS (code) == 'i'
619 && (prev_code == JUMP_INSN
620 || prev_code == BARRIER
621 || saw_abnormal_edge)))
623 saw_abnormal_edge = 0;
627 /* Record whether this insn created an edge. */
628 if (code == CALL_INSN)
632 /* If there is a nonlocal goto label and the specified
633 region number isn't -1, we have an edge. */
634 if (nonlocal_goto_handler_labels
635 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
636 || INTVAL (XEXP (note, 0)) >= 0))
637 saw_abnormal_edge = 1;
639 else if (can_throw_internal (insn))
640 saw_abnormal_edge = 1;
642 else if (flag_non_call_exceptions
644 && can_throw_internal (insn))
645 saw_abnormal_edge = 1;
651 /* The rest of the compiler works a bit smoother when we don't have to
652 check for the edge case of do-nothing functions with no basic blocks. */
655 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
662 /* Scan a list of insns for labels referred to other than by jumps.
663 This is used to scan the alternatives of a call placeholder. */
665 find_label_refs (f, lvl)
671 for (insn = f; insn; insn = NEXT_INSN (insn))
672 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
676 /* Make a list of all labels referred to other than by jumps
677 (which just don't have the REG_LABEL notes).
679 Make a special exception for labels followed by an ADDR*VEC,
680 as this would be a part of the tablejump setup code.
682 Make a special exception to registers loaded with label
683 values just before jump insns that use them. */
685 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
686 if (REG_NOTE_KIND (note) == REG_LABEL)
688 rtx lab = XEXP (note, 0), next;
690 if ((next = next_nonnote_insn (lab)) != NULL
691 && GET_CODE (next) == JUMP_INSN
692 && (GET_CODE (PATTERN (next)) == ADDR_VEC
693 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
695 else if (GET_CODE (lab) == NOTE)
697 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
698 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
701 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
708 /* Assume that someone emitted code with control flow instructions to the
709 basic block. Update the data structure. */
711 find_sub_basic_blocks (bb)
716 rtx jump_insn = NULL_RTX;
718 basic_block first_bb = bb;
724 if (GET_CODE (insn) == CODE_LABEL)
725 insn = NEXT_INSN (insn);
727 /* Scan insn chain and try to find new basic block boundaries. */
730 enum rtx_code code = GET_CODE (insn);
737 /* On code label, split current basic block. */
739 falltru = split_block (bb, PREV_INSN (insn));
743 remove_edge (falltru);
745 if (LABEL_ALTERNATE_NAME (insn))
746 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
750 /* In case we've previously split insn on the JUMP_INSN, move the
751 block header to proper place. */
754 falltru = split_block (bb, PREV_INSN (insn));
757 remove_edge (falltru);
760 /* We need some special care for those expressions. */
761 if (GET_CODE (insn) == JUMP_INSN)
763 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
764 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
774 insn = NEXT_INSN (insn);
777 /* In case expander replaced normal insn by sequence terminating by
778 return and barrier, or possibly other sequence not behaving like
779 ordinary jump, we need to take care and move basic block boundary. */
780 if (jump_insn && GET_CODE (bb->end) != JUMP_INSN)
783 /* We've possibly replaced the conditional jump by conditional jump
784 followed by cleanup at fallthru edge, so the outgoing edges may
786 purge_dead_edges (bb);
788 /* Now re-scan and wire in all edges. This expect simple (conditional)
789 jumps at the end of each new basic blocks. */
790 make_edges (NULL, first_bb->index, bb->index, 1);
792 /* Update branch probabilities. Expect only (un)conditional jumps
793 to be created with only the forward edges. */
794 for (i = first_bb->index; i <= bb->index; i++)
797 basic_block b = BASIC_BLOCK (i);
802 for (e = b->pred; e; e=e->pred_next)
804 b->count += e->count;
805 b->frequency += EDGE_FREQUENCY (e);
808 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
810 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
815 probability = INTVAL (XEXP (find_reg_note (b->end,
819 e->probability = probability;
820 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
822 f = FALLTHRU_EDGE (b);
823 f->probability = REG_BR_PROB_BASE - probability;
824 f->count = b->count - e->count;
826 if (b->succ && !b->succ->succ_next)
829 e->probability = REG_BR_PROB_BASE;
835 /* Find all basic blocks of the function whose first insn is F.
837 Collect and return a list of labels whose addresses are taken. This
838 will be used in make_edges for use with computed gotos. */
841 find_basic_blocks_1 (f)
844 register rtx insn, next;
846 rtx bb_note = NULL_RTX;
852 /* We process the instructions in a slightly different way than we did
853 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
854 closed out the previous block, so that it gets attached at the proper
855 place. Since this form should be equivalent to the previous,
856 count_basic_blocks continues to use the old form as a check. */
858 for (insn = f; insn; insn = next)
860 enum rtx_code code = GET_CODE (insn);
862 next = NEXT_INSN (insn);
868 int kind = NOTE_LINE_NUMBER (insn);
870 /* Look for basic block notes with which to keep the
871 basic_block_info pointers stable. Unthread the note now;
872 we'll put it back at the right place in create_basic_block.
873 Or not at all if we've already found a note in this block. */
874 if (kind == NOTE_INSN_BASIC_BLOCK)
876 if (bb_note == NULL_RTX)
879 next = flow_delete_insn (insn);
885 /* A basic block starts at a label. If we've closed one off due
886 to a barrier or some such, no need to do it again. */
887 if (head != NULL_RTX)
889 create_basic_block (i++, head, end, bb_note);
897 /* A basic block ends at a jump. */
898 if (head == NULL_RTX)
902 /* ??? Make a special check for table jumps. The way this
903 happens is truly and amazingly gross. We are about to
904 create a basic block that contains just a code label and
905 an addr*vec jump insn. Worse, an addr_diff_vec creates
906 its own natural loop.
908 Prevent this bit of brain damage, pasting things together
909 correctly in make_edges.
911 The correct solution involves emitting the table directly
912 on the tablejump instruction as a note, or JUMP_LABEL. */
914 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
915 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
923 goto new_bb_inclusive;
926 /* A basic block ends at a barrier. It may be that an unconditional
927 jump already closed the basic block -- no need to do it again. */
928 if (head == NULL_RTX)
930 goto new_bb_exclusive;
934 /* Record whether this call created an edge. */
935 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
936 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
938 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
940 /* Scan each of the alternatives for label refs. */
941 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
942 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
943 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
944 /* Record its tail recursion label, if any. */
945 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
946 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
949 /* A basic block ends at a call that can either throw or
950 do a non-local goto. */
951 if ((nonlocal_goto_handler_labels && region >= 0)
952 || can_throw_internal (insn))
955 if (head == NULL_RTX)
960 create_basic_block (i++, head, end, bb_note);
961 head = end = NULL_RTX;
969 /* Non-call exceptions generate new blocks just like calls. */
970 if (flag_non_call_exceptions && can_throw_internal (insn))
971 goto new_bb_inclusive;
973 if (head == NULL_RTX)
982 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
986 /* Make a list of all labels referred to other than by jumps.
988 Make a special exception for labels followed by an ADDR*VEC,
989 as this would be a part of the tablejump setup code.
991 Make a special exception to registers loaded with label
992 values just before jump insns that use them. */
994 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
995 if (REG_NOTE_KIND (note) == REG_LABEL)
997 rtx lab = XEXP (note, 0), next;
999 if ((next = next_nonnote_insn (lab)) != NULL
1000 && GET_CODE (next) == JUMP_INSN
1001 && (GET_CODE (PATTERN (next)) == ADDR_VEC
1002 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1004 else if (GET_CODE (lab) == NOTE)
1006 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1007 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
1010 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
1015 if (head != NULL_RTX)
1016 create_basic_block (i++, head, end, bb_note);
1018 flow_delete_insn (bb_note);
1020 if (i != n_basic_blocks)
1023 label_value_list = lvl;
1024 tail_recursion_label_list = trll;
1027 /* Tidy the CFG by deleting unreachable code and whatnot. */
1033 timevar_push (TV_CLEANUP_CFG);
1034 delete_unreachable_blocks ();
1035 if (try_optimize_cfg (mode))
1036 delete_unreachable_blocks ();
1037 mark_critical_edges ();
1039 /* Kill the data we won't maintain. */
1040 free_EXPR_LIST_list (&label_value_list);
1041 free_EXPR_LIST_list (&tail_recursion_label_list);
1042 timevar_pop (TV_CLEANUP_CFG);
1045 /* Create a new basic block consisting of the instructions between
1046 HEAD and END inclusive. Reuses the note and basic block struct
1047 in BB_NOTE, if any. */
1050 create_basic_block (index, head, end, bb_note)
1052 rtx head, end, bb_note;
1057 && ! RTX_INTEGRATED_P (bb_note)
1058 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1061 /* If we found an existing note, thread it back onto the chain. */
1065 if (GET_CODE (head) == CODE_LABEL)
1069 after = PREV_INSN (head);
1073 if (after != bb_note && NEXT_INSN (after) != bb_note)
1074 reorder_insns (bb_note, bb_note, after);
1078 /* Otherwise we must create a note and a basic block structure.
1079 Since we allow basic block structs in rtl, give the struct
1080 the same lifetime by allocating it off the function obstack
1081 rather than using malloc. */
1083 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1084 memset (bb, 0, sizeof (*bb));
1086 if (GET_CODE (head) == CODE_LABEL)
1087 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1090 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1093 NOTE_BASIC_BLOCK (bb_note) = bb;
1096 /* Always include the bb note in the block. */
1097 if (NEXT_INSN (end) == bb_note)
1103 BASIC_BLOCK (index) = bb;
1105 /* Tag the block so that we know it has been used when considering
1106 other basic block notes. */
1110 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
1111 note associated with the BLOCK. */
1114 first_insn_after_basic_block_note (block)
1119 /* Get the first instruction in the block. */
1122 if (insn == NULL_RTX)
1124 if (GET_CODE (insn) == CODE_LABEL)
1125 insn = NEXT_INSN (insn);
1126 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
1129 return NEXT_INSN (insn);
1132 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1133 indexed by INSN_UID. MAX is the size of the array. */
1136 compute_bb_for_insn (max)
1141 if (basic_block_for_insn)
1142 VARRAY_FREE (basic_block_for_insn);
1143 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1145 for (i = 0; i < n_basic_blocks; ++i)
1147 basic_block bb = BASIC_BLOCK (i);
1154 int uid = INSN_UID (insn);
1156 VARRAY_BB (basic_block_for_insn, uid) = bb;
1159 insn = NEXT_INSN (insn);
1164 /* Free the memory associated with the edge structures. */
1172 for (i = 0; i < n_basic_blocks; ++i)
1174 basic_block bb = BASIC_BLOCK (i);
1176 for (e = bb->succ; e; e = n)
1186 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1192 ENTRY_BLOCK_PTR->succ = 0;
1193 EXIT_BLOCK_PTR->pred = 0;
1198 /* Identify the edges between basic blocks MIN to MAX.
1200 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1201 that are otherwise unreachable may be reachable with a non-local goto.
1203 BB_EH_END is an array indexed by basic block number in which we record
1204 the list of exception regions active at the end of the basic block. */
1207 make_edges (label_value_list, min, max, update_p)
1208 rtx label_value_list;
1209 int min, max, update_p;
1212 sbitmap *edge_cache = NULL;
1214 /* Assume no computed jump; revise as we create edges. */
1215 current_function_has_computed_jump = 0;
1217 /* Heavy use of computed goto in machine-generated code can lead to
1218 nearly fully-connected CFGs. In that case we spend a significant
1219 amount of time searching the edge lists for duplicates. */
1220 if (forced_labels || label_value_list)
1222 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1223 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1226 for (i = min; i <= max; ++i)
1229 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1230 if (e->dest != EXIT_BLOCK_PTR)
1231 SET_BIT (edge_cache[i], e->dest->index);
1235 /* By nature of the way these get numbered, block 0 is always the entry. */
1236 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1238 for (i = min; i <= max; ++i)
1240 basic_block bb = BASIC_BLOCK (i);
1243 int force_fallthru = 0;
1245 if (GET_CODE (bb->head) == CODE_LABEL
1246 && LABEL_ALTERNATE_NAME (bb->head))
1247 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1249 /* Examine the last instruction of the block, and discover the
1250 ways we can leave the block. */
1253 code = GET_CODE (insn);
1256 if (code == JUMP_INSN)
1260 /* Recognize exception handling placeholders. */
1261 if (GET_CODE (PATTERN (insn)) == RESX)
1262 make_eh_edge (edge_cache, bb, insn);
1264 /* Recognize a non-local goto as a branch outside the
1265 current function. */
1266 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1269 /* ??? Recognize a tablejump and do the right thing. */
1270 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1271 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1272 && GET_CODE (tmp) == JUMP_INSN
1273 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1274 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1279 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1280 vec = XVEC (PATTERN (tmp), 0);
1282 vec = XVEC (PATTERN (tmp), 1);
1284 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1285 make_label_edge (edge_cache, bb,
1286 XEXP (RTVEC_ELT (vec, j), 0), 0);
1288 /* Some targets (eg, ARM) emit a conditional jump that also
1289 contains the out-of-range target. Scan for these and
1290 add an edge if necessary. */
1291 if ((tmp = single_set (insn)) != NULL
1292 && SET_DEST (tmp) == pc_rtx
1293 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1294 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1295 make_label_edge (edge_cache, bb,
1296 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1298 #ifdef CASE_DROPS_THROUGH
1299 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1300 us naturally detecting fallthru into the next block. */
1305 /* If this is a computed jump, then mark it as reaching
1306 everything on the label_value_list and forced_labels list. */
1307 else if (computed_jump_p (insn))
1309 current_function_has_computed_jump = 1;
1311 for (x = label_value_list; x; x = XEXP (x, 1))
1312 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1314 for (x = forced_labels; x; x = XEXP (x, 1))
1315 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1318 /* Returns create an exit out. */
1319 else if (returnjump_p (insn))
1320 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1322 /* Otherwise, we have a plain conditional or unconditional jump. */
1325 if (! JUMP_LABEL (insn))
1327 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1331 /* If this is a sibling call insn, then this is in effect a
1332 combined call and return, and so we need an edge to the
1333 exit block. No need to worry about EH edges, since we
1334 wouldn't have created the sibling call in the first place. */
1336 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1337 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1338 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1340 /* If this is a CALL_INSN, then mark it as reaching the active EH
1341 handler for this CALL_INSN. If we're handling non-call
1342 exceptions then any insn can reach any of the active handlers.
1344 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1346 else if (code == CALL_INSN || flag_non_call_exceptions)
1348 /* Add any appropriate EH edges. */
1349 make_eh_edge (edge_cache, bb, insn);
1351 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1353 /* ??? This could be made smarter: in some cases it's possible
1354 to tell that certain calls will not do a nonlocal goto.
1356 For example, if the nested functions that do the nonlocal
1357 gotos do not have their addresses taken, then only calls to
1358 those functions or to other nested functions that use them
1359 could possibly do nonlocal gotos. */
1360 /* We do know that a REG_EH_REGION note with a value less
1361 than 0 is guaranteed not to perform a non-local goto. */
1362 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1363 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1364 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1365 make_label_edge (edge_cache, bb, XEXP (x, 0),
1366 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1370 /* Find out if we can drop through to the next block. */
1371 insn = next_nonnote_insn (insn);
1372 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1373 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1374 else if (i + 1 < n_basic_blocks)
1376 rtx tmp = BLOCK_HEAD (i + 1);
1377 if (GET_CODE (tmp) == NOTE)
1378 tmp = next_nonnote_insn (tmp);
1379 if (force_fallthru || insn == tmp)
1380 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1385 sbitmap_vector_free (edge_cache);
1388 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1389 about the edge that is accumulated between calls. */
1392 make_edge (edge_cache, src, dst, flags)
1393 sbitmap *edge_cache;
1394 basic_block src, dst;
1400 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1401 many edges to them, and we didn't allocate memory for it. */
1402 use_edge_cache = (edge_cache
1403 && src != ENTRY_BLOCK_PTR
1404 && dst != EXIT_BLOCK_PTR);
1406 /* Make sure we don't add duplicate edges. */
1407 switch (use_edge_cache)
1410 /* Quick test for non-existance of the edge. */
1411 if (! TEST_BIT (edge_cache[src->index], dst->index))
1414 /* The edge exists; early exit if no work to do. */
1420 for (e = src->succ; e; e = e->succ_next)
1429 e = (edge) xcalloc (1, sizeof (*e));
1432 e->succ_next = src->succ;
1433 e->pred_next = dst->pred;
1442 SET_BIT (edge_cache[src->index], dst->index);
1445 /* Create an edge from a basic block to a label. */
1448 make_label_edge (edge_cache, src, label, flags)
1449 sbitmap *edge_cache;
1454 if (GET_CODE (label) != CODE_LABEL)
1457 /* If the label was never emitted, this insn is junk, but avoid a
1458 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1459 as a result of a syntax error and a diagnostic has already been
1462 if (INSN_UID (label) == 0)
1465 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1468 /* Create the edges generated by INSN in REGION. */
1471 make_eh_edge (edge_cache, src, insn)
1472 sbitmap *edge_cache;
1476 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1479 handlers = reachable_handlers (insn);
1481 for (i = handlers; i; i = XEXP (i, 1))
1482 make_label_edge (edge_cache, src, XEXP (i, 0),
1483 EDGE_ABNORMAL | EDGE_EH | is_call);
1485 free_INSN_LIST_list (&handlers);
1488 /* Identify critical edges and set the bits appropriately. */
1491 mark_critical_edges ()
1493 int i, n = n_basic_blocks;
1496 /* We begin with the entry block. This is not terribly important now,
1497 but could be if a front end (Fortran) implemented alternate entry
1499 bb = ENTRY_BLOCK_PTR;
1506 /* (1) Critical edges must have a source with multiple successors. */
1507 if (bb->succ && bb->succ->succ_next)
1509 for (e = bb->succ; e; e = e->succ_next)
1511 /* (2) Critical edges must have a destination with multiple
1512 predecessors. Note that we know there is at least one
1513 predecessor -- the edge we followed to get here. */
1514 if (e->dest->pred->pred_next)
1515 e->flags |= EDGE_CRITICAL;
1517 e->flags &= ~EDGE_CRITICAL;
1522 for (e = bb->succ; e; e = e->succ_next)
1523 e->flags &= ~EDGE_CRITICAL;
1528 bb = BASIC_BLOCK (i);
1532 /* Mark the back edges in DFS traversal.
1533 Return non-zero if a loop (natural or otherwise) is present.
1534 Inspired by Depth_First_Search_PP described in:
1536 Advanced Compiler Design and Implementation
1538 Morgan Kaufmann, 1997
1540 and heavily borrowed from flow_depth_first_order_compute. */
1543 mark_dfs_back_edges ()
1554 /* Allocate the preorder and postorder number arrays. */
1555 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
1556 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
1558 /* Allocate stack for back-tracking up CFG. */
1559 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
1562 /* Allocate bitmap to track nodes that have been visited. */
1563 visited = sbitmap_alloc (n_basic_blocks);
1565 /* None of the nodes in the CFG have been visited yet. */
1566 sbitmap_zero (visited);
1568 /* Push the first edge on to the stack. */
1569 stack[sp++] = ENTRY_BLOCK_PTR->succ;
1577 /* Look at the edge on the top of the stack. */
1581 e->flags &= ~EDGE_DFS_BACK;
1583 /* Check if the edge destination has been visited yet. */
1584 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1586 /* Mark that we have visited the destination. */
1587 SET_BIT (visited, dest->index);
1589 pre[dest->index] = prenum++;
1593 /* Since the DEST node has been visited for the first
1594 time, check its successors. */
1595 stack[sp++] = dest->succ;
1598 post[dest->index] = postnum++;
1602 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
1603 && pre[src->index] >= pre[dest->index]
1604 && post[dest->index] == 0)
1605 e->flags |= EDGE_DFS_BACK, found = true;
1607 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
1608 post[src->index] = postnum++;
1611 stack[sp - 1] = e->succ_next;
1620 sbitmap_free (visited);
1625 /* Split a block BB after insn INSN creating a new fallthru edge.
1626 Return the new edge. Note that to keep other parts of the compiler happy,
1627 this function renumbers all the basic blocks so that the new
1628 one has a number one greater than the block split. */
1631 split_block (bb, insn)
1641 /* There is no point splitting the block after its end. */
1642 if (bb->end == insn)
1645 /* Create the new structures. */
1646 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1647 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1650 memset (new_bb, 0, sizeof (*new_bb));
1652 new_bb->head = NEXT_INSN (insn);
1653 new_bb->end = bb->end;
1656 new_bb->succ = bb->succ;
1657 bb->succ = new_edge;
1658 new_bb->pred = new_edge;
1659 new_bb->count = bb->count;
1660 new_bb->frequency = bb->frequency;
1661 new_bb->loop_depth = bb->loop_depth;
1664 new_edge->dest = new_bb;
1665 new_edge->flags = EDGE_FALLTHRU;
1666 new_edge->probability = REG_BR_PROB_BASE;
1667 new_edge->count = bb->count;
1669 /* Redirect the src of the successor edges of bb to point to new_bb. */
1670 for (e = new_bb->succ; e; e = e->succ_next)
1673 /* Place the new block just after the block being split. */
1674 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1676 /* Some parts of the compiler expect blocks to be number in
1677 sequential order so insert the new block immediately after the
1678 block being split.. */
1680 for (i = n_basic_blocks - 1; i > j + 1; --i)
1682 basic_block tmp = BASIC_BLOCK (i - 1);
1683 BASIC_BLOCK (i) = tmp;
1687 BASIC_BLOCK (i) = new_bb;
1690 if (GET_CODE (new_bb->head) == CODE_LABEL)
1692 /* Create the basic block note. */
1693 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1695 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1697 /* If the only thing in this new block was the label, make sure
1698 the block note gets included. */
1699 if (new_bb->head == new_bb->end)
1700 new_bb->end = bb_note;
1704 /* Create the basic block note. */
1705 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1707 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1708 new_bb->head = bb_note;
1711 update_bb_for_insn (new_bb);
1713 if (bb->global_live_at_start)
1715 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1716 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1717 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1719 /* We now have to calculate which registers are live at the end
1720 of the split basic block and at the start of the new basic
1721 block. Start with those registers that are known to be live
1722 at the end of the original basic block and get
1723 propagate_block to determine which registers are live. */
1724 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1725 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1726 COPY_REG_SET (bb->global_live_at_end,
1727 new_bb->global_live_at_start);
1733 /* Return label in the head of basic block. Create one if it doesn't exist. */
1738 if (block == EXIT_BLOCK_PTR)
1740 if (GET_CODE (block->head) != CODE_LABEL)
1742 block->head = emit_label_before (gen_label_rtx (), block->head);
1743 if (basic_block_for_insn)
1744 set_block_for_insn (block->head, block);
1749 /* Return true if the block has no effect and only forwards control flow to
1750 its single destination. */
1752 forwarder_block_p (bb)
1755 rtx insn = bb->head;
1756 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1757 || !bb->succ || bb->succ->succ_next)
1760 while (insn != bb->end)
1762 if (active_insn_p (insn))
1764 insn = NEXT_INSN (insn);
1766 return (!active_insn_p (insn)
1767 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
1770 /* Return nonzero if we can reach target from src by falling trought. */
1772 can_fallthru (src, target)
1773 basic_block src, target;
1775 rtx insn = src->end;
1776 rtx insn2 = target->head;
1778 if (src->index + 1 == target->index && !active_insn_p (insn2))
1779 insn2 = next_active_insn (insn2);
1780 /* ??? Later we may add code to move jump tables offline. */
1781 return next_active_insn (insn) == insn2;
1784 /* Attempt to perform edge redirection by replacing possibly complex jump
1785 instruction by unconditional jump or removing jump completely.
1786 This can apply only if all edges now point to the same block.
1788 The parameters and return values are equivalent to redirect_edge_and_branch.
1791 try_redirect_by_replacing_jump (e, target)
1795 basic_block src = e->src;
1796 rtx insn = src->end, kill_from;
1801 /* Verify that all targets will be TARGET. */
1802 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1803 if (tmp->dest != target && tmp != e)
1805 if (tmp || !onlyjump_p (insn))
1808 /* Avoid removing branch with side effects. */
1809 set = single_set (insn);
1810 if (!set || side_effects_p (set))
1813 /* In case we zap a conditional jump, we'll need to kill
1814 the cc0 setter too. */
1817 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
1818 kill_from = PREV_INSN (insn);
1821 /* See if we can create the fallthru edge. */
1822 if (can_fallthru (src, target))
1824 src->end = PREV_INSN (kill_from);
1826 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1829 /* Selectivly unlink whole insn chain. */
1830 flow_delete_insn_chain (kill_from, PREV_INSN (target->head));
1832 /* If this already is simplejump, redirect it. */
1833 else if (simplejump_p (insn))
1835 if (e->dest == target)
1838 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1839 INSN_UID (insn), e->dest->index, target->index);
1840 redirect_jump (insn, block_label (target), 0);
1842 /* Or replace possibly complicated jump insn by simple jump insn. */
1845 rtx target_label = block_label (target);
1848 src->end = emit_jump_insn_before (gen_jump (target_label), kill_from);
1849 JUMP_LABEL (src->end) = target_label;
1850 LABEL_NUSES (target_label)++;
1851 if (basic_block_for_insn)
1852 set_block_for_new_insns (src->end, src);
1854 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1855 INSN_UID (insn), INSN_UID (src->end));
1857 flow_delete_insn_chain (kill_from, insn);
1859 barrier = next_nonnote_insn (src->end);
1860 if (!barrier || GET_CODE (barrier) != BARRIER)
1861 emit_barrier_after (src->end);
1864 /* Keep only one edge out and set proper flags. */
1865 while (src->succ->succ_next)
1866 remove_edge (src->succ);
1869 e->flags = EDGE_FALLTHRU;
1872 e->probability = REG_BR_PROB_BASE;
1873 e->count = src->count;
1875 /* We don't want a block to end on a line-number note since that has
1876 the potential of changing the code between -g and not -g. */
1877 while (GET_CODE (e->src->end) == NOTE
1878 && NOTE_LINE_NUMBER (e->src->end) >= 0)
1880 rtx prev = PREV_INSN (e->src->end);
1881 flow_delete_insn (e->src->end);
1885 if (e->dest != target)
1886 redirect_edge_succ (e, target);
1890 /* Return last loop_beg note appearing after INSN, before start of next
1891 basic block. Return INSN if there are no such notes.
1893 When emmiting jump to redirect an fallthru edge, it should always
1894 appear after the LOOP_BEG notes, as loop optimizer expect loop to
1895 eighter start by fallthru edge or jump following the LOOP_BEG note
1896 jumping to the loop exit test. */
1898 last_loop_beg_note (insn)
1902 insn = NEXT_INSN (insn);
1903 while (GET_CODE (insn) == NOTE
1904 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1906 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1908 insn = NEXT_INSN (insn);
1913 /* Attempt to change code to redirect edge E to TARGET.
1914 Don't do that on expense of adding new instructions or reordering
1917 Function can be also called with edge destionation equivalent to the
1918 TARGET. Then it should try the simplifications and do nothing if
1921 Return true if transformation suceeded. We still return flase in case
1922 E already destinated TARGET and we didn't managed to simplify instruction
1925 redirect_edge_and_branch (e, target)
1930 rtx old_label = e->dest->head;
1931 basic_block src = e->src;
1932 rtx insn = src->end;
1934 if (e->flags & EDGE_COMPLEX)
1937 if (try_redirect_by_replacing_jump (e, target))
1939 /* Do this fast path late, as we want above code to simplify for cases
1940 where called on single edge leaving basic block containing nontrivial
1942 else if (e->dest == target)
1945 /* We can only redirect non-fallthru edges of jump insn. */
1946 if (e->flags & EDGE_FALLTHRU)
1948 if (GET_CODE (insn) != JUMP_INSN)
1951 /* Recognize a tablejump and adjust all matching cases. */
1952 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1953 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1954 && GET_CODE (tmp) == JUMP_INSN
1955 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1956 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1960 rtx new_label = block_label (target);
1962 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1963 vec = XVEC (PATTERN (tmp), 0);
1965 vec = XVEC (PATTERN (tmp), 1);
1967 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1968 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1970 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1971 --LABEL_NUSES (old_label);
1972 ++LABEL_NUSES (new_label);
1975 /* Handle casesi dispatch insns */
1976 if ((tmp = single_set (insn)) != NULL
1977 && SET_DEST (tmp) == pc_rtx
1978 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1979 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1980 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1982 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1984 --LABEL_NUSES (old_label);
1985 ++LABEL_NUSES (new_label);
1990 /* ?? We may play the games with moving the named labels from
1991 one basic block to the other in case only one computed_jump is
1993 if (computed_jump_p (insn))
1996 /* A return instruction can't be redirected. */
1997 if (returnjump_p (insn))
2000 /* If the insn doesn't go where we think, we're confused. */
2001 if (JUMP_LABEL (insn) != old_label)
2003 redirect_jump (insn, block_label (target), 0);
2007 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
2008 e->src->index, e->dest->index, target->index);
2009 if (e->dest != target)
2010 redirect_edge_succ_nodup (e, target);
2014 /* Redirect edge even at the expense of creating new jump insn or
2015 basic block. Return new basic block if created, NULL otherwise.
2016 Abort if converison is impossible. */
2018 redirect_edge_and_branch_force (e, target)
2028 if (redirect_edge_and_branch (e, target))
2030 if (e->dest == target)
2032 if (e->flags & EDGE_ABNORMAL)
2034 if (!(e->flags & EDGE_FALLTHRU))
2037 e->flags &= ~EDGE_FALLTHRU;
2038 label = block_label (target);
2039 /* Case of the fallthru block. */
2040 if (!e->src->succ->succ_next)
2042 e->src->end = emit_jump_insn_after (gen_jump (label),
2043 last_loop_beg_note (e->src->end));
2044 JUMP_LABEL (e->src->end) = label;
2045 LABEL_NUSES (label)++;
2046 if (basic_block_for_insn)
2047 set_block_for_new_insns (e->src->end, e->src);
2048 emit_barrier_after (e->src->end);
2050 fprintf (rtl_dump_file,
2051 "Emitting jump insn %i to redirect edge %i->%i to %i\n",
2052 INSN_UID (e->src->end), e->src->index, e->dest->index,
2054 redirect_edge_succ (e, target);
2057 /* Redirecting fallthru edge of the conditional needs extra work. */
2060 fprintf (rtl_dump_file,
2061 "Emitting jump insn %i in new BB to redirect edge %i->%i to %i\n",
2062 INSN_UID (e->src->end), e->src->index, e->dest->index,
2065 /* Create the new structures. */
2066 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
2067 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
2070 memset (new_bb, 0, sizeof (*new_bb));
2072 new_bb->end = new_bb->head = last_loop_beg_note (e->src->end);
2073 new_bb->succ = NULL;
2074 new_bb->pred = new_edge;
2075 new_bb->count = e->count;
2076 new_bb->frequency = EDGE_FREQUENCY (e);
2077 new_bb->loop_depth = e->dest->loop_depth;
2079 new_edge->flags = EDGE_FALLTHRU;
2080 new_edge->probability = e->probability;
2081 new_edge->count = e->count;
2083 if (target->global_live_at_start)
2085 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2086 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2087 COPY_REG_SET (new_bb->global_live_at_start,
2088 target->global_live_at_start);
2089 COPY_REG_SET (new_bb->global_live_at_end, new_bb->global_live_at_start);
2093 new_edge->src = e->src;
2094 new_edge->dest = new_bb;
2095 new_edge->succ_next = e->src->succ;
2096 e->src->succ = new_edge;
2097 new_edge->pred_next = NULL;
2099 /* Redirect old edge. */
2100 redirect_edge_succ (e, target);
2101 redirect_edge_pred (e, new_bb);
2102 e->probability = REG_BR_PROB_BASE;
2104 /* Place the new block just after the block being split. */
2105 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2107 /* Some parts of the compiler expect blocks to be number in
2108 sequential order so insert the new block immediately after the
2109 block being split.. */
2110 j = new_edge->src->index;
2111 for (i = n_basic_blocks - 1; i > j + 1; --i)
2113 basic_block tmp = BASIC_BLOCK (i - 1);
2114 BASIC_BLOCK (i) = tmp;
2118 BASIC_BLOCK (i) = new_bb;
2121 /* Create the basic block note. */
2122 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, new_bb->head);
2123 NOTE_BASIC_BLOCK (bb_note) = new_bb;
2124 new_bb->head = bb_note;
2126 new_bb->end = emit_jump_insn_after (gen_jump (label), new_bb->head);
2127 JUMP_LABEL (new_bb->end) = label;
2128 LABEL_NUSES (label)++;
2129 if (basic_block_for_insn)
2130 set_block_for_new_insns (new_bb->end, new_bb);
2131 emit_barrier_after (new_bb->end);
2135 /* Helper function for split_edge. Return true in case edge BB2 to BB1
2136 is back edge of syntactic loop. */
2138 back_edge_of_syntactic_loop_p (bb1, bb2)
2139 basic_block bb1, bb2;
2144 if (bb1->index > bb2->index)
2147 if (bb1->index == bb2->index)
2150 for (insn = bb1->end; insn != bb2->head && count >= 0;
2151 insn = NEXT_INSN (insn))
2152 if (GET_CODE (insn) == NOTE)
2154 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2156 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2163 /* Split a (typically critical) edge. Return the new block.
2164 Abort on abnormal edges.
2166 ??? The code generally expects to be called on critical edges.
2167 The case of a block ending in an unconditional jump to a
2168 block with multiple predecessors is not handled optimally. */
2171 split_edge (edge_in)
2174 basic_block old_pred, bb, old_succ;
2179 /* Abnormal edges cannot be split. */
2180 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
2183 old_pred = edge_in->src;
2184 old_succ = edge_in->dest;
2186 /* Create the new structures. */
2187 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
2188 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
2191 memset (bb, 0, sizeof (*bb));
2193 /* ??? This info is likely going to be out of date very soon. */
2194 if (old_succ->global_live_at_start)
2196 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2197 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2198 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
2199 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
2203 bb->succ = edge_out;
2204 bb->count = edge_in->count;
2205 bb->frequency = EDGE_FREQUENCY (edge_in);
2207 edge_in->flags &= ~EDGE_CRITICAL;
2209 edge_out->pred_next = old_succ->pred;
2210 edge_out->succ_next = NULL;
2212 edge_out->dest = old_succ;
2213 edge_out->flags = EDGE_FALLTHRU;
2214 edge_out->probability = REG_BR_PROB_BASE;
2215 edge_out->count = edge_in->count;
2217 old_succ->pred = edge_out;
2219 /* Tricky case -- if there existed a fallthru into the successor
2220 (and we're not it) we must add a new unconditional jump around
2221 the new block we're actually interested in.
2223 Further, if that edge is critical, this means a second new basic
2224 block must be created to hold it. In order to simplify correct
2225 insn placement, do this before we touch the existing basic block
2226 ordering for the block we were really wanting. */
2227 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2230 for (e = edge_out->pred_next; e; e = e->pred_next)
2231 if (e->flags & EDGE_FALLTHRU)
2236 basic_block jump_block;
2239 if ((e->flags & EDGE_CRITICAL) == 0
2240 && e->src != ENTRY_BLOCK_PTR)
2242 /* Non critical -- we can simply add a jump to the end
2243 of the existing predecessor. */
2244 jump_block = e->src;
2248 /* We need a new block to hold the jump. The simplest
2249 way to do the bulk of the work here is to recursively
2251 jump_block = split_edge (e);
2252 e = jump_block->succ;
2255 /* Now add the jump insn ... */
2256 pos = emit_jump_insn_after (gen_jump (old_succ->head),
2257 last_loop_beg_note (jump_block->end));
2258 jump_block->end = pos;
2259 if (basic_block_for_insn)
2260 set_block_for_new_insns (pos, jump_block);
2261 emit_barrier_after (pos);
2263 /* ... let jump know that label is in use, ... */
2264 JUMP_LABEL (pos) = old_succ->head;
2265 ++LABEL_NUSES (old_succ->head);
2267 /* ... and clear fallthru on the outgoing edge. */
2268 e->flags &= ~EDGE_FALLTHRU;
2270 /* Continue splitting the interesting edge. */
2274 /* Place the new block just in front of the successor. */
2275 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2276 if (old_succ == EXIT_BLOCK_PTR)
2277 j = n_basic_blocks - 1;
2279 j = old_succ->index;
2280 for (i = n_basic_blocks - 1; i > j; --i)
2282 basic_block tmp = BASIC_BLOCK (i - 1);
2283 BASIC_BLOCK (i) = tmp;
2286 BASIC_BLOCK (i) = bb;
2289 /* Create the basic block note.
2291 Where we place the note can have a noticable impact on the generated
2292 code. Consider this cfg:
2302 If we need to insert an insn on the edge from block 0 to block 1,
2303 we want to ensure the instructions we insert are outside of any
2304 loop notes that physically sit between block 0 and block 1. Otherwise
2305 we confuse the loop optimizer into thinking the loop is a phony. */
2306 if (old_succ != EXIT_BLOCK_PTR
2307 && PREV_INSN (old_succ->head)
2308 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
2309 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG
2310 && !back_edge_of_syntactic_loop_p (old_succ, old_pred))
2311 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
2312 PREV_INSN (old_succ->head));
2313 else if (old_succ != EXIT_BLOCK_PTR)
2314 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
2316 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
2317 NOTE_BASIC_BLOCK (bb_note) = bb;
2318 bb->head = bb->end = bb_note;
2320 /* For non-fallthry edges, we must adjust the predecessor's
2321 jump instruction to target our new block. */
2322 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2324 if (!redirect_edge_and_branch (edge_in, bb))
2328 redirect_edge_succ (edge_in, bb);
2333 /* Queue instructions for insertion on an edge between two basic blocks.
2334 The new instructions and basic blocks (if any) will not appear in the
2335 CFG until commit_edge_insertions is called. */
2338 insert_insn_on_edge (pattern, e)
2342 /* We cannot insert instructions on an abnormal critical edge.
2343 It will be easier to find the culprit if we die now. */
2344 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2345 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2348 if (e->insns == NULL_RTX)
2351 push_to_sequence (e->insns);
2353 emit_insn (pattern);
2355 e->insns = get_insns ();
2359 /* Update the CFG for the instructions queued on edge E. */
2362 commit_one_edge_insertion (e)
2365 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2368 /* Pull the insns off the edge now since the edge might go away. */
2370 e->insns = NULL_RTX;
2372 /* Figure out where to put these things. If the destination has
2373 one predecessor, insert there. Except for the exit block. */
2374 if (e->dest->pred->pred_next == NULL
2375 && e->dest != EXIT_BLOCK_PTR)
2379 /* Get the location correct wrt a code label, and "nice" wrt
2380 a basic block note, and before everything else. */
2382 if (GET_CODE (tmp) == CODE_LABEL)
2383 tmp = NEXT_INSN (tmp);
2384 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2385 tmp = NEXT_INSN (tmp);
2386 if (tmp == bb->head)
2389 after = PREV_INSN (tmp);
2392 /* If the source has one successor and the edge is not abnormal,
2393 insert there. Except for the entry block. */
2394 else if ((e->flags & EDGE_ABNORMAL) == 0
2395 && e->src->succ->succ_next == NULL
2396 && e->src != ENTRY_BLOCK_PTR)
2399 /* It is possible to have a non-simple jump here. Consider a target
2400 where some forms of unconditional jumps clobber a register. This
2401 happens on the fr30 for example.
2403 We know this block has a single successor, so we can just emit
2404 the queued insns before the jump. */
2405 if (GET_CODE (bb->end) == JUMP_INSN)
2408 while (GET_CODE (PREV_INSN (before)) == NOTE
2409 && NOTE_LINE_NUMBER (PREV_INSN (before)) == NOTE_INSN_LOOP_BEG)
2410 before = PREV_INSN (before);
2414 /* We'd better be fallthru, or we've lost track of what's what. */
2415 if ((e->flags & EDGE_FALLTHRU) == 0)
2422 /* Otherwise we must split the edge. */
2425 bb = split_edge (e);
2429 /* Now that we've found the spot, do the insertion. */
2431 /* Set the new block number for these insns, if structure is allocated. */
2432 if (basic_block_for_insn)
2435 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2436 set_block_for_insn (i, bb);
2441 emit_insns_before (insns, before);
2442 if (before == bb->head)
2445 last = prev_nonnote_insn (before);
2449 last = emit_insns_after (insns, after);
2450 if (after == bb->end)
2454 if (returnjump_p (last))
2456 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2457 This is not currently a problem because this only happens
2458 for the (single) epilogue, which already has a fallthru edge
2462 if (e->dest != EXIT_BLOCK_PTR
2463 || e->succ_next != NULL
2464 || (e->flags & EDGE_FALLTHRU) == 0)
2466 e->flags &= ~EDGE_FALLTHRU;
2468 emit_barrier_after (last);
2472 flow_delete_insn (before);
2474 else if (GET_CODE (last) == JUMP_INSN)
2476 find_sub_basic_blocks (bb);
2479 /* Update the CFG for all queued instructions. */
2482 commit_edge_insertions ()
2486 compute_bb_for_insn (get_max_uid ());
2488 #ifdef ENABLE_CHECKING
2489 verify_flow_info ();
2493 bb = ENTRY_BLOCK_PTR;
2498 for (e = bb->succ; e; e = next)
2500 next = e->succ_next;
2502 commit_one_edge_insertion (e);
2505 if (++i >= n_basic_blocks)
2507 bb = BASIC_BLOCK (i);
2511 /* Return true if we need to add fake edge to exit.
2512 Helper function for the flow_call_edges_add. */
2514 need_fake_edge_p (insn)
2520 if ((GET_CODE (insn) == CALL_INSN
2521 && !SIBLING_CALL_P (insn)
2522 && !find_reg_note (insn, REG_NORETURN, NULL)
2523 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
2524 && !CONST_OR_PURE_CALL_P (insn)))
2527 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2528 && MEM_VOLATILE_P (PATTERN (insn)))
2529 || (GET_CODE (PATTERN (insn)) == PARALLEL
2530 && asm_noperands (insn) != -1
2531 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2532 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2535 /* Add fake edges to the function exit for any non constant and non noreturn
2536 calls, volatile inline assembly in the bitmap of blocks specified by
2537 BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks
2540 The goal is to expose cases in which entering a basic block does not imply
2541 that all subsequent instructions must be executed. */
2544 flow_call_edges_add (blocks)
2548 int blocks_split = 0;
2551 bool check_last_block = false;
2553 /* Map bb indicies into basic block pointers since split_block
2554 will renumber the basic blocks. */
2556 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2560 for (i = 0; i < n_basic_blocks; i++)
2561 bbs[bb_num++] = BASIC_BLOCK (i);
2562 check_last_block = true;
2566 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2568 bbs[bb_num++] = BASIC_BLOCK (i);
2569 if (i == n_basic_blocks - 1)
2570 check_last_block = true;
2574 /* In the last basic block, before epilogue generation, there will be
2575 a fallthru edge to EXIT. Special care is required if the last insn
2576 of the last basic block is a call because make_edge folds duplicate
2577 edges, which would result in the fallthru edge also being marked
2578 fake, which would result in the fallthru edge being removed by
2579 remove_fake_edges, which would result in an invalid CFG.
2581 Moreover, we can't elide the outgoing fake edge, since the block
2582 profiler needs to take this into account in order to solve the minimal
2583 spanning tree in the case that the call doesn't return.
2585 Handle this by adding a dummy instruction in a new last basic block. */
2586 if (check_last_block
2587 && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
2590 for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
2591 if (e->dest == EXIT_BLOCK_PTR)
2593 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2594 commit_edge_insertions ();
2598 /* Now add fake edges to the function exit for any non constant
2599 calls since there is no way that we can determine if they will
2602 for (i = 0; i < bb_num; i++)
2604 basic_block bb = bbs[i];
2608 for (insn = bb->end; ; insn = prev_insn)
2610 prev_insn = PREV_INSN (insn);
2611 if (need_fake_edge_p (insn))
2615 /* The above condition should be enought to verify that there is
2616 no edge to the exit block in CFG already. Calling make_edge in
2617 such case would make us to mark that edge as fake and remove it
2619 #ifdef ENABLE_CHECKING
2620 if (insn == bb->end)
2621 for (e = bb->succ; e; e = e->succ_next)
2622 if (e->dest == EXIT_BLOCK_PTR)
2626 /* Note that the following may create a new basic block
2627 and renumber the existing basic blocks. */
2628 e = split_block (bb, insn);
2632 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2634 if (insn == bb->head)
2640 verify_flow_info ();
2643 return blocks_split;
2646 /* Find unreachable blocks. An unreachable block will have NULL in
2647 block->aux, a non-NULL value indicates the block is reachable. */
2650 find_unreachable_blocks ()
2654 basic_block *tos, *worklist;
2657 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2659 /* Use basic_block->aux as a marker. Clear them all. */
2661 for (i = 0; i < n; ++i)
2662 BASIC_BLOCK (i)->aux = NULL;
2664 /* Add our starting points to the worklist. Almost always there will
2665 be only one. It isn't inconcievable that we might one day directly
2666 support Fortran alternate entry points. */
2668 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2672 /* Mark the block with a handy non-null value. */
2676 /* Iterate: find everything reachable from what we've already seen. */
2678 while (tos != worklist)
2680 basic_block b = *--tos;
2682 for (e = b->succ; e; e = e->succ_next)
2693 /* Delete all unreachable basic blocks. */
2695 delete_unreachable_blocks ()
2699 find_unreachable_blocks ();
2701 /* Delete all unreachable basic blocks. Count down so that we
2702 don't interfere with the block renumbering that happens in
2703 flow_delete_block. */
2705 for (i = n_basic_blocks - 1; i >= 0; --i)
2707 basic_block b = BASIC_BLOCK (i);
2710 /* This block was found. Tidy up the mark. */
2713 flow_delete_block (b);
2716 tidy_fallthru_edges ();
2719 /* Return true if NOTE is not one of the ones that must be kept paired,
2720 so that we may simply delete them. */
2723 can_delete_note_p (note)
2726 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2727 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2730 /* Unlink a chain of insns between START and FINISH, leaving notes
2731 that must be paired. */
2734 flow_delete_insn_chain (start, finish)
2737 /* Unchain the insns one by one. It would be quicker to delete all
2738 of these with a single unchaining, rather than one at a time, but
2739 we need to keep the NOTE's. */
2745 next = NEXT_INSN (start);
2746 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2748 else if (GET_CODE (start) == CODE_LABEL
2749 && ! can_delete_label_p (start))
2751 const char *name = LABEL_NAME (start);
2752 PUT_CODE (start, NOTE);
2753 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2754 NOTE_SOURCE_FILE (start) = name;
2757 next = flow_delete_insn (start);
2759 if (start == finish)
2765 /* Delete the insns in a (non-live) block. We physically delete every
2766 non-deleted-note insn, and update the flow graph appropriately.
2768 Return nonzero if we deleted an exception handler. */
2770 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2771 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2774 flow_delete_block (b)
2777 int deleted_handler = 0;
2780 /* If the head of this block is a CODE_LABEL, then it might be the
2781 label for an exception handler which can't be reached.
2783 We need to remove the label from the exception_handler_label list
2784 and remove the associated NOTE_INSN_EH_REGION_BEG and
2785 NOTE_INSN_EH_REGION_END notes. */
2789 never_reached_warning (insn);
2791 if (GET_CODE (insn) == CODE_LABEL)
2792 maybe_remove_eh_handler (insn);
2794 /* Include any jump table following the basic block. */
2796 if (GET_CODE (end) == JUMP_INSN
2797 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2798 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2799 && GET_CODE (tmp) == JUMP_INSN
2800 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2801 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2804 /* Include any barrier that may follow the basic block. */
2805 tmp = next_nonnote_insn (end);
2806 if (tmp && GET_CODE (tmp) == BARRIER)
2809 /* Selectively delete the entire chain. */
2810 flow_delete_insn_chain (insn, end);
2812 /* Remove the edges into and out of this block. Note that there may
2813 indeed be edges in, if we are removing an unreachable loop. */
2817 for (e = b->pred; e; e = next)
2819 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2822 next = e->pred_next;
2826 for (e = b->succ; e; e = next)
2828 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2831 next = e->succ_next;
2840 /* Remove the basic block from the array, and compact behind it. */
2843 return deleted_handler;
2846 /* Remove block B from the basic block array and compact behind it. */
2852 int i, n = n_basic_blocks;
2854 for (i = b->index; i + 1 < n; ++i)
2856 basic_block x = BASIC_BLOCK (i + 1);
2857 BASIC_BLOCK (i) = x;
2861 basic_block_info->num_elements--;
2865 /* Delete INSN by patching it out. Return the next insn. */
2868 flow_delete_insn (insn)
2871 rtx prev = PREV_INSN (insn);
2872 rtx next = NEXT_INSN (insn);
2875 PREV_INSN (insn) = NULL_RTX;
2876 NEXT_INSN (insn) = NULL_RTX;
2877 INSN_DELETED_P (insn) = 1;
2880 NEXT_INSN (prev) = next;
2882 PREV_INSN (next) = prev;
2884 set_last_insn (prev);
2886 if (GET_CODE (insn) == CODE_LABEL)
2887 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2889 /* If deleting a jump, decrement the use count of the label. Deleting
2890 the label itself should happen in the normal course of block merging. */
2891 if (GET_CODE (insn) == JUMP_INSN
2892 && JUMP_LABEL (insn)
2893 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2894 LABEL_NUSES (JUMP_LABEL (insn))--;
2896 /* Also if deleting an insn that references a label. */
2897 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2898 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2899 LABEL_NUSES (XEXP (note, 0))--;
2901 if (GET_CODE (insn) == JUMP_INSN
2902 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2903 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2905 rtx pat = PATTERN (insn);
2906 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2907 int len = XVECLEN (pat, diff_vec_p);
2910 for (i = 0; i < len; i++)
2911 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2917 /* True if a given label can be deleted. */
2920 can_delete_label_p (label)
2925 if (LABEL_PRESERVE_P (label))
2928 for (x = forced_labels; x; x = XEXP (x, 1))
2929 if (label == XEXP (x, 0))
2931 for (x = label_value_list; x; x = XEXP (x, 1))
2932 if (label == XEXP (x, 0))
2934 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2935 if (label == XEXP (x, 0))
2938 /* User declared labels must be preserved. */
2939 if (LABEL_NAME (label) != 0)
2946 tail_recursion_label_p (label)
2951 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2952 if (label == XEXP (x, 0))
2958 /* Blocks A and B are to be merged into a single block A. The insns
2959 are already contiguous, hence `nomove'. */
2962 merge_blocks_nomove (a, b)
2966 rtx b_head, b_end, a_end;
2967 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2970 /* If there was a CODE_LABEL beginning B, delete it. */
2973 if (GET_CODE (b_head) == CODE_LABEL)
2975 /* Detect basic blocks with nothing but a label. This can happen
2976 in particular at the end of a function. */
2977 if (b_head == b_end)
2979 del_first = del_last = b_head;
2980 b_head = NEXT_INSN (b_head);
2983 /* Delete the basic block note. */
2984 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2986 if (b_head == b_end)
2991 b_head = NEXT_INSN (b_head);
2994 /* If there was a jump out of A, delete it. */
2996 if (GET_CODE (a_end) == JUMP_INSN)
3000 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
3001 if (GET_CODE (prev) != NOTE
3002 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
3009 /* If this was a conditional jump, we need to also delete
3010 the insn that set cc0. */
3011 if (only_sets_cc0_p (prev))
3014 prev = prev_nonnote_insn (prev);
3023 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
3024 del_first = NEXT_INSN (a_end);
3026 /* Delete everything marked above as well as crap that might be
3027 hanging out between the two blocks. */
3028 flow_delete_insn_chain (del_first, del_last);
3030 /* Normally there should only be one successor of A and that is B, but
3031 partway though the merge of blocks for conditional_execution we'll
3032 be merging a TEST block with THEN and ELSE successors. Free the
3033 whole lot of them and hope the caller knows what they're doing. */
3035 remove_edge (a->succ);
3037 /* Adjust the edges out of B for the new owner. */
3038 for (e = b->succ; e; e = e->succ_next)
3042 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
3043 b->pred = b->succ = NULL;
3045 /* Reassociate the insns of B with A. */
3048 if (basic_block_for_insn)
3050 BLOCK_FOR_INSN (b_head) = a;
3051 while (b_head != b_end)
3053 b_head = NEXT_INSN (b_head);
3054 BLOCK_FOR_INSN (b_head) = a;
3064 /* Blocks A and B are to be merged into a single block. A has no incoming
3065 fallthru edge, so it can be moved before B without adding or modifying
3066 any jumps (aside from the jump from A to B). */
3069 merge_blocks_move_predecessor_nojumps (a, b)
3072 rtx start, end, barrier;
3078 barrier = next_nonnote_insn (end);
3079 if (GET_CODE (barrier) != BARRIER)
3081 flow_delete_insn (barrier);
3083 /* Move block and loop notes out of the chain so that we do not
3084 disturb their order.
3086 ??? A better solution would be to squeeze out all the non-nested notes
3087 and adjust the block trees appropriately. Even better would be to have
3088 a tighter connection between block trees and rtl so that this is not
3090 start = squeeze_notes (start, end);
3092 /* Scramble the insn chain. */
3093 if (end != PREV_INSN (b->head))
3094 reorder_insns (start, end, PREV_INSN (b->head));
3098 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
3099 a->index, b->index);
3102 /* Swap the records for the two blocks around. Although we are deleting B,
3103 A is now where B was and we want to compact the BB array from where
3105 BASIC_BLOCK (a->index) = b;
3106 BASIC_BLOCK (b->index) = a;
3108 a->index = b->index;
3111 /* Now blocks A and B are contiguous. Merge them. */
3112 merge_blocks_nomove (a, b);
3117 /* Blocks A and B are to be merged into a single block. B has no outgoing
3118 fallthru edge, so it can be moved after A without adding or modifying
3119 any jumps (aside from the jump from A to B). */
3122 merge_blocks_move_successor_nojumps (a, b)
3125 rtx start, end, barrier;
3129 barrier = NEXT_INSN (end);
3131 /* Recognize a jump table following block B. */
3133 && GET_CODE (barrier) == CODE_LABEL
3134 && NEXT_INSN (barrier)
3135 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
3136 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
3137 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
3139 end = NEXT_INSN (barrier);
3140 barrier = NEXT_INSN (end);
3143 /* There had better have been a barrier there. Delete it. */
3144 if (barrier && GET_CODE (barrier) == BARRIER)
3145 flow_delete_insn (barrier);
3147 /* Move block and loop notes out of the chain so that we do not
3148 disturb their order.
3150 ??? A better solution would be to squeeze out all the non-nested notes
3151 and adjust the block trees appropriately. Even better would be to have
3152 a tighter connection between block trees and rtl so that this is not
3154 start = squeeze_notes (start, end);
3156 /* Scramble the insn chain. */
3157 reorder_insns (start, end, a->end);
3159 /* Now blocks A and B are contiguous. Merge them. */
3160 merge_blocks_nomove (a, b);
3164 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
3165 b->index, a->index);
3171 /* Attempt to merge basic blocks that are potentially non-adjacent.
3172 Return true iff the attempt succeeded. */
3175 merge_blocks (e, b, c, mode)
3180 /* If C has a tail recursion label, do not merge. There is no
3181 edge recorded from the call_placeholder back to this label, as
3182 that would make optimize_sibling_and_tail_recursive_calls more
3183 complex for no gain. */
3184 if (GET_CODE (c->head) == CODE_LABEL
3185 && tail_recursion_label_p (c->head))
3188 /* If B has a fallthru edge to C, no need to move anything. */
3189 if (e->flags & EDGE_FALLTHRU)
3191 merge_blocks_nomove (b, c);
3195 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
3196 b->index, c->index);
3201 /* Otherwise we will need to move code around. Do that only if expensive
3202 transformations are allowed. */
3203 else if (mode & CLEANUP_EXPENSIVE)
3205 edge tmp_edge, c_fallthru_edge;
3206 int c_has_outgoing_fallthru;
3207 int b_has_incoming_fallthru;
3209 /* Avoid overactive code motion, as the forwarder blocks should be
3210 eliminated by edge redirection instead. One exception might have
3211 been if B is a forwarder block and C has no fallthru edge, but
3212 that should be cleaned up by bb-reorder instead. */
3213 if (forwarder_block_p (b) || forwarder_block_p (c))
3216 /* We must make sure to not munge nesting of lexical blocks,
3217 and loop notes. This is done by squeezing out all the notes
3218 and leaving them there to lie. Not ideal, but functional. */
3220 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
3221 if (tmp_edge->flags & EDGE_FALLTHRU)
3223 c_has_outgoing_fallthru = (tmp_edge != NULL);
3224 c_fallthru_edge = tmp_edge;
3226 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
3227 if (tmp_edge->flags & EDGE_FALLTHRU)
3229 b_has_incoming_fallthru = (tmp_edge != NULL);
3231 /* If B does not have an incoming fallthru, then it can be moved
3232 immediately before C without introducing or modifying jumps.
3233 C cannot be the first block, so we do not have to worry about
3234 accessing a non-existent block. */
3235 if (! b_has_incoming_fallthru)
3236 return merge_blocks_move_predecessor_nojumps (b, c);
3238 /* Otherwise, we're going to try to move C after B. If C does
3239 not have an outgoing fallthru, then it can be moved
3240 immediately after B without introducing or modifying jumps. */
3241 if (! c_has_outgoing_fallthru)
3242 return merge_blocks_move_successor_nojumps (b, c);
3244 /* Otherwise, we'll need to insert an extra jump, and possibly
3245 a new block to contain it. We can't redirect to EXIT_BLOCK_PTR,
3246 as we don't have explicit return instructions before epilogues
3247 are generated, so give up on that case. */
3249 if (c_fallthru_edge->dest != EXIT_BLOCK_PTR
3250 && merge_blocks_move_successor_nojumps (b, c))
3252 basic_block target = c_fallthru_edge->dest;
3256 /* This is a dirty hack to avoid code duplication.
3258 Set edge to point to wrong basic block, so
3259 redirect_edge_and_branch_force will do the trick
3260 and rewire edge back to the original location. */
3261 redirect_edge_succ (c_fallthru_edge, ENTRY_BLOCK_PTR);
3262 new = redirect_edge_and_branch_force (c_fallthru_edge, target);
3264 /* We've just created barrier, but another barrier is
3265 already present in the stream. Avoid the duplicate. */
3266 barrier = next_nonnote_insn (new ? new->end : b->end);
3267 if (GET_CODE (barrier) != BARRIER)
3269 flow_delete_insn (barrier);
3279 /* Simplify a conditional jump around an unconditional jump.
3280 Return true if something changed. */
3283 try_simplify_condjump (cbranch_block)
3284 basic_block cbranch_block;
3286 basic_block jump_block, jump_dest_block, cbranch_dest_block;
3287 edge cbranch_jump_edge, cbranch_fallthru_edge;
3290 /* Verify that there are exactly two successors. */
3291 if (!cbranch_block->succ
3292 || !cbranch_block->succ->succ_next
3293 || cbranch_block->succ->succ_next->succ_next)
3296 /* Verify that we've got a normal conditional branch at the end
3298 cbranch_insn = cbranch_block->end;
3299 if (!any_condjump_p (cbranch_insn))
3302 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
3303 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
3305 /* The next block must not have multiple predecessors, must not
3306 be the last block in the function, and must contain just the
3307 unconditional jump. */
3308 jump_block = cbranch_fallthru_edge->dest;
3309 if (jump_block->pred->pred_next
3310 || jump_block->index == n_basic_blocks - 1
3311 || !forwarder_block_p (jump_block))
3313 jump_dest_block = jump_block->succ->dest;
3315 /* The conditional branch must target the block after the
3316 unconditional branch. */
3317 cbranch_dest_block = cbranch_jump_edge->dest;
3319 if (!can_fallthru (jump_block, cbranch_dest_block))
3322 /* Invert the conditional branch. Prevent jump.c from deleting
3323 "unreachable" instructions. */
3324 LABEL_NUSES (JUMP_LABEL (cbranch_insn))++;
3325 if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 1))
3327 LABEL_NUSES (JUMP_LABEL (cbranch_insn))--;
3332 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
3333 INSN_UID (cbranch_insn), INSN_UID (jump_block->end));
3335 /* Success. Update the CFG to match. Note that after this point
3336 the edge variable names appear backwards; the redirection is done
3337 this way to preserve edge profile data. */
3338 redirect_edge_succ_nodup (cbranch_jump_edge, cbranch_dest_block);
3339 redirect_edge_succ_nodup (cbranch_fallthru_edge, jump_dest_block);
3340 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
3341 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
3343 /* Delete the block with the unconditional jump, and clean up the mess. */
3344 flow_delete_block (jump_block);
3345 tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block);
3350 /* Attempt to forward edges leaving basic block B.
3351 Return true if sucessful. */
3354 try_forward_edges (mode, b)
3358 bool changed = false;
3361 for (e = b->succ; e ; e = next)
3363 basic_block target, first;
3366 next = e->succ_next;
3368 /* Skip complex edges because we don't know how to update them.
3370 Still handle fallthru edges, as we can suceed to forward fallthru
3371 edge to the same place as the branch edge of conditional branch
3372 and turn conditional branch to an unconditonal branch. */
3373 if (e->flags & EDGE_COMPLEX)
3376 target = first = e->dest;
3379 /* Look for the real destination of the jump.
3380 Avoid inifinite loop in the infinite empty loop by counting
3381 up to n_basic_blocks. */
3382 while (forwarder_block_p (target)
3383 && target->succ->dest != EXIT_BLOCK_PTR
3384 && counter < n_basic_blocks)
3386 /* Bypass trivial infinite loops. */
3387 if (target == target->succ->dest)
3388 counter = n_basic_blocks;
3390 /* Avoid killing of loop pre-headers, as it is the place loop
3391 optimizer wants to hoist code to.
3393 For fallthru forwarders, the LOOP_BEG note must appear between
3394 the header of block and CODE_LABEL of the loop, for non forwarders
3395 it must appear before the JUMP_INSN. */
3396 if (mode & CLEANUP_PRE_LOOP)
3398 rtx insn = (target->succ->flags & EDGE_FALLTHRU
3399 ? target->head : prev_nonnote_insn (target->end));
3401 if (GET_CODE (insn) != NOTE)
3402 insn = NEXT_INSN (insn);
3404 for (;insn && GET_CODE (insn) != CODE_LABEL && !INSN_P (insn);
3405 insn = NEXT_INSN (insn))
3406 if (GET_CODE (insn) == NOTE
3407 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
3410 if (GET_CODE (insn) == NOTE)
3413 target = target->succ->dest, counter++;
3416 if (counter >= n_basic_blocks)
3419 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n",
3422 else if (target == first)
3423 ; /* We didn't do anything. */
3426 /* Save the values now, as the edge may get removed. */
3427 gcov_type edge_count = e->count;
3428 int edge_probability = e->probability;
3430 if (redirect_edge_and_branch (e, target))
3432 /* We successfully forwarded the edge. Now update profile
3433 data: for each edge we traversed in the chain, remove
3434 the original edge's execution count. */
3435 int edge_frequency = ((edge_probability * b->frequency
3436 + REG_BR_PROB_BASE / 2)
3437 / REG_BR_PROB_BASE);
3441 first->count -= edge_count;
3442 first->succ->count -= edge_count;
3443 first->frequency -= edge_frequency;
3444 first = first->succ->dest;
3446 while (first != target);
3453 fprintf (rtl_dump_file, "Forwarding edge %i->%i to %i failed.\n",
3454 b->index, e->dest->index, target->index);
3462 /* Look through the insns at the end of BB1 and BB2 and find the longest
3463 sequence that are equivalent. Store the first insns for that sequence
3464 in *F1 and *F2 and return the sequence length.
3466 To simplify callers of this function, if the blocks match exactly,
3467 store the head of the blocks in *F1 and *F2. */
3470 flow_find_cross_jump (mode, bb1, bb2, f1, f2)
3471 int mode ATTRIBUTE_UNUSED;
3472 basic_block bb1, bb2;
3475 rtx i1, i2, p1, p2, last1, last2, afterlast1, afterlast2;
3478 /* Skip simple jumps at the end of the blocks. Complex jumps still
3479 need to be compared for equivalence, which we'll do below. */
3482 if (onlyjump_p (i1))
3483 i1 = PREV_INSN (i1);
3485 if (onlyjump_p (i2))
3486 i2 = PREV_INSN (i2);
3488 last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
3492 while ((GET_CODE (i1) == NOTE && i1 != bb1->head))
3493 i1 = PREV_INSN (i1);
3494 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
3495 i2 = PREV_INSN (i2);
3497 if (i1 == bb1->head || i2 == bb2->head)
3500 /* Verify that I1 and I2 are equivalent. */
3502 if (GET_CODE (i1) != GET_CODE (i2))
3508 /* If this is a CALL_INSN, compare register usage information.
3509 If we don't check this on stack register machines, the two
3510 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3511 numbers of stack registers in the same basic block.
3512 If we don't check this on machines with delay slots, a delay slot may
3513 be filled that clobbers a parameter expected by the subroutine.
3515 ??? We take the simple route for now and assume that if they're
3516 equal, they were constructed identically. */
3518 if (GET_CODE (i1) == CALL_INSN
3519 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3520 CALL_INSN_FUNCTION_USAGE (i2)))
3524 /* If cross_jump_death_matters is not 0, the insn's mode
3525 indicates whether or not the insn contains any stack-like
3528 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
3530 /* If register stack conversion has already been done, then
3531 death notes must also be compared before it is certain that
3532 the two instruction streams match. */
3535 HARD_REG_SET i1_regset, i2_regset;
3537 CLEAR_HARD_REG_SET (i1_regset);
3538 CLEAR_HARD_REG_SET (i2_regset);
3540 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3541 if (REG_NOTE_KIND (note) == REG_DEAD
3542 && STACK_REG_P (XEXP (note, 0)))
3543 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3545 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3546 if (REG_NOTE_KIND (note) == REG_DEAD
3547 && STACK_REG_P (XEXP (note, 0)))
3548 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3550 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3559 if (GET_CODE (p1) != GET_CODE (p2))
3562 if (! rtx_renumbered_equal_p (p1, p2))
3564 /* The following code helps take care of G++ cleanups. */
3565 rtx equiv1 = find_reg_equal_equiv_note (i1);
3566 rtx equiv2 = find_reg_equal_equiv_note (i2);
3568 if (equiv1 && equiv2
3569 /* If the equivalences are not to a constant, they may
3570 reference pseudos that no longer exist, so we can't
3572 && CONSTANT_P (XEXP (equiv1, 0))
3573 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3575 rtx s1 = single_set (i1);
3576 rtx s2 = single_set (i2);
3577 if (s1 != 0 && s2 != 0
3578 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3580 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3581 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3582 if (! rtx_renumbered_equal_p (p1, p2))
3584 else if (apply_change_group ())
3592 /* Don't begin a cross-jump with a USE or CLOBBER insn. */
3593 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3595 afterlast1 = last1, afterlast2 = last2;
3596 last1 = i1, last2 = i2;
3599 i1 = PREV_INSN (i1);
3600 i2 = PREV_INSN (i2);
3606 /* Don't allow the insn after a compare to be shared by
3607 cross-jumping unless the compare is also shared. */
3608 if (reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
3609 last1 = afterlast1, last2 = afterlast2, ninsns--;
3613 /* Include preceeding notes and labels in the cross-jump. One,
3614 this may bring us to the head of the blocks as requested above.
3615 Two, it keeps line number notes as matched as may be. */
3618 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
3619 last1 = PREV_INSN (last1);
3620 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
3621 last1 = PREV_INSN (last1);
3622 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
3623 last2 = PREV_INSN (last2);
3624 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
3625 last2 = PREV_INSN (last2);
3634 /* Return true iff outgoing edges of BB1 and BB2 match, together with
3635 the branch instruction. This means that if we commonize the control
3636 flow before end of the basic block, the semantic remains unchanged.
3638 We may assume that there exists one edge with a common destination. */
3641 outgoing_edges_match (bb1, bb2)
3645 /* If BB1 has only one successor, we must be looking at an unconditional
3646 jump. Which, by the assumption above, means that we only need to check
3647 that BB2 has one successor. */
3648 if (bb1->succ && !bb1->succ->succ_next)
3649 return (bb2->succ && !bb2->succ->succ_next);
3651 /* Match conditional jumps - this may get tricky when fallthru and branch
3652 edges are crossed. */
3654 && bb1->succ->succ_next
3655 && !bb1->succ->succ_next->succ_next
3656 && any_condjump_p (bb1->end))
3658 edge b1, f1, b2, f2;
3659 bool reverse, match;
3660 rtx set1, set2, cond1, cond2;
3661 enum rtx_code code1, code2;
3664 || !bb2->succ->succ_next
3665 || bb1->succ->succ_next->succ_next
3666 || !any_condjump_p (bb2->end))
3669 b1 = BRANCH_EDGE (bb1);
3670 b2 = BRANCH_EDGE (bb2);
3671 f1 = FALLTHRU_EDGE (bb1);
3672 f2 = FALLTHRU_EDGE (bb2);
3674 /* Get around possible forwarders on fallthru edges. Other cases
3675 should be optimized out already. */
3676 if (forwarder_block_p (f1->dest))
3677 f1 = f1->dest->succ;
3678 if (forwarder_block_p (f2->dest))
3679 f2 = f2->dest->succ;
3681 /* To simplify use of this function, return false if there are
3682 unneeded forwarder blocks. These will get eliminated later
3683 during cleanup_cfg. */
3684 if (forwarder_block_p (f1->dest)
3685 || forwarder_block_p (f2->dest)
3686 || forwarder_block_p (b1->dest)
3687 || forwarder_block_p (b2->dest))
3690 if (f1->dest == f2->dest && b1->dest == b2->dest)
3692 else if (f1->dest == b2->dest && b1->dest == f2->dest)
3697 set1 = pc_set (bb1->end);
3698 set2 = pc_set (bb2->end);
3699 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
3700 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
3703 cond1 = XEXP (SET_SRC (set1), 0);
3704 cond2 = XEXP (SET_SRC (set2), 0);
3705 code1 = GET_CODE (cond1);
3707 code2 = reversed_comparison_code (cond2, bb2->end);
3709 code2 = GET_CODE (cond2);
3710 if (code2 == UNKNOWN)
3713 /* Verify codes and operands match. */
3714 match = ((code1 == code2
3715 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
3716 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
3717 || (code1 == swap_condition (code2)
3718 && rtx_renumbered_equal_p (XEXP (cond1, 1),
3720 && rtx_renumbered_equal_p (XEXP (cond1, 0),
3723 /* If we return true, we will join the blocks. Which means that
3724 we will only have one branch prediction bit to work with. Thus
3725 we require the existing branches to have probabilities that are
3727 /* ??? We should use bb->frequency to allow merging in infrequently
3728 executed blocks, but at the moment it is not available when
3729 cleanup_cfg is run. */
3730 if (match && !optimize_size)
3734 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
3735 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
3739 prob1 = INTVAL (XEXP (note1, 0));
3740 prob2 = INTVAL (XEXP (note2, 0));
3742 prob2 = REG_BR_PROB_BASE - prob2;
3744 /* Fail if the difference in probabilities is
3746 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 20)
3749 else if (note1 || note2)
3753 if (rtl_dump_file && match)
3754 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
3755 bb1->index, bb2->index);
3760 /* ??? We can handle computed jumps too. This may be important for
3761 inlined functions containing switch statements. Also jumps w/o
3762 fallthru edges can be handled by simply matching whole insn. */
3766 /* E1 and E2 are edges with the same destination block. Search their
3767 predecessors for common code. If found, redirect control flow from
3768 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
3771 try_crossjump_to_edge (mode, e1, e2)
3776 basic_block src1 = e1->src, src2 = e2->src;
3777 basic_block redirect_to;
3778 rtx newpos1, newpos2;
3784 /* Search backward through forwarder blocks. We don't need to worry
3785 about multiple entry or chained forwarders, as they will be optimized
3786 away. We do this to look past the unconditional jump following a
3787 conditional jump that is required due to the current CFG shape. */
3789 && !src1->pred->pred_next
3790 && forwarder_block_p (src1))
3796 && !src2->pred->pred_next
3797 && forwarder_block_p (src2))
3803 /* Nothing to do if we reach ENTRY, or a common source block. */
3804 if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
3809 /* Seeing more than 1 forwarder blocks would confuse us later... */
3810 if (forwarder_block_p (e1->dest)
3811 && forwarder_block_p (e1->dest->succ->dest))
3813 if (forwarder_block_p (e2->dest)
3814 && forwarder_block_p (e2->dest->succ->dest))
3817 /* Likewise with dead code (possibly newly created by the other optimizations
3819 if (!src1->pred || !src2->pred)
3822 /* Likewise with complex edges.
3823 ??? We should be able to handle most complex edges later with some
3825 if (e1->flags & EDGE_COMPLEX)
3828 /* Look for the common insn sequence, part the first ... */
3829 if (!outgoing_edges_match (src1, src2))
3832 /* ... and part the second. */
3833 nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
3837 /* Avoid splitting if possible. */
3838 if (newpos2 == src2->head)
3843 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
3844 src2->index, nmatch);
3845 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
3849 fprintf (rtl_dump_file,
3850 "Cross jumping from bb %i to bb %i; %i common insns\n",
3851 src1->index, src2->index, nmatch);
3853 redirect_to->count += src1->count;
3854 redirect_to->frequency += src1->frequency;
3856 /* Recompute the frequencies and counts of outgoing edges. */
3857 for (s = redirect_to->succ; s; s = s->succ_next)
3860 basic_block d = s->dest;
3862 if (forwarder_block_p (d))
3864 for (s2 = src1->succ; ; s2 = s2->succ_next)
3866 basic_block d2 = s2->dest;
3867 if (forwarder_block_p (d2))
3868 d2 = d2->succ->dest;
3872 s->count += s2->count;
3874 /* Take care to update possible forwarder blocks. We verified
3875 that there is no more than one in the chain, so we can't run
3876 into infinite loop. */
3877 if (forwarder_block_p (s->dest))
3879 s->dest->succ->count += s2->count;
3880 s->dest->count += s2->count;
3881 s->dest->frequency += EDGE_FREQUENCY (s);
3883 if (forwarder_block_p (s2->dest))
3885 s2->dest->succ->count -= s2->count;
3886 s2->dest->count -= s2->count;
3887 s2->dest->frequency -= EDGE_FREQUENCY (s);
3889 if (!redirect_to->frequency && !src1->frequency)
3890 s->probability = (s->probability + s2->probability) / 2;
3893 ((s->probability * redirect_to->frequency +
3894 s2->probability * src1->frequency)
3895 / (redirect_to->frequency + src1->frequency));
3898 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
3900 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
3902 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
3904 /* Skip possible basic block header. */
3905 if (GET_CODE (newpos1) == CODE_LABEL)
3906 newpos1 = NEXT_INSN (newpos1);
3907 if (GET_CODE (newpos1) == NOTE)
3908 newpos1 = NEXT_INSN (newpos1);
3911 /* Emit the jump insn. */
3912 label = block_label (redirect_to);
3913 src1->end = emit_jump_insn_before (gen_jump (label), newpos1);
3914 JUMP_LABEL (src1->end) = label;
3915 LABEL_NUSES (label)++;
3916 if (basic_block_for_insn)
3917 set_block_for_new_insns (src1->end, src1);
3919 /* Delete the now unreachable instructions. */
3920 flow_delete_insn_chain (newpos1, last);
3922 /* Make sure there is a barrier after the new jump. */
3923 last = next_nonnote_insn (src1->end);
3924 if (!last || GET_CODE (last) != BARRIER)
3925 emit_barrier_after (src1->end);
3929 remove_edge (src1->succ);
3930 make_edge (NULL, src1, redirect_to, 0);
3931 src1->succ->probability = REG_BR_PROB_BASE;
3932 src1->succ->count = src1->count;
3937 /* Search the predecessors of BB for common insn sequences. When found,
3938 share code between them by redirecting control flow. Return true if
3939 any changes made. */
3942 try_crossjump_bb (mode, bb)
3946 edge e, e2, nexte2, nexte, fallthru;
3949 /* Nothing to do if there is not at least two incomming edges. */
3950 if (!bb->pred || !bb->pred->pred_next)
3953 /* It is always cheapest to redirect a block that ends in a branch to
3954 a block that falls through into BB, as that adds no branches to the
3955 program. We'll try that combination first. */
3956 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
3957 if (fallthru->flags & EDGE_FALLTHRU)
3961 for (e = bb->pred; e; e = nexte)
3963 nexte = e->pred_next;
3965 /* Elide complex edges now, as neither try_crossjump_to_edge
3966 nor outgoing_edges_match can handle them. */
3967 if (e->flags & EDGE_COMPLEX)
3970 /* As noted above, first try with the fallthru predecessor. */
3973 /* Don't combine the fallthru edge into anything else.
3974 If there is a match, we'll do it the other way around. */
3978 if (try_crossjump_to_edge (mode, e, fallthru))
3986 /* Non-obvious work limiting check: Recognize that we're going
3987 to call try_crossjump_bb on every basic block. So if we have
3988 two blocks with lots of outgoing edges (a switch) and they
3989 share lots of common destinations, then we would do the
3990 cross-jump check once for each common destination.
3992 Now, if the blocks actually are cross-jump candidates, then
3993 all of their destinations will be shared. Which means that
3994 we only need check them for cross-jump candidacy once. We
3995 can eliminate redundant checks of crossjump(A,B) by arbitrarily
3996 choosing to do the check from the block for which the edge
3997 in question is the first successor of A. */
3998 if (e->src->succ != e)
4001 for (e2 = bb->pred; e2; e2 = nexte2)
4003 nexte2 = e2->pred_next;
4008 /* We've already checked the fallthru edge above. */
4012 /* Again, neither try_crossjump_to_edge nor outgoing_edges_match
4013 can handle complex edges. */
4014 if (e2->flags & EDGE_COMPLEX)
4017 /* The "first successor" check above only prevents multiple
4018 checks of crossjump(A,B). In order to prevent redundant
4019 checks of crossjump(B,A), require that A be the block
4020 with the lowest index. */
4021 if (e->src->index > e2->src->index)
4024 if (try_crossjump_to_edge (mode, e, e2))
4036 /* Do simple CFG optimizations - basic block merging, simplifying of jump
4037 instructions etc. Return nonzero if changes were made. */
4040 try_optimize_cfg (mode)
4044 bool changed_overall = false;
4048 /* Attempt to merge blocks as made possible by edge removal. If a block
4049 has only one successor, and the successor has only one predecessor,
4050 they may be combined. */
4058 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
4061 for (i = 0; i < n_basic_blocks;)
4063 basic_block c, b = BASIC_BLOCK (i);
4065 bool changed_here = false;
4067 /* Delete trivially dead basic blocks. */
4068 while (b->pred == NULL)
4070 c = BASIC_BLOCK (b->index - 1);
4072 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
4073 flow_delete_block (b);
4078 /* Remove code labels no longer used. Don't do this before
4079 CALL_PLACEHOLDER is removed, as some branches may be hidden
4081 if (b->pred->pred_next == NULL
4082 && (b->pred->flags & EDGE_FALLTHRU)
4083 && !(b->pred->flags & EDGE_COMPLEX)
4084 && GET_CODE (b->head) == CODE_LABEL
4085 && (!(mode & CLEANUP_PRE_SIBCALL)
4086 || !tail_recursion_label_p (b->head))
4087 /* If previous block ends with condjump jumping to next BB,
4088 we can't delete the label. */
4089 && (b->pred->src == ENTRY_BLOCK_PTR
4090 || !reg_mentioned_p (b->head, b->pred->src->end)))
4092 rtx label = b->head;
4093 b->head = NEXT_INSN (b->head);
4094 flow_delete_insn_chain (label, label);
4096 fprintf (rtl_dump_file, "Deleted label in block %i.\n",
4100 /* If we fall through an empty block, we can remove it. */
4101 if (b->pred->pred_next == NULL
4102 && (b->pred->flags & EDGE_FALLTHRU)
4103 && GET_CODE (b->head) != CODE_LABEL
4104 && forwarder_block_p (b)
4105 /* Note that forwarder_block_p true ensures that there
4106 is a successor for this block. */
4107 && (b->succ->flags & EDGE_FALLTHRU)
4108 && n_basic_blocks > 1)
4111 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
4113 c = BASIC_BLOCK (b->index ? b->index - 1 : 1);
4114 redirect_edge_succ_nodup (b->pred, b->succ->dest);
4115 flow_delete_block (b);
4120 /* Merge blocks. Loop because chains of blocks might be
4122 while ((s = b->succ) != NULL
4123 && s->succ_next == NULL
4124 && !(s->flags & EDGE_COMPLEX)
4125 && (c = s->dest) != EXIT_BLOCK_PTR
4126 && c->pred->pred_next == NULL
4127 /* If the jump insn has side effects,
4128 we can't kill the edge. */
4129 && (GET_CODE (b->end) != JUMP_INSN
4130 || onlyjump_p (b->end))
4131 && merge_blocks (s, b, c, mode))
4132 changed_here = true;
4134 /* Simplify branch over branch. */
4135 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
4136 changed_here = true;
4138 /* If B has a single outgoing edge, but uses a non-trivial jump
4139 instruction without side-effects, we can either delete the
4140 jump entirely, or replace it with a simple unconditional jump.
4141 Use redirect_edge_and_branch to do the dirty work. */
4143 && ! b->succ->succ_next
4144 && b->succ->dest != EXIT_BLOCK_PTR
4145 && onlyjump_p (b->end)
4146 && redirect_edge_and_branch (b->succ, b->succ->dest))
4147 changed_here = true;
4149 /* Simplify branch to branch. */
4150 if (try_forward_edges (mode, b))
4151 changed_here = true;
4153 /* Look for shared code between blocks. */
4154 if ((mode & CLEANUP_CROSSJUMP)
4155 && try_crossjump_bb (mode, b))
4156 changed_here = true;
4158 /* Don't get confused by the index shift caused by deleting
4166 if ((mode & CLEANUP_CROSSJUMP)
4167 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
4170 #ifdef ENABLE_CHECKING
4172 verify_flow_info ();
4175 changed_overall |= changed;
4178 return changed_overall;
4181 /* The given edge should potentially be a fallthru edge. If that is in
4182 fact true, delete the jump and barriers that are in the way. */
4185 tidy_fallthru_edge (e, b, c)
4191 /* ??? In a late-running flow pass, other folks may have deleted basic
4192 blocks by nopping out blocks, leaving multiple BARRIERs between here
4193 and the target label. They ought to be chastized and fixed.
4195 We can also wind up with a sequence of undeletable labels between
4196 one block and the next.
4198 So search through a sequence of barriers, labels, and notes for
4199 the head of block C and assert that we really do fall through. */
4201 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
4204 /* Remove what will soon cease being the jump insn from the source block.
4205 If block B consisted only of this single jump, turn it into a deleted
4208 if (GET_CODE (q) == JUMP_INSN
4210 && (any_uncondjump_p (q)
4211 || (b->succ == e && e->succ_next == NULL)))
4214 /* If this was a conditional jump, we need to also delete
4215 the insn that set cc0. */
4216 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
4223 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
4224 NOTE_SOURCE_FILE (q) = 0;
4230 /* We don't want a block to end on a line-number note since that has
4231 the potential of changing the code between -g and not -g. */
4232 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
4239 /* Selectively unlink the sequence. */
4240 if (q != PREV_INSN (c->head))
4241 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
4243 e->flags |= EDGE_FALLTHRU;
4246 /* Fix up edges that now fall through, or rather should now fall through
4247 but previously required a jump around now deleted blocks. Simplify
4248 the search by only examining blocks numerically adjacent, since this
4249 is how find_basic_blocks created them. */
4252 tidy_fallthru_edges ()
4256 for (i = 1; i < n_basic_blocks; ++i)
4258 basic_block b = BASIC_BLOCK (i - 1);
4259 basic_block c = BASIC_BLOCK (i);
4262 /* We care about simple conditional or unconditional jumps with
4265 If we had a conditional branch to the next instruction when
4266 find_basic_blocks was called, then there will only be one
4267 out edge for the block which ended with the conditional
4268 branch (since we do not create duplicate edges).
4270 Furthermore, the edge will be marked as a fallthru because we
4271 merge the flags for the duplicate edges. So we do not want to
4272 check that the edge is not a FALLTHRU edge. */
4273 if ((s = b->succ) != NULL
4274 && ! (s->flags & EDGE_COMPLEX)
4275 && s->succ_next == NULL
4277 /* If the jump insn has side effects, we can't tidy the edge. */
4278 && (GET_CODE (b->end) != JUMP_INSN
4279 || onlyjump_p (b->end)))
4280 tidy_fallthru_edge (s, b, c);
4284 /* Perform data flow analysis.
4285 F is the first insn of the function; FLAGS is a set of PROP_* flags
4286 to be used in accumulating flow info. */
4289 life_analysis (f, file, flags)
4294 #ifdef ELIMINABLE_REGS
4296 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
4299 /* Record which registers will be eliminated. We use this in
4302 CLEAR_HARD_REG_SET (elim_reg_set);
4304 #ifdef ELIMINABLE_REGS
4305 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4306 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4308 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4312 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
4314 /* The post-reload life analysis have (on a global basis) the same
4315 registers live as was computed by reload itself. elimination
4316 Otherwise offsets and such may be incorrect.
4318 Reload will make some registers as live even though they do not
4321 We don't want to create new auto-incs after reload, since they
4322 are unlikely to be useful and can cause problems with shared
4324 if (reload_completed)
4325 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
4327 /* We want alias analysis information for local dead store elimination. */
4328 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4329 init_alias_analysis ();
4331 /* Always remove no-op moves. Do this before other processing so
4332 that we don't have to keep re-scanning them. */
4333 delete_noop_moves (f);
4335 /* Some targets can emit simpler epilogues if they know that sp was
4336 not ever modified during the function. After reload, of course,
4337 we've already emitted the epilogue so there's no sense searching. */
4338 if (! reload_completed)
4339 notice_stack_pointer_modification (f);
4341 /* Allocate and zero out data structures that will record the
4342 data from lifetime analysis. */
4343 allocate_reg_life_data ();
4344 allocate_bb_life_data ();
4346 /* Find the set of registers live on function exit. */
4347 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
4349 /* "Update" life info from zero. It'd be nice to begin the
4350 relaxation with just the exit and noreturn blocks, but that set
4351 is not immediately handy. */
4353 if (flags & PROP_REG_INFO)
4354 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4355 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
4358 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4359 end_alias_analysis ();
4362 dump_flow_info (file);
4364 free_basic_block_vars (1);
4366 #ifdef ENABLE_CHECKING
4370 /* Search for any REG_LABEL notes which reference deleted labels. */
4371 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4373 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4375 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
4380 /* Removing dead insns should've made jumptables really dead. */
4381 delete_dead_jumptables ();
4384 /* A subroutine of verify_wide_reg, called through for_each_rtx.
4385 Search for REGNO. If found, abort if it is not wider than word_mode. */
4388 verify_wide_reg_1 (px, pregno)
4393 unsigned int regno = *(int *) pregno;
4395 if (GET_CODE (x) == REG && REGNO (x) == regno)
4397 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
4404 /* A subroutine of verify_local_live_at_start. Search through insns
4405 between HEAD and END looking for register REGNO. */
4408 verify_wide_reg (regno, head, end)
4415 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
4419 head = NEXT_INSN (head);
4422 /* We didn't find the register at all. Something's way screwy. */
4424 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
4425 print_rtl_and_abort ();
4428 /* A subroutine of update_life_info. Verify that there are no untoward
4429 changes in live_at_start during a local update. */
4432 verify_local_live_at_start (new_live_at_start, bb)
4433 regset new_live_at_start;
4436 if (reload_completed)
4438 /* After reload, there are no pseudos, nor subregs of multi-word
4439 registers. The regsets should exactly match. */
4440 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
4444 fprintf (rtl_dump_file,
4445 "live_at_start mismatch in bb %d, aborting\n",
4447 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
4448 debug_bitmap_file (rtl_dump_file, new_live_at_start);
4450 print_rtl_and_abort ();
4457 /* Find the set of changed registers. */
4458 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
4460 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
4462 /* No registers should die. */
4463 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
4466 fprintf (rtl_dump_file,
4467 "Register %d died unexpectedly in block %d\n", i,
4469 print_rtl_and_abort ();
4472 /* Verify that the now-live register is wider than word_mode. */
4473 verify_wide_reg (i, bb->head, bb->end);
4478 /* Updates life information starting with the basic blocks set in BLOCKS.
4479 If BLOCKS is null, consider it to be the universal set.
4481 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
4482 we are only expecting local modifications to basic blocks. If we find
4483 extra registers live at the beginning of a block, then we either killed
4484 useful data, or we have a broken split that wants data not provided.
4485 If we find registers removed from live_at_start, that means we have
4486 a broken peephole that is killing a register it shouldn't.
4488 ??? This is not true in one situation -- when a pre-reload splitter
4489 generates subregs of a multi-word pseudo, current life analysis will
4490 lose the kill. So we _can_ have a pseudo go live. How irritating.
4492 Including PROP_REG_INFO does not properly refresh regs_ever_live
4493 unless the caller resets it to zero. */
4496 update_life_info (blocks, extent, prop_flags)
4498 enum update_life_extent extent;
4502 regset_head tmp_head;
4505 tmp = INITIALIZE_REG_SET (tmp_head);
4507 /* Changes to the CFG are only allowed when
4508 doing a global update for the entire CFG. */
4509 if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
4510 && (extent == UPDATE_LIFE_LOCAL || blocks))
4513 /* For a global update, we go through the relaxation process again. */
4514 if (extent != UPDATE_LIFE_LOCAL)
4520 calculate_global_regs_live (blocks, blocks,
4521 prop_flags & (PROP_SCAN_DEAD_CODE
4522 | PROP_ALLOW_CFG_CHANGES));
4524 if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4525 != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4528 /* Removing dead code may allow the CFG to be simplified which
4529 in turn may allow for further dead code detection / removal. */
4530 for (i = n_basic_blocks - 1; i >= 0; --i)
4532 basic_block bb = BASIC_BLOCK (i);
4534 COPY_REG_SET (tmp, bb->global_live_at_end);
4535 changed |= propagate_block (bb, tmp, NULL, NULL,
4536 prop_flags & (PROP_SCAN_DEAD_CODE
4537 | PROP_KILL_DEAD_CODE));
4540 if (! changed || ! try_optimize_cfg (CLEANUP_EXPENSIVE))
4543 delete_unreachable_blocks ();
4544 mark_critical_edges ();
4547 /* If asked, remove notes from the blocks we'll update. */
4548 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
4549 count_or_remove_death_notes (blocks, 1);
4554 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4556 basic_block bb = BASIC_BLOCK (i);
4558 COPY_REG_SET (tmp, bb->global_live_at_end);
4559 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4561 if (extent == UPDATE_LIFE_LOCAL)
4562 verify_local_live_at_start (tmp, bb);
4567 for (i = n_basic_blocks - 1; i >= 0; --i)
4569 basic_block bb = BASIC_BLOCK (i);
4571 COPY_REG_SET (tmp, bb->global_live_at_end);
4572 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4574 if (extent == UPDATE_LIFE_LOCAL)
4575 verify_local_live_at_start (tmp, bb);
4581 if (prop_flags & PROP_REG_INFO)
4583 /* The only pseudos that are live at the beginning of the function
4584 are those that were not set anywhere in the function. local-alloc
4585 doesn't know how to handle these correctly, so mark them as not
4586 local to any one basic block. */
4587 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
4588 FIRST_PSEUDO_REGISTER, i,
4589 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4591 /* We have a problem with any pseudoreg that lives across the setjmp.
4592 ANSI says that if a user variable does not change in value between
4593 the setjmp and the longjmp, then the longjmp preserves it. This
4594 includes longjmp from a place where the pseudo appears dead.
4595 (In principle, the value still exists if it is in scope.)
4596 If the pseudo goes in a hard reg, some other value may occupy
4597 that hard reg where this pseudo is dead, thus clobbering the pseudo.
4598 Conclusion: such a pseudo must not go in a hard reg. */
4599 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
4600 FIRST_PSEUDO_REGISTER, i,
4602 if (regno_reg_rtx[i] != 0)
4604 REG_LIVE_LENGTH (i) = -1;
4605 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4611 /* Free the variables allocated by find_basic_blocks.
4613 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
4616 free_basic_block_vars (keep_head_end_p)
4617 int keep_head_end_p;
4619 if (basic_block_for_insn)
4621 VARRAY_FREE (basic_block_for_insn);
4622 basic_block_for_insn = NULL;
4625 if (! keep_head_end_p)
4627 if (basic_block_info)
4630 VARRAY_FREE (basic_block_info);
4634 ENTRY_BLOCK_PTR->aux = NULL;
4635 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
4636 EXIT_BLOCK_PTR->aux = NULL;
4637 EXIT_BLOCK_PTR->global_live_at_start = NULL;
4641 /* Delete any insns that copy a register to itself. */
4644 delete_noop_moves (f)
4645 rtx f ATTRIBUTE_UNUSED;
4651 for (i = 0; i < n_basic_blocks; i++)
4653 bb = BASIC_BLOCK (i);
4654 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
4656 next = NEXT_INSN (insn);
4657 if (INSN_P (insn) && noop_move_p (insn))
4659 /* Do not call flow_delete_insn here to not confuse backward
4660 pointers of LIBCALL block. */
4661 PUT_CODE (insn, NOTE);
4662 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4663 NOTE_SOURCE_FILE (insn) = 0;
4669 /* Delete any jump tables never referenced. We can't delete them at the
4670 time of removing tablejump insn as they are referenced by the preceeding
4671 insns computing the destination, so we delay deleting and garbagecollect
4672 them once life information is computed. */
4674 delete_dead_jumptables ()
4677 for (insn = get_insns (); insn; insn = next)
4679 next = NEXT_INSN (insn);
4680 if (GET_CODE (insn) == CODE_LABEL
4681 && LABEL_NUSES (insn) == 0
4682 && GET_CODE (next) == JUMP_INSN
4683 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4684 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4687 fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
4688 flow_delete_insn (NEXT_INSN (insn));
4689 flow_delete_insn (insn);
4690 next = NEXT_INSN (next);
4695 /* Determine if the stack pointer is constant over the life of the function.
4696 Only useful before prologues have been emitted. */
4699 notice_stack_pointer_modification_1 (x, pat, data)
4701 rtx pat ATTRIBUTE_UNUSED;
4702 void *data ATTRIBUTE_UNUSED;
4704 if (x == stack_pointer_rtx
4705 /* The stack pointer is only modified indirectly as the result
4706 of a push until later in flow. See the comments in rtl.texi
4707 regarding Embedded Side-Effects on Addresses. */
4708 || (GET_CODE (x) == MEM
4709 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
4710 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
4711 current_function_sp_is_unchanging = 0;
4715 notice_stack_pointer_modification (f)
4720 /* Assume that the stack pointer is unchanging if alloca hasn't
4722 current_function_sp_is_unchanging = !current_function_calls_alloca;
4723 if (! current_function_sp_is_unchanging)
4726 for (insn = f; insn; insn = NEXT_INSN (insn))
4730 /* Check if insn modifies the stack pointer. */
4731 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
4733 if (! current_function_sp_is_unchanging)
4739 /* Mark a register in SET. Hard registers in large modes get all
4740 of their component registers set as well. */
4743 mark_reg (reg, xset)
4747 regset set = (regset) xset;
4748 int regno = REGNO (reg);
4750 if (GET_MODE (reg) == BLKmode)
4753 SET_REGNO_REG_SET (set, regno);
4754 if (regno < FIRST_PSEUDO_REGISTER)
4756 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4758 SET_REGNO_REG_SET (set, regno + n);
4762 /* Mark those regs which are needed at the end of the function as live
4763 at the end of the last basic block. */
4766 mark_regs_live_at_end (set)
4771 /* If exiting needs the right stack value, consider the stack pointer
4772 live at the end of the function. */
4773 if ((HAVE_epilogue && reload_completed)
4774 || ! EXIT_IGNORE_STACK
4775 || (! FRAME_POINTER_REQUIRED
4776 && ! current_function_calls_alloca
4777 && flag_omit_frame_pointer)
4778 || current_function_sp_is_unchanging)
4780 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
4783 /* Mark the frame pointer if needed at the end of the function. If
4784 we end up eliminating it, it will be removed from the live list
4785 of each basic block by reload. */
4787 if (! reload_completed || frame_pointer_needed)
4789 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
4790 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4791 /* If they are different, also mark the hard frame pointer as live. */
4792 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4793 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
4797 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4798 /* Many architectures have a GP register even without flag_pic.
4799 Assume the pic register is not in use, or will be handled by
4800 other means, if it is not fixed. */
4801 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4802 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4803 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
4806 /* Mark all global registers, and all registers used by the epilogue
4807 as being live at the end of the function since they may be
4808 referenced by our caller. */
4809 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4810 if (global_regs[i] || EPILOGUE_USES (i))
4811 SET_REGNO_REG_SET (set, i);
4813 if (HAVE_epilogue && reload_completed)
4815 /* Mark all call-saved registers that we actually used. */
4816 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4817 if (regs_ever_live[i] && ! LOCAL_REGNO (i)
4818 && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
4819 SET_REGNO_REG_SET (set, i);
4822 #ifdef EH_RETURN_DATA_REGNO
4823 /* Mark the registers that will contain data for the handler. */
4824 if (reload_completed && current_function_calls_eh_return)
4827 unsigned regno = EH_RETURN_DATA_REGNO(i);
4828 if (regno == INVALID_REGNUM)
4830 SET_REGNO_REG_SET (set, regno);
4833 #ifdef EH_RETURN_STACKADJ_RTX
4834 if ((! HAVE_epilogue || ! reload_completed)
4835 && current_function_calls_eh_return)
4837 rtx tmp = EH_RETURN_STACKADJ_RTX;
4838 if (tmp && REG_P (tmp))
4839 mark_reg (tmp, set);
4842 #ifdef EH_RETURN_HANDLER_RTX
4843 if ((! HAVE_epilogue || ! reload_completed)
4844 && current_function_calls_eh_return)
4846 rtx tmp = EH_RETURN_HANDLER_RTX;
4847 if (tmp && REG_P (tmp))
4848 mark_reg (tmp, set);
4852 /* Mark function return value. */
4853 diddle_return_value (mark_reg, set);
4856 /* Callback function for for_each_successor_phi. DATA is a regset.
4857 Sets the SRC_REGNO, the regno of the phi alternative for phi node
4858 INSN, in the regset. */
4861 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
4862 rtx insn ATTRIBUTE_UNUSED;
4863 int dest_regno ATTRIBUTE_UNUSED;
4867 regset live = (regset) data;
4868 SET_REGNO_REG_SET (live, src_regno);
4872 /* Propagate global life info around the graph of basic blocks. Begin
4873 considering blocks with their corresponding bit set in BLOCKS_IN.
4874 If BLOCKS_IN is null, consider it the universal set.
4876 BLOCKS_OUT is set for every block that was changed. */
4879 calculate_global_regs_live (blocks_in, blocks_out, flags)
4880 sbitmap blocks_in, blocks_out;
4883 basic_block *queue, *qhead, *qtail, *qend;
4884 regset tmp, new_live_at_end, call_used;
4885 regset_head tmp_head, call_used_head;
4886 regset_head new_live_at_end_head;
4889 tmp = INITIALIZE_REG_SET (tmp_head);
4890 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
4891 call_used = INITIALIZE_REG_SET (call_used_head);
4893 /* Inconveniently, this is only redily available in hard reg set form. */
4894 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4895 if (call_used_regs[i])
4896 SET_REGNO_REG_SET (call_used, i);
4898 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
4899 because the `head == tail' style test for an empty queue doesn't
4900 work with a full queue. */
4901 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
4903 qhead = qend = queue + n_basic_blocks + 2;
4905 /* Queue the blocks set in the initial mask. Do this in reverse block
4906 number order so that we are more likely for the first round to do
4907 useful work. We use AUX non-null to flag that the block is queued. */
4910 /* Clear out the garbage that might be hanging out in bb->aux. */
4911 for (i = n_basic_blocks - 1; i >= 0; --i)
4912 BASIC_BLOCK (i)->aux = NULL;
4914 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
4916 basic_block bb = BASIC_BLOCK (i);
4923 for (i = 0; i < n_basic_blocks; ++i)
4925 basic_block bb = BASIC_BLOCK (i);
4932 sbitmap_zero (blocks_out);
4934 /* We work through the queue until there are no more blocks. What
4935 is live at the end of this block is precisely the union of what
4936 is live at the beginning of all its successors. So, we set its
4937 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
4938 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
4939 this block by walking through the instructions in this block in
4940 reverse order and updating as we go. If that changed
4941 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
4942 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
4944 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
4945 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
4946 must either be live at the end of the block, or used within the
4947 block. In the latter case, it will certainly never disappear
4948 from GLOBAL_LIVE_AT_START. In the former case, the register
4949 could go away only if it disappeared from GLOBAL_LIVE_AT_START
4950 for one of the successor blocks. By induction, that cannot
4952 while (qhead != qtail)
4954 int rescan, changed;
4963 /* Begin by propagating live_at_start from the successor blocks. */
4964 CLEAR_REG_SET (new_live_at_end);
4965 for (e = bb->succ; e; e = e->succ_next)
4967 basic_block sb = e->dest;
4969 /* Call-clobbered registers die across exception and call edges. */
4970 /* ??? Abnormal call edges ignored for the moment, as this gets
4971 confused by sibling call edges, which crashes reg-stack. */
4972 if (e->flags & EDGE_EH)
4974 bitmap_operation (tmp, sb->global_live_at_start,
4975 call_used, BITMAP_AND_COMPL);
4976 IOR_REG_SET (new_live_at_end, tmp);
4979 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
4982 /* The all-important stack pointer must always be live. */
4983 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
4985 /* Before reload, there are a few registers that must be forced
4986 live everywhere -- which might not already be the case for
4987 blocks within infinite loops. */
4988 if (! reload_completed)
4990 /* Any reference to any pseudo before reload is a potential
4991 reference of the frame pointer. */
4992 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
4994 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4995 /* Pseudos with argument area equivalences may require
4996 reloading via the argument pointer. */
4997 if (fixed_regs[ARG_POINTER_REGNUM])
4998 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
5001 /* Any constant, or pseudo with constant equivalences, may
5002 require reloading from memory using the pic register. */
5003 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
5004 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
5005 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
5008 /* Regs used in phi nodes are not included in
5009 global_live_at_start, since they are live only along a
5010 particular edge. Set those regs that are live because of a
5011 phi node alternative corresponding to this particular block. */
5013 for_each_successor_phi (bb, &set_phi_alternative_reg,
5016 if (bb == ENTRY_BLOCK_PTR)
5018 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5022 /* On our first pass through this block, we'll go ahead and continue.
5023 Recognize first pass by local_set NULL. On subsequent passes, we
5024 get to skip out early if live_at_end wouldn't have changed. */
5026 if (bb->local_set == NULL)
5028 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5029 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5034 /* If any bits were removed from live_at_end, we'll have to
5035 rescan the block. This wouldn't be necessary if we had
5036 precalculated local_live, however with PROP_SCAN_DEAD_CODE
5037 local_live is really dependent on live_at_end. */
5038 CLEAR_REG_SET (tmp);
5039 rescan = bitmap_operation (tmp, bb->global_live_at_end,
5040 new_live_at_end, BITMAP_AND_COMPL);
5044 /* If any of the registers in the new live_at_end set are
5045 conditionally set in this basic block, we must rescan.
5046 This is because conditional lifetimes at the end of the
5047 block do not just take the live_at_end set into account,
5048 but also the liveness at the start of each successor
5049 block. We can miss changes in those sets if we only
5050 compare the new live_at_end against the previous one. */
5051 CLEAR_REG_SET (tmp);
5052 rescan = bitmap_operation (tmp, new_live_at_end,
5053 bb->cond_local_set, BITMAP_AND);
5058 /* Find the set of changed bits. Take this opportunity
5059 to notice that this set is empty and early out. */
5060 CLEAR_REG_SET (tmp);
5061 changed = bitmap_operation (tmp, bb->global_live_at_end,
5062 new_live_at_end, BITMAP_XOR);
5066 /* If any of the changed bits overlap with local_set,
5067 we'll have to rescan the block. Detect overlap by
5068 the AND with ~local_set turning off bits. */
5069 rescan = bitmap_operation (tmp, tmp, bb->local_set,
5074 /* Let our caller know that BB changed enough to require its
5075 death notes updated. */
5077 SET_BIT (blocks_out, bb->index);
5081 /* Add to live_at_start the set of all registers in
5082 new_live_at_end that aren't in the old live_at_end. */
5084 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
5086 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5088 changed = bitmap_operation (bb->global_live_at_start,
5089 bb->global_live_at_start,
5096 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5098 /* Rescan the block insn by insn to turn (a copy of) live_at_end
5099 into live_at_start. */
5100 propagate_block (bb, new_live_at_end, bb->local_set,
5101 bb->cond_local_set, flags);
5103 /* If live_at start didn't change, no need to go farther. */
5104 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
5107 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
5110 /* Queue all predecessors of BB so that we may re-examine
5111 their live_at_end. */
5112 for (e = bb->pred; e; e = e->pred_next)
5114 basic_block pb = e->src;
5115 if (pb->aux == NULL)
5126 FREE_REG_SET (new_live_at_end);
5127 FREE_REG_SET (call_used);
5131 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
5133 basic_block bb = BASIC_BLOCK (i);
5134 FREE_REG_SET (bb->local_set);
5135 FREE_REG_SET (bb->cond_local_set);
5140 for (i = n_basic_blocks - 1; i >= 0; --i)
5142 basic_block bb = BASIC_BLOCK (i);
5143 FREE_REG_SET (bb->local_set);
5144 FREE_REG_SET (bb->cond_local_set);
5151 /* Subroutines of life analysis. */
5153 /* Allocate the permanent data structures that represent the results
5154 of life analysis. Not static since used also for stupid life analysis. */
5157 allocate_bb_life_data ()
5161 for (i = 0; i < n_basic_blocks; i++)
5163 basic_block bb = BASIC_BLOCK (i);
5165 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5166 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5169 ENTRY_BLOCK_PTR->global_live_at_end
5170 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5171 EXIT_BLOCK_PTR->global_live_at_start
5172 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5174 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5178 allocate_reg_life_data ()
5182 max_regno = max_reg_num ();
5184 /* Recalculate the register space, in case it has grown. Old style
5185 vector oriented regsets would set regset_{size,bytes} here also. */
5186 allocate_reg_info (max_regno, FALSE, FALSE);
5188 /* Reset all the data we'll collect in propagate_block and its
5190 for (i = 0; i < max_regno; i++)
5194 REG_N_DEATHS (i) = 0;
5195 REG_N_CALLS_CROSSED (i) = 0;
5196 REG_LIVE_LENGTH (i) = 0;
5197 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
5201 /* Delete dead instructions for propagate_block. */
5204 propagate_block_delete_insn (bb, insn)
5208 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
5210 /* If the insn referred to a label, and that label was attached to
5211 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
5212 pretty much mandatory to delete it, because the ADDR_VEC may be
5213 referencing labels that no longer exist.
5215 INSN may reference a deleted label, particularly when a jump
5216 table has been optimized into a direct jump. There's no
5217 real good way to fix up the reference to the deleted label
5218 when the label is deleted, so we just allow it here.
5220 After dead code elimination is complete, we do search for
5221 any REG_LABEL notes which reference deleted labels as a
5224 if (inote && GET_CODE (inote) == CODE_LABEL)
5226 rtx label = XEXP (inote, 0);
5229 /* The label may be forced if it has been put in the constant
5230 pool. If that is the only use we must discard the table
5231 jump following it, but not the label itself. */
5232 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
5233 && (next = next_nonnote_insn (label)) != NULL
5234 && GET_CODE (next) == JUMP_INSN
5235 && (GET_CODE (PATTERN (next)) == ADDR_VEC
5236 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
5238 rtx pat = PATTERN (next);
5239 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
5240 int len = XVECLEN (pat, diff_vec_p);
5243 for (i = 0; i < len; i++)
5244 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
5246 flow_delete_insn (next);
5250 if (bb->end == insn)
5251 bb->end = PREV_INSN (insn);
5252 flow_delete_insn (insn);
5255 /* Delete dead libcalls for propagate_block. Return the insn
5256 before the libcall. */
5259 propagate_block_delete_libcall (bb, insn, note)
5263 rtx first = XEXP (note, 0);
5264 rtx before = PREV_INSN (first);
5266 if (insn == bb->end)
5269 flow_delete_insn_chain (first, insn);
5273 /* Update the life-status of regs for one insn. Return the previous insn. */
5276 propagate_one_insn (pbi, insn)
5277 struct propagate_block_info *pbi;
5280 rtx prev = PREV_INSN (insn);
5281 int flags = pbi->flags;
5282 int insn_is_dead = 0;
5283 int libcall_is_dead = 0;
5287 if (! INSN_P (insn))
5290 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
5291 if (flags & PROP_SCAN_DEAD_CODE)
5293 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
5294 libcall_is_dead = (insn_is_dead && note != 0
5295 && libcall_dead_p (pbi, note, insn));
5298 /* If an instruction consists of just dead store(s) on final pass,
5300 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
5302 /* If we're trying to delete a prologue or epilogue instruction
5303 that isn't flagged as possibly being dead, something is wrong.
5304 But if we are keeping the stack pointer depressed, we might well
5305 be deleting insns that are used to compute the amount to update
5306 it by, so they are fine. */
5307 if (reload_completed
5308 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5309 && (TYPE_RETURNS_STACK_DEPRESSED
5310 (TREE_TYPE (current_function_decl))))
5311 && (((HAVE_epilogue || HAVE_prologue)
5312 && prologue_epilogue_contains (insn))
5313 || (HAVE_sibcall_epilogue
5314 && sibcall_epilogue_contains (insn)))
5315 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
5318 /* Record sets. Do this even for dead instructions, since they
5319 would have killed the values if they hadn't been deleted. */
5320 mark_set_regs (pbi, PATTERN (insn), insn);
5322 /* CC0 is now known to be dead. Either this insn used it,
5323 in which case it doesn't anymore, or clobbered it,
5324 so the next insn can't use it. */
5327 if (libcall_is_dead)
5328 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
5330 propagate_block_delete_insn (pbi->bb, insn);
5335 /* See if this is an increment or decrement that can be merged into
5336 a following memory address. */
5339 register rtx x = single_set (insn);
5341 /* Does this instruction increment or decrement a register? */
5342 if ((flags & PROP_AUTOINC)
5344 && GET_CODE (SET_DEST (x)) == REG
5345 && (GET_CODE (SET_SRC (x)) == PLUS
5346 || GET_CODE (SET_SRC (x)) == MINUS)
5347 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
5348 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5349 /* Ok, look for a following memory ref we can combine with.
5350 If one is found, change the memory ref to a PRE_INC
5351 or PRE_DEC, cancel this insn, and return 1.
5352 Return 0 if nothing has been done. */
5353 && try_pre_increment_1 (pbi, insn))
5356 #endif /* AUTO_INC_DEC */
5358 CLEAR_REG_SET (pbi->new_set);
5360 /* If this is not the final pass, and this insn is copying the value of
5361 a library call and it's dead, don't scan the insns that perform the
5362 library call, so that the call's arguments are not marked live. */
5363 if (libcall_is_dead)
5365 /* Record the death of the dest reg. */
5366 mark_set_regs (pbi, PATTERN (insn), insn);
5368 insn = XEXP (note, 0);
5369 return PREV_INSN (insn);
5371 else if (GET_CODE (PATTERN (insn)) == SET
5372 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
5373 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
5374 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
5375 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
5376 /* We have an insn to pop a constant amount off the stack.
5377 (Such insns use PLUS regardless of the direction of the stack,
5378 and any insn to adjust the stack by a constant is always a pop.)
5379 These insns, if not dead stores, have no effect on life. */
5383 /* Any regs live at the time of a call instruction must not go
5384 in a register clobbered by calls. Find all regs now live and
5385 record this for them. */
5387 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
5388 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5389 { REG_N_CALLS_CROSSED (i)++; });
5391 /* Record sets. Do this even for dead instructions, since they
5392 would have killed the values if they hadn't been deleted. */
5393 mark_set_regs (pbi, PATTERN (insn), insn);
5395 if (GET_CODE (insn) == CALL_INSN)
5401 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5402 cond = COND_EXEC_TEST (PATTERN (insn));
5404 /* Non-constant calls clobber memory. */
5405 if (! CONST_OR_PURE_CALL_P (insn))
5407 free_EXPR_LIST_list (&pbi->mem_set_list);
5408 pbi->mem_set_list_len = 0;
5411 /* There may be extra registers to be clobbered. */
5412 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5414 note = XEXP (note, 1))
5415 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
5416 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
5417 cond, insn, pbi->flags);
5419 /* Calls change all call-used and global registers. */
5420 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5421 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
5423 /* We do not want REG_UNUSED notes for these registers. */
5424 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
5426 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
5430 /* If an insn doesn't use CC0, it becomes dead since we assume
5431 that every insn clobbers it. So show it dead here;
5432 mark_used_regs will set it live if it is referenced. */
5437 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
5439 /* Sometimes we may have inserted something before INSN (such as a move)
5440 when we make an auto-inc. So ensure we will scan those insns. */
5442 prev = PREV_INSN (insn);
5445 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
5451 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5452 cond = COND_EXEC_TEST (PATTERN (insn));
5454 /* Calls use their arguments. */
5455 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5457 note = XEXP (note, 1))
5458 if (GET_CODE (XEXP (note, 0)) == USE)
5459 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
5462 /* The stack ptr is used (honorarily) by a CALL insn. */
5463 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
5465 /* Calls may also reference any of the global registers,
5466 so they are made live. */
5467 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5469 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
5474 /* On final pass, update counts of how many insns in which each reg
5476 if (flags & PROP_REG_INFO)
5477 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5478 { REG_LIVE_LENGTH (i)++; });
5483 /* Initialize a propagate_block_info struct for public consumption.
5484 Note that the structure itself is opaque to this file, but that
5485 the user can use the regsets provided here. */
5487 struct propagate_block_info *
5488 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
5490 regset live, local_set, cond_local_set;
5493 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
5496 pbi->reg_live = live;
5497 pbi->mem_set_list = NULL_RTX;
5498 pbi->mem_set_list_len = 0;
5499 pbi->local_set = local_set;
5500 pbi->cond_local_set = cond_local_set;
5504 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5505 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
5507 pbi->reg_next_use = NULL;
5509 pbi->new_set = BITMAP_XMALLOC ();
5511 #ifdef HAVE_conditional_execution
5512 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
5513 free_reg_cond_life_info);
5514 pbi->reg_cond_reg = BITMAP_XMALLOC ();
5516 /* If this block ends in a conditional branch, for each register live
5517 from one side of the branch and not the other, record the register
5518 as conditionally dead. */
5519 if (GET_CODE (bb->end) == JUMP_INSN
5520 && any_condjump_p (bb->end))
5522 regset_head diff_head;
5523 regset diff = INITIALIZE_REG_SET (diff_head);
5524 basic_block bb_true, bb_false;
5525 rtx cond_true, cond_false, set_src;
5528 /* Identify the successor blocks. */
5529 bb_true = bb->succ->dest;
5530 if (bb->succ->succ_next != NULL)
5532 bb_false = bb->succ->succ_next->dest;
5534 if (bb->succ->flags & EDGE_FALLTHRU)
5536 basic_block t = bb_false;
5540 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
5545 /* This can happen with a conditional jump to the next insn. */
5546 if (JUMP_LABEL (bb->end) != bb_true->head)
5549 /* Simplest way to do nothing. */
5553 /* Extract the condition from the branch. */
5554 set_src = SET_SRC (pc_set (bb->end));
5555 cond_true = XEXP (set_src, 0);
5556 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
5557 GET_MODE (cond_true), XEXP (cond_true, 0),
5558 XEXP (cond_true, 1));
5559 if (GET_CODE (XEXP (set_src, 1)) == PC)
5562 cond_false = cond_true;
5566 /* Compute which register lead different lives in the successors. */
5567 if (bitmap_operation (diff, bb_true->global_live_at_start,
5568 bb_false->global_live_at_start, BITMAP_XOR))
5570 rtx reg = XEXP (cond_true, 0);
5572 if (GET_CODE (reg) == SUBREG)
5573 reg = SUBREG_REG (reg);
5575 if (GET_CODE (reg) != REG)
5578 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
5580 /* For each such register, mark it conditionally dead. */
5581 EXECUTE_IF_SET_IN_REG_SET
5584 struct reg_cond_life_info *rcli;
5587 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5589 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
5593 rcli->condition = cond;
5594 rcli->stores = const0_rtx;
5595 rcli->orig_condition = cond;
5597 splay_tree_insert (pbi->reg_cond_dead, i,
5598 (splay_tree_value) rcli);
5602 FREE_REG_SET (diff);
5606 /* If this block has no successors, any stores to the frame that aren't
5607 used later in the block are dead. So make a pass over the block
5608 recording any such that are made and show them dead at the end. We do
5609 a very conservative and simple job here. */
5611 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5612 && (TYPE_RETURNS_STACK_DEPRESSED
5613 (TREE_TYPE (current_function_decl))))
5614 && (flags & PROP_SCAN_DEAD_CODE)
5615 && (bb->succ == NULL
5616 || (bb->succ->succ_next == NULL
5617 && bb->succ->dest == EXIT_BLOCK_PTR
5618 && ! current_function_calls_eh_return)))
5621 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
5622 if (GET_CODE (insn) == INSN
5623 && (set = single_set (insn))
5624 && GET_CODE (SET_DEST (set)) == MEM)
5626 rtx mem = SET_DEST (set);
5627 rtx canon_mem = canon_rtx (mem);
5629 /* This optimization is performed by faking a store to the
5630 memory at the end of the block. This doesn't work for
5631 unchanging memories because multiple stores to unchanging
5632 memory is illegal and alias analysis doesn't consider it. */
5633 if (RTX_UNCHANGING_P (canon_mem))
5636 if (XEXP (canon_mem, 0) == frame_pointer_rtx
5637 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
5638 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
5639 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
5640 add_to_mem_set_list (pbi, canon_mem);
5647 /* Release a propagate_block_info struct. */
5650 free_propagate_block_info (pbi)
5651 struct propagate_block_info *pbi;
5653 free_EXPR_LIST_list (&pbi->mem_set_list);
5655 BITMAP_XFREE (pbi->new_set);
5657 #ifdef HAVE_conditional_execution
5658 splay_tree_delete (pbi->reg_cond_dead);
5659 BITMAP_XFREE (pbi->reg_cond_reg);
5662 if (pbi->reg_next_use)
5663 free (pbi->reg_next_use);
5668 /* Compute the registers live at the beginning of a basic block BB from
5669 those live at the end.
5671 When called, REG_LIVE contains those live at the end. On return, it
5672 contains those live at the beginning.
5674 LOCAL_SET, if non-null, will be set with all registers killed
5675 unconditionally by this basic block.
5676 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
5677 killed conditionally by this basic block. If there is any unconditional
5678 set of a register, then the corresponding bit will be set in LOCAL_SET
5679 and cleared in COND_LOCAL_SET.
5680 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
5681 case, the resulting set will be equal to the union of the two sets that
5682 would otherwise be computed.
5684 Return non-zero if an INSN is deleted (i.e. by dead code removal). */
5687 propagate_block (bb, live, local_set, cond_local_set, flags)
5691 regset cond_local_set;
5694 struct propagate_block_info *pbi;
5698 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
5700 if (flags & PROP_REG_INFO)
5704 /* Process the regs live at the end of the block.
5705 Mark them as not local to any one basic block. */
5706 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
5707 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
5710 /* Scan the block an insn at a time from end to beginning. */
5713 for (insn = bb->end;; insn = prev)
5715 /* If this is a call to `setjmp' et al, warn if any
5716 non-volatile datum is live. */
5717 if ((flags & PROP_REG_INFO)
5718 && GET_CODE (insn) == CALL_INSN
5719 && find_reg_note (insn, REG_SETJMP, NULL))
5720 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
5722 prev = propagate_one_insn (pbi, insn);
5723 changed |= NEXT_INSN (prev) != insn;
5725 if (insn == bb->head)
5729 free_propagate_block_info (pbi);
5734 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
5735 (SET expressions whose destinations are registers dead after the insn).
5736 NEEDED is the regset that says which regs are alive after the insn.
5738 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
5740 If X is the entire body of an insn, NOTES contains the reg notes
5741 pertaining to the insn. */
5744 insn_dead_p (pbi, x, call_ok, notes)
5745 struct propagate_block_info *pbi;
5748 rtx notes ATTRIBUTE_UNUSED;
5750 enum rtx_code code = GET_CODE (x);
5753 /* If flow is invoked after reload, we must take existing AUTO_INC
5754 expresions into account. */
5755 if (reload_completed)
5757 for (; notes; notes = XEXP (notes, 1))
5759 if (REG_NOTE_KIND (notes) == REG_INC)
5761 int regno = REGNO (XEXP (notes, 0));
5763 /* Don't delete insns to set global regs. */
5764 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5765 || REGNO_REG_SET_P (pbi->reg_live, regno))
5772 /* If setting something that's a reg or part of one,
5773 see if that register's altered value will be live. */
5777 rtx r = SET_DEST (x);
5780 if (GET_CODE (r) == CC0)
5781 return ! pbi->cc0_live;
5784 /* A SET that is a subroutine call cannot be dead. */
5785 if (GET_CODE (SET_SRC (x)) == CALL)
5791 /* Don't eliminate loads from volatile memory or volatile asms. */
5792 else if (volatile_refs_p (SET_SRC (x)))
5795 if (GET_CODE (r) == MEM)
5799 if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
5802 canon_r = canon_rtx (r);
5804 /* Walk the set of memory locations we are currently tracking
5805 and see if one is an identical match to this memory location.
5806 If so, this memory write is dead (remember, we're walking
5807 backwards from the end of the block to the start). Since
5808 rtx_equal_p does not check the alias set or flags, we also
5809 must have the potential for them to conflict (anti_dependence). */
5810 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
5811 if (anti_dependence (r, XEXP (temp, 0)))
5813 rtx mem = XEXP (temp, 0);
5815 if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
5816 && (GET_MODE_SIZE (GET_MODE (canon_r))
5817 <= GET_MODE_SIZE (GET_MODE (mem))))
5821 /* Check if memory reference matches an auto increment. Only
5822 post increment/decrement or modify are valid. */
5823 if (GET_MODE (mem) == GET_MODE (r)
5824 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
5825 || GET_CODE (XEXP (mem, 0)) == POST_INC
5826 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
5827 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
5828 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
5835 while (GET_CODE (r) == SUBREG
5836 || GET_CODE (r) == STRICT_LOW_PART
5837 || GET_CODE (r) == ZERO_EXTRACT)
5840 if (GET_CODE (r) == REG)
5842 int regno = REGNO (r);
5845 if (REGNO_REG_SET_P (pbi->reg_live, regno))
5848 /* If this is a hard register, verify that subsequent
5849 words are not needed. */
5850 if (regno < FIRST_PSEUDO_REGISTER)
5852 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
5855 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
5859 /* Don't delete insns to set global regs. */
5860 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5863 /* Make sure insns to set the stack pointer aren't deleted. */
5864 if (regno == STACK_POINTER_REGNUM)
5867 /* ??? These bits might be redundant with the force live bits
5868 in calculate_global_regs_live. We would delete from
5869 sequential sets; whether this actually affects real code
5870 for anything but the stack pointer I don't know. */
5871 /* Make sure insns to set the frame pointer aren't deleted. */
5872 if (regno == FRAME_POINTER_REGNUM
5873 && (! reload_completed || frame_pointer_needed))
5875 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5876 if (regno == HARD_FRAME_POINTER_REGNUM
5877 && (! reload_completed || frame_pointer_needed))
5881 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5882 /* Make sure insns to set arg pointer are never deleted
5883 (if the arg pointer isn't fixed, there will be a USE
5884 for it, so we can treat it normally). */
5885 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5889 /* Otherwise, the set is dead. */
5895 /* If performing several activities, insn is dead if each activity
5896 is individually dead. Also, CLOBBERs and USEs can be ignored; a
5897 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
5899 else if (code == PARALLEL)
5901 int i = XVECLEN (x, 0);
5903 for (i--; i >= 0; i--)
5904 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
5905 && GET_CODE (XVECEXP (x, 0, i)) != USE
5906 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
5912 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
5913 is not necessarily true for hard registers. */
5914 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
5915 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
5916 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
5919 /* We do not check other CLOBBER or USE here. An insn consisting of just
5920 a CLOBBER or just a USE should not be deleted. */
5924 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
5925 return 1 if the entire library call is dead.
5926 This is true if INSN copies a register (hard or pseudo)
5927 and if the hard return reg of the call insn is dead.
5928 (The caller should have tested the destination of the SET inside
5929 INSN already for death.)
5931 If this insn doesn't just copy a register, then we don't
5932 have an ordinary libcall. In that case, cse could not have
5933 managed to substitute the source for the dest later on,
5934 so we can assume the libcall is dead.
5936 PBI is the block info giving pseudoregs live before this insn.
5937 NOTE is the REG_RETVAL note of the insn. */
5940 libcall_dead_p (pbi, note, insn)
5941 struct propagate_block_info *pbi;
5945 rtx x = single_set (insn);
5949 register rtx r = SET_SRC (x);
5951 if (GET_CODE (r) == REG)
5953 rtx call = XEXP (note, 0);
5957 /* Find the call insn. */
5958 while (call != insn && GET_CODE (call) != CALL_INSN)
5959 call = NEXT_INSN (call);
5961 /* If there is none, do nothing special,
5962 since ordinary death handling can understand these insns. */
5966 /* See if the hard reg holding the value is dead.
5967 If this is a PARALLEL, find the call within it. */
5968 call_pat = PATTERN (call);
5969 if (GET_CODE (call_pat) == PARALLEL)
5971 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
5972 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
5973 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
5976 /* This may be a library call that is returning a value
5977 via invisible pointer. Do nothing special, since
5978 ordinary death handling can understand these insns. */
5982 call_pat = XVECEXP (call_pat, 0, i);
5985 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
5991 /* Return 1 if register REGNO was used before it was set, i.e. if it is
5992 live at function entry. Don't count global register variables, variables
5993 in registers that can be used for function arg passing, or variables in
5994 fixed hard registers. */
5997 regno_uninitialized (regno)
6000 if (n_basic_blocks == 0
6001 || (regno < FIRST_PSEUDO_REGISTER
6002 && (global_regs[regno]
6003 || fixed_regs[regno]
6004 || FUNCTION_ARG_REGNO_P (regno))))
6007 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
6010 /* 1 if register REGNO was alive at a place where `setjmp' was called
6011 and was set more than once or is an argument.
6012 Such regs may be clobbered by `longjmp'. */
6015 regno_clobbered_at_setjmp (regno)
6018 if (n_basic_blocks == 0)
6021 return ((REG_N_SETS (regno) > 1
6022 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
6023 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
6026 /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the
6027 maximal list size; look for overlaps in mode and select the largest. */
6029 add_to_mem_set_list (pbi, mem)
6030 struct propagate_block_info *pbi;
6035 /* We don't know how large a BLKmode store is, so we must not
6036 take them into consideration. */
6037 if (GET_MODE (mem) == BLKmode)
6040 for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
6042 rtx e = XEXP (i, 0);
6043 if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
6045 if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
6048 /* If we must store a copy of the mem, we can just modify
6049 the mode of the stored copy. */
6050 if (pbi->flags & PROP_AUTOINC)
6051 PUT_MODE (e, GET_MODE (mem));
6060 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
6063 /* Store a copy of mem, otherwise the address may be
6064 scrogged by find_auto_inc. */
6065 if (pbi->flags & PROP_AUTOINC)
6066 mem = shallow_copy_rtx (mem);
6068 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
6069 pbi->mem_set_list_len++;
6073 /* INSN references memory, possibly using autoincrement addressing modes.
6074 Find any entries on the mem_set_list that need to be invalidated due
6075 to an address change. */
6078 invalidate_mems_from_autoinc (pbi, insn)
6079 struct propagate_block_info *pbi;
6082 rtx note = REG_NOTES (insn);
6083 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
6084 if (REG_NOTE_KIND (note) == REG_INC)
6085 invalidate_mems_from_set (pbi, XEXP (note, 0));
6088 /* EXP is a REG. Remove any dependant entries from pbi->mem_set_list. */
6091 invalidate_mems_from_set (pbi, exp)
6092 struct propagate_block_info *pbi;
6095 rtx temp = pbi->mem_set_list;
6096 rtx prev = NULL_RTX;
6101 next = XEXP (temp, 1);
6102 if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
6104 /* Splice this entry out of the list. */
6106 XEXP (prev, 1) = next;
6108 pbi->mem_set_list = next;
6109 free_EXPR_LIST_node (temp);
6110 pbi->mem_set_list_len--;
6118 /* Process the registers that are set within X. Their bits are set to
6119 1 in the regset DEAD, because they are dead prior to this insn.
6121 If INSN is nonzero, it is the insn being processed.
6123 FLAGS is the set of operations to perform. */
6126 mark_set_regs (pbi, x, insn)
6127 struct propagate_block_info *pbi;
6130 rtx cond = NULL_RTX;
6135 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6137 if (REG_NOTE_KIND (link) == REG_INC)
6138 mark_set_1 (pbi, SET, XEXP (link, 0),
6139 (GET_CODE (x) == COND_EXEC
6140 ? COND_EXEC_TEST (x) : NULL_RTX),
6144 switch (code = GET_CODE (x))
6148 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
6152 cond = COND_EXEC_TEST (x);
6153 x = COND_EXEC_CODE (x);
6159 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6161 rtx sub = XVECEXP (x, 0, i);
6162 switch (code = GET_CODE (sub))
6165 if (cond != NULL_RTX)
6168 cond = COND_EXEC_TEST (sub);
6169 sub = COND_EXEC_CODE (sub);
6170 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
6176 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
6191 /* Process a single set, which appears in INSN. REG (which may not
6192 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
6193 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
6194 If the set is conditional (because it appear in a COND_EXEC), COND
6195 will be the condition. */
6198 mark_set_1 (pbi, code, reg, cond, insn, flags)
6199 struct propagate_block_info *pbi;
6201 rtx reg, cond, insn;
6204 int regno_first = -1, regno_last = -1;
6205 unsigned long not_dead = 0;
6208 /* Modifying just one hardware register of a multi-reg value or just a
6209 byte field of a register does not mean the value from before this insn
6210 is now dead. Of course, if it was dead after it's unused now. */
6212 switch (GET_CODE (reg))
6215 /* Some targets place small structures in registers for return values of
6216 functions. We have to detect this case specially here to get correct
6217 flow information. */
6218 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
6219 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
6220 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
6226 case STRICT_LOW_PART:
6227 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
6229 reg = XEXP (reg, 0);
6230 while (GET_CODE (reg) == SUBREG
6231 || GET_CODE (reg) == ZERO_EXTRACT
6232 || GET_CODE (reg) == SIGN_EXTRACT
6233 || GET_CODE (reg) == STRICT_LOW_PART);
6234 if (GET_CODE (reg) == MEM)
6236 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
6240 regno_last = regno_first = REGNO (reg);
6241 if (regno_first < FIRST_PSEUDO_REGISTER)
6242 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6246 if (GET_CODE (SUBREG_REG (reg)) == REG)
6248 enum machine_mode outer_mode = GET_MODE (reg);
6249 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
6251 /* Identify the range of registers affected. This is moderately
6252 tricky for hard registers. See alter_subreg. */
6254 regno_last = regno_first = REGNO (SUBREG_REG (reg));
6255 if (regno_first < FIRST_PSEUDO_REGISTER)
6257 regno_first += subreg_regno_offset (regno_first, inner_mode,
6260 regno_last = (regno_first
6261 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
6263 /* Since we've just adjusted the register number ranges, make
6264 sure REG matches. Otherwise some_was_live will be clear
6265 when it shouldn't have been, and we'll create incorrect
6266 REG_UNUSED notes. */
6267 reg = gen_rtx_REG (outer_mode, regno_first);
6271 /* If the number of words in the subreg is less than the number
6272 of words in the full register, we have a well-defined partial
6273 set. Otherwise the high bits are undefined.
6275 This is only really applicable to pseudos, since we just took
6276 care of multi-word hard registers. */
6277 if (((GET_MODE_SIZE (outer_mode)
6278 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
6279 < ((GET_MODE_SIZE (inner_mode)
6280 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
6281 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
6284 reg = SUBREG_REG (reg);
6288 reg = SUBREG_REG (reg);
6295 /* If this set is a MEM, then it kills any aliased writes.
6296 If this set is a REG, then it kills any MEMs which use the reg. */
6297 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
6299 if (GET_CODE (reg) == REG)
6300 invalidate_mems_from_set (pbi, reg);
6302 /* If the memory reference had embedded side effects (autoincrement
6303 address modes. Then we may need to kill some entries on the
6305 if (insn && GET_CODE (reg) == MEM)
6306 invalidate_mems_from_autoinc (pbi, insn);
6308 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
6309 /* ??? With more effort we could track conditional memory life. */
6311 /* There are no REG_INC notes for SP, so we can't assume we'll see
6312 everything that invalidates it. To be safe, don't eliminate any
6313 stores though SP; none of them should be redundant anyway. */
6314 && ! reg_mentioned_p (stack_pointer_rtx, reg))
6315 add_to_mem_set_list (pbi, canon_rtx (reg));
6318 if (GET_CODE (reg) == REG
6319 && ! (regno_first == FRAME_POINTER_REGNUM
6320 && (! reload_completed || frame_pointer_needed))
6321 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
6322 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
6323 && (! reload_completed || frame_pointer_needed))
6325 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
6326 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
6330 int some_was_live = 0, some_was_dead = 0;
6332 for (i = regno_first; i <= regno_last; ++i)
6334 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6337 /* Order of the set operation matters here since both
6338 sets may be the same. */
6339 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
6340 if (cond != NULL_RTX
6341 && ! REGNO_REG_SET_P (pbi->local_set, i))
6342 SET_REGNO_REG_SET (pbi->cond_local_set, i);
6344 SET_REGNO_REG_SET (pbi->local_set, i);
6346 if (code != CLOBBER)
6347 SET_REGNO_REG_SET (pbi->new_set, i);
6349 some_was_live |= needed_regno;
6350 some_was_dead |= ! needed_regno;
6353 #ifdef HAVE_conditional_execution
6354 /* Consider conditional death in deciding that the register needs
6356 if (some_was_live && ! not_dead
6357 /* The stack pointer is never dead. Well, not strictly true,
6358 but it's very difficult to tell from here. Hopefully
6359 combine_stack_adjustments will fix up the most egregious
6361 && regno_first != STACK_POINTER_REGNUM)
6363 for (i = regno_first; i <= regno_last; ++i)
6364 if (! mark_regno_cond_dead (pbi, i, cond))
6365 not_dead |= ((unsigned long) 1) << (i - regno_first);
6369 /* Additional data to record if this is the final pass. */
6370 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
6371 | PROP_DEATH_NOTES | PROP_AUTOINC))
6374 register int blocknum = pbi->bb->index;
6377 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6379 y = pbi->reg_next_use[regno_first];
6381 /* The next use is no longer next, since a store intervenes. */
6382 for (i = regno_first; i <= regno_last; ++i)
6383 pbi->reg_next_use[i] = 0;
6386 if (flags & PROP_REG_INFO)
6388 for (i = regno_first; i <= regno_last; ++i)
6390 /* Count (weighted) references, stores, etc. This counts a
6391 register twice if it is modified, but that is correct. */
6392 REG_N_SETS (i) += 1;
6393 REG_N_REFS (i) += 1;
6394 REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
6396 /* The insns where a reg is live are normally counted
6397 elsewhere, but we want the count to include the insn
6398 where the reg is set, and the normal counting mechanism
6399 would not count it. */
6400 REG_LIVE_LENGTH (i) += 1;
6403 /* If this is a hard reg, record this function uses the reg. */
6404 if (regno_first < FIRST_PSEUDO_REGISTER)
6406 for (i = regno_first; i <= regno_last; i++)
6407 regs_ever_live[i] = 1;
6411 /* Keep track of which basic blocks each reg appears in. */
6412 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6413 REG_BASIC_BLOCK (regno_first) = blocknum;
6414 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6415 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6419 if (! some_was_dead)
6421 if (flags & PROP_LOG_LINKS)
6423 /* Make a logical link from the next following insn
6424 that uses this register, back to this insn.
6425 The following insns have already been processed.
6427 We don't build a LOG_LINK for hard registers containing
6428 in ASM_OPERANDs. If these registers get replaced,
6429 we might wind up changing the semantics of the insn,
6430 even if reload can make what appear to be valid
6431 assignments later. */
6432 if (y && (BLOCK_NUM (y) == blocknum)
6433 && (regno_first >= FIRST_PSEUDO_REGISTER
6434 || asm_noperands (PATTERN (y)) < 0))
6435 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
6440 else if (! some_was_live)
6442 if (flags & PROP_REG_INFO)
6443 REG_N_DEATHS (regno_first) += 1;
6445 if (flags & PROP_DEATH_NOTES)
6447 /* Note that dead stores have already been deleted
6448 when possible. If we get here, we have found a
6449 dead store that cannot be eliminated (because the
6450 same insn does something useful). Indicate this
6451 by marking the reg being set as dying here. */
6453 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6458 if (flags & PROP_DEATH_NOTES)
6460 /* This is a case where we have a multi-word hard register
6461 and some, but not all, of the words of the register are
6462 needed in subsequent insns. Write REG_UNUSED notes
6463 for those parts that were not needed. This case should
6466 for (i = regno_first; i <= regno_last; ++i)
6467 if (! REGNO_REG_SET_P (pbi->reg_live, i))
6469 = alloc_EXPR_LIST (REG_UNUSED,
6470 gen_rtx_REG (reg_raw_mode[i], i),
6476 /* Mark the register as being dead. */
6478 /* The stack pointer is never dead. Well, not strictly true,
6479 but it's very difficult to tell from here. Hopefully
6480 combine_stack_adjustments will fix up the most egregious
6482 && regno_first != STACK_POINTER_REGNUM)
6484 for (i = regno_first; i <= regno_last; ++i)
6485 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
6486 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
6489 else if (GET_CODE (reg) == REG)
6491 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6492 pbi->reg_next_use[regno_first] = 0;
6495 /* If this is the last pass and this is a SCRATCH, show it will be dying
6496 here and count it. */
6497 else if (GET_CODE (reg) == SCRATCH)
6499 if (flags & PROP_DEATH_NOTES)
6501 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6505 #ifdef HAVE_conditional_execution
6506 /* Mark REGNO conditionally dead.
6507 Return true if the register is now unconditionally dead. */
6510 mark_regno_cond_dead (pbi, regno, cond)
6511 struct propagate_block_info *pbi;
6515 /* If this is a store to a predicate register, the value of the
6516 predicate is changing, we don't know that the predicate as seen
6517 before is the same as that seen after. Flush all dependent
6518 conditions from reg_cond_dead. This will make all such
6519 conditionally live registers unconditionally live. */
6520 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
6521 flush_reg_cond_reg (pbi, regno);
6523 /* If this is an unconditional store, remove any conditional
6524 life that may have existed. */
6525 if (cond == NULL_RTX)
6526 splay_tree_remove (pbi->reg_cond_dead, regno);
6529 splay_tree_node node;
6530 struct reg_cond_life_info *rcli;
6533 /* Otherwise this is a conditional set. Record that fact.
6534 It may have been conditionally used, or there may be a
6535 subsequent set with a complimentary condition. */
6537 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
6540 /* The register was unconditionally live previously.
6541 Record the current condition as the condition under
6542 which it is dead. */
6543 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6544 rcli->condition = cond;
6545 rcli->stores = cond;
6546 rcli->orig_condition = const0_rtx;
6547 splay_tree_insert (pbi->reg_cond_dead, regno,
6548 (splay_tree_value) rcli);
6550 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6552 /* Not unconditionaly dead. */
6557 /* The register was conditionally live previously.
6558 Add the new condition to the old. */
6559 rcli = (struct reg_cond_life_info *) node->value;
6560 ncond = rcli->condition;
6561 ncond = ior_reg_cond (ncond, cond, 1);
6562 if (rcli->stores == const0_rtx)
6563 rcli->stores = cond;
6564 else if (rcli->stores != const1_rtx)
6565 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
6567 /* If the register is now unconditionally dead, remove the entry
6568 in the splay_tree. A register is unconditionally dead if the
6569 dead condition ncond is true. A register is also unconditionally
6570 dead if the sum of all conditional stores is an unconditional
6571 store (stores is true), and the dead condition is identically the
6572 same as the original dead condition initialized at the end of
6573 the block. This is a pointer compare, not an rtx_equal_p
6575 if (ncond == const1_rtx
6576 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
6577 splay_tree_remove (pbi->reg_cond_dead, regno);
6580 rcli->condition = ncond;
6582 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6584 /* Not unconditionaly dead. */
6593 /* Called from splay_tree_delete for pbi->reg_cond_life. */
6596 free_reg_cond_life_info (value)
6597 splay_tree_value value;
6599 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
6603 /* Helper function for flush_reg_cond_reg. */
6606 flush_reg_cond_reg_1 (node, data)
6607 splay_tree_node node;
6610 struct reg_cond_life_info *rcli;
6611 int *xdata = (int *) data;
6612 unsigned int regno = xdata[0];
6614 /* Don't need to search if last flushed value was farther on in
6615 the in-order traversal. */
6616 if (xdata[1] >= (int) node->key)
6619 /* Splice out portions of the expression that refer to regno. */
6620 rcli = (struct reg_cond_life_info *) node->value;
6621 rcli->condition = elim_reg_cond (rcli->condition, regno);
6622 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
6623 rcli->stores = elim_reg_cond (rcli->stores, regno);
6625 /* If the entire condition is now false, signal the node to be removed. */
6626 if (rcli->condition == const0_rtx)
6628 xdata[1] = node->key;
6631 else if (rcli->condition == const1_rtx)
6637 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
6640 flush_reg_cond_reg (pbi, regno)
6641 struct propagate_block_info *pbi;
6648 while (splay_tree_foreach (pbi->reg_cond_dead,
6649 flush_reg_cond_reg_1, pair) == -1)
6650 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
6652 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
6655 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
6656 For ior/and, the ADD flag determines whether we want to add the new
6657 condition X to the old one unconditionally. If it is zero, we will
6658 only return a new expression if X allows us to simplify part of
6659 OLD, otherwise we return OLD unchanged to the caller.
6660 If ADD is nonzero, we will return a new condition in all cases. The
6661 toplevel caller of one of these functions should always pass 1 for
6665 ior_reg_cond (old, x, add)
6671 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6673 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6674 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
6675 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6677 if (GET_CODE (x) == GET_CODE (old)
6678 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6682 return gen_rtx_IOR (0, old, x);
6685 switch (GET_CODE (old))
6688 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6689 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6690 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6692 if (op0 == const0_rtx)
6694 if (op1 == const0_rtx)
6696 if (op0 == const1_rtx || op1 == const1_rtx)
6698 if (op0 == XEXP (old, 0))
6699 op0 = gen_rtx_IOR (0, op0, x);
6701 op1 = gen_rtx_IOR (0, op1, x);
6702 return gen_rtx_IOR (0, op0, op1);
6706 return gen_rtx_IOR (0, old, x);
6709 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6710 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6711 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6713 if (op0 == const1_rtx)
6715 if (op1 == const1_rtx)
6717 if (op0 == const0_rtx || op1 == const0_rtx)
6719 if (op0 == XEXP (old, 0))
6720 op0 = gen_rtx_IOR (0, op0, x);
6722 op1 = gen_rtx_IOR (0, op1, x);
6723 return gen_rtx_AND (0, op0, op1);
6727 return gen_rtx_IOR (0, old, x);
6730 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6731 if (op0 != XEXP (old, 0))
6732 return not_reg_cond (op0);
6735 return gen_rtx_IOR (0, old, x);
6746 enum rtx_code x_code;
6748 if (x == const0_rtx)
6750 else if (x == const1_rtx)
6752 x_code = GET_CODE (x);
6755 if (GET_RTX_CLASS (x_code) == '<'
6756 && GET_CODE (XEXP (x, 0)) == REG)
6758 if (XEXP (x, 1) != const0_rtx)
6761 return gen_rtx_fmt_ee (reverse_condition (x_code),
6762 VOIDmode, XEXP (x, 0), const0_rtx);
6764 return gen_rtx_NOT (0, x);
6768 and_reg_cond (old, x, add)
6774 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6776 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6777 && GET_CODE (x) == reverse_condition (GET_CODE (old))
6778 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6780 if (GET_CODE (x) == GET_CODE (old)
6781 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6785 return gen_rtx_AND (0, old, x);
6788 switch (GET_CODE (old))
6791 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6792 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6793 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6795 if (op0 == const0_rtx)
6797 if (op1 == const0_rtx)
6799 if (op0 == const1_rtx || op1 == const1_rtx)
6801 if (op0 == XEXP (old, 0))
6802 op0 = gen_rtx_AND (0, op0, x);
6804 op1 = gen_rtx_AND (0, op1, x);
6805 return gen_rtx_IOR (0, op0, op1);
6809 return gen_rtx_AND (0, old, x);
6812 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6813 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6814 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6816 if (op0 == const1_rtx)
6818 if (op1 == const1_rtx)
6820 if (op0 == const0_rtx || op1 == const0_rtx)
6822 if (op0 == XEXP (old, 0))
6823 op0 = gen_rtx_AND (0, op0, x);
6825 op1 = gen_rtx_AND (0, op1, x);
6826 return gen_rtx_AND (0, op0, op1);
6831 /* If X is identical to one of the existing terms of the AND,
6832 then just return what we already have. */
6833 /* ??? There really should be some sort of recursive check here in
6834 case there are nested ANDs. */
6835 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
6836 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
6837 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
6838 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
6841 return gen_rtx_AND (0, old, x);
6844 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6845 if (op0 != XEXP (old, 0))
6846 return not_reg_cond (op0);
6849 return gen_rtx_AND (0, old, x);
6856 /* Given a condition X, remove references to reg REGNO and return the
6857 new condition. The removal will be done so that all conditions
6858 involving REGNO are considered to evaluate to false. This function
6859 is used when the value of REGNO changes. */
6862 elim_reg_cond (x, regno)
6868 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6870 if (REGNO (XEXP (x, 0)) == regno)
6875 switch (GET_CODE (x))
6878 op0 = elim_reg_cond (XEXP (x, 0), regno);
6879 op1 = elim_reg_cond (XEXP (x, 1), regno);
6880 if (op0 == const0_rtx || op1 == const0_rtx)
6882 if (op0 == const1_rtx)
6884 if (op1 == const1_rtx)
6886 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6888 return gen_rtx_AND (0, op0, op1);
6891 op0 = elim_reg_cond (XEXP (x, 0), regno);
6892 op1 = elim_reg_cond (XEXP (x, 1), regno);
6893 if (op0 == const1_rtx || op1 == const1_rtx)
6895 if (op0 == const0_rtx)
6897 if (op1 == const0_rtx)
6899 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6901 return gen_rtx_IOR (0, op0, op1);
6904 op0 = elim_reg_cond (XEXP (x, 0), regno);
6905 if (op0 == const0_rtx)
6907 if (op0 == const1_rtx)
6909 if (op0 != XEXP (x, 0))
6910 return not_reg_cond (op0);
6917 #endif /* HAVE_conditional_execution */
6921 /* Try to substitute the auto-inc expression INC as the address inside
6922 MEM which occurs in INSN. Currently, the address of MEM is an expression
6923 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
6924 that has a single set whose source is a PLUS of INCR_REG and something
6928 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
6929 struct propagate_block_info *pbi;
6930 rtx inc, insn, mem, incr, incr_reg;
6932 int regno = REGNO (incr_reg);
6933 rtx set = single_set (incr);
6934 rtx q = SET_DEST (set);
6935 rtx y = SET_SRC (set);
6936 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
6938 /* Make sure this reg appears only once in this insn. */
6939 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
6942 if (dead_or_set_p (incr, incr_reg)
6943 /* Mustn't autoinc an eliminable register. */
6944 && (regno >= FIRST_PSEUDO_REGISTER
6945 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
6947 /* This is the simple case. Try to make the auto-inc. If
6948 we can't, we are done. Otherwise, we will do any
6949 needed updates below. */
6950 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
6953 else if (GET_CODE (q) == REG
6954 /* PREV_INSN used here to check the semi-open interval
6956 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
6957 /* We must also check for sets of q as q may be
6958 a call clobbered hard register and there may
6959 be a call between PREV_INSN (insn) and incr. */
6960 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
6962 /* We have *p followed sometime later by q = p+size.
6963 Both p and q must be live afterward,
6964 and q is not used between INSN and its assignment.
6965 Change it to q = p, ...*q..., q = q+size.
6966 Then fall into the usual case. */
6970 emit_move_insn (q, incr_reg);
6971 insns = get_insns ();
6974 if (basic_block_for_insn)
6975 for (temp = insns; temp; temp = NEXT_INSN (temp))
6976 set_block_for_insn (temp, pbi->bb);
6978 /* If we can't make the auto-inc, or can't make the
6979 replacement into Y, exit. There's no point in making
6980 the change below if we can't do the auto-inc and doing
6981 so is not correct in the pre-inc case. */
6984 validate_change (insn, &XEXP (mem, 0), inc, 1);
6985 validate_change (incr, &XEXP (y, opnum), q, 1);
6986 if (! apply_change_group ())
6989 /* We now know we'll be doing this change, so emit the
6990 new insn(s) and do the updates. */
6991 emit_insns_before (insns, insn);
6993 if (pbi->bb->head == insn)
6994 pbi->bb->head = insns;
6996 /* INCR will become a NOTE and INSN won't contain a
6997 use of INCR_REG. If a use of INCR_REG was just placed in
6998 the insn before INSN, make that the next use.
6999 Otherwise, invalidate it. */
7000 if (GET_CODE (PREV_INSN (insn)) == INSN
7001 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
7002 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
7003 pbi->reg_next_use[regno] = PREV_INSN (insn);
7005 pbi->reg_next_use[regno] = 0;
7010 /* REGNO is now used in INCR which is below INSN, but
7011 it previously wasn't live here. If we don't mark
7012 it as live, we'll put a REG_DEAD note for it
7013 on this insn, which is incorrect. */
7014 SET_REGNO_REG_SET (pbi->reg_live, regno);
7016 /* If there are any calls between INSN and INCR, show
7017 that REGNO now crosses them. */
7018 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
7019 if (GET_CODE (temp) == CALL_INSN)
7020 REG_N_CALLS_CROSSED (regno)++;
7025 /* If we haven't returned, it means we were able to make the
7026 auto-inc, so update the status. First, record that this insn
7027 has an implicit side effect. */
7029 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
7031 /* Modify the old increment-insn to simply copy
7032 the already-incremented value of our register. */
7033 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
7036 /* If that makes it a no-op (copying the register into itself) delete
7037 it so it won't appear to be a "use" and a "set" of this
7039 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
7041 /* If the original source was dead, it's dead now. */
7044 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
7046 remove_note (incr, note);
7047 if (XEXP (note, 0) != incr_reg)
7048 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
7051 PUT_CODE (incr, NOTE);
7052 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
7053 NOTE_SOURCE_FILE (incr) = 0;
7056 if (regno >= FIRST_PSEUDO_REGISTER)
7058 /* Count an extra reference to the reg. When a reg is
7059 incremented, spilling it is worse, so we want to make
7060 that less likely. */
7061 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7063 /* Count the increment as a setting of the register,
7064 even though it isn't a SET in rtl. */
7065 REG_N_SETS (regno)++;
7069 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
7073 find_auto_inc (pbi, x, insn)
7074 struct propagate_block_info *pbi;
7078 rtx addr = XEXP (x, 0);
7079 HOST_WIDE_INT offset = 0;
7080 rtx set, y, incr, inc_val;
7082 int size = GET_MODE_SIZE (GET_MODE (x));
7084 if (GET_CODE (insn) == JUMP_INSN)
7087 /* Here we detect use of an index register which might be good for
7088 postincrement, postdecrement, preincrement, or predecrement. */
7090 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
7091 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
7093 if (GET_CODE (addr) != REG)
7096 regno = REGNO (addr);
7098 /* Is the next use an increment that might make auto-increment? */
7099 incr = pbi->reg_next_use[regno];
7100 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
7102 set = single_set (incr);
7103 if (set == 0 || GET_CODE (set) != SET)
7107 if (GET_CODE (y) != PLUS)
7110 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
7111 inc_val = XEXP (y, 1);
7112 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
7113 inc_val = XEXP (y, 0);
7117 if (GET_CODE (inc_val) == CONST_INT)
7119 if (HAVE_POST_INCREMENT
7120 && (INTVAL (inc_val) == size && offset == 0))
7121 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
7123 else if (HAVE_POST_DECREMENT
7124 && (INTVAL (inc_val) == -size && offset == 0))
7125 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
7127 else if (HAVE_PRE_INCREMENT
7128 && (INTVAL (inc_val) == size && offset == size))
7129 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
7131 else if (HAVE_PRE_DECREMENT
7132 && (INTVAL (inc_val) == -size && offset == -size))
7133 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
7135 else if (HAVE_POST_MODIFY_DISP && offset == 0)
7136 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
7137 gen_rtx_PLUS (Pmode,
7140 insn, x, incr, addr);
7142 else if (GET_CODE (inc_val) == REG
7143 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
7147 if (HAVE_POST_MODIFY_REG && offset == 0)
7148 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
7149 gen_rtx_PLUS (Pmode,
7152 insn, x, incr, addr);
7156 #endif /* AUTO_INC_DEC */
7159 mark_used_reg (pbi, reg, cond, insn)
7160 struct propagate_block_info *pbi;
7162 rtx cond ATTRIBUTE_UNUSED;
7165 unsigned int regno_first, regno_last, i;
7166 int some_was_live, some_was_dead, some_not_set;
7168 regno_last = regno_first = REGNO (reg);
7169 if (regno_first < FIRST_PSEUDO_REGISTER)
7170 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
7172 /* Find out if any of this register is live after this instruction. */
7173 some_was_live = some_was_dead = 0;
7174 for (i = regno_first; i <= regno_last; ++i)
7176 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
7177 some_was_live |= needed_regno;
7178 some_was_dead |= ! needed_regno;
7181 /* Find out if any of the register was set this insn. */
7183 for (i = regno_first; i <= regno_last; ++i)
7184 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
7186 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
7188 /* Record where each reg is used, so when the reg is set we know
7189 the next insn that uses it. */
7190 pbi->reg_next_use[regno_first] = insn;
7193 if (pbi->flags & PROP_REG_INFO)
7195 if (regno_first < FIRST_PSEUDO_REGISTER)
7197 /* If this is a register we are going to try to eliminate,
7198 don't mark it live here. If we are successful in
7199 eliminating it, it need not be live unless it is used for
7200 pseudos, in which case it will have been set live when it
7201 was allocated to the pseudos. If the register will not
7202 be eliminated, reload will set it live at that point.
7204 Otherwise, record that this function uses this register. */
7205 /* ??? The PPC backend tries to "eliminate" on the pic
7206 register to itself. This should be fixed. In the mean
7207 time, hack around it. */
7209 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
7210 && (regno_first == FRAME_POINTER_REGNUM
7211 || regno_first == ARG_POINTER_REGNUM)))
7212 for (i = regno_first; i <= regno_last; ++i)
7213 regs_ever_live[i] = 1;
7217 /* Keep track of which basic block each reg appears in. */
7219 register int blocknum = pbi->bb->index;
7220 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
7221 REG_BASIC_BLOCK (regno_first) = blocknum;
7222 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
7223 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
7225 /* Count (weighted) number of uses of each reg. */
7226 REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
7227 REG_N_REFS (regno_first)++;
7231 /* Record and count the insns in which a reg dies. If it is used in
7232 this insn and was dead below the insn then it dies in this insn.
7233 If it was set in this insn, we do not make a REG_DEAD note;
7234 likewise if we already made such a note. */
7235 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
7239 /* Check for the case where the register dying partially
7240 overlaps the register set by this insn. */
7241 if (regno_first != regno_last)
7242 for (i = regno_first; i <= regno_last; ++i)
7243 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
7245 /* If none of the words in X is needed, make a REG_DEAD note.
7246 Otherwise, we must make partial REG_DEAD notes. */
7247 if (! some_was_live)
7249 if ((pbi->flags & PROP_DEATH_NOTES)
7250 && ! find_regno_note (insn, REG_DEAD, regno_first))
7252 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
7254 if (pbi->flags & PROP_REG_INFO)
7255 REG_N_DEATHS (regno_first)++;
7259 /* Don't make a REG_DEAD note for a part of a register
7260 that is set in the insn. */
7261 for (i = regno_first; i <= regno_last; ++i)
7262 if (! REGNO_REG_SET_P (pbi->reg_live, i)
7263 && ! dead_or_set_regno_p (insn, i))
7265 = alloc_EXPR_LIST (REG_DEAD,
7266 gen_rtx_REG (reg_raw_mode[i], i),
7271 /* Mark the register as being live. */
7272 for (i = regno_first; i <= regno_last; ++i)
7274 SET_REGNO_REG_SET (pbi->reg_live, i);
7276 #ifdef HAVE_conditional_execution
7277 /* If this is a conditional use, record that fact. If it is later
7278 conditionally set, we'll know to kill the register. */
7279 if (cond != NULL_RTX)
7281 splay_tree_node node;
7282 struct reg_cond_life_info *rcli;
7287 node = splay_tree_lookup (pbi->reg_cond_dead, i);
7290 /* The register was unconditionally live previously.
7291 No need to do anything. */
7295 /* The register was conditionally live previously.
7296 Subtract the new life cond from the old death cond. */
7297 rcli = (struct reg_cond_life_info *) node->value;
7298 ncond = rcli->condition;
7299 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
7301 /* If the register is now unconditionally live,
7302 remove the entry in the splay_tree. */
7303 if (ncond == const0_rtx)
7304 splay_tree_remove (pbi->reg_cond_dead, i);
7307 rcli->condition = ncond;
7308 SET_REGNO_REG_SET (pbi->reg_cond_reg,
7309 REGNO (XEXP (cond, 0)));
7315 /* The register was not previously live at all. Record
7316 the condition under which it is still dead. */
7317 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
7318 rcli->condition = not_reg_cond (cond);
7319 rcli->stores = const0_rtx;
7320 rcli->orig_condition = const0_rtx;
7321 splay_tree_insert (pbi->reg_cond_dead, i,
7322 (splay_tree_value) rcli);
7324 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
7327 else if (some_was_live)
7329 /* The register may have been conditionally live previously, but
7330 is now unconditionally live. Remove it from the conditionally
7331 dead list, so that a conditional set won't cause us to think
7333 splay_tree_remove (pbi->reg_cond_dead, i);
7339 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
7340 This is done assuming the registers needed from X are those that
7341 have 1-bits in PBI->REG_LIVE.
7343 INSN is the containing instruction. If INSN is dead, this function
7347 mark_used_regs (pbi, x, cond, insn)
7348 struct propagate_block_info *pbi;
7351 register RTX_CODE code;
7353 int flags = pbi->flags;
7356 code = GET_CODE (x);
7376 /* If we are clobbering a MEM, mark any registers inside the address
7378 if (GET_CODE (XEXP (x, 0)) == MEM)
7379 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
7383 /* Don't bother watching stores to mems if this is not the
7384 final pass. We'll not be deleting dead stores this round. */
7385 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
7387 /* Invalidate the data for the last MEM stored, but only if MEM is
7388 something that can be stored into. */
7389 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
7390 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
7391 /* Needn't clear the memory set list. */
7395 rtx temp = pbi->mem_set_list;
7396 rtx prev = NULL_RTX;
7401 next = XEXP (temp, 1);
7402 if (anti_dependence (XEXP (temp, 0), x))
7404 /* Splice temp out of the list. */
7406 XEXP (prev, 1) = next;
7408 pbi->mem_set_list = next;
7409 free_EXPR_LIST_node (temp);
7410 pbi->mem_set_list_len--;
7418 /* If the memory reference had embedded side effects (autoincrement
7419 address modes. Then we may need to kill some entries on the
7422 invalidate_mems_from_autoinc (pbi, insn);
7426 if (flags & PROP_AUTOINC)
7427 find_auto_inc (pbi, x, insn);
7432 #ifdef CLASS_CANNOT_CHANGE_MODE
7433 if (GET_CODE (SUBREG_REG (x)) == REG
7434 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
7435 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
7436 GET_MODE (SUBREG_REG (x))))
7437 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
7440 /* While we're here, optimize this case. */
7442 if (GET_CODE (x) != REG)
7447 /* See a register other than being set => mark it as needed. */
7448 mark_used_reg (pbi, x, cond, insn);
7453 register rtx testreg = SET_DEST (x);
7456 /* If storing into MEM, don't show it as being used. But do
7457 show the address as being used. */
7458 if (GET_CODE (testreg) == MEM)
7461 if (flags & PROP_AUTOINC)
7462 find_auto_inc (pbi, testreg, insn);
7464 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
7465 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7469 /* Storing in STRICT_LOW_PART is like storing in a reg
7470 in that this SET might be dead, so ignore it in TESTREG.
7471 but in some other ways it is like using the reg.
7473 Storing in a SUBREG or a bit field is like storing the entire
7474 register in that if the register's value is not used
7475 then this SET is not needed. */
7476 while (GET_CODE (testreg) == STRICT_LOW_PART
7477 || GET_CODE (testreg) == ZERO_EXTRACT
7478 || GET_CODE (testreg) == SIGN_EXTRACT
7479 || GET_CODE (testreg) == SUBREG)
7481 #ifdef CLASS_CANNOT_CHANGE_MODE
7482 if (GET_CODE (testreg) == SUBREG
7483 && GET_CODE (SUBREG_REG (testreg)) == REG
7484 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
7485 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
7486 GET_MODE (testreg)))
7487 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
7490 /* Modifying a single register in an alternate mode
7491 does not use any of the old value. But these other
7492 ways of storing in a register do use the old value. */
7493 if (GET_CODE (testreg) == SUBREG
7494 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
7499 testreg = XEXP (testreg, 0);
7502 /* If this is a store into a register or group of registers,
7503 recursively scan the value being stored. */
7505 if ((GET_CODE (testreg) == PARALLEL
7506 && GET_MODE (testreg) == BLKmode)
7507 || (GET_CODE (testreg) == REG
7508 && (regno = REGNO (testreg),
7509 ! (regno == FRAME_POINTER_REGNUM
7510 && (! reload_completed || frame_pointer_needed)))
7511 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7512 && ! (regno == HARD_FRAME_POINTER_REGNUM
7513 && (! reload_completed || frame_pointer_needed))
7515 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
7516 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
7521 mark_used_regs (pbi, SET_DEST (x), cond, insn);
7522 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7529 case UNSPEC_VOLATILE:
7533 /* Traditional and volatile asm instructions must be considered to use
7534 and clobber all hard registers, all pseudo-registers and all of
7535 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
7537 Consider for instance a volatile asm that changes the fpu rounding
7538 mode. An insn should not be moved across this even if it only uses
7539 pseudo-regs because it might give an incorrectly rounded result.
7541 ?!? Unfortunately, marking all hard registers as live causes massive
7542 problems for the register allocator and marking all pseudos as live
7543 creates mountains of uninitialized variable warnings.
7545 So for now, just clear the memory set list and mark any regs
7546 we can find in ASM_OPERANDS as used. */
7547 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
7549 free_EXPR_LIST_list (&pbi->mem_set_list);
7550 pbi->mem_set_list_len = 0;
7553 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
7554 We can not just fall through here since then we would be confused
7555 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
7556 traditional asms unlike their normal usage. */
7557 if (code == ASM_OPERANDS)
7561 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
7562 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
7568 if (cond != NULL_RTX)
7571 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
7573 cond = COND_EXEC_TEST (x);
7574 x = COND_EXEC_CODE (x);
7578 /* We _do_not_ want to scan operands of phi nodes. Operands of
7579 a phi function are evaluated only when control reaches this
7580 block along a particular edge. Therefore, regs that appear
7581 as arguments to phi should not be added to the global live at
7589 /* Recursively scan the operands of this expression. */
7592 register const char * const fmt = GET_RTX_FORMAT (code);
7595 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7599 /* Tail recursive case: save a function call level. */
7605 mark_used_regs (pbi, XEXP (x, i), cond, insn);
7607 else if (fmt[i] == 'E')
7610 for (j = 0; j < XVECLEN (x, i); j++)
7611 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
7620 try_pre_increment_1 (pbi, insn)
7621 struct propagate_block_info *pbi;
7624 /* Find the next use of this reg. If in same basic block,
7625 make it do pre-increment or pre-decrement if appropriate. */
7626 rtx x = single_set (insn);
7627 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
7628 * INTVAL (XEXP (SET_SRC (x), 1)));
7629 int regno = REGNO (SET_DEST (x));
7630 rtx y = pbi->reg_next_use[regno];
7632 && SET_DEST (x) != stack_pointer_rtx
7633 && BLOCK_NUM (y) == BLOCK_NUM (insn)
7634 /* Don't do this if the reg dies, or gets set in y; a standard addressing
7635 mode would be better. */
7636 && ! dead_or_set_p (y, SET_DEST (x))
7637 && try_pre_increment (y, SET_DEST (x), amount))
7639 /* We have found a suitable auto-increment and already changed
7640 insn Y to do it. So flush this increment instruction. */
7641 propagate_block_delete_insn (pbi->bb, insn);
7643 /* Count a reference to this reg for the increment insn we are
7644 deleting. When a reg is incremented, spilling it is worse,
7645 so we want to make that less likely. */
7646 if (regno >= FIRST_PSEUDO_REGISTER)
7648 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7649 REG_N_SETS (regno)++;
7652 /* Flush any remembered memories depending on the value of
7653 the incremented register. */
7654 invalidate_mems_from_set (pbi, SET_DEST (x));
7661 /* Try to change INSN so that it does pre-increment or pre-decrement
7662 addressing on register REG in order to add AMOUNT to REG.
7663 AMOUNT is negative for pre-decrement.
7664 Returns 1 if the change could be made.
7665 This checks all about the validity of the result of modifying INSN. */
7668 try_pre_increment (insn, reg, amount)
7670 HOST_WIDE_INT amount;
7674 /* Nonzero if we can try to make a pre-increment or pre-decrement.
7675 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
7677 /* Nonzero if we can try to make a post-increment or post-decrement.
7678 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
7679 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
7680 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
7683 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
7686 /* From the sign of increment, see which possibilities are conceivable
7687 on this target machine. */
7688 if (HAVE_PRE_INCREMENT && amount > 0)
7690 if (HAVE_POST_INCREMENT && amount > 0)
7693 if (HAVE_PRE_DECREMENT && amount < 0)
7695 if (HAVE_POST_DECREMENT && amount < 0)
7698 if (! (pre_ok || post_ok))
7701 /* It is not safe to add a side effect to a jump insn
7702 because if the incremented register is spilled and must be reloaded
7703 there would be no way to store the incremented value back in memory. */
7705 if (GET_CODE (insn) == JUMP_INSN)
7710 use = find_use_as_address (PATTERN (insn), reg, 0);
7711 if (post_ok && (use == 0 || use == (rtx) 1))
7713 use = find_use_as_address (PATTERN (insn), reg, -amount);
7717 if (use == 0 || use == (rtx) 1)
7720 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
7723 /* See if this combination of instruction and addressing mode exists. */
7724 if (! validate_change (insn, &XEXP (use, 0),
7725 gen_rtx_fmt_e (amount > 0
7726 ? (do_post ? POST_INC : PRE_INC)
7727 : (do_post ? POST_DEC : PRE_DEC),
7731 /* Record that this insn now has an implicit side effect on X. */
7732 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
7736 #endif /* AUTO_INC_DEC */
7738 /* Find the place in the rtx X where REG is used as a memory address.
7739 Return the MEM rtx that so uses it.
7740 If PLUSCONST is nonzero, search instead for a memory address equivalent to
7741 (plus REG (const_int PLUSCONST)).
7743 If such an address does not appear, return 0.
7744 If REG appears more than once, or is used other than in such an address,
7748 find_use_as_address (x, reg, plusconst)
7751 HOST_WIDE_INT plusconst;
7753 enum rtx_code code = GET_CODE (x);
7754 const char * const fmt = GET_RTX_FORMAT (code);
7756 register rtx value = 0;
7759 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
7762 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
7763 && XEXP (XEXP (x, 0), 0) == reg
7764 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7765 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
7768 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
7770 /* If REG occurs inside a MEM used in a bit-field reference,
7771 that is unacceptable. */
7772 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
7773 return (rtx) (HOST_WIDE_INT) 1;
7777 return (rtx) (HOST_WIDE_INT) 1;
7779 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7783 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
7787 return (rtx) (HOST_WIDE_INT) 1;
7789 else if (fmt[i] == 'E')
7792 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7794 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
7798 return (rtx) (HOST_WIDE_INT) 1;
7806 /* Write information about registers and basic blocks into FILE.
7807 This is part of making a debugging dump. */
7810 dump_regset (r, outf)
7817 fputs (" (nil)", outf);
7821 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
7823 fprintf (outf, " %d", i);
7824 if (i < FIRST_PSEUDO_REGISTER)
7825 fprintf (outf, " [%s]",
7830 /* Print a human-reaable representation of R on the standard error
7831 stream. This function is designed to be used from within the
7838 dump_regset (r, stderr);
7839 putc ('\n', stderr);
7843 dump_flow_info (file)
7847 static const char * const reg_class_names[] = REG_CLASS_NAMES;
7849 fprintf (file, "%d registers.\n", max_regno);
7850 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
7853 enum reg_class class, altclass;
7854 fprintf (file, "\nRegister %d used %d times across %d insns",
7855 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
7856 if (REG_BASIC_BLOCK (i) >= 0)
7857 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
7859 fprintf (file, "; set %d time%s", REG_N_SETS (i),
7860 (REG_N_SETS (i) == 1) ? "" : "s");
7861 if (REG_USERVAR_P (regno_reg_rtx[i]))
7862 fprintf (file, "; user var");
7863 if (REG_N_DEATHS (i) != 1)
7864 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
7865 if (REG_N_CALLS_CROSSED (i) == 1)
7866 fprintf (file, "; crosses 1 call");
7867 else if (REG_N_CALLS_CROSSED (i))
7868 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
7869 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
7870 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
7871 class = reg_preferred_class (i);
7872 altclass = reg_alternate_class (i);
7873 if (class != GENERAL_REGS || altclass != ALL_REGS)
7875 if (altclass == ALL_REGS || class == ALL_REGS)
7876 fprintf (file, "; pref %s", reg_class_names[(int) class]);
7877 else if (altclass == NO_REGS)
7878 fprintf (file, "; %s or none", reg_class_names[(int) class]);
7880 fprintf (file, "; pref %s, else %s",
7881 reg_class_names[(int) class],
7882 reg_class_names[(int) altclass]);
7884 if (REG_POINTER (regno_reg_rtx[i]))
7885 fprintf (file, "; pointer");
7886 fprintf (file, ".\n");
7889 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
7890 for (i = 0; i < n_basic_blocks; i++)
7892 register basic_block bb = BASIC_BLOCK (i);
7895 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
7896 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
7897 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7898 fprintf (file, ", freq %i.\n", bb->frequency);
7900 fprintf (file, "Predecessors: ");
7901 for (e = bb->pred; e; e = e->pred_next)
7902 dump_edge_info (file, e, 0);
7904 fprintf (file, "\nSuccessors: ");
7905 for (e = bb->succ; e; e = e->succ_next)
7906 dump_edge_info (file, e, 1);
7908 fprintf (file, "\nRegisters live at start:");
7909 dump_regset (bb->global_live_at_start, file);
7911 fprintf (file, "\nRegisters live at end:");
7912 dump_regset (bb->global_live_at_end, file);
7923 dump_flow_info (stderr);
7927 dump_edge_info (file, e, do_succ)
7932 basic_block side = (do_succ ? e->dest : e->src);
7934 if (side == ENTRY_BLOCK_PTR)
7935 fputs (" ENTRY", file);
7936 else if (side == EXIT_BLOCK_PTR)
7937 fputs (" EXIT", file);
7939 fprintf (file, " %d", side->index);
7942 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
7946 fprintf (file, " count:");
7947 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
7952 static const char * const bitnames[] = {
7953 "fallthru", "crit", "ab", "abcall", "eh", "fake", "dfs_back"
7956 int i, flags = e->flags;
7960 for (i = 0; flags; i++)
7961 if (flags & (1 << i))
7967 if (i < (int) ARRAY_SIZE (bitnames))
7968 fputs (bitnames[i], file);
7970 fprintf (file, "%d", i);
7977 /* Print out one basic block with live information at start and end. */
7988 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
7989 bb->index, bb->loop_depth);
7990 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7993 fputs (";; Predecessors: ", outf);
7994 for (e = bb->pred; e; e = e->pred_next)
7995 dump_edge_info (outf, e, 0);
7998 fputs (";; Registers live at start:", outf);
7999 dump_regset (bb->global_live_at_start, outf);
8002 for (insn = bb->head, last = NEXT_INSN (bb->end);
8004 insn = NEXT_INSN (insn))
8005 print_rtl_single (outf, insn);
8007 fputs (";; Registers live at end:", outf);
8008 dump_regset (bb->global_live_at_end, outf);
8011 fputs (";; Successors: ", outf);
8012 for (e = bb->succ; e; e = e->succ_next)
8013 dump_edge_info (outf, e, 1);
8021 dump_bb (bb, stderr);
8028 dump_bb (BASIC_BLOCK (n), stderr);
8031 /* Like print_rtl, but also print out live information for the start of each
8035 print_rtl_with_bb (outf, rtx_first)
8039 register rtx tmp_rtx;
8042 fprintf (outf, "(nil)\n");
8046 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
8047 int max_uid = get_max_uid ();
8048 basic_block *start = (basic_block *)
8049 xcalloc (max_uid, sizeof (basic_block));
8050 basic_block *end = (basic_block *)
8051 xcalloc (max_uid, sizeof (basic_block));
8052 enum bb_state *in_bb_p = (enum bb_state *)
8053 xcalloc (max_uid, sizeof (enum bb_state));
8055 for (i = n_basic_blocks - 1; i >= 0; i--)
8057 basic_block bb = BASIC_BLOCK (i);
8060 start[INSN_UID (bb->head)] = bb;
8061 end[INSN_UID (bb->end)] = bb;
8062 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
8064 enum bb_state state = IN_MULTIPLE_BB;
8065 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
8067 in_bb_p[INSN_UID (x)] = state;
8074 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
8079 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
8081 fprintf (outf, ";; Start of basic block %d, registers live:",
8083 dump_regset (bb->global_live_at_start, outf);
8087 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
8088 && GET_CODE (tmp_rtx) != NOTE
8089 && GET_CODE (tmp_rtx) != BARRIER)
8090 fprintf (outf, ";; Insn is not within a basic block\n");
8091 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
8092 fprintf (outf, ";; Insn is in multiple basic blocks\n");
8094 did_output = print_rtl_single (outf, tmp_rtx);
8096 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
8098 fprintf (outf, ";; End of basic block %d, registers live:\n",
8100 dump_regset (bb->global_live_at_end, outf);
8113 if (current_function_epilogue_delay_list != 0)
8115 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
8116 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
8117 tmp_rtx = XEXP (tmp_rtx, 1))
8118 print_rtl_single (outf, XEXP (tmp_rtx, 0));
8122 /* Dump the rtl into the current debugging dump file, then abort. */
8125 print_rtl_and_abort_fcn (file, line, function)
8128 const char *function;
8132 print_rtl_with_bb (rtl_dump_file, get_insns ());
8133 fclose (rtl_dump_file);
8136 fancy_abort (file, line, function);
8139 /* Recompute register set/reference counts immediately prior to register
8142 This avoids problems with set/reference counts changing to/from values
8143 which have special meanings to the register allocators.
8145 Additionally, the reference counts are the primary component used by the
8146 register allocators to prioritize pseudos for allocation to hard regs.
8147 More accurate reference counts generally lead to better register allocation.
8149 F is the first insn to be scanned.
8151 LOOP_STEP denotes how much loop_depth should be incremented per
8152 loop nesting level in order to increase the ref count more for
8153 references in a loop.
8155 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
8156 possibly other information which is used by the register allocators. */
8159 recompute_reg_usage (f, loop_step)
8160 rtx f ATTRIBUTE_UNUSED;
8161 int loop_step ATTRIBUTE_UNUSED;
8163 allocate_reg_life_data ();
8164 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
8167 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
8168 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
8169 of the number of registers that died. */
8172 count_or_remove_death_notes (blocks, kill)
8178 for (i = n_basic_blocks - 1; i >= 0; --i)
8183 if (blocks && ! TEST_BIT (blocks, i))
8186 bb = BASIC_BLOCK (i);
8188 for (insn = bb->head;; insn = NEXT_INSN (insn))
8192 rtx *pprev = ®_NOTES (insn);
8197 switch (REG_NOTE_KIND (link))
8200 if (GET_CODE (XEXP (link, 0)) == REG)
8202 rtx reg = XEXP (link, 0);
8205 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
8208 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
8216 rtx next = XEXP (link, 1);
8217 free_EXPR_LIST_node (link);
8218 *pprev = link = next;
8224 pprev = &XEXP (link, 1);
8231 if (insn == bb->end)
8240 /* Update insns block within BB. */
8243 update_bb_for_insn (bb)
8248 if (! basic_block_for_insn)
8251 for (insn = bb->head; ; insn = NEXT_INSN (insn))
8253 set_block_for_insn (insn, bb);
8255 if (insn == bb->end)
8261 /* Record INSN's block as BB. */
8264 set_block_for_insn (insn, bb)
8268 size_t uid = INSN_UID (insn);
8269 if (uid >= basic_block_for_insn->num_elements)
8273 /* Add one-eighth the size so we don't keep calling xrealloc. */
8274 new_size = uid + (uid + 7) / 8;
8276 VARRAY_GROW (basic_block_for_insn, new_size);
8278 VARRAY_BB (basic_block_for_insn, uid) = bb;
8281 /* When a new insn has been inserted into an existing block, it will
8282 sometimes emit more than a single insn. This routine will set the
8283 block number for the specified insn, and look backwards in the insn
8284 chain to see if there are any other uninitialized insns immediately
8285 previous to this one, and set the block number for them too. */
8288 set_block_for_new_insns (insn, bb)
8292 set_block_for_insn (insn, bb);
8294 /* Scan the previous instructions setting the block number until we find
8295 an instruction that has the block number set, or we find a note
8297 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
8299 if (GET_CODE (insn) == NOTE)
8301 if ((unsigned) INSN_UID (insn) >= basic_block_for_insn->num_elements
8302 || BLOCK_FOR_INSN (insn) == 0)
8303 set_block_for_insn (insn, bb);
8309 /* Verify the CFG consistency. This function check some CFG invariants and
8310 aborts when something is wrong. Hope that this function will help to
8311 convert many optimization passes to preserve CFG consistent.
8313 Currently it does following checks:
8315 - test head/end pointers
8316 - overlapping of basic blocks
8317 - edge list correctness
8318 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
8319 - tails of basic blocks (ensure that boundary is necesary)
8320 - scans body of the basic block for JUMP_INSN, CODE_LABEL
8321 and NOTE_INSN_BASIC_BLOCK
8322 - check that all insns are in the basic blocks
8323 (except the switch handling code, barriers and notes)
8324 - check that all returns are followed by barriers
8326 In future it can be extended check a lot of other stuff as well
8327 (reachability of basic blocks, life information, etc. etc.). */
8332 const int max_uid = get_max_uid ();
8333 const rtx rtx_first = get_insns ();
8334 rtx last_head = get_last_insn ();
8335 basic_block *bb_info, *last_visited;
8337 int i, last_bb_num_seen, num_bb_notes, err = 0;
8339 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
8340 last_visited = (basic_block *) xcalloc (n_basic_blocks + 2,
8341 sizeof (basic_block));
8343 for (i = n_basic_blocks - 1; i >= 0; i--)
8345 basic_block bb = BASIC_BLOCK (i);
8346 rtx head = bb->head;
8349 /* Verify the end of the basic block is in the INSN chain. */
8350 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
8355 error ("End insn %d for block %d not found in the insn stream.",
8356 INSN_UID (end), bb->index);
8360 /* Work backwards from the end to the head of the basic block
8361 to verify the head is in the RTL chain. */
8362 for (; x != NULL_RTX; x = PREV_INSN (x))
8364 /* While walking over the insn chain, verify insns appear
8365 in only one basic block and initialize the BB_INFO array
8366 used by other passes. */
8367 if (bb_info[INSN_UID (x)] != NULL)
8369 error ("Insn %d is in multiple basic blocks (%d and %d)",
8370 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
8373 bb_info[INSN_UID (x)] = bb;
8380 error ("Head insn %d for block %d not found in the insn stream.",
8381 INSN_UID (head), bb->index);
8388 /* Now check the basic blocks (boundaries etc.) */
8389 for (i = n_basic_blocks - 1; i >= 0; i--)
8391 basic_block bb = BASIC_BLOCK (i);
8392 /* Check correctness of edge lists. */
8394 int has_fallthru = 0;
8399 if (last_visited [e->dest->index + 2] == bb)
8401 error ("verify_flow_info: Duplicate edge %i->%i",
8402 e->src->index, e->dest->index);
8405 last_visited [e->dest->index + 2] = bb;
8407 if (e->flags & EDGE_FALLTHRU)
8410 if ((e->flags & EDGE_FALLTHRU)
8411 && e->src != ENTRY_BLOCK_PTR
8412 && e->dest != EXIT_BLOCK_PTR)
8415 if (e->src->index + 1 != e->dest->index)
8417 error ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
8418 e->src->index, e->dest->index);
8422 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
8423 insn = NEXT_INSN (insn))
8424 if (GET_CODE (insn) == BARRIER || INSN_P (insn))
8426 error ("verify_flow_info: Incorrect fallthru %i->%i",
8427 e->src->index, e->dest->index);
8428 fatal_insn ("Wrong insn in the fallthru edge", insn);
8434 error ("verify_flow_info: Basic block %d succ edge is corrupted",
8436 fprintf (stderr, "Predecessor: ");
8437 dump_edge_info (stderr, e, 0);
8438 fprintf (stderr, "\nSuccessor: ");
8439 dump_edge_info (stderr, e, 1);
8440 fprintf (stderr, "\n");
8443 if (e->dest != EXIT_BLOCK_PTR)
8445 edge e2 = e->dest->pred;
8446 while (e2 && e2 != e)
8450 error ("Basic block %i edge lists are corrupted", bb->index);
8460 /* Ensure existence of barrier in BB with no fallthru edges. */
8461 for (insn = bb->end; GET_CODE (insn) != BARRIER;
8462 insn = NEXT_INSN (insn))
8464 || (GET_CODE (insn) == NOTE
8465 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
8467 error ("Missing barrier after block %i", bb->index);
8477 error ("Basic block %d pred edge is corrupted", bb->index);
8478 fputs ("Predecessor: ", stderr);
8479 dump_edge_info (stderr, e, 0);
8480 fputs ("\nSuccessor: ", stderr);
8481 dump_edge_info (stderr, e, 1);
8482 fputc ('\n', stderr);
8485 if (e->src != ENTRY_BLOCK_PTR)
8487 edge e2 = e->src->succ;
8488 while (e2 && e2 != e)
8492 error ("Basic block %i edge lists are corrupted", bb->index);
8499 /* OK pointers are correct. Now check the header of basic
8500 block. It ought to contain optional CODE_LABEL followed
8501 by NOTE_BASIC_BLOCK. */
8503 if (GET_CODE (x) == CODE_LABEL)
8507 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
8513 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
8515 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
8522 /* Do checks for empty blocks here */
8529 if (NOTE_INSN_BASIC_BLOCK_P (x))
8531 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
8532 INSN_UID (x), bb->index);
8539 if (GET_CODE (x) == JUMP_INSN
8540 || GET_CODE (x) == CODE_LABEL
8541 || GET_CODE (x) == BARRIER)
8543 error ("In basic block %d:", bb->index);
8544 fatal_insn ("Flow control insn inside a basic block", x);
8552 last_bb_num_seen = -1;
8557 if (NOTE_INSN_BASIC_BLOCK_P (x))
8559 basic_block bb = NOTE_BASIC_BLOCK (x);
8561 if (bb->index != last_bb_num_seen + 1)
8562 internal_error ("Basic blocks not numbered consecutively.");
8564 last_bb_num_seen = bb->index;
8567 if (!bb_info[INSN_UID (x)])
8569 switch (GET_CODE (x))
8576 /* An addr_vec is placed outside any block block. */
8578 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
8579 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
8580 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
8585 /* But in any case, non-deletable labels can appear anywhere. */
8589 fatal_insn ("Insn outside basic block", x);
8594 && GET_CODE (x) == JUMP_INSN
8595 && returnjump_p (x) && ! condjump_p (x)
8596 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
8597 fatal_insn ("Return not followed by barrier", x);
8602 if (num_bb_notes != n_basic_blocks)
8604 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
8605 num_bb_notes, n_basic_blocks);
8608 internal_error ("verify_flow_info failed.");
8612 free (last_visited);
8615 /* Functions to access an edge list with a vector representation.
8616 Enough data is kept such that given an index number, the
8617 pred and succ that edge represents can be determined, or
8618 given a pred and a succ, its index number can be returned.
8619 This allows algorithms which consume a lot of memory to
8620 represent the normally full matrix of edge (pred,succ) with a
8621 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
8622 wasted space in the client code due to sparse flow graphs. */
8624 /* This functions initializes the edge list. Basically the entire
8625 flowgraph is processed, and all edges are assigned a number,
8626 and the data structure is filled in. */
8631 struct edge_list *elist;
8637 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
8641 /* Determine the number of edges in the flow graph by counting successor
8642 edges on each basic block. */
8643 for (x = 0; x < n_basic_blocks; x++)
8645 basic_block bb = BASIC_BLOCK (x);
8647 for (e = bb->succ; e; e = e->succ_next)
8650 /* Don't forget successors of the entry block. */
8651 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8654 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
8655 elist->num_blocks = block_count;
8656 elist->num_edges = num_edges;
8657 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
8661 /* Follow successors of the entry block, and register these edges. */
8662 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8664 elist->index_to_edge[num_edges] = e;
8668 for (x = 0; x < n_basic_blocks; x++)
8670 basic_block bb = BASIC_BLOCK (x);
8672 /* Follow all successors of blocks, and register these edges. */
8673 for (e = bb->succ; e; e = e->succ_next)
8675 elist->index_to_edge[num_edges] = e;
8682 /* This function free's memory associated with an edge list. */
8685 free_edge_list (elist)
8686 struct edge_list *elist;
8690 free (elist->index_to_edge);
8695 /* This function provides debug output showing an edge list. */
8698 print_edge_list (f, elist)
8700 struct edge_list *elist;
8703 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
8704 elist->num_blocks - 2, elist->num_edges);
8706 for (x = 0; x < elist->num_edges; x++)
8708 fprintf (f, " %-4d - edge(", x);
8709 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
8710 fprintf (f, "entry,");
8712 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
8714 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
8715 fprintf (f, "exit)\n");
8717 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
8721 /* This function provides an internal consistency check of an edge list,
8722 verifying that all edges are present, and that there are no
8726 verify_edge_list (f, elist)
8728 struct edge_list *elist;
8730 int x, pred, succ, index;
8733 for (x = 0; x < n_basic_blocks; x++)
8735 basic_block bb = BASIC_BLOCK (x);
8737 for (e = bb->succ; e; e = e->succ_next)
8739 pred = e->src->index;
8740 succ = e->dest->index;
8741 index = EDGE_INDEX (elist, e->src, e->dest);
8742 if (index == EDGE_INDEX_NO_EDGE)
8744 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8747 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8748 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8749 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8750 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8751 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8752 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8755 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8757 pred = e->src->index;
8758 succ = e->dest->index;
8759 index = EDGE_INDEX (elist, e->src, e->dest);
8760 if (index == EDGE_INDEX_NO_EDGE)
8762 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8765 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8766 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8767 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8768 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8769 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8770 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8772 /* We've verified that all the edges are in the list, no lets make sure
8773 there are no spurious edges in the list. */
8775 for (pred = 0; pred < n_basic_blocks; pred++)
8776 for (succ = 0; succ < n_basic_blocks; succ++)
8778 basic_block p = BASIC_BLOCK (pred);
8779 basic_block s = BASIC_BLOCK (succ);
8783 for (e = p->succ; e; e = e->succ_next)
8789 for (e = s->pred; e; e = e->pred_next)
8795 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8796 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8797 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
8799 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8800 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8801 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
8802 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8803 BASIC_BLOCK (succ)));
8805 for (succ = 0; succ < n_basic_blocks; succ++)
8807 basic_block p = ENTRY_BLOCK_PTR;
8808 basic_block s = BASIC_BLOCK (succ);
8812 for (e = p->succ; e; e = e->succ_next)
8818 for (e = s->pred; e; e = e->pred_next)
8824 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8825 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8826 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
8828 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8829 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8830 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
8831 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
8832 BASIC_BLOCK (succ)));
8834 for (pred = 0; pred < n_basic_blocks; pred++)
8836 basic_block p = BASIC_BLOCK (pred);
8837 basic_block s = EXIT_BLOCK_PTR;
8841 for (e = p->succ; e; e = e->succ_next)
8847 for (e = s->pred; e; e = e->pred_next)
8853 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8854 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8855 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
8857 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8858 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8859 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
8860 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8865 /* This routine will determine what, if any, edge there is between
8866 a specified predecessor and successor. */
8869 find_edge_index (edge_list, pred, succ)
8870 struct edge_list *edge_list;
8871 basic_block pred, succ;
8874 for (x = 0; x < NUM_EDGES (edge_list); x++)
8876 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
8877 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
8880 return (EDGE_INDEX_NO_EDGE);
8883 /* This function will remove an edge from the flow graph. */
8889 edge last_pred = NULL;
8890 edge last_succ = NULL;
8892 basic_block src, dest;
8895 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
8901 last_succ->succ_next = e->succ_next;
8903 src->succ = e->succ_next;
8905 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
8911 last_pred->pred_next = e->pred_next;
8913 dest->pred = e->pred_next;
8919 /* This routine will remove any fake successor edges for a basic block.
8920 When the edge is removed, it is also removed from whatever predecessor
8924 remove_fake_successors (bb)
8928 for (e = bb->succ; e;)
8932 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
8937 /* This routine will remove all fake edges from the flow graph. If
8938 we remove all fake successors, it will automatically remove all
8939 fake predecessors. */
8942 remove_fake_edges ()
8946 for (x = 0; x < n_basic_blocks; x++)
8947 remove_fake_successors (BASIC_BLOCK (x));
8949 /* We've handled all successors except the entry block's. */
8950 remove_fake_successors (ENTRY_BLOCK_PTR);
8953 /* This function will add a fake edge between any block which has no
8954 successors, and the exit block. Some data flow equations require these
8958 add_noreturn_fake_exit_edges ()
8962 for (x = 0; x < n_basic_blocks; x++)
8963 if (BASIC_BLOCK (x)->succ == NULL)
8964 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
8967 /* This function adds a fake edge between any infinite loops to the
8968 exit block. Some optimizations require a path from each node to
8971 See also Morgan, Figure 3.10, pp. 82-83.
8973 The current implementation is ugly, not attempting to minimize the
8974 number of inserted fake edges. To reduce the number of fake edges
8975 to insert, add fake edges from _innermost_ loops containing only
8976 nodes not reachable from the exit block. */
8979 connect_infinite_loops_to_exit ()
8981 basic_block unvisited_block;
8983 /* Perform depth-first search in the reverse graph to find nodes
8984 reachable from the exit block. */
8985 struct depth_first_search_dsS dfs_ds;
8987 flow_dfs_compute_reverse_init (&dfs_ds);
8988 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
8990 /* Repeatedly add fake edges, updating the unreachable nodes. */
8993 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
8994 if (!unvisited_block)
8996 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
8997 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
9000 flow_dfs_compute_reverse_finish (&dfs_ds);
9005 /* Redirect an edge's successor from one block to another. */
9008 redirect_edge_succ (e, new_succ)
9010 basic_block new_succ;
9014 /* Disconnect the edge from the old successor block. */
9015 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
9017 *pe = (*pe)->pred_next;
9019 /* Reconnect the edge to the new successor block. */
9020 e->pred_next = new_succ->pred;
9025 /* Like previous but avoid possible dupplicate edge. */
9028 redirect_edge_succ_nodup (e, new_succ)
9030 basic_block new_succ;
9033 /* Check whether the edge is already present. */
9034 for (s = e->src->succ; s; s = s->succ_next)
9035 if (s->dest == new_succ && s != e)
9039 s->flags |= e->flags;
9040 s->probability += e->probability;
9041 s->count += e->count;
9045 redirect_edge_succ (e, new_succ);
9048 /* Redirect an edge's predecessor from one block to another. */
9051 redirect_edge_pred (e, new_pred)
9053 basic_block new_pred;
9057 /* Disconnect the edge from the old predecessor block. */
9058 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
9060 *pe = (*pe)->succ_next;
9062 /* Reconnect the edge to the new predecessor block. */
9063 e->succ_next = new_pred->succ;
9068 /* Dump the list of basic blocks in the bitmap NODES. */
9071 flow_nodes_print (str, nodes, file)
9073 const sbitmap nodes;
9081 fprintf (file, "%s { ", str);
9082 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
9083 fputs ("}\n", file);
9087 /* Dump the list of edges in the array EDGE_LIST. */
9090 flow_edge_list_print (str, edge_list, num_edges, file)
9092 const edge *edge_list;
9101 fprintf (file, "%s { ", str);
9102 for (i = 0; i < num_edges; i++)
9103 fprintf (file, "%d->%d ", edge_list[i]->src->index,
9104 edge_list[i]->dest->index);
9105 fputs ("}\n", file);
9109 /* Dump loop related CFG information. */
9112 flow_loops_cfg_dump (loops, file)
9113 const struct loops *loops;
9118 if (! loops->num || ! file || ! loops->cfg.dom)
9121 for (i = 0; i < n_basic_blocks; i++)
9125 fprintf (file, ";; %d succs { ", i);
9126 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
9127 fprintf (file, "%d ", succ->dest->index);
9128 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
9131 /* Dump the DFS node order. */
9132 if (loops->cfg.dfs_order)
9134 fputs (";; DFS order: ", file);
9135 for (i = 0; i < n_basic_blocks; i++)
9136 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
9139 /* Dump the reverse completion node order. */
9140 if (loops->cfg.rc_order)
9142 fputs (";; RC order: ", file);
9143 for (i = 0; i < n_basic_blocks; i++)
9144 fprintf (file, "%d ", loops->cfg.rc_order[i]);
9149 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
9152 flow_loop_nested_p (outer, loop)
9156 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
9160 /* Dump the loop information specified by LOOP to the stream FILE
9161 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9163 flow_loop_dump (loop, file, loop_dump_aux, verbose)
9164 const struct loop *loop;
9166 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9169 if (! loop || ! loop->header)
9172 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
9173 loop->num, INSN_UID (loop->first->head),
9174 INSN_UID (loop->last->end),
9175 loop->shared ? " shared" : "",
9176 loop->invalid ? " invalid" : "");
9177 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
9178 loop->header->index, loop->latch->index,
9179 loop->pre_header ? loop->pre_header->index : -1,
9180 loop->first->index, loop->last->index);
9181 fprintf (file, ";; depth %d, level %d, outer %ld\n",
9182 loop->depth, loop->level,
9183 (long) (loop->outer ? loop->outer->num : -1));
9185 if (loop->pre_header_edges)
9186 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
9187 loop->num_pre_header_edges, file);
9188 flow_edge_list_print (";; entry edges", loop->entry_edges,
9189 loop->num_entries, file);
9190 fprintf (file, ";; %d", loop->num_nodes);
9191 flow_nodes_print (" nodes", loop->nodes, file);
9192 flow_edge_list_print (";; exit edges", loop->exit_edges,
9193 loop->num_exits, file);
9194 if (loop->exits_doms)
9195 flow_nodes_print (";; exit doms", loop->exits_doms, file);
9197 loop_dump_aux (loop, file, verbose);
9201 /* Dump the loop information specified by LOOPS to the stream FILE,
9202 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9204 flow_loops_dump (loops, file, loop_dump_aux, verbose)
9205 const struct loops *loops;
9207 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9213 num_loops = loops->num;
9214 if (! num_loops || ! file)
9217 fprintf (file, ";; %d loops found, %d levels\n",
9218 num_loops, loops->levels);
9220 for (i = 0; i < num_loops; i++)
9222 struct loop *loop = &loops->array[i];
9224 flow_loop_dump (loop, file, loop_dump_aux, verbose);
9230 for (j = 0; j < i; j++)
9232 struct loop *oloop = &loops->array[j];
9234 if (loop->header == oloop->header)
9239 smaller = loop->num_nodes < oloop->num_nodes;
9241 /* If the union of LOOP and OLOOP is different than
9242 the larger of LOOP and OLOOP then LOOP and OLOOP
9243 must be disjoint. */
9244 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
9245 smaller ? oloop : loop);
9247 ";; loop header %d shared by loops %d, %d %s\n",
9248 loop->header->index, i, j,
9249 disjoint ? "disjoint" : "nested");
9256 flow_loops_cfg_dump (loops, file);
9260 /* Free all the memory allocated for LOOPS. */
9263 flow_loops_free (loops)
9264 struct loops *loops;
9273 /* Free the loop descriptors. */
9274 for (i = 0; i < loops->num; i++)
9276 struct loop *loop = &loops->array[i];
9278 if (loop->pre_header_edges)
9279 free (loop->pre_header_edges);
9281 sbitmap_free (loop->nodes);
9282 if (loop->entry_edges)
9283 free (loop->entry_edges);
9284 if (loop->exit_edges)
9285 free (loop->exit_edges);
9286 if (loop->exits_doms)
9287 sbitmap_free (loop->exits_doms);
9289 free (loops->array);
9290 loops->array = NULL;
9293 sbitmap_vector_free (loops->cfg.dom);
9294 if (loops->cfg.dfs_order)
9295 free (loops->cfg.dfs_order);
9297 if (loops->shared_headers)
9298 sbitmap_free (loops->shared_headers);
9303 /* Find the entry edges into the loop with header HEADER and nodes
9304 NODES and store in ENTRY_EDGES array. Return the number of entry
9305 edges from the loop. */
9308 flow_loop_entry_edges_find (header, nodes, entry_edges)
9310 const sbitmap nodes;
9316 *entry_edges = NULL;
9319 for (e = header->pred; e; e = e->pred_next)
9321 basic_block src = e->src;
9323 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9330 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
9333 for (e = header->pred; e; e = e->pred_next)
9335 basic_block src = e->src;
9337 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9338 (*entry_edges)[num_entries++] = e;
9345 /* Find the exit edges from the loop using the bitmap of loop nodes
9346 NODES and store in EXIT_EDGES array. Return the number of
9347 exit edges from the loop. */
9350 flow_loop_exit_edges_find (nodes, exit_edges)
9351 const sbitmap nodes;
9360 /* Check all nodes within the loop to see if there are any
9361 successors not in the loop. Note that a node may have multiple
9362 exiting edges ????? A node can have one jumping edge and one fallthru
9363 edge so only one of these can exit the loop. */
9365 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9366 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9368 basic_block dest = e->dest;
9370 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9378 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
9380 /* Store all exiting edges into an array. */
9382 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9383 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9385 basic_block dest = e->dest;
9387 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9388 (*exit_edges)[num_exits++] = e;
9396 /* Find the nodes contained within the loop with header HEADER and
9397 latch LATCH and store in NODES. Return the number of nodes within
9401 flow_loop_nodes_find (header, latch, nodes)
9410 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
9413 /* Start with only the loop header in the set of loop nodes. */
9414 sbitmap_zero (nodes);
9415 SET_BIT (nodes, header->index);
9417 header->loop_depth++;
9419 /* Push the loop latch on to the stack. */
9420 if (! TEST_BIT (nodes, latch->index))
9422 SET_BIT (nodes, latch->index);
9423 latch->loop_depth++;
9425 stack[sp++] = latch;
9434 for (e = node->pred; e; e = e->pred_next)
9436 basic_block ancestor = e->src;
9438 /* If each ancestor not marked as part of loop, add to set of
9439 loop nodes and push on to stack. */
9440 if (ancestor != ENTRY_BLOCK_PTR
9441 && ! TEST_BIT (nodes, ancestor->index))
9443 SET_BIT (nodes, ancestor->index);
9444 ancestor->loop_depth++;
9446 stack[sp++] = ancestor;
9454 /* Compute the depth first search order and store in the array
9455 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
9456 RC_ORDER is non-zero, return the reverse completion number for each
9457 node. Returns the number of nodes visited. A depth first search
9458 tries to get as far away from the starting point as quickly as
9462 flow_depth_first_order_compute (dfs_order, rc_order)
9469 int rcnum = n_basic_blocks - 1;
9472 /* Allocate stack for back-tracking up CFG. */
9473 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
9476 /* Allocate bitmap to track nodes that have been visited. */
9477 visited = sbitmap_alloc (n_basic_blocks);
9479 /* None of the nodes in the CFG have been visited yet. */
9480 sbitmap_zero (visited);
9482 /* Push the first edge on to the stack. */
9483 stack[sp++] = ENTRY_BLOCK_PTR->succ;
9491 /* Look at the edge on the top of the stack. */
9496 /* Check if the edge destination has been visited yet. */
9497 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
9499 /* Mark that we have visited the destination. */
9500 SET_BIT (visited, dest->index);
9503 dfs_order[dfsnum++] = dest->index;
9507 /* Since the DEST node has been visited for the first
9508 time, check its successors. */
9509 stack[sp++] = dest->succ;
9513 /* There are no successors for the DEST node so assign
9514 its reverse completion number. */
9516 rc_order[rcnum--] = dest->index;
9521 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
9523 /* There are no more successors for the SRC node
9524 so assign its reverse completion number. */
9526 rc_order[rcnum--] = src->index;
9530 stack[sp - 1] = e->succ_next;
9537 sbitmap_free (visited);
9539 /* The number of nodes visited should not be greater than
9541 if (dfsnum > n_basic_blocks)
9544 /* There are some nodes left in the CFG that are unreachable. */
9545 if (dfsnum < n_basic_blocks)
9550 /* Compute the depth first search order on the _reverse_ graph and
9551 store in the array DFS_ORDER, marking the nodes visited in VISITED.
9552 Returns the number of nodes visited.
9554 The computation is split into three pieces:
9556 flow_dfs_compute_reverse_init () creates the necessary data
9559 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
9560 structures. The block will start the search.
9562 flow_dfs_compute_reverse_execute () continues (or starts) the
9563 search using the block on the top of the stack, stopping when the
9566 flow_dfs_compute_reverse_finish () destroys the necessary data
9569 Thus, the user will probably call ..._init(), call ..._add_bb() to
9570 add a beginning basic block to the stack, call ..._execute(),
9571 possibly add another bb to the stack and again call ..._execute(),
9572 ..., and finally call _finish(). */
9574 /* Initialize the data structures used for depth-first search on the
9575 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
9576 added to the basic block stack. DATA is the current depth-first
9577 search context. If INITIALIZE_STACK is non-zero, there is an
9578 element on the stack. */
9581 flow_dfs_compute_reverse_init (data)
9582 depth_first_search_ds data;
9584 /* Allocate stack for back-tracking up CFG. */
9586 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
9587 * sizeof (basic_block));
9590 /* Allocate bitmap to track nodes that have been visited. */
9591 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
9593 /* None of the nodes in the CFG have been visited yet. */
9594 sbitmap_zero (data->visited_blocks);
9599 /* Add the specified basic block to the top of the dfs data
9600 structures. When the search continues, it will start at the
9604 flow_dfs_compute_reverse_add_bb (data, bb)
9605 depth_first_search_ds data;
9608 data->stack[data->sp++] = bb;
9612 /* Continue the depth-first search through the reverse graph starting
9613 with the block at the stack's top and ending when the stack is
9614 empty. Visited nodes are marked. Returns an unvisited basic
9615 block, or NULL if there is none available. */
9618 flow_dfs_compute_reverse_execute (data)
9619 depth_first_search_ds data;
9625 while (data->sp > 0)
9627 bb = data->stack[--data->sp];
9629 /* Mark that we have visited this node. */
9630 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
9632 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
9634 /* Perform depth-first search on adjacent vertices. */
9635 for (e = bb->pred; e; e = e->pred_next)
9636 flow_dfs_compute_reverse_add_bb (data, e->src);
9640 /* Determine if there are unvisited basic blocks. */
9641 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
9642 if (!TEST_BIT (data->visited_blocks, i))
9643 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
9647 /* Destroy the data structures needed for depth-first search on the
9651 flow_dfs_compute_reverse_finish (data)
9652 depth_first_search_ds data;
9655 sbitmap_free (data->visited_blocks);
9660 /* Find the root node of the loop pre-header extended basic block and
9661 the edges along the trace from the root node to the loop header. */
9664 flow_loop_pre_header_scan (loop)
9670 loop->num_pre_header_edges = 0;
9672 if (loop->num_entries != 1)
9675 ebb = loop->entry_edges[0]->src;
9677 if (ebb != ENTRY_BLOCK_PTR)
9681 /* Count number of edges along trace from loop header to
9682 root of pre-header extended basic block. Usually this is
9683 only one or two edges. */
9685 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
9687 ebb = ebb->pred->src;
9691 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
9692 loop->num_pre_header_edges = num;
9694 /* Store edges in order that they are followed. The source
9695 of the first edge is the root node of the pre-header extended
9696 basic block and the destination of the last last edge is
9698 for (e = loop->entry_edges[0]; num; e = e->src->pred)
9700 loop->pre_header_edges[--num] = e;
9706 /* Return the block for the pre-header of the loop with header
9707 HEADER where DOM specifies the dominator information. Return NULL if
9708 there is no pre-header. */
9711 flow_loop_pre_header_find (header, dom)
9715 basic_block pre_header;
9718 /* If block p is a predecessor of the header and is the only block
9719 that the header does not dominate, then it is the pre-header. */
9721 for (e = header->pred; e; e = e->pred_next)
9723 basic_block node = e->src;
9725 if (node != ENTRY_BLOCK_PTR
9726 && ! TEST_BIT (dom[node->index], header->index))
9728 if (pre_header == NULL)
9732 /* There are multiple edges into the header from outside
9733 the loop so there is no pre-header block. */
9742 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
9743 previously added. The insertion algorithm assumes that the loops
9744 are added in the order found by a depth first search of the CFG. */
9747 flow_loop_tree_node_add (prevloop, loop)
9748 struct loop *prevloop;
9752 if (flow_loop_nested_p (prevloop, loop))
9754 prevloop->inner = loop;
9755 loop->outer = prevloop;
9759 while (prevloop->outer)
9761 if (flow_loop_nested_p (prevloop->outer, loop))
9763 prevloop->next = loop;
9764 loop->outer = prevloop->outer;
9767 prevloop = prevloop->outer;
9770 prevloop->next = loop;
9774 /* Build the loop hierarchy tree for LOOPS. */
9777 flow_loops_tree_build (loops)
9778 struct loops *loops;
9783 num_loops = loops->num;
9787 /* Root the loop hierarchy tree with the first loop found.
9788 Since we used a depth first search this should be the
9790 loops->tree_root = &loops->array[0];
9791 loops->tree_root->outer = loops->tree_root->inner = loops->tree_root->next = NULL;
9793 /* Add the remaining loops to the tree. */
9794 for (i = 1; i < num_loops; i++)
9795 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
9798 /* Helper function to compute loop nesting depth and enclosed loop level
9799 for the natural loop specified by LOOP at the loop depth DEPTH.
9800 Returns the loop level. */
9803 flow_loop_level_compute (loop, depth)
9813 /* Traverse loop tree assigning depth and computing level as the
9814 maximum level of all the inner loops of this loop. The loop
9815 level is equivalent to the height of the loop in the loop tree
9816 and corresponds to the number of enclosed loop levels (including
9818 for (inner = loop->inner; inner; inner = inner->next)
9822 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
9827 loop->level = level;
9828 loop->depth = depth;
9832 /* Compute the loop nesting depth and enclosed loop level for the loop
9833 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
9837 flow_loops_level_compute (loops)
9838 struct loops *loops;
9844 /* Traverse all the outer level loops. */
9845 for (loop = loops->tree_root; loop; loop = loop->next)
9847 level = flow_loop_level_compute (loop, 1);
9855 /* Scan a single natural loop specified by LOOP collecting information
9856 about it specified by FLAGS. */
9859 flow_loop_scan (loops, loop, flags)
9860 struct loops *loops;
9864 /* Determine prerequisites. */
9865 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
9866 flags |= LOOP_EXIT_EDGES;
9868 if (flags & LOOP_ENTRY_EDGES)
9870 /* Find edges which enter the loop header.
9871 Note that the entry edges should only
9872 enter the header of a natural loop. */
9874 = flow_loop_entry_edges_find (loop->header,
9876 &loop->entry_edges);
9879 if (flags & LOOP_EXIT_EDGES)
9881 /* Find edges which exit the loop. */
9883 = flow_loop_exit_edges_find (loop->nodes,
9887 if (flags & LOOP_EXITS_DOMS)
9891 /* Determine which loop nodes dominate all the exits
9893 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
9894 sbitmap_copy (loop->exits_doms, loop->nodes);
9895 for (j = 0; j < loop->num_exits; j++)
9896 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
9897 loops->cfg.dom[loop->exit_edges[j]->src->index]);
9899 /* The header of a natural loop must dominate
9901 if (! TEST_BIT (loop->exits_doms, loop->header->index))
9905 if (flags & LOOP_PRE_HEADER)
9907 /* Look to see if the loop has a pre-header node. */
9909 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
9911 /* Find the blocks within the extended basic block of
9912 the loop pre-header. */
9913 flow_loop_pre_header_scan (loop);
9919 /* Find all the natural loops in the function and save in LOOPS structure
9920 and recalculate loop_depth information in basic block structures.
9921 FLAGS controls which loop information is collected.
9922 Return the number of natural loops found. */
9925 flow_loops_find (loops, flags)
9926 struct loops *loops;
9938 /* This function cannot be repeatedly called with different
9939 flags to build up the loop information. The loop tree
9940 must always be built if this function is called. */
9941 if (! (flags & LOOP_TREE))
9944 memset (loops, 0, sizeof (*loops));
9946 /* Taking care of this degenerate case makes the rest of
9947 this code simpler. */
9948 if (n_basic_blocks == 0)
9954 /* Compute the dominators. */
9955 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
9956 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
9958 /* Count the number of loop edges (back edges). This should be the
9959 same as the number of natural loops. */
9962 for (b = 0; b < n_basic_blocks; b++)
9966 header = BASIC_BLOCK (b);
9967 header->loop_depth = 0;
9969 for (e = header->pred; e; e = e->pred_next)
9971 basic_block latch = e->src;
9973 /* Look for back edges where a predecessor is dominated
9974 by this block. A natural loop has a single entry
9975 node (header) that dominates all the nodes in the
9976 loop. It also has single back edge to the header
9977 from a latch node. Note that multiple natural loops
9978 may share the same header. */
9979 if (b != header->index)
9982 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
9989 /* Compute depth first search order of the CFG so that outer
9990 natural loops will be found before inner natural loops. */
9991 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9992 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9993 flow_depth_first_order_compute (dfs_order, rc_order);
9995 /* Save CFG derived information to avoid recomputing it. */
9996 loops->cfg.dom = dom;
9997 loops->cfg.dfs_order = dfs_order;
9998 loops->cfg.rc_order = rc_order;
10000 /* Allocate loop structures. */
10002 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
10004 headers = sbitmap_alloc (n_basic_blocks);
10005 sbitmap_zero (headers);
10007 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
10008 sbitmap_zero (loops->shared_headers);
10010 /* Find and record information about all the natural loops
10013 for (b = 0; b < n_basic_blocks; b++)
10015 basic_block header;
10017 /* Search the nodes of the CFG in reverse completion order
10018 so that we can find outer loops first. */
10019 header = BASIC_BLOCK (rc_order[b]);
10021 /* Look for all the possible latch blocks for this header. */
10022 for (e = header->pred; e; e = e->pred_next)
10024 basic_block latch = e->src;
10026 /* Look for back edges where a predecessor is dominated
10027 by this block. A natural loop has a single entry
10028 node (header) that dominates all the nodes in the
10029 loop. It also has single back edge to the header
10030 from a latch node. Note that multiple natural loops
10031 may share the same header. */
10032 if (latch != ENTRY_BLOCK_PTR
10033 && TEST_BIT (dom[latch->index], header->index))
10037 loop = loops->array + num_loops;
10039 loop->header = header;
10040 loop->latch = latch;
10041 loop->num = num_loops;
10048 for (i = 0; i < num_loops; i++)
10050 struct loop *loop = &loops->array[i];
10052 /* Keep track of blocks that are loop headers so
10053 that we can tell which loops should be merged. */
10054 if (TEST_BIT (headers, loop->header->index))
10055 SET_BIT (loops->shared_headers, loop->header->index);
10056 SET_BIT (headers, loop->header->index);
10058 /* Find nodes contained within the loop. */
10059 loop->nodes = sbitmap_alloc (n_basic_blocks);
10061 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
10063 /* Compute first and last blocks within the loop.
10064 These are often the same as the loop header and
10065 loop latch respectively, but this is not always
10068 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
10070 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
10072 flow_loop_scan (loops, loop, flags);
10075 /* Natural loops with shared headers may either be disjoint or
10076 nested. Disjoint loops with shared headers cannot be inner
10077 loops and should be merged. For now just mark loops that share
10079 for (i = 0; i < num_loops; i++)
10080 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
10081 loops->array[i].shared = 1;
10083 sbitmap_free (headers);
10087 sbitmap_vector_free (dom);
10090 loops->num = num_loops;
10092 /* Build the loop hierarchy tree. */
10093 flow_loops_tree_build (loops);
10095 /* Assign the loop nesting depth and enclosed loop level for each
10097 loops->levels = flow_loops_level_compute (loops);
10103 /* Update the information regarding the loops in the CFG
10104 specified by LOOPS. */
10106 flow_loops_update (loops, flags)
10107 struct loops *loops;
10110 /* One day we may want to update the current loop data. For now
10111 throw away the old stuff and rebuild what we need. */
10113 flow_loops_free (loops);
10115 return flow_loops_find (loops, flags);
10119 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
10122 flow_loop_outside_edge_p (loop, e)
10123 const struct loop *loop;
10126 if (e->dest != loop->header)
10128 return (e->src == ENTRY_BLOCK_PTR)
10129 || ! TEST_BIT (loop->nodes, e->src->index);
10132 /* Clear LOG_LINKS fields of insns in a chain.
10133 Also clear the global_live_at_{start,end} fields of the basic block
10137 clear_log_links (insns)
10143 for (i = insns; i; i = NEXT_INSN (i))
10147 for (b = 0; b < n_basic_blocks; b++)
10149 basic_block bb = BASIC_BLOCK (b);
10151 bb->global_live_at_start = NULL;
10152 bb->global_live_at_end = NULL;
10155 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
10156 EXIT_BLOCK_PTR->global_live_at_start = NULL;
10159 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
10160 correspond to the hard registers, if any, set in that map. This
10161 could be done far more efficiently by having all sorts of special-cases
10162 with moving single words, but probably isn't worth the trouble. */
10165 reg_set_to_hard_reg_set (to, from)
10171 EXECUTE_IF_SET_IN_BITMAP
10174 if (i >= FIRST_PSEUDO_REGISTER)
10176 SET_HARD_REG_BIT (*to, i);
10180 /* Called once at intialization time. */
10185 static int initialized;
10189 gcc_obstack_init (&flow_obstack);
10190 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10195 obstack_free (&flow_obstack, flow_firstobj);
10196 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10200 /* Assume that the preceeding pass has possibly eliminated jump instructions
10201 or converted the unconditional jumps. Eliminate the edges from CFG.
10202 Return true if any edges are eliminated. */
10205 purge_dead_edges (bb)
10209 rtx insn = bb->end;
10210 bool purged = false;
10212 if (GET_CODE (insn) == JUMP_INSN && !simplejump_p (insn))
10214 if (GET_CODE (insn) == JUMP_INSN)
10218 /* We do care only about conditional jumps and simplejumps. */
10219 if (!any_condjump_p (insn)
10220 && !returnjump_p (insn)
10221 && !simplejump_p (insn))
10223 for (e = bb->succ; e; e = next)
10225 next = e->succ_next;
10227 /* Check purposes we can have edge. */
10228 if ((e->flags & EDGE_FALLTHRU)
10229 && any_condjump_p (insn))
10231 if (e->dest != EXIT_BLOCK_PTR
10232 && e->dest->head == JUMP_LABEL (insn))
10234 if (e->dest == EXIT_BLOCK_PTR
10235 && returnjump_p (insn))
10240 if (!bb->succ || !purged)
10243 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
10247 /* Redistribute probabilities. */
10248 if (!bb->succ->succ_next)
10250 bb->succ->probability = REG_BR_PROB_BASE;
10251 bb->succ->count = bb->count;
10255 note = find_reg_note (insn, REG_BR_PROB, NULL);
10258 b = BRANCH_EDGE (bb);
10259 f = FALLTHRU_EDGE (bb);
10260 b->probability = INTVAL (XEXP (note, 0));
10261 f->probability = REG_BR_PROB_BASE - b->probability;
10262 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
10263 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
10268 /* Cleanup abnormal edges caused by throwing insns that have been
10270 if (! can_throw_internal (bb->end))
10271 for (e = bb->succ; e; e = next)
10273 next = e->succ_next;
10274 if (e->flags & EDGE_EH)
10281 /* If we don't see a jump insn, we don't know exactly why the block would
10282 have been broken at this point. Look for a simple, non-fallthru edge,
10283 as these are only created by conditional branches. If we find such an
10284 edge we know that there used to be a jump here and can then safely
10285 remove all non-fallthru edges. */
10286 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
10290 for (e = bb->succ; e; e = next)
10292 next = e->succ_next;
10293 if (!(e->flags & EDGE_FALLTHRU))
10294 remove_edge (e), purged = true;
10296 if (!bb->succ || bb->succ->succ_next)
10298 bb->succ->probability = REG_BR_PROB_BASE;
10299 bb->succ->count = bb->count;
10302 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
10307 /* Search all basic blocks for potentionally dead edges and purge them.
10309 Return true ifif some edge has been elliminated.
10313 purge_all_dead_edges ()
10315 int i, purged = false;
10316 for (i = 0; i < n_basic_blocks; i++)
10317 purged |= purge_dead_edges (BASIC_BLOCK (i));