1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
140 #include "splay-tree.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
145 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
146 the stack pointer does not matter. The value is tested only in
147 functions that have frame pointers.
148 No definition is equivalent to always zero. */
149 #ifndef EXIT_IGNORE_STACK
150 #define EXIT_IGNORE_STACK 0
153 #ifndef HAVE_epilogue
154 #define HAVE_epilogue 0
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
159 #ifndef HAVE_sibcall_epilogue
160 #define HAVE_sibcall_epilogue 0
164 #define LOCAL_REGNO(REGNO) 0
166 #ifndef EPILOGUE_USES
167 #define EPILOGUE_USES(REGNO) 0
170 #ifdef HAVE_conditional_execution
171 #ifndef REVERSE_CONDEXEC_PREDICATES_P
172 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
176 /* The obstack on which the flow graph components are allocated. */
178 struct obstack flow_obstack;
179 static char *flow_firstobj;
181 /* Number of basic blocks in the current function. */
185 /* Number of edges in the current function. */
189 /* The basic block array. */
191 varray_type basic_block_info;
193 /* The special entry and exit blocks. */
195 struct basic_block_def entry_exit_blocks[2]
200 NULL, /* local_set */
201 NULL, /* cond_local_set */
202 NULL, /* global_live_at_start */
203 NULL, /* global_live_at_end */
205 ENTRY_BLOCK, /* index */
215 NULL, /* local_set */
216 NULL, /* cond_local_set */
217 NULL, /* global_live_at_start */
218 NULL, /* global_live_at_end */
220 EXIT_BLOCK, /* index */
227 /* Nonzero if the second flow pass has completed. */
230 /* Maximum register number used in this function, plus one. */
234 /* Indexed by n, giving various register information */
236 varray_type reg_n_info;
238 /* Size of a regset for the current function,
239 in (1) bytes and (2) elements. */
244 /* Regset of regs live when calls to `setjmp'-like functions happen. */
245 /* ??? Does this exist only for the setjmp-clobbered warning message? */
247 regset regs_live_at_setjmp;
249 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
250 that have to go in the same hard reg.
251 The first two regs in the list are a pair, and the next two
252 are another pair, etc. */
255 /* Callback that determines if it's ok for a function to have no
256 noreturn attribute. */
257 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
259 /* Set of registers that may be eliminable. These are handled specially
260 in updating regs_ever_live. */
262 static HARD_REG_SET elim_reg_set;
264 /* The basic block structure for every insn, indexed by uid. */
266 varray_type basic_block_for_insn;
268 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
269 /* ??? Should probably be using LABEL_NUSES instead. It would take a
270 bit of surgery to be able to use or co-opt the routines in jump. */
272 static rtx label_value_list;
273 static rtx tail_recursion_label_list;
275 /* Holds information for tracking conditional register life information. */
276 struct reg_cond_life_info
278 /* A boolean expression of conditions under which a register is dead. */
280 /* Conditions under which a register is dead at the basic block end. */
283 /* A boolean expression of conditions under which a register has been
287 /* ??? Could store mask of bytes that are dead, so that we could finally
288 track lifetimes of multi-word registers accessed via subregs. */
291 /* For use in communicating between propagate_block and its subroutines.
292 Holds all information needed to compute life and def-use information. */
294 struct propagate_block_info
296 /* The basic block we're considering. */
299 /* Bit N is set if register N is conditionally or unconditionally live. */
302 /* Bit N is set if register N is set this insn. */
305 /* Element N is the next insn that uses (hard or pseudo) register N
306 within the current basic block; or zero, if there is no such insn. */
309 /* Contains a list of all the MEMs we are tracking for dead store
313 /* If non-null, record the set of registers set unconditionally in the
317 /* If non-null, record the set of registers set conditionally in the
319 regset cond_local_set;
321 #ifdef HAVE_conditional_execution
322 /* Indexed by register number, holds a reg_cond_life_info for each
323 register that is not unconditionally live or dead. */
324 splay_tree reg_cond_dead;
326 /* Bit N is set if register N is in an expression in reg_cond_dead. */
330 /* The length of mem_set_list. */
331 int mem_set_list_len;
333 /* Non-zero if the value of CC0 is live. */
336 /* Flags controling the set of information propagate_block collects. */
340 /* Maximum length of pbi->mem_set_list before we start dropping
341 new elements on the floor. */
342 #define MAX_MEM_SET_LIST_LEN 100
344 /* Store the data structures necessary for depth-first search. */
345 struct depth_first_search_dsS {
346 /* stack for backtracking during the algorithm */
349 /* number of edges in the stack. That is, positions 0, ..., sp-1
353 /* record of basic blocks already seen by depth-first search */
354 sbitmap visited_blocks;
356 typedef struct depth_first_search_dsS *depth_first_search_ds;
358 /* Have print_rtl_and_abort give the same information that fancy_abort
360 #define print_rtl_and_abort() \
361 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
363 /* Forward declarations */
364 static int count_basic_blocks PARAMS ((rtx));
365 static void find_basic_blocks_1 PARAMS ((rtx));
366 static rtx find_label_refs PARAMS ((rtx, rtx));
367 static void make_edges PARAMS ((rtx));
368 static void make_label_edge PARAMS ((sbitmap *, basic_block,
370 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
372 static void commit_one_edge_insertion PARAMS ((edge));
374 static void delete_unreachable_blocks PARAMS ((void));
375 static int can_delete_note_p PARAMS ((rtx));
376 static void expunge_block PARAMS ((basic_block));
377 static int can_delete_label_p PARAMS ((rtx));
378 static int tail_recursion_label_p PARAMS ((rtx));
379 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
381 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
383 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
384 static bool try_optimize_cfg PARAMS ((void));
385 static bool forwarder_block_p PARAMS ((basic_block));
386 static bool can_fallthru PARAMS ((basic_block, basic_block));
387 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
388 static bool try_simplify_condjump PARAMS ((basic_block));
389 static bool try_forward_edges PARAMS ((basic_block));
390 static void tidy_fallthru_edges PARAMS ((void));
391 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
392 static void verify_wide_reg PARAMS ((int, rtx, rtx));
393 static void verify_local_live_at_start PARAMS ((regset, basic_block));
394 static int noop_move_p PARAMS ((rtx));
395 static void delete_noop_moves PARAMS ((rtx));
396 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
397 static void notice_stack_pointer_modification PARAMS ((rtx));
398 static void mark_reg PARAMS ((rtx, void *));
399 static void mark_regs_live_at_end PARAMS ((regset));
400 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
401 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
402 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
403 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
404 static int insn_dead_p PARAMS ((struct propagate_block_info *,
406 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
408 static void mark_set_regs PARAMS ((struct propagate_block_info *,
410 static void mark_set_1 PARAMS ((struct propagate_block_info *,
411 enum rtx_code, rtx, rtx,
413 #ifdef HAVE_conditional_execution
414 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
416 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
417 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
418 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
420 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
421 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
422 static rtx not_reg_cond PARAMS ((rtx));
423 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
426 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
427 rtx, rtx, rtx, rtx, rtx));
428 static void find_auto_inc PARAMS ((struct propagate_block_info *,
430 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
432 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
434 static void mark_used_reg PARAMS ((struct propagate_block_info *,
436 static void mark_used_regs PARAMS ((struct propagate_block_info *,
438 void dump_flow_info PARAMS ((FILE *));
439 void debug_flow_info PARAMS ((void));
440 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
444 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
446 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
448 static void remove_fake_successors PARAMS ((basic_block));
449 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
451 static void flow_edge_list_print PARAMS ((const char *, const edge *,
453 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
455 static int flow_loop_nested_p PARAMS ((struct loop *,
457 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
459 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
460 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
461 static void flow_dfs_compute_reverse_init
462 PARAMS ((depth_first_search_ds));
463 static void flow_dfs_compute_reverse_add_bb
464 PARAMS ((depth_first_search_ds, basic_block));
465 static basic_block flow_dfs_compute_reverse_execute
466 PARAMS ((depth_first_search_ds));
467 static void flow_dfs_compute_reverse_finish
468 PARAMS ((depth_first_search_ds));
469 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
470 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
472 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
473 static void flow_loops_tree_build PARAMS ((struct loops *));
474 static int flow_loop_level_compute PARAMS ((struct loop *, int));
475 static int flow_loops_level_compute PARAMS ((struct loops *));
476 static void allocate_bb_life_data PARAMS ((void));
477 static void find_sub_basic_blocks PARAMS ((basic_block));
478 static bool redirect_edge_and_branch PARAMS ((edge, basic_block));
479 static rtx block_label PARAMS ((basic_block));
481 /* Find basic blocks of the current function.
482 F is the first insn of the function and NREGS the number of register
486 find_basic_blocks (f, nregs, file)
488 int nregs ATTRIBUTE_UNUSED;
489 FILE *file ATTRIBUTE_UNUSED;
493 /* Flush out existing data. */
494 if (basic_block_info != NULL)
500 /* Clear bb->aux on all extant basic blocks. We'll use this as a
501 tag for reuse during create_basic_block, just in case some pass
502 copies around basic block notes improperly. */
503 for (i = 0; i < n_basic_blocks; ++i)
504 BASIC_BLOCK (i)->aux = NULL;
506 VARRAY_FREE (basic_block_info);
509 n_basic_blocks = count_basic_blocks (f);
511 /* Size the basic block table. The actual structures will be allocated
512 by find_basic_blocks_1, since we want to keep the structure pointers
513 stable across calls to find_basic_blocks. */
514 /* ??? This whole issue would be much simpler if we called find_basic_blocks
515 exactly once, and thereafter we don't have a single long chain of
516 instructions at all until close to the end of compilation when we
517 actually lay them out. */
519 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
521 find_basic_blocks_1 (f);
523 /* Record the block to which an insn belongs. */
524 /* ??? This should be done another way, by which (perhaps) a label is
525 tagged directly with the basic block that it starts. It is used for
526 more than that currently, but IMO that is the only valid use. */
528 max_uid = get_max_uid ();
530 /* Leave space for insns life_analysis makes in some cases for auto-inc.
531 These cases are rare, so we don't need too much space. */
532 max_uid += max_uid / 10;
535 compute_bb_for_insn (max_uid);
537 /* Discover the edges of our cfg. */
538 make_edges (label_value_list);
540 /* Do very simple cleanup now, for the benefit of code that runs between
541 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
542 tidy_fallthru_edges ();
544 mark_critical_edges ();
546 #ifdef ENABLE_CHECKING
552 check_function_return_warnings ()
554 if (warn_missing_noreturn
555 && !TREE_THIS_VOLATILE (cfun->decl)
556 && EXIT_BLOCK_PTR->pred == NULL
557 && (lang_missing_noreturn_ok_p
558 && !lang_missing_noreturn_ok_p (cfun->decl)))
559 warning ("function might be possible candidate for attribute `noreturn'");
561 /* If we have a path to EXIT, then we do return. */
562 if (TREE_THIS_VOLATILE (cfun->decl)
563 && EXIT_BLOCK_PTR->pred != NULL)
564 warning ("`noreturn' function does return");
566 /* If the clobber_return_insn appears in some basic block, then we
567 do reach the end without returning a value. */
568 else if (warn_return_type
569 && cfun->x_clobber_return_insn != NULL
570 && EXIT_BLOCK_PTR->pred != NULL)
572 int max_uid = get_max_uid ();
574 /* If clobber_return_insn was excised by jump1, then renumber_insns
575 can make max_uid smaller than the number still recorded in our rtx.
576 That's fine, since this is a quick way of verifying that the insn
577 is no longer in the chain. */
578 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
580 /* Recompute insn->block mapping, since the initial mapping is
581 set before we delete unreachable blocks. */
582 compute_bb_for_insn (max_uid);
584 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
585 warning ("control reaches end of non-void function");
590 /* Count the basic blocks of the function. */
593 count_basic_blocks (f)
597 register RTX_CODE prev_code;
598 register int count = 0;
599 int saw_abnormal_edge = 0;
601 prev_code = JUMP_INSN;
602 for (insn = f; insn; insn = NEXT_INSN (insn))
604 enum rtx_code code = GET_CODE (insn);
606 if (code == CODE_LABEL
607 || (GET_RTX_CLASS (code) == 'i'
608 && (prev_code == JUMP_INSN
609 || prev_code == BARRIER
610 || saw_abnormal_edge)))
612 saw_abnormal_edge = 0;
616 /* Record whether this insn created an edge. */
617 if (code == CALL_INSN)
621 /* If there is a nonlocal goto label and the specified
622 region number isn't -1, we have an edge. */
623 if (nonlocal_goto_handler_labels
624 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
625 || INTVAL (XEXP (note, 0)) >= 0))
626 saw_abnormal_edge = 1;
628 else if (can_throw_internal (insn))
629 saw_abnormal_edge = 1;
631 else if (flag_non_call_exceptions
633 && can_throw_internal (insn))
634 saw_abnormal_edge = 1;
640 /* The rest of the compiler works a bit smoother when we don't have to
641 check for the edge case of do-nothing functions with no basic blocks. */
644 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
651 /* Scan a list of insns for labels referred to other than by jumps.
652 This is used to scan the alternatives of a call placeholder. */
654 find_label_refs (f, lvl)
660 for (insn = f; insn; insn = NEXT_INSN (insn))
661 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
665 /* Make a list of all labels referred to other than by jumps
666 (which just don't have the REG_LABEL notes).
668 Make a special exception for labels followed by an ADDR*VEC,
669 as this would be a part of the tablejump setup code.
671 Make a special exception to registers loaded with label
672 values just before jump insns that use them. */
674 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
675 if (REG_NOTE_KIND (note) == REG_LABEL)
677 rtx lab = XEXP (note, 0), next;
679 if ((next = next_nonnote_insn (lab)) != NULL
680 && GET_CODE (next) == JUMP_INSN
681 && (GET_CODE (PATTERN (next)) == ADDR_VEC
682 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
684 else if (GET_CODE (lab) == NOTE)
686 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
687 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
690 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
697 /* Assume that someone emitted code with control flow instructions to the
698 basic block. Update the data structure. */
700 find_sub_basic_blocks (bb)
703 rtx first_insn = bb->head, insn;
705 edge succ_list = bb->succ;
706 rtx jump_insn = NULL_RTX;
710 basic_block first_bb = bb, last_bb;
713 if (GET_CODE (first_insn) == LABEL_REF)
714 first_insn = NEXT_INSN (first_insn);
715 first_insn = NEXT_INSN (first_insn);
719 /* Scan insn chain and try to find new basic block boundaries. */
722 enum rtx_code code = GET_CODE (insn);
726 /* We need some special care for those expressions. */
727 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
728 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
737 /* On code label, split current basic block. */
739 falltru = split_block (bb, PREV_INSN (insn));
744 remove_edge (falltru);
748 if (LABEL_ALTERNATE_NAME (insn))
749 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
752 /* In case we've previously split insn on the JUMP_INSN, move the
753 block header to proper place. */
756 falltru = split_block (bb, PREV_INSN (insn));
766 insn = NEXT_INSN (insn);
768 /* Last basic block must end in the original BB end. */
772 /* Wire in the original edges for last basic block. */
775 bb->succ = succ_list;
777 succ_list->src = bb, succ_list = succ_list->succ_next;
780 bb->succ = succ_list;
782 /* Now re-scan and wire in all edges. This expect simple (conditional)
783 jumps at the end of each new basic blocks. */
785 for (i = first_bb->index; i < last_bb->index; i++)
787 bb = BASIC_BLOCK (i);
788 if (GET_CODE (bb->end) == JUMP_INSN)
790 mark_jump_label (PATTERN (bb->end), bb->end, 0, 0);
791 make_label_edge (NULL, bb, JUMP_LABEL (bb->end), 0);
793 insn = NEXT_INSN (insn);
797 /* Find all basic blocks of the function whose first insn is F.
799 Collect and return a list of labels whose addresses are taken. This
800 will be used in make_edges for use with computed gotos. */
803 find_basic_blocks_1 (f)
806 register rtx insn, next;
808 rtx bb_note = NULL_RTX;
814 /* We process the instructions in a slightly different way than we did
815 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
816 closed out the previous block, so that it gets attached at the proper
817 place. Since this form should be equivalent to the previous,
818 count_basic_blocks continues to use the old form as a check. */
820 for (insn = f; insn; insn = next)
822 enum rtx_code code = GET_CODE (insn);
824 next = NEXT_INSN (insn);
830 int kind = NOTE_LINE_NUMBER (insn);
832 /* Look for basic block notes with which to keep the
833 basic_block_info pointers stable. Unthread the note now;
834 we'll put it back at the right place in create_basic_block.
835 Or not at all if we've already found a note in this block. */
836 if (kind == NOTE_INSN_BASIC_BLOCK)
838 if (bb_note == NULL_RTX)
841 next = flow_delete_insn (insn);
847 /* A basic block starts at a label. If we've closed one off due
848 to a barrier or some such, no need to do it again. */
849 if (head != NULL_RTX)
851 /* While we now have edge lists with which other portions of
852 the compiler might determine a call ending a basic block
853 does not imply an abnormal edge, it will be a bit before
854 everything can be updated. So continue to emit a noop at
855 the end of such a block. */
856 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
858 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
859 end = emit_insn_after (nop, end);
862 create_basic_block (i++, head, end, bb_note);
870 /* A basic block ends at a jump. */
871 if (head == NULL_RTX)
875 /* ??? Make a special check for table jumps. The way this
876 happens is truly and amazingly gross. We are about to
877 create a basic block that contains just a code label and
878 an addr*vec jump insn. Worse, an addr_diff_vec creates
879 its own natural loop.
881 Prevent this bit of brain damage, pasting things together
882 correctly in make_edges.
884 The correct solution involves emitting the table directly
885 on the tablejump instruction as a note, or JUMP_LABEL. */
887 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
888 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
896 goto new_bb_inclusive;
899 /* A basic block ends at a barrier. It may be that an unconditional
900 jump already closed the basic block -- no need to do it again. */
901 if (head == NULL_RTX)
904 /* While we now have edge lists with which other portions of the
905 compiler might determine a call ending a basic block does not
906 imply an abnormal edge, it will be a bit before everything can
907 be updated. So continue to emit a noop at the end of such a
909 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
911 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
912 end = emit_insn_after (nop, end);
914 goto new_bb_exclusive;
918 /* Record whether this call created an edge. */
919 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
920 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
922 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
924 /* Scan each of the alternatives for label refs. */
925 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
926 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
927 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
928 /* Record its tail recursion label, if any. */
929 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
930 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
933 /* A basic block ends at a call that can either throw or
934 do a non-local goto. */
935 if ((nonlocal_goto_handler_labels && region >= 0)
936 || can_throw_internal (insn))
939 if (head == NULL_RTX)
944 create_basic_block (i++, head, end, bb_note);
945 head = end = NULL_RTX;
953 /* Non-call exceptions generate new blocks just like calls. */
954 if (flag_non_call_exceptions && can_throw_internal (insn))
955 goto new_bb_inclusive;
957 if (head == NULL_RTX)
966 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
970 /* Make a list of all labels referred to other than by jumps.
972 Make a special exception for labels followed by an ADDR*VEC,
973 as this would be a part of the tablejump setup code.
975 Make a special exception to registers loaded with label
976 values just before jump insns that use them. */
978 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
979 if (REG_NOTE_KIND (note) == REG_LABEL)
981 rtx lab = XEXP (note, 0), next;
983 if ((next = next_nonnote_insn (lab)) != NULL
984 && GET_CODE (next) == JUMP_INSN
985 && (GET_CODE (PATTERN (next)) == ADDR_VEC
986 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
988 else if (GET_CODE (lab) == NOTE)
990 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
991 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
994 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
999 if (head != NULL_RTX)
1000 create_basic_block (i++, head, end, bb_note);
1002 flow_delete_insn (bb_note);
1004 if (i != n_basic_blocks)
1007 label_value_list = lvl;
1008 tail_recursion_label_list = trll;
1011 /* Tidy the CFG by deleting unreachable code and whatnot. */
1016 delete_unreachable_blocks ();
1017 if (try_optimize_cfg ())
1018 delete_unreachable_blocks ();
1019 mark_critical_edges ();
1021 /* Kill the data we won't maintain. */
1022 free_EXPR_LIST_list (&label_value_list);
1023 free_EXPR_LIST_list (&tail_recursion_label_list);
1026 /* Create a new basic block consisting of the instructions between
1027 HEAD and END inclusive. Reuses the note and basic block struct
1028 in BB_NOTE, if any. */
1031 create_basic_block (index, head, end, bb_note)
1033 rtx head, end, bb_note;
1038 && ! RTX_INTEGRATED_P (bb_note)
1039 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1042 /* If we found an existing note, thread it back onto the chain. */
1046 if (GET_CODE (head) == CODE_LABEL)
1050 after = PREV_INSN (head);
1054 if (after != bb_note && NEXT_INSN (after) != bb_note)
1055 reorder_insns (bb_note, bb_note, after);
1059 /* Otherwise we must create a note and a basic block structure.
1060 Since we allow basic block structs in rtl, give the struct
1061 the same lifetime by allocating it off the function obstack
1062 rather than using malloc. */
1064 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1065 memset (bb, 0, sizeof (*bb));
1067 if (GET_CODE (head) == CODE_LABEL)
1068 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1071 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1074 NOTE_BASIC_BLOCK (bb_note) = bb;
1077 /* Always include the bb note in the block. */
1078 if (NEXT_INSN (end) == bb_note)
1084 BASIC_BLOCK (index) = bb;
1086 /* Tag the block so that we know it has been used when considering
1087 other basic block notes. */
1091 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1092 indexed by INSN_UID. MAX is the size of the array. */
1095 compute_bb_for_insn (max)
1100 if (basic_block_for_insn)
1101 VARRAY_FREE (basic_block_for_insn);
1102 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1104 for (i = 0; i < n_basic_blocks; ++i)
1106 basic_block bb = BASIC_BLOCK (i);
1113 int uid = INSN_UID (insn);
1115 VARRAY_BB (basic_block_for_insn, uid) = bb;
1118 insn = NEXT_INSN (insn);
1123 /* Free the memory associated with the edge structures. */
1131 for (i = 0; i < n_basic_blocks; ++i)
1133 basic_block bb = BASIC_BLOCK (i);
1135 for (e = bb->succ; e; e = n)
1145 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1151 ENTRY_BLOCK_PTR->succ = 0;
1152 EXIT_BLOCK_PTR->pred = 0;
1157 /* Identify the edges between basic blocks.
1159 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1160 that are otherwise unreachable may be reachable with a non-local goto.
1162 BB_EH_END is an array indexed by basic block number in which we record
1163 the list of exception regions active at the end of the basic block. */
1166 make_edges (label_value_list)
1167 rtx label_value_list;
1170 sbitmap *edge_cache = NULL;
1172 /* Assume no computed jump; revise as we create edges. */
1173 current_function_has_computed_jump = 0;
1175 /* Heavy use of computed goto in machine-generated code can lead to
1176 nearly fully-connected CFGs. In that case we spend a significant
1177 amount of time searching the edge lists for duplicates. */
1178 if (forced_labels || label_value_list)
1180 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1181 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1184 /* By nature of the way these get numbered, block 0 is always the entry. */
1185 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1187 for (i = 0; i < n_basic_blocks; ++i)
1189 basic_block bb = BASIC_BLOCK (i);
1192 int force_fallthru = 0;
1194 if (GET_CODE (bb->head) == CODE_LABEL
1195 && LABEL_ALTERNATE_NAME (bb->head))
1196 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1198 /* Examine the last instruction of the block, and discover the
1199 ways we can leave the block. */
1202 code = GET_CODE (insn);
1205 if (code == JUMP_INSN)
1209 /* Recognize exception handling placeholders. */
1210 if (GET_CODE (PATTERN (insn)) == RESX)
1211 make_eh_edge (edge_cache, bb, insn);
1213 /* Recognize a non-local goto as a branch outside the
1214 current function. */
1215 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1218 /* ??? Recognize a tablejump and do the right thing. */
1219 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1220 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1221 && GET_CODE (tmp) == JUMP_INSN
1222 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1223 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1228 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1229 vec = XVEC (PATTERN (tmp), 0);
1231 vec = XVEC (PATTERN (tmp), 1);
1233 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1234 make_label_edge (edge_cache, bb,
1235 XEXP (RTVEC_ELT (vec, j), 0), 0);
1237 /* Some targets (eg, ARM) emit a conditional jump that also
1238 contains the out-of-range target. Scan for these and
1239 add an edge if necessary. */
1240 if ((tmp = single_set (insn)) != NULL
1241 && SET_DEST (tmp) == pc_rtx
1242 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1243 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1244 make_label_edge (edge_cache, bb,
1245 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1247 #ifdef CASE_DROPS_THROUGH
1248 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1249 us naturally detecting fallthru into the next block. */
1254 /* If this is a computed jump, then mark it as reaching
1255 everything on the label_value_list and forced_labels list. */
1256 else if (computed_jump_p (insn))
1258 current_function_has_computed_jump = 1;
1260 for (x = label_value_list; x; x = XEXP (x, 1))
1261 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1263 for (x = forced_labels; x; x = XEXP (x, 1))
1264 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1267 /* Returns create an exit out. */
1268 else if (returnjump_p (insn))
1269 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1271 /* Otherwise, we have a plain conditional or unconditional jump. */
1274 if (! JUMP_LABEL (insn))
1276 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1280 /* If this is a sibling call insn, then this is in effect a
1281 combined call and return, and so we need an edge to the
1282 exit block. No need to worry about EH edges, since we
1283 wouldn't have created the sibling call in the first place. */
1285 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1286 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1287 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1289 /* If this is a CALL_INSN, then mark it as reaching the active EH
1290 handler for this CALL_INSN. If we're handling non-call
1291 exceptions then any insn can reach any of the active handlers.
1293 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1295 else if (code == CALL_INSN || flag_non_call_exceptions)
1297 /* Add any appropriate EH edges. */
1298 make_eh_edge (edge_cache, bb, insn);
1300 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1302 /* ??? This could be made smarter: in some cases it's possible
1303 to tell that certain calls will not do a nonlocal goto.
1305 For example, if the nested functions that do the nonlocal
1306 gotos do not have their addresses taken, then only calls to
1307 those functions or to other nested functions that use them
1308 could possibly do nonlocal gotos. */
1309 /* We do know that a REG_EH_REGION note with a value less
1310 than 0 is guaranteed not to perform a non-local goto. */
1311 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1312 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1313 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1314 make_label_edge (edge_cache, bb, XEXP (x, 0),
1315 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1319 /* Find out if we can drop through to the next block. */
1320 insn = next_nonnote_insn (insn);
1321 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1322 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1323 else if (i + 1 < n_basic_blocks)
1325 rtx tmp = BLOCK_HEAD (i + 1);
1326 if (GET_CODE (tmp) == NOTE)
1327 tmp = next_nonnote_insn (tmp);
1328 if (force_fallthru || insn == tmp)
1329 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1334 sbitmap_vector_free (edge_cache);
1337 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1338 about the edge that is accumulated between calls. */
1341 make_edge (edge_cache, src, dst, flags)
1342 sbitmap *edge_cache;
1343 basic_block src, dst;
1349 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1350 many edges to them, and we didn't allocate memory for it. */
1351 use_edge_cache = (edge_cache
1352 && src != ENTRY_BLOCK_PTR
1353 && dst != EXIT_BLOCK_PTR);
1355 /* Make sure we don't add duplicate edges. */
1356 switch (use_edge_cache)
1359 /* Quick test for non-existance of the edge. */
1360 if (! TEST_BIT (edge_cache[src->index], dst->index))
1363 /* The edge exists; early exit if no work to do. */
1369 for (e = src->succ; e; e = e->succ_next)
1378 e = (edge) xcalloc (1, sizeof (*e));
1381 e->succ_next = src->succ;
1382 e->pred_next = dst->pred;
1391 SET_BIT (edge_cache[src->index], dst->index);
1394 /* Create an edge from a basic block to a label. */
1397 make_label_edge (edge_cache, src, label, flags)
1398 sbitmap *edge_cache;
1403 if (GET_CODE (label) != CODE_LABEL)
1406 /* If the label was never emitted, this insn is junk, but avoid a
1407 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1408 as a result of a syntax error and a diagnostic has already been
1411 if (INSN_UID (label) == 0)
1414 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1417 /* Create the edges generated by INSN in REGION. */
1420 make_eh_edge (edge_cache, src, insn)
1421 sbitmap *edge_cache;
1425 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1428 handlers = reachable_handlers (insn);
1430 for (i = handlers; i; i = XEXP (i, 1))
1431 make_label_edge (edge_cache, src, XEXP (i, 0),
1432 EDGE_ABNORMAL | EDGE_EH | is_call);
1434 free_INSN_LIST_list (&handlers);
1437 /* Identify critical edges and set the bits appropriately. */
1440 mark_critical_edges ()
1442 int i, n = n_basic_blocks;
1445 /* We begin with the entry block. This is not terribly important now,
1446 but could be if a front end (Fortran) implemented alternate entry
1448 bb = ENTRY_BLOCK_PTR;
1455 /* (1) Critical edges must have a source with multiple successors. */
1456 if (bb->succ && bb->succ->succ_next)
1458 for (e = bb->succ; e; e = e->succ_next)
1460 /* (2) Critical edges must have a destination with multiple
1461 predecessors. Note that we know there is at least one
1462 predecessor -- the edge we followed to get here. */
1463 if (e->dest->pred->pred_next)
1464 e->flags |= EDGE_CRITICAL;
1466 e->flags &= ~EDGE_CRITICAL;
1471 for (e = bb->succ; e; e = e->succ_next)
1472 e->flags &= ~EDGE_CRITICAL;
1477 bb = BASIC_BLOCK (i);
1481 /* Split a block BB after insn INSN creating a new fallthru edge.
1482 Return the new edge. Note that to keep other parts of the compiler happy,
1483 this function renumbers all the basic blocks so that the new
1484 one has a number one greater than the block split. */
1487 split_block (bb, insn)
1497 /* There is no point splitting the block after its end. */
1498 if (bb->end == insn)
1501 /* Create the new structures. */
1502 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1503 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1506 memset (new_bb, 0, sizeof (*new_bb));
1508 new_bb->head = NEXT_INSN (insn);
1509 new_bb->end = bb->end;
1512 new_bb->succ = bb->succ;
1513 bb->succ = new_edge;
1514 new_bb->pred = new_edge;
1515 new_bb->count = bb->count;
1516 new_bb->frequency = bb->frequency;
1517 new_bb->loop_depth = bb->loop_depth;
1520 new_edge->dest = new_bb;
1521 new_edge->flags = EDGE_FALLTHRU;
1522 new_edge->probability = REG_BR_PROB_BASE;
1523 new_edge->count = bb->count;
1525 /* Redirect the src of the successor edges of bb to point to new_bb. */
1526 for (e = new_bb->succ; e; e = e->succ_next)
1529 /* Place the new block just after the block being split. */
1530 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1532 /* Some parts of the compiler expect blocks to be number in
1533 sequential order so insert the new block immediately after the
1534 block being split.. */
1536 for (i = n_basic_blocks - 1; i > j + 1; --i)
1538 basic_block tmp = BASIC_BLOCK (i - 1);
1539 BASIC_BLOCK (i) = tmp;
1543 BASIC_BLOCK (i) = new_bb;
1546 if (GET_CODE (new_bb->head) == CODE_LABEL)
1548 /* Create the basic block note. */
1549 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1551 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1555 /* Create the basic block note. */
1556 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1558 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1559 new_bb->head = bb_note;
1562 update_bb_for_insn (new_bb);
1564 if (bb->global_live_at_start)
1566 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1567 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1568 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1570 /* We now have to calculate which registers are live at the end
1571 of the split basic block and at the start of the new basic
1572 block. Start with those registers that are known to be live
1573 at the end of the original basic block and get
1574 propagate_block to determine which registers are live. */
1575 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1576 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1577 COPY_REG_SET (bb->global_live_at_end,
1578 new_bb->global_live_at_start);
1584 /* Return label in the head of basic block. Create one if it doesn't exist. */
1589 if (GET_CODE (block->head) != CODE_LABEL)
1590 block->head = emit_label_before (gen_label_rtx (), block->head);
1594 /* Return true if the block has no effect and only forwards control flow to
1595 its single destination. */
1597 forwarder_block_p (bb)
1601 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1602 || !bb->succ || bb->succ->succ_next)
1605 insn = next_active_insn (bb->head);
1608 if (GET_CODE (insn) == CODE_LABEL
1609 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)))
1614 /* Return nonzero if we can reach target from src by falling trought. */
1616 can_fallthru (src, target)
1617 basic_block src, target;
1619 rtx insn = src->end;
1620 rtx insn2 = target->head;
1622 if (!active_insn_p (insn2))
1623 insn2 = next_active_insn (insn2);
1624 /* ??? Later we may add code to move jump tables offline. */
1625 return next_active_insn (insn) == insn2;
1628 /* Attempt to perform edge redirection by replacing possibly complex jump
1629 instruction by unconditional jump or removing jump completely.
1630 This can apply only if all edges now point to the same block.
1632 The parameters and return values are equivalent to redirect_edge_and_branch.
1635 try_redirect_by_replacing_jump (e, target)
1639 basic_block src = e->src;
1640 rtx insn = src->end;
1646 /* Verify that all targets will be TARGET. */
1647 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1648 if (tmp->dest != target && tmp != e)
1650 if (tmp || GET_CODE (insn) != JUMP_INSN)
1653 /* Avoid removing branch with side effects. */
1654 set = single_set (insn);
1655 if (!set || side_effects_p (set))
1658 /* See if we can create the fallthru edge. */
1659 if (can_fallthru (src, target))
1661 src->end = PREV_INSN (insn);
1663 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1664 flow_delete_insn (insn);
1668 /* If this already is simplejump, redirect it. */
1669 else if (simplejump_p (insn))
1671 if (e->dest == target)
1674 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1675 INSN_UID (insn), e->dest->index, target->index);
1676 redirect_jump (insn, block_label (target), 0);
1678 /* Or replace possibly complicated jump insn by simple jump insn. */
1681 rtx target_label = block_label (target);
1683 src->end = PREV_INSN (insn);
1684 src->end = emit_jump_insn_after (gen_jump (target_label), src->end);
1685 JUMP_LABEL (src->end) = target_label;
1686 LABEL_NUSES (target_label)++;
1688 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1689 INSN_UID (insn), INSN_UID (src->end));
1690 flow_delete_insn (insn);
1694 /* Keep only one edge out and set proper flags. */
1695 while (src->succ->succ_next)
1696 remove_edge (src->succ);
1699 e->flags = EDGE_FALLTHRU;
1703 /* Fixup barriers. */
1704 barrier = next_nonnote_insn (insn);
1705 if (fallthru && GET_CODE (barrier) == BARRIER)
1706 flow_delete_insn (barrier);
1707 else if (!fallthru && GET_CODE (barrier) != BARRIER)
1708 emit_barrier_after (insn);
1710 if (e->dest != target)
1711 redirect_edge_succ (e, target);
1715 /* Attempt to change code to redirect edge E to TARGET.
1716 Don't do that on expense of adding new instructions or reordering
1719 Function can be also called with edge destionation equivalent to the
1720 TARGET. Then it should try the simplifications and do nothing if
1723 Return true if transformation suceeded. We still return flase in case
1724 E already destinated TARGET and we didn't managed to simplify instruction
1727 redirect_edge_and_branch (e, target)
1732 rtx old_label = e->dest->head;
1733 basic_block src = e->src;
1734 rtx insn = src->end;
1736 if (try_redirect_by_replacing_jump (e, target))
1738 /* Do this fast path late, as we want above code to simplify for cases
1739 where called on single edge leaving basic block containing nontrivial
1741 else if (e->dest == target)
1744 /* We can only redirect non-fallthru edges of jump insn. */
1745 if (e->flags & EDGE_FALLTHRU)
1747 if (GET_CODE (insn) != JUMP_INSN)
1750 /* Recognize a tablejump and adjust all matching cases. */
1751 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1752 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1753 && GET_CODE (tmp) == JUMP_INSN
1754 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1755 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1759 rtx new_label = block_label (target);
1761 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1762 vec = XVEC (PATTERN (tmp), 0);
1764 vec = XVEC (PATTERN (tmp), 1);
1766 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1767 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1769 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1770 --LABEL_NUSES (old_label);
1771 ++LABEL_NUSES (new_label);
1774 /* Handle casesi dispatch insns */
1775 if ((tmp = single_set (insn)) != NULL
1776 && SET_DEST (tmp) == pc_rtx
1777 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1778 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1779 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1781 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1783 --LABEL_NUSES (old_label);
1784 ++LABEL_NUSES (new_label);
1789 /* ?? We may play the games with moving the named labels from
1790 one basic block to the other in case only one computed_jump is
1792 if (computed_jump_p (insn))
1795 /* A return instruction can't be redirected. */
1796 if (returnjump_p (insn))
1799 /* If the insn doesn't go where we think, we're confused. */
1800 if (JUMP_LABEL (insn) != old_label)
1802 redirect_jump (insn, block_label (target), 0);
1806 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
1807 e->src->index, e->dest->index, target->index);
1808 if (e->dest != target)
1811 /* Check whether the edge is already present. */
1812 for (s = src->succ; s; s=s->succ_next)
1813 if (s->dest == target)
1817 s->flags |= e->flags;
1821 redirect_edge_succ (e, target);
1826 /* Split a (typically critical) edge. Return the new block.
1827 Abort on abnormal edges.
1829 ??? The code generally expects to be called on critical edges.
1830 The case of a block ending in an unconditional jump to a
1831 block with multiple predecessors is not handled optimally. */
1834 split_edge (edge_in)
1837 basic_block old_pred, bb, old_succ;
1842 /* Abnormal edges cannot be split. */
1843 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1846 old_pred = edge_in->src;
1847 old_succ = edge_in->dest;
1849 /* Create the new structures. */
1850 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1851 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1854 memset (bb, 0, sizeof (*bb));
1856 /* ??? This info is likely going to be out of date very soon. */
1857 if (old_succ->global_live_at_start)
1859 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1860 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1861 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1862 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1866 bb->succ = edge_out;
1867 bb->count = edge_in->count;
1868 bb->frequency = (edge_in->probability * edge_in->src->frequency
1869 / REG_BR_PROB_BASE);
1871 edge_in->flags &= ~EDGE_CRITICAL;
1873 edge_out->pred_next = old_succ->pred;
1874 edge_out->succ_next = NULL;
1876 edge_out->dest = old_succ;
1877 edge_out->flags = EDGE_FALLTHRU;
1878 edge_out->probability = REG_BR_PROB_BASE;
1879 edge_out->count = edge_in->count;
1881 old_succ->pred = edge_out;
1883 /* Tricky case -- if there existed a fallthru into the successor
1884 (and we're not it) we must add a new unconditional jump around
1885 the new block we're actually interested in.
1887 Further, if that edge is critical, this means a second new basic
1888 block must be created to hold it. In order to simplify correct
1889 insn placement, do this before we touch the existing basic block
1890 ordering for the block we were really wanting. */
1891 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1894 for (e = edge_out->pred_next; e; e = e->pred_next)
1895 if (e->flags & EDGE_FALLTHRU)
1900 basic_block jump_block;
1903 if ((e->flags & EDGE_CRITICAL) == 0
1904 && e->src != ENTRY_BLOCK_PTR)
1906 /* Non critical -- we can simply add a jump to the end
1907 of the existing predecessor. */
1908 jump_block = e->src;
1912 /* We need a new block to hold the jump. The simplest
1913 way to do the bulk of the work here is to recursively
1915 jump_block = split_edge (e);
1916 e = jump_block->succ;
1919 /* Now add the jump insn ... */
1920 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1922 jump_block->end = pos;
1923 if (basic_block_for_insn)
1924 set_block_for_insn (pos, jump_block);
1925 emit_barrier_after (pos);
1927 /* ... let jump know that label is in use, ... */
1928 JUMP_LABEL (pos) = old_succ->head;
1929 ++LABEL_NUSES (old_succ->head);
1931 /* ... and clear fallthru on the outgoing edge. */
1932 e->flags &= ~EDGE_FALLTHRU;
1934 /* Continue splitting the interesting edge. */
1938 /* Place the new block just in front of the successor. */
1939 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1940 if (old_succ == EXIT_BLOCK_PTR)
1941 j = n_basic_blocks - 1;
1943 j = old_succ->index;
1944 for (i = n_basic_blocks - 1; i > j; --i)
1946 basic_block tmp = BASIC_BLOCK (i - 1);
1947 BASIC_BLOCK (i) = tmp;
1950 BASIC_BLOCK (i) = bb;
1953 /* Create the basic block note.
1955 Where we place the note can have a noticable impact on the generated
1956 code. Consider this cfg:
1966 If we need to insert an insn on the edge from block 0 to block 1,
1967 we want to ensure the instructions we insert are outside of any
1968 loop notes that physically sit between block 0 and block 1. Otherwise
1969 we confuse the loop optimizer into thinking the loop is a phony. */
1970 if (old_succ != EXIT_BLOCK_PTR
1971 && PREV_INSN (old_succ->head)
1972 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1973 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1974 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1975 PREV_INSN (old_succ->head));
1976 else if (old_succ != EXIT_BLOCK_PTR)
1977 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1979 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1980 NOTE_BASIC_BLOCK (bb_note) = bb;
1981 bb->head = bb->end = bb_note;
1983 /* For non-fallthry edges, we must adjust the predecessor's
1984 jump instruction to target our new block. */
1985 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1987 if (!redirect_edge_and_branch (edge_in, bb))
1991 redirect_edge_succ (edge_in, bb);
1996 /* Queue instructions for insertion on an edge between two basic blocks.
1997 The new instructions and basic blocks (if any) will not appear in the
1998 CFG until commit_edge_insertions is called. */
2001 insert_insn_on_edge (pattern, e)
2005 /* We cannot insert instructions on an abnormal critical edge.
2006 It will be easier to find the culprit if we die now. */
2007 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2008 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2011 if (e->insns == NULL_RTX)
2014 push_to_sequence (e->insns);
2016 emit_insn (pattern);
2018 e->insns = get_insns ();
2022 /* Update the CFG for the instructions queued on edge E. */
2025 commit_one_edge_insertion (e)
2028 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2031 /* Pull the insns off the edge now since the edge might go away. */
2033 e->insns = NULL_RTX;
2035 /* Figure out where to put these things. If the destination has
2036 one predecessor, insert there. Except for the exit block. */
2037 if (e->dest->pred->pred_next == NULL
2038 && e->dest != EXIT_BLOCK_PTR)
2042 /* Get the location correct wrt a code label, and "nice" wrt
2043 a basic block note, and before everything else. */
2045 if (GET_CODE (tmp) == CODE_LABEL)
2046 tmp = NEXT_INSN (tmp);
2047 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2048 tmp = NEXT_INSN (tmp);
2049 if (tmp == bb->head)
2052 after = PREV_INSN (tmp);
2055 /* If the source has one successor and the edge is not abnormal,
2056 insert there. Except for the entry block. */
2057 else if ((e->flags & EDGE_ABNORMAL) == 0
2058 && e->src->succ->succ_next == NULL
2059 && e->src != ENTRY_BLOCK_PTR)
2062 /* It is possible to have a non-simple jump here. Consider a target
2063 where some forms of unconditional jumps clobber a register. This
2064 happens on the fr30 for example.
2066 We know this block has a single successor, so we can just emit
2067 the queued insns before the jump. */
2068 if (GET_CODE (bb->end) == JUMP_INSN)
2074 /* We'd better be fallthru, or we've lost track of what's what. */
2075 if ((e->flags & EDGE_FALLTHRU) == 0)
2082 /* Otherwise we must split the edge. */
2085 bb = split_edge (e);
2089 /* Now that we've found the spot, do the insertion. */
2091 /* Set the new block number for these insns, if structure is allocated. */
2092 if (basic_block_for_insn)
2095 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2096 set_block_for_insn (i, bb);
2101 emit_insns_before (insns, before);
2102 if (before == bb->head)
2105 last = prev_nonnote_insn (before);
2109 last = emit_insns_after (insns, after);
2110 if (after == bb->end)
2114 if (returnjump_p (last))
2116 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2117 This is not currently a problem because this only happens
2118 for the (single) epilogue, which already has a fallthru edge
2122 if (e->dest != EXIT_BLOCK_PTR
2123 || e->succ_next != NULL
2124 || (e->flags & EDGE_FALLTHRU) == 0)
2126 e->flags &= ~EDGE_FALLTHRU;
2128 emit_barrier_after (last);
2132 flow_delete_insn (before);
2134 else if (GET_CODE (last) == JUMP_INSN)
2136 find_sub_basic_blocks (bb);
2139 /* Update the CFG for all queued instructions. */
2142 commit_edge_insertions ()
2147 #ifdef ENABLE_CHECKING
2148 verify_flow_info ();
2152 bb = ENTRY_BLOCK_PTR;
2157 for (e = bb->succ; e; e = next)
2159 next = e->succ_next;
2161 commit_one_edge_insertion (e);
2164 if (++i >= n_basic_blocks)
2166 bb = BASIC_BLOCK (i);
2170 /* Add fake edges to the function exit for any non constant calls in
2171 the bitmap of blocks specified by BLOCKS or to the whole CFG if
2172 BLOCKS is zero. Return the nuber of blocks that were split. */
2175 flow_call_edges_add (blocks)
2179 int blocks_split = 0;
2183 /* Map bb indicies into basic block pointers since split_block
2184 will renumber the basic blocks. */
2186 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2190 for (i = 0; i < n_basic_blocks; i++)
2191 bbs[bb_num++] = BASIC_BLOCK (i);
2195 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2197 bbs[bb_num++] = BASIC_BLOCK (i);
2202 /* Now add fake edges to the function exit for any non constant
2203 calls since there is no way that we can determine if they will
2206 for (i = 0; i < bb_num; i++)
2208 basic_block bb = bbs[i];
2212 for (insn = bb->end; ; insn = prev_insn)
2214 prev_insn = PREV_INSN (insn);
2215 if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
2219 /* Note that the following may create a new basic block
2220 and renumber the existing basic blocks. */
2221 e = split_block (bb, insn);
2225 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2227 if (insn == bb->head)
2233 verify_flow_info ();
2236 return blocks_split;
2239 /* Find unreachable blocks. An unreachable block will have NULL in
2240 block->aux, a non-NULL value indicates the block is reachable. */
2243 find_unreachable_blocks ()
2247 basic_block *tos, *worklist;
2250 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2252 /* Use basic_block->aux as a marker. Clear them all. */
2254 for (i = 0; i < n; ++i)
2255 BASIC_BLOCK (i)->aux = NULL;
2257 /* Add our starting points to the worklist. Almost always there will
2258 be only one. It isn't inconcievable that we might one day directly
2259 support Fortran alternate entry points. */
2261 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2265 /* Mark the block with a handy non-null value. */
2269 /* Iterate: find everything reachable from what we've already seen. */
2271 while (tos != worklist)
2273 basic_block b = *--tos;
2275 for (e = b->succ; e; e = e->succ_next)
2286 /* Delete all unreachable basic blocks. */
2288 delete_unreachable_blocks ()
2292 find_unreachable_blocks ();
2294 /* Delete all unreachable basic blocks. Count down so that we
2295 don't interfere with the block renumbering that happens in
2296 flow_delete_block. */
2298 for (i = n_basic_blocks - 1; i >= 0; --i)
2300 basic_block b = BASIC_BLOCK (i);
2303 /* This block was found. Tidy up the mark. */
2306 flow_delete_block (b);
2309 tidy_fallthru_edges ();
2312 /* Return true if NOTE is not one of the ones that must be kept paired,
2313 so that we may simply delete them. */
2316 can_delete_note_p (note)
2319 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2320 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2323 /* Unlink a chain of insns between START and FINISH, leaving notes
2324 that must be paired. */
2327 flow_delete_insn_chain (start, finish)
2330 /* Unchain the insns one by one. It would be quicker to delete all
2331 of these with a single unchaining, rather than one at a time, but
2332 we need to keep the NOTE's. */
2338 next = NEXT_INSN (start);
2339 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2341 else if (GET_CODE (start) == CODE_LABEL
2342 && ! can_delete_label_p (start))
2344 const char *name = LABEL_NAME (start);
2345 PUT_CODE (start, NOTE);
2346 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2347 NOTE_SOURCE_FILE (start) = name;
2350 next = flow_delete_insn (start);
2352 if (start == finish)
2358 /* Delete the insns in a (non-live) block. We physically delete every
2359 non-deleted-note insn, and update the flow graph appropriately.
2361 Return nonzero if we deleted an exception handler. */
2363 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2364 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2367 flow_delete_block (b)
2370 int deleted_handler = 0;
2373 /* If the head of this block is a CODE_LABEL, then it might be the
2374 label for an exception handler which can't be reached.
2376 We need to remove the label from the exception_handler_label list
2377 and remove the associated NOTE_INSN_EH_REGION_BEG and
2378 NOTE_INSN_EH_REGION_END notes. */
2382 never_reached_warning (insn);
2384 if (GET_CODE (insn) == CODE_LABEL)
2385 maybe_remove_eh_handler (insn);
2387 /* Include any jump table following the basic block. */
2389 if (GET_CODE (end) == JUMP_INSN
2390 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2391 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2392 && GET_CODE (tmp) == JUMP_INSN
2393 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2394 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2397 /* Include any barrier that may follow the basic block. */
2398 tmp = next_nonnote_insn (end);
2399 if (tmp && GET_CODE (tmp) == BARRIER)
2402 /* Selectively delete the entire chain. */
2403 flow_delete_insn_chain (insn, end);
2405 /* Remove the edges into and out of this block. Note that there may
2406 indeed be edges in, if we are removing an unreachable loop. */
2410 for (e = b->pred; e; e = next)
2412 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2415 next = e->pred_next;
2419 for (e = b->succ; e; e = next)
2421 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2424 next = e->succ_next;
2433 /* Remove the basic block from the array, and compact behind it. */
2436 return deleted_handler;
2439 /* Remove block B from the basic block array and compact behind it. */
2445 int i, n = n_basic_blocks;
2447 for (i = b->index; i + 1 < n; ++i)
2449 basic_block x = BASIC_BLOCK (i + 1);
2450 BASIC_BLOCK (i) = x;
2454 basic_block_info->num_elements--;
2458 /* Delete INSN by patching it out. Return the next insn. */
2461 flow_delete_insn (insn)
2464 rtx prev = PREV_INSN (insn);
2465 rtx next = NEXT_INSN (insn);
2468 PREV_INSN (insn) = NULL_RTX;
2469 NEXT_INSN (insn) = NULL_RTX;
2470 INSN_DELETED_P (insn) = 1;
2473 NEXT_INSN (prev) = next;
2475 PREV_INSN (next) = prev;
2477 set_last_insn (prev);
2479 if (GET_CODE (insn) == CODE_LABEL)
2480 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2482 /* If deleting a jump, decrement the use count of the label. Deleting
2483 the label itself should happen in the normal course of block merging. */
2484 if (GET_CODE (insn) == JUMP_INSN
2485 && JUMP_LABEL (insn)
2486 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2487 LABEL_NUSES (JUMP_LABEL (insn))--;
2489 /* Also if deleting an insn that references a label. */
2490 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2491 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2492 LABEL_NUSES (XEXP (note, 0))--;
2494 if (GET_CODE (insn) == JUMP_INSN
2495 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2496 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2498 rtx pat = PATTERN (insn);
2499 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2500 int len = XVECLEN (pat, diff_vec_p);
2503 for (i = 0; i < len; i++)
2504 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2510 /* True if a given label can be deleted. */
2513 can_delete_label_p (label)
2518 if (LABEL_PRESERVE_P (label))
2521 for (x = forced_labels; x; x = XEXP (x, 1))
2522 if (label == XEXP (x, 0))
2524 for (x = label_value_list; x; x = XEXP (x, 1))
2525 if (label == XEXP (x, 0))
2527 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2528 if (label == XEXP (x, 0))
2531 /* User declared labels must be preserved. */
2532 if (LABEL_NAME (label) != 0)
2539 tail_recursion_label_p (label)
2544 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2545 if (label == XEXP (x, 0))
2551 /* Blocks A and B are to be merged into a single block A. The insns
2552 are already contiguous, hence `nomove'. */
2555 merge_blocks_nomove (a, b)
2559 rtx b_head, b_end, a_end;
2560 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2563 /* If there was a CODE_LABEL beginning B, delete it. */
2566 if (GET_CODE (b_head) == CODE_LABEL)
2568 /* Detect basic blocks with nothing but a label. This can happen
2569 in particular at the end of a function. */
2570 if (b_head == b_end)
2572 del_first = del_last = b_head;
2573 b_head = NEXT_INSN (b_head);
2576 /* Delete the basic block note. */
2577 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2579 if (b_head == b_end)
2584 b_head = NEXT_INSN (b_head);
2587 /* If there was a jump out of A, delete it. */
2589 if (GET_CODE (a_end) == JUMP_INSN)
2593 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2594 if (GET_CODE (prev) != NOTE
2595 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2602 /* If this was a conditional jump, we need to also delete
2603 the insn that set cc0. */
2604 if (prev && sets_cc0_p (prev))
2607 prev = prev_nonnote_insn (prev);
2616 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2617 del_first = NEXT_INSN (a_end);
2619 /* Delete everything marked above as well as crap that might be
2620 hanging out between the two blocks. */
2621 flow_delete_insn_chain (del_first, del_last);
2623 /* Normally there should only be one successor of A and that is B, but
2624 partway though the merge of blocks for conditional_execution we'll
2625 be merging a TEST block with THEN and ELSE successors. Free the
2626 whole lot of them and hope the caller knows what they're doing. */
2628 remove_edge (a->succ);
2630 /* Adjust the edges out of B for the new owner. */
2631 for (e = b->succ; e; e = e->succ_next)
2635 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2636 b->pred = b->succ = NULL;
2638 /* Reassociate the insns of B with A. */
2641 if (basic_block_for_insn)
2643 BLOCK_FOR_INSN (b_head) = a;
2644 while (b_head != b_end)
2646 b_head = NEXT_INSN (b_head);
2647 BLOCK_FOR_INSN (b_head) = a;
2657 /* Blocks A and B are to be merged into a single block. A has no incoming
2658 fallthru edge, so it can be moved before B without adding or modifying
2659 any jumps (aside from the jump from A to B). */
2662 merge_blocks_move_predecessor_nojumps (a, b)
2665 rtx start, end, barrier;
2671 barrier = next_nonnote_insn (end);
2672 if (GET_CODE (barrier) != BARRIER)
2674 flow_delete_insn (barrier);
2676 /* Move block and loop notes out of the chain so that we do not
2677 disturb their order.
2679 ??? A better solution would be to squeeze out all the non-nested notes
2680 and adjust the block trees appropriately. Even better would be to have
2681 a tighter connection between block trees and rtl so that this is not
2683 start = squeeze_notes (start, end);
2685 /* Scramble the insn chain. */
2686 if (end != PREV_INSN (b->head))
2687 reorder_insns (start, end, PREV_INSN (b->head));
2691 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2692 a->index, b->index);
2695 /* Swap the records for the two blocks around. Although we are deleting B,
2696 A is now where B was and we want to compact the BB array from where
2698 BASIC_BLOCK (a->index) = b;
2699 BASIC_BLOCK (b->index) = a;
2701 a->index = b->index;
2704 /* Now blocks A and B are contiguous. Merge them. */
2705 merge_blocks_nomove (a, b);
2710 /* Blocks A and B are to be merged into a single block. B has no outgoing
2711 fallthru edge, so it can be moved after A without adding or modifying
2712 any jumps (aside from the jump from A to B). */
2715 merge_blocks_move_successor_nojumps (a, b)
2718 rtx start, end, barrier;
2722 barrier = NEXT_INSN (end);
2724 /* Recognize a jump table following block B. */
2725 if (GET_CODE (barrier) == CODE_LABEL
2726 && NEXT_INSN (barrier)
2727 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2728 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2729 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2731 end = NEXT_INSN (barrier);
2732 barrier = NEXT_INSN (end);
2735 /* There had better have been a barrier there. Delete it. */
2736 if (GET_CODE (barrier) != BARRIER)
2738 flow_delete_insn (barrier);
2740 /* Move block and loop notes out of the chain so that we do not
2741 disturb their order.
2743 ??? A better solution would be to squeeze out all the non-nested notes
2744 and adjust the block trees appropriately. Even better would be to have
2745 a tighter connection between block trees and rtl so that this is not
2747 start = squeeze_notes (start, end);
2749 /* Scramble the insn chain. */
2750 reorder_insns (start, end, a->end);
2752 /* Now blocks A and B are contiguous. Merge them. */
2753 merge_blocks_nomove (a, b);
2757 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2758 b->index, a->index);
2764 /* Attempt to merge basic blocks that are potentially non-adjacent.
2765 Return true iff the attempt succeeded. */
2768 merge_blocks (e, b, c)
2772 /* If C has a tail recursion label, do not merge. There is no
2773 edge recorded from the call_placeholder back to this label, as
2774 that would make optimize_sibling_and_tail_recursive_calls more
2775 complex for no gain. */
2776 if (GET_CODE (c->head) == CODE_LABEL
2777 && tail_recursion_label_p (c->head))
2780 /* If B has a fallthru edge to C, no need to move anything. */
2781 if (e->flags & EDGE_FALLTHRU)
2783 merge_blocks_nomove (b, c);
2787 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2788 b->index, c->index);
2796 int c_has_outgoing_fallthru;
2797 int b_has_incoming_fallthru;
2799 /* We must make sure to not munge nesting of exception regions,
2800 lexical blocks, and loop notes.
2802 The first is taken care of by requiring that the active eh
2803 region at the end of one block always matches the active eh
2804 region at the beginning of the next block.
2806 The later two are taken care of by squeezing out all the notes. */
2808 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2809 executed and we may want to treat blocks which have two out
2810 edges, one normal, one abnormal as only having one edge for
2811 block merging purposes. */
2813 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2814 if (tmp_edge->flags & EDGE_FALLTHRU)
2816 c_has_outgoing_fallthru = (tmp_edge != NULL);
2818 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2819 if (tmp_edge->flags & EDGE_FALLTHRU)
2821 b_has_incoming_fallthru = (tmp_edge != NULL);
2823 /* If B does not have an incoming fallthru, then it can be moved
2824 immediately before C without introducing or modifying jumps.
2825 C cannot be the first block, so we do not have to worry about
2826 accessing a non-existent block. */
2827 if (! b_has_incoming_fallthru)
2828 return merge_blocks_move_predecessor_nojumps (b, c);
2830 /* Otherwise, we're going to try to move C after B. If C does
2831 not have an outgoing fallthru, then it can be moved
2832 immediately after B without introducing or modifying jumps. */
2833 if (! c_has_outgoing_fallthru)
2834 return merge_blocks_move_successor_nojumps (b, c);
2836 /* Otherwise, we'll need to insert an extra jump, and possibly
2837 a new block to contain it. */
2838 /* ??? Not implemented yet. */
2844 /* Simplify conditional jump around an jump.
2845 Return nonzero in case optimization matched. */
2848 try_simplify_condjump (src)
2851 basic_block final_block, next_block;
2852 rtx insn = src->end;
2853 edge branch, fallthru;
2855 if (!any_condjump_p (insn))
2858 fallthru = FALLTHRU_EDGE (src);
2860 /* Following block must be simple forwarder block with single
2861 entry and must not be last in the stream. */
2862 next_block = fallthru->dest;
2863 if (!forwarder_block_p (next_block)
2864 || next_block->pred->pred_next
2865 || next_block->index == n_basic_blocks - 1)
2868 /* The branch must target to block afterwards. */
2869 final_block = BASIC_BLOCK (next_block->index + 1);
2871 branch = BRANCH_EDGE (src);
2873 if (branch->dest != final_block)
2876 /* Avoid jump.c from being overactive on removin ureachable insns. */
2877 LABEL_NUSES (JUMP_LABEL (insn))++;
2878 if (!invert_jump (insn, block_label (next_block->succ->dest), 1))
2880 LABEL_NUSES (JUMP_LABEL (insn))--;
2884 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
2885 INSN_UID (insn), INSN_UID (next_block->end));
2887 redirect_edge_succ (branch, final_block);
2888 redirect_edge_succ (fallthru, next_block->succ->dest);
2890 branch->flags |= EDGE_FALLTHRU;
2891 fallthru->flags &= ~EDGE_FALLTHRU;
2893 flow_delete_block (next_block);
2897 /* Attempt to forward edges leaving basic block B.
2898 Return nonzero if sucessfull. */
2901 try_forward_edges (b)
2906 for (e = b->succ; e; e = e->succ_next)
2908 basic_block target = e->dest, first = e->dest;
2911 /* Look for the real destination of jump.
2912 Avoid inifinite loop in the infinite empty loop by counting
2913 up to n_basic_blocks. */
2914 while (forwarder_block_p (target)
2915 && target->succ->dest != EXIT_BLOCK_PTR
2916 && counter < n_basic_blocks)
2918 /* Bypass trivial infinite loops. */
2919 if (target == target->succ->dest)
2920 counter = n_basic_blocks;
2921 target = target->succ->dest, counter++;
2924 if (target != first && counter < n_basic_blocks
2925 && redirect_edge_and_branch (e, target))
2927 while (first != target)
2929 first->count -= e->count;
2930 first->succ->count -= e->count;
2931 first->frequency -= ((e->probability * b->frequency
2932 + REG_BR_PROB_BASE / 2)
2933 / REG_BR_PROB_BASE);
2934 first = first->succ->dest;
2936 /* We've possibly removed the edge. */
2940 else if (rtl_dump_file && counter == n_basic_blocks)
2941 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n", target->index);
2942 else if (rtl_dump_file && first != target)
2943 fprintf (rtl_dump_file,
2944 "Forwarding edge %i->%i to %i failed.\n", b->index,
2945 e->dest->index, target->index);
2950 /* Do simple CFG optimizations - basic block merging, simplifying of jump
2953 Return nonzero in case some optimizations matched. */
2959 bool changed_overall = 0;
2962 /* Attempt to merge blocks as made possible by edge removal. If a block
2963 has only one successor, and the successor has only one predecessor,
2964 they may be combined. */
2969 for (i = 0; i < n_basic_blocks;)
2971 basic_block c, b = BASIC_BLOCK (i);
2973 int changed_here = 0;
2975 /* Delete trivially dead basic block. */
2976 if (b->pred == NULL)
2978 c = BASIC_BLOCK (i - 1);
2980 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
2981 flow_delete_block (b);
2985 /* The fallthru forwarder block can be deleted. */
2986 if (b->pred->pred_next == NULL
2987 && forwarder_block_p (b)
2988 && (b->pred->flags & EDGE_FALLTHRU)
2989 && (b->succ->flags & EDGE_FALLTHRU))
2992 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
2994 c = BASIC_BLOCK (i ? i - 1 : i + 1);
2995 redirect_edge_succ (b->pred, b->succ->dest);
2996 flow_delete_block (b);
3001 /* A loop because chains of blocks might be combineable. */
3002 while ((s = b->succ) != NULL
3003 && s->succ_next == NULL
3004 && (s->flags & EDGE_EH) == 0
3005 && (c = s->dest) != EXIT_BLOCK_PTR
3006 && c->pred->pred_next == NULL
3007 /* If the jump insn has side effects, we can't kill the edge. */
3008 && (GET_CODE (b->end) != JUMP_INSN
3009 || onlyjump_p (b->end)) && merge_blocks (s, b, c))
3012 if (try_simplify_condjump (b))
3015 /* In the case basic blocks has single outgoing edge, but over by the
3016 non-trivial jump instruction, we can replace it by unconditional
3017 jump, or delete the jump completely. Use logic of
3018 redirect_edge_and_branch to do the dirty job for us.
3020 We match cases as conditional jumps jumping to the next block or
3024 && b->succ->succ_next == NULL
3025 && GET_CODE (b->end) == JUMP_INSN
3026 && b->succ->dest != EXIT_BLOCK_PTR
3027 && redirect_edge_and_branch (b->succ, b->succ->dest))
3030 if (try_forward_edges (b))
3033 /* Don't get confused by the index shift caused by deleting
3040 changed_overall |= changed;
3044 #ifdef ENABLE_CHECKING
3046 verify_flow_info ();
3048 return changed_overall;
3051 /* The given edge should potentially be a fallthru edge. If that is in
3052 fact true, delete the jump and barriers that are in the way. */
3055 tidy_fallthru_edge (e, b, c)
3061 /* ??? In a late-running flow pass, other folks may have deleted basic
3062 blocks by nopping out blocks, leaving multiple BARRIERs between here
3063 and the target label. They ought to be chastized and fixed.
3065 We can also wind up with a sequence of undeletable labels between
3066 one block and the next.
3068 So search through a sequence of barriers, labels, and notes for
3069 the head of block C and assert that we really do fall through. */
3071 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
3074 /* Remove what will soon cease being the jump insn from the source block.
3075 If block B consisted only of this single jump, turn it into a deleted
3078 if (GET_CODE (q) == JUMP_INSN
3080 && (any_uncondjump_p (q)
3081 || (b->succ == e && e->succ_next == NULL)))
3084 /* If this was a conditional jump, we need to also delete
3085 the insn that set cc0. */
3086 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
3093 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
3094 NOTE_SOURCE_FILE (q) = 0;
3100 /* We don't want a block to end on a line-number note since that has
3101 the potential of changing the code between -g and not -g. */
3102 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
3109 /* Selectively unlink the sequence. */
3110 if (q != PREV_INSN (c->head))
3111 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
3113 e->flags |= EDGE_FALLTHRU;
3116 /* Fix up edges that now fall through, or rather should now fall through
3117 but previously required a jump around now deleted blocks. Simplify
3118 the search by only examining blocks numerically adjacent, since this
3119 is how find_basic_blocks created them. */
3122 tidy_fallthru_edges ()
3126 for (i = 1; i < n_basic_blocks; ++i)
3128 basic_block b = BASIC_BLOCK (i - 1);
3129 basic_block c = BASIC_BLOCK (i);
3132 /* We care about simple conditional or unconditional jumps with
3135 If we had a conditional branch to the next instruction when
3136 find_basic_blocks was called, then there will only be one
3137 out edge for the block which ended with the conditional
3138 branch (since we do not create duplicate edges).
3140 Furthermore, the edge will be marked as a fallthru because we
3141 merge the flags for the duplicate edges. So we do not want to
3142 check that the edge is not a FALLTHRU edge. */
3143 if ((s = b->succ) != NULL
3144 && ! (s->flags & EDGE_COMPLEX)
3145 && s->succ_next == NULL
3147 /* If the jump insn has side effects, we can't tidy the edge. */
3148 && (GET_CODE (b->end) != JUMP_INSN
3149 || onlyjump_p (b->end)))
3150 tidy_fallthru_edge (s, b, c);
3154 /* Perform data flow analysis.
3155 F is the first insn of the function; FLAGS is a set of PROP_* flags
3156 to be used in accumulating flow info. */
3159 life_analysis (f, file, flags)
3164 #ifdef ELIMINABLE_REGS
3166 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
3169 /* Record which registers will be eliminated. We use this in
3172 CLEAR_HARD_REG_SET (elim_reg_set);
3174 #ifdef ELIMINABLE_REGS
3175 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3176 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3178 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3182 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
3184 /* The post-reload life analysis have (on a global basis) the same
3185 registers live as was computed by reload itself. elimination
3186 Otherwise offsets and such may be incorrect.
3188 Reload will make some registers as live even though they do not
3191 We don't want to create new auto-incs after reload, since they
3192 are unlikely to be useful and can cause problems with shared
3194 if (reload_completed)
3195 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
3197 /* We want alias analysis information for local dead store elimination. */
3198 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3199 init_alias_analysis ();
3201 /* Always remove no-op moves. Do this before other processing so
3202 that we don't have to keep re-scanning them. */
3203 delete_noop_moves (f);
3205 /* Some targets can emit simpler epilogues if they know that sp was
3206 not ever modified during the function. After reload, of course,
3207 we've already emitted the epilogue so there's no sense searching. */
3208 if (! reload_completed)
3209 notice_stack_pointer_modification (f);
3211 /* Allocate and zero out data structures that will record the
3212 data from lifetime analysis. */
3213 allocate_reg_life_data ();
3214 allocate_bb_life_data ();
3216 /* Find the set of registers live on function exit. */
3217 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
3219 /* "Update" life info from zero. It'd be nice to begin the
3220 relaxation with just the exit and noreturn blocks, but that set
3221 is not immediately handy. */
3223 if (flags & PROP_REG_INFO)
3224 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3225 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
3228 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3229 end_alias_analysis ();
3232 dump_flow_info (file);
3234 free_basic_block_vars (1);
3236 #ifdef ENABLE_CHECKING
3240 /* Search for any REG_LABEL notes which reference deleted labels. */
3241 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3243 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3245 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
3252 /* A subroutine of verify_wide_reg, called through for_each_rtx.
3253 Search for REGNO. If found, abort if it is not wider than word_mode. */
3256 verify_wide_reg_1 (px, pregno)
3261 unsigned int regno = *(int *) pregno;
3263 if (GET_CODE (x) == REG && REGNO (x) == regno)
3265 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
3272 /* A subroutine of verify_local_live_at_start. Search through insns
3273 between HEAD and END looking for register REGNO. */
3276 verify_wide_reg (regno, head, end)
3283 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
3287 head = NEXT_INSN (head);
3290 /* We didn't find the register at all. Something's way screwy. */
3292 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
3293 print_rtl_and_abort ();
3296 /* A subroutine of update_life_info. Verify that there are no untoward
3297 changes in live_at_start during a local update. */
3300 verify_local_live_at_start (new_live_at_start, bb)
3301 regset new_live_at_start;
3304 if (reload_completed)
3306 /* After reload, there are no pseudos, nor subregs of multi-word
3307 registers. The regsets should exactly match. */
3308 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
3312 fprintf (rtl_dump_file,
3313 "live_at_start mismatch in bb %d, aborting\n",
3315 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
3316 debug_bitmap_file (rtl_dump_file, new_live_at_start);
3318 print_rtl_and_abort ();
3325 /* Find the set of changed registers. */
3326 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
3328 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
3330 /* No registers should die. */
3331 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
3334 fprintf (rtl_dump_file,
3335 "Register %d died unexpectedly in block %d\n", i,
3337 print_rtl_and_abort ();
3340 /* Verify that the now-live register is wider than word_mode. */
3341 verify_wide_reg (i, bb->head, bb->end);
3346 /* Updates life information starting with the basic blocks set in BLOCKS.
3347 If BLOCKS is null, consider it to be the universal set.
3349 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
3350 we are only expecting local modifications to basic blocks. If we find
3351 extra registers live at the beginning of a block, then we either killed
3352 useful data, or we have a broken split that wants data not provided.
3353 If we find registers removed from live_at_start, that means we have
3354 a broken peephole that is killing a register it shouldn't.
3356 ??? This is not true in one situation -- when a pre-reload splitter
3357 generates subregs of a multi-word pseudo, current life analysis will
3358 lose the kill. So we _can_ have a pseudo go live. How irritating.
3360 Including PROP_REG_INFO does not properly refresh regs_ever_live
3361 unless the caller resets it to zero. */
3364 update_life_info (blocks, extent, prop_flags)
3366 enum update_life_extent extent;
3370 regset_head tmp_head;
3373 tmp = INITIALIZE_REG_SET (tmp_head);
3375 /* For a global update, we go through the relaxation process again. */
3376 if (extent != UPDATE_LIFE_LOCAL)
3378 calculate_global_regs_live (blocks, blocks,
3379 prop_flags & PROP_SCAN_DEAD_CODE);
3381 /* If asked, remove notes from the blocks we'll update. */
3382 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
3383 count_or_remove_death_notes (blocks, 1);
3388 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
3390 basic_block bb = BASIC_BLOCK (i);
3392 COPY_REG_SET (tmp, bb->global_live_at_end);
3393 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3395 if (extent == UPDATE_LIFE_LOCAL)
3396 verify_local_live_at_start (tmp, bb);
3401 for (i = n_basic_blocks - 1; i >= 0; --i)
3403 basic_block bb = BASIC_BLOCK (i);
3405 COPY_REG_SET (tmp, bb->global_live_at_end);
3406 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3408 if (extent == UPDATE_LIFE_LOCAL)
3409 verify_local_live_at_start (tmp, bb);
3415 if (prop_flags & PROP_REG_INFO)
3417 /* The only pseudos that are live at the beginning of the function
3418 are those that were not set anywhere in the function. local-alloc
3419 doesn't know how to handle these correctly, so mark them as not
3420 local to any one basic block. */
3421 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
3422 FIRST_PSEUDO_REGISTER, i,
3423 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3425 /* We have a problem with any pseudoreg that lives across the setjmp.
3426 ANSI says that if a user variable does not change in value between
3427 the setjmp and the longjmp, then the longjmp preserves it. This
3428 includes longjmp from a place where the pseudo appears dead.
3429 (In principle, the value still exists if it is in scope.)
3430 If the pseudo goes in a hard reg, some other value may occupy
3431 that hard reg where this pseudo is dead, thus clobbering the pseudo.
3432 Conclusion: such a pseudo must not go in a hard reg. */
3433 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
3434 FIRST_PSEUDO_REGISTER, i,
3436 if (regno_reg_rtx[i] != 0)
3438 REG_LIVE_LENGTH (i) = -1;
3439 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3445 /* Free the variables allocated by find_basic_blocks.
3447 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
3450 free_basic_block_vars (keep_head_end_p)
3451 int keep_head_end_p;
3453 if (basic_block_for_insn)
3455 VARRAY_FREE (basic_block_for_insn);
3456 basic_block_for_insn = NULL;
3459 if (! keep_head_end_p)
3461 if (basic_block_info)
3464 VARRAY_FREE (basic_block_info);
3468 ENTRY_BLOCK_PTR->aux = NULL;
3469 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
3470 EXIT_BLOCK_PTR->aux = NULL;
3471 EXIT_BLOCK_PTR->global_live_at_start = NULL;
3475 /* Return nonzero if an insn consists only of SETs, each of which only sets a
3482 rtx pat = PATTERN (insn);
3484 /* Insns carrying these notes are useful later on. */
3485 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
3488 if (GET_CODE (pat) == SET && set_noop_p (pat))
3491 if (GET_CODE (pat) == PARALLEL)
3494 /* If nothing but SETs of registers to themselves,
3495 this insn can also be deleted. */
3496 for (i = 0; i < XVECLEN (pat, 0); i++)
3498 rtx tem = XVECEXP (pat, 0, i);
3500 if (GET_CODE (tem) == USE
3501 || GET_CODE (tem) == CLOBBER)
3504 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
3513 /* Delete any insns that copy a register to itself. */
3516 delete_noop_moves (f)
3520 for (insn = f; insn; insn = NEXT_INSN (insn))
3522 if (GET_CODE (insn) == INSN && noop_move_p (insn))
3524 PUT_CODE (insn, NOTE);
3525 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3526 NOTE_SOURCE_FILE (insn) = 0;
3531 /* Determine if the stack pointer is constant over the life of the function.
3532 Only useful before prologues have been emitted. */
3535 notice_stack_pointer_modification_1 (x, pat, data)
3537 rtx pat ATTRIBUTE_UNUSED;
3538 void *data ATTRIBUTE_UNUSED;
3540 if (x == stack_pointer_rtx
3541 /* The stack pointer is only modified indirectly as the result
3542 of a push until later in flow. See the comments in rtl.texi
3543 regarding Embedded Side-Effects on Addresses. */
3544 || (GET_CODE (x) == MEM
3545 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
3546 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
3547 current_function_sp_is_unchanging = 0;
3551 notice_stack_pointer_modification (f)
3556 /* Assume that the stack pointer is unchanging if alloca hasn't
3558 current_function_sp_is_unchanging = !current_function_calls_alloca;
3559 if (! current_function_sp_is_unchanging)
3562 for (insn = f; insn; insn = NEXT_INSN (insn))
3566 /* Check if insn modifies the stack pointer. */
3567 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3569 if (! current_function_sp_is_unchanging)
3575 /* Mark a register in SET. Hard registers in large modes get all
3576 of their component registers set as well. */
3579 mark_reg (reg, xset)
3583 regset set = (regset) xset;
3584 int regno = REGNO (reg);
3586 if (GET_MODE (reg) == BLKmode)
3589 SET_REGNO_REG_SET (set, regno);
3590 if (regno < FIRST_PSEUDO_REGISTER)
3592 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3594 SET_REGNO_REG_SET (set, regno + n);
3598 /* Mark those regs which are needed at the end of the function as live
3599 at the end of the last basic block. */
3602 mark_regs_live_at_end (set)
3607 /* If exiting needs the right stack value, consider the stack pointer
3608 live at the end of the function. */
3609 if ((HAVE_epilogue && reload_completed)
3610 || ! EXIT_IGNORE_STACK
3611 || (! FRAME_POINTER_REQUIRED
3612 && ! current_function_calls_alloca
3613 && flag_omit_frame_pointer)
3614 || current_function_sp_is_unchanging)
3616 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3619 /* Mark the frame pointer if needed at the end of the function. If
3620 we end up eliminating it, it will be removed from the live list
3621 of each basic block by reload. */
3623 if (! reload_completed || frame_pointer_needed)
3625 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3626 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3627 /* If they are different, also mark the hard frame pointer as live. */
3628 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3629 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3633 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3634 /* Many architectures have a GP register even without flag_pic.
3635 Assume the pic register is not in use, or will be handled by
3636 other means, if it is not fixed. */
3637 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3638 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3639 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3642 /* Mark all global registers, and all registers used by the epilogue
3643 as being live at the end of the function since they may be
3644 referenced by our caller. */
3645 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3646 if (global_regs[i] || EPILOGUE_USES (i))
3647 SET_REGNO_REG_SET (set, i);
3649 if (HAVE_epilogue && reload_completed)
3651 /* Mark all call-saved registers that we actually used. */
3652 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3653 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3654 SET_REGNO_REG_SET (set, i);
3657 #ifdef EH_RETURN_DATA_REGNO
3658 /* Mark the registers that will contain data for the handler. */
3659 if (reload_completed && current_function_calls_eh_return)
3662 unsigned regno = EH_RETURN_DATA_REGNO(i);
3663 if (regno == INVALID_REGNUM)
3665 SET_REGNO_REG_SET (set, regno);
3668 #ifdef EH_RETURN_STACKADJ_RTX
3669 if ((! HAVE_epilogue || ! reload_completed)
3670 && current_function_calls_eh_return)
3672 rtx tmp = EH_RETURN_STACKADJ_RTX;
3673 if (tmp && REG_P (tmp))
3674 mark_reg (tmp, set);
3677 #ifdef EH_RETURN_HANDLER_RTX
3678 if ((! HAVE_epilogue || ! reload_completed)
3679 && current_function_calls_eh_return)
3681 rtx tmp = EH_RETURN_HANDLER_RTX;
3682 if (tmp && REG_P (tmp))
3683 mark_reg (tmp, set);
3687 /* Mark function return value. */
3688 diddle_return_value (mark_reg, set);
3691 /* Callback function for for_each_successor_phi. DATA is a regset.
3692 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3693 INSN, in the regset. */
3696 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3697 rtx insn ATTRIBUTE_UNUSED;
3698 int dest_regno ATTRIBUTE_UNUSED;
3702 regset live = (regset) data;
3703 SET_REGNO_REG_SET (live, src_regno);
3707 /* Propagate global life info around the graph of basic blocks. Begin
3708 considering blocks with their corresponding bit set in BLOCKS_IN.
3709 If BLOCKS_IN is null, consider it the universal set.
3711 BLOCKS_OUT is set for every block that was changed. */
3714 calculate_global_regs_live (blocks_in, blocks_out, flags)
3715 sbitmap blocks_in, blocks_out;
3718 basic_block *queue, *qhead, *qtail, *qend;
3719 regset tmp, new_live_at_end, call_used;
3720 regset_head tmp_head, call_used_head;
3721 regset_head new_live_at_end_head;
3724 tmp = INITIALIZE_REG_SET (tmp_head);
3725 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3726 call_used = INITIALIZE_REG_SET (call_used_head);
3728 /* Inconveniently, this is only redily available in hard reg set form. */
3729 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
3730 if (call_used_regs[i])
3731 SET_REGNO_REG_SET (call_used, i);
3733 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3734 because the `head == tail' style test for an empty queue doesn't
3735 work with a full queue. */
3736 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3738 qhead = qend = queue + n_basic_blocks + 2;
3740 /* Queue the blocks set in the initial mask. Do this in reverse block
3741 number order so that we are more likely for the first round to do
3742 useful work. We use AUX non-null to flag that the block is queued. */
3745 /* Clear out the garbage that might be hanging out in bb->aux. */
3746 for (i = n_basic_blocks - 1; i >= 0; --i)
3747 BASIC_BLOCK (i)->aux = NULL;
3749 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3751 basic_block bb = BASIC_BLOCK (i);
3758 for (i = 0; i < n_basic_blocks; ++i)
3760 basic_block bb = BASIC_BLOCK (i);
3767 sbitmap_zero (blocks_out);
3769 /* We work through the queue until there are no more blocks. What
3770 is live at the end of this block is precisely the union of what
3771 is live at the beginning of all its successors. So, we set its
3772 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
3773 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
3774 this block by walking through the instructions in this block in
3775 reverse order and updating as we go. If that changed
3776 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
3777 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
3779 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
3780 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
3781 must either be live at the end of the block, or used within the
3782 block. In the latter case, it will certainly never disappear
3783 from GLOBAL_LIVE_AT_START. In the former case, the register
3784 could go away only if it disappeared from GLOBAL_LIVE_AT_START
3785 for one of the successor blocks. By induction, that cannot
3787 while (qhead != qtail)
3789 int rescan, changed;
3798 /* Begin by propagating live_at_start from the successor blocks. */
3799 CLEAR_REG_SET (new_live_at_end);
3800 for (e = bb->succ; e; e = e->succ_next)
3802 basic_block sb = e->dest;
3804 /* Call-clobbered registers die across exception and call edges. */
3805 /* ??? Abnormal call edges ignored for the moment, as this gets
3806 confused by sibling call edges, which crashes reg-stack. */
3807 if (e->flags & EDGE_EH)
3809 bitmap_operation (tmp, sb->global_live_at_start,
3810 call_used, BITMAP_AND_COMPL);
3811 IOR_REG_SET (new_live_at_end, tmp);
3814 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3817 /* The all-important stack pointer must always be live. */
3818 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3820 /* Before reload, there are a few registers that must be forced
3821 live everywhere -- which might not already be the case for
3822 blocks within infinite loops. */
3823 if (! reload_completed)
3825 /* Any reference to any pseudo before reload is a potential
3826 reference of the frame pointer. */
3827 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
3829 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3830 /* Pseudos with argument area equivalences may require
3831 reloading via the argument pointer. */
3832 if (fixed_regs[ARG_POINTER_REGNUM])
3833 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
3836 /* Any constant, or pseudo with constant equivalences, may
3837 require reloading from memory using the pic register. */
3838 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3839 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3840 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
3843 /* Regs used in phi nodes are not included in
3844 global_live_at_start, since they are live only along a
3845 particular edge. Set those regs that are live because of a
3846 phi node alternative corresponding to this particular block. */
3848 for_each_successor_phi (bb, &set_phi_alternative_reg,
3851 if (bb == ENTRY_BLOCK_PTR)
3853 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3857 /* On our first pass through this block, we'll go ahead and continue.
3858 Recognize first pass by local_set NULL. On subsequent passes, we
3859 get to skip out early if live_at_end wouldn't have changed. */
3861 if (bb->local_set == NULL)
3863 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3864 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3869 /* If any bits were removed from live_at_end, we'll have to
3870 rescan the block. This wouldn't be necessary if we had
3871 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3872 local_live is really dependent on live_at_end. */
3873 CLEAR_REG_SET (tmp);
3874 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3875 new_live_at_end, BITMAP_AND_COMPL);
3879 /* If any of the registers in the new live_at_end set are
3880 conditionally set in this basic block, we must rescan.
3881 This is because conditional lifetimes at the end of the
3882 block do not just take the live_at_end set into account,
3883 but also the liveness at the start of each successor
3884 block. We can miss changes in those sets if we only
3885 compare the new live_at_end against the previous one. */
3886 CLEAR_REG_SET (tmp);
3887 rescan = bitmap_operation (tmp, new_live_at_end,
3888 bb->cond_local_set, BITMAP_AND);
3893 /* Find the set of changed bits. Take this opportunity
3894 to notice that this set is empty and early out. */
3895 CLEAR_REG_SET (tmp);
3896 changed = bitmap_operation (tmp, bb->global_live_at_end,
3897 new_live_at_end, BITMAP_XOR);
3901 /* If any of the changed bits overlap with local_set,
3902 we'll have to rescan the block. Detect overlap by
3903 the AND with ~local_set turning off bits. */
3904 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3909 /* Let our caller know that BB changed enough to require its
3910 death notes updated. */
3912 SET_BIT (blocks_out, bb->index);
3916 /* Add to live_at_start the set of all registers in
3917 new_live_at_end that aren't in the old live_at_end. */
3919 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3921 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3923 changed = bitmap_operation (bb->global_live_at_start,
3924 bb->global_live_at_start,
3931 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3933 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3934 into live_at_start. */
3935 propagate_block (bb, new_live_at_end, bb->local_set,
3936 bb->cond_local_set, flags);
3938 /* If live_at start didn't change, no need to go farther. */
3939 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3942 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3945 /* Queue all predecessors of BB so that we may re-examine
3946 their live_at_end. */
3947 for (e = bb->pred; e; e = e->pred_next)
3949 basic_block pb = e->src;
3950 if (pb->aux == NULL)
3961 FREE_REG_SET (new_live_at_end);
3962 FREE_REG_SET (call_used);
3966 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3968 basic_block bb = BASIC_BLOCK (i);
3969 FREE_REG_SET (bb->local_set);
3970 FREE_REG_SET (bb->cond_local_set);
3975 for (i = n_basic_blocks - 1; i >= 0; --i)
3977 basic_block bb = BASIC_BLOCK (i);
3978 FREE_REG_SET (bb->local_set);
3979 FREE_REG_SET (bb->cond_local_set);
3986 /* Subroutines of life analysis. */
3988 /* Allocate the permanent data structures that represent the results
3989 of life analysis. Not static since used also for stupid life analysis. */
3992 allocate_bb_life_data ()
3996 for (i = 0; i < n_basic_blocks; i++)
3998 basic_block bb = BASIC_BLOCK (i);
4000 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4001 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4004 ENTRY_BLOCK_PTR->global_live_at_end
4005 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4006 EXIT_BLOCK_PTR->global_live_at_start
4007 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4009 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4013 allocate_reg_life_data ()
4017 max_regno = max_reg_num ();
4019 /* Recalculate the register space, in case it has grown. Old style
4020 vector oriented regsets would set regset_{size,bytes} here also. */
4021 allocate_reg_info (max_regno, FALSE, FALSE);
4023 /* Reset all the data we'll collect in propagate_block and its
4025 for (i = 0; i < max_regno; i++)
4029 REG_N_DEATHS (i) = 0;
4030 REG_N_CALLS_CROSSED (i) = 0;
4031 REG_LIVE_LENGTH (i) = 0;
4032 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4036 /* Delete dead instructions for propagate_block. */
4039 propagate_block_delete_insn (bb, insn)
4043 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4045 /* If the insn referred to a label, and that label was attached to
4046 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
4047 pretty much mandatory to delete it, because the ADDR_VEC may be
4048 referencing labels that no longer exist.
4050 INSN may reference a deleted label, particularly when a jump
4051 table has been optimized into a direct jump. There's no
4052 real good way to fix up the reference to the deleted label
4053 when the label is deleted, so we just allow it here.
4055 After dead code elimination is complete, we do search for
4056 any REG_LABEL notes which reference deleted labels as a
4059 if (inote && GET_CODE (inote) == CODE_LABEL)
4061 rtx label = XEXP (inote, 0);
4064 /* The label may be forced if it has been put in the constant
4065 pool. If that is the only use we must discard the table
4066 jump following it, but not the label itself. */
4067 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
4068 && (next = next_nonnote_insn (label)) != NULL
4069 && GET_CODE (next) == JUMP_INSN
4070 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4071 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4073 rtx pat = PATTERN (next);
4074 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4075 int len = XVECLEN (pat, diff_vec_p);
4078 for (i = 0; i < len; i++)
4079 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
4081 flow_delete_insn (next);
4085 if (bb->end == insn)
4086 bb->end = PREV_INSN (insn);
4087 flow_delete_insn (insn);
4090 /* Delete dead libcalls for propagate_block. Return the insn
4091 before the libcall. */
4094 propagate_block_delete_libcall (bb, insn, note)
4098 rtx first = XEXP (note, 0);
4099 rtx before = PREV_INSN (first);
4101 if (insn == bb->end)
4104 flow_delete_insn_chain (first, insn);
4108 /* Update the life-status of regs for one insn. Return the previous insn. */
4111 propagate_one_insn (pbi, insn)
4112 struct propagate_block_info *pbi;
4115 rtx prev = PREV_INSN (insn);
4116 int flags = pbi->flags;
4117 int insn_is_dead = 0;
4118 int libcall_is_dead = 0;
4122 if (! INSN_P (insn))
4125 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4126 if (flags & PROP_SCAN_DEAD_CODE)
4128 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
4129 libcall_is_dead = (insn_is_dead && note != 0
4130 && libcall_dead_p (pbi, note, insn));
4133 /* If an instruction consists of just dead store(s) on final pass,
4135 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
4137 /* If we're trying to delete a prologue or epilogue instruction
4138 that isn't flagged as possibly being dead, something is wrong.
4139 But if we are keeping the stack pointer depressed, we might well
4140 be deleting insns that are used to compute the amount to update
4141 it by, so they are fine. */
4142 if (reload_completed
4143 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
4144 && (TYPE_RETURNS_STACK_DEPRESSED
4145 (TREE_TYPE (current_function_decl))))
4146 && (((HAVE_epilogue || HAVE_prologue)
4147 && prologue_epilogue_contains (insn))
4148 || (HAVE_sibcall_epilogue
4149 && sibcall_epilogue_contains (insn)))
4150 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
4153 /* Record sets. Do this even for dead instructions, since they
4154 would have killed the values if they hadn't been deleted. */
4155 mark_set_regs (pbi, PATTERN (insn), insn);
4157 /* CC0 is now known to be dead. Either this insn used it,
4158 in which case it doesn't anymore, or clobbered it,
4159 so the next insn can't use it. */
4162 if (libcall_is_dead)
4163 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
4165 propagate_block_delete_insn (pbi->bb, insn);
4170 /* See if this is an increment or decrement that can be merged into
4171 a following memory address. */
4174 register rtx x = single_set (insn);
4176 /* Does this instruction increment or decrement a register? */
4177 if ((flags & PROP_AUTOINC)
4179 && GET_CODE (SET_DEST (x)) == REG
4180 && (GET_CODE (SET_SRC (x)) == PLUS
4181 || GET_CODE (SET_SRC (x)) == MINUS)
4182 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
4183 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
4184 /* Ok, look for a following memory ref we can combine with.
4185 If one is found, change the memory ref to a PRE_INC
4186 or PRE_DEC, cancel this insn, and return 1.
4187 Return 0 if nothing has been done. */
4188 && try_pre_increment_1 (pbi, insn))
4191 #endif /* AUTO_INC_DEC */
4193 CLEAR_REG_SET (pbi->new_set);
4195 /* If this is not the final pass, and this insn is copying the value of
4196 a library call and it's dead, don't scan the insns that perform the
4197 library call, so that the call's arguments are not marked live. */
4198 if (libcall_is_dead)
4200 /* Record the death of the dest reg. */
4201 mark_set_regs (pbi, PATTERN (insn), insn);
4203 insn = XEXP (note, 0);
4204 return PREV_INSN (insn);
4206 else if (GET_CODE (PATTERN (insn)) == SET
4207 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
4208 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
4209 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
4210 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
4211 /* We have an insn to pop a constant amount off the stack.
4212 (Such insns use PLUS regardless of the direction of the stack,
4213 and any insn to adjust the stack by a constant is always a pop.)
4214 These insns, if not dead stores, have no effect on life. */
4218 /* Any regs live at the time of a call instruction must not go
4219 in a register clobbered by calls. Find all regs now live and
4220 record this for them. */
4222 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
4223 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
4224 { REG_N_CALLS_CROSSED (i)++; });
4226 /* Record sets. Do this even for dead instructions, since they
4227 would have killed the values if they hadn't been deleted. */
4228 mark_set_regs (pbi, PATTERN (insn), insn);
4230 if (GET_CODE (insn) == CALL_INSN)
4236 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
4237 cond = COND_EXEC_TEST (PATTERN (insn));
4239 /* Non-constant calls clobber memory. */
4240 if (! CONST_CALL_P (insn))
4242 free_EXPR_LIST_list (&pbi->mem_set_list);
4243 pbi->mem_set_list_len = 0;
4246 /* There may be extra registers to be clobbered. */
4247 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4249 note = XEXP (note, 1))
4250 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
4251 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
4252 cond, insn, pbi->flags);
4254 /* Calls change all call-used and global registers. */
4255 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4256 if (call_used_regs[i] && ! global_regs[i]
4259 /* We do not want REG_UNUSED notes for these registers. */
4260 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
4262 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
4266 /* If an insn doesn't use CC0, it becomes dead since we assume
4267 that every insn clobbers it. So show it dead here;
4268 mark_used_regs will set it live if it is referenced. */
4273 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
4275 /* Sometimes we may have inserted something before INSN (such as a move)
4276 when we make an auto-inc. So ensure we will scan those insns. */
4278 prev = PREV_INSN (insn);
4281 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
4287 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
4288 cond = COND_EXEC_TEST (PATTERN (insn));
4290 /* Calls use their arguments. */
4291 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4293 note = XEXP (note, 1))
4294 if (GET_CODE (XEXP (note, 0)) == USE)
4295 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
4298 /* The stack ptr is used (honorarily) by a CALL insn. */
4299 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
4301 /* Calls may also reference any of the global registers,
4302 so they are made live. */
4303 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4305 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
4310 /* On final pass, update counts of how many insns in which each reg
4312 if (flags & PROP_REG_INFO)
4313 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
4314 { REG_LIVE_LENGTH (i)++; });
4319 /* Initialize a propagate_block_info struct for public consumption.
4320 Note that the structure itself is opaque to this file, but that
4321 the user can use the regsets provided here. */
4323 struct propagate_block_info *
4324 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
4326 regset live, local_set, cond_local_set;
4329 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
4332 pbi->reg_live = live;
4333 pbi->mem_set_list = NULL_RTX;
4334 pbi->mem_set_list_len = 0;
4335 pbi->local_set = local_set;
4336 pbi->cond_local_set = cond_local_set;
4340 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4341 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
4343 pbi->reg_next_use = NULL;
4345 pbi->new_set = BITMAP_XMALLOC ();
4347 #ifdef HAVE_conditional_execution
4348 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
4349 free_reg_cond_life_info);
4350 pbi->reg_cond_reg = BITMAP_XMALLOC ();
4352 /* If this block ends in a conditional branch, for each register live
4353 from one side of the branch and not the other, record the register
4354 as conditionally dead. */
4355 if (GET_CODE (bb->end) == JUMP_INSN
4356 && any_condjump_p (bb->end))
4358 regset_head diff_head;
4359 regset diff = INITIALIZE_REG_SET (diff_head);
4360 basic_block bb_true, bb_false;
4361 rtx cond_true, cond_false, set_src;
4364 /* Identify the successor blocks. */
4365 bb_true = bb->succ->dest;
4366 if (bb->succ->succ_next != NULL)
4368 bb_false = bb->succ->succ_next->dest;
4370 if (bb->succ->flags & EDGE_FALLTHRU)
4372 basic_block t = bb_false;
4376 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
4381 /* This can happen with a conditional jump to the next insn. */
4382 if (JUMP_LABEL (bb->end) != bb_true->head)
4385 /* Simplest way to do nothing. */
4389 /* Extract the condition from the branch. */
4390 set_src = SET_SRC (pc_set (bb->end));
4391 cond_true = XEXP (set_src, 0);
4392 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
4393 GET_MODE (cond_true), XEXP (cond_true, 0),
4394 XEXP (cond_true, 1));
4395 if (GET_CODE (XEXP (set_src, 1)) == PC)
4398 cond_false = cond_true;
4402 /* Compute which register lead different lives in the successors. */
4403 if (bitmap_operation (diff, bb_true->global_live_at_start,
4404 bb_false->global_live_at_start, BITMAP_XOR))
4406 rtx reg = XEXP (cond_true, 0);
4408 if (GET_CODE (reg) == SUBREG)
4409 reg = SUBREG_REG (reg);
4411 if (GET_CODE (reg) != REG)
4414 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
4416 /* For each such register, mark it conditionally dead. */
4417 EXECUTE_IF_SET_IN_REG_SET
4420 struct reg_cond_life_info *rcli;
4423 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
4425 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
4429 rcli->condition = cond;
4430 rcli->stores = const0_rtx;
4431 rcli->orig_condition = cond;
4433 splay_tree_insert (pbi->reg_cond_dead, i,
4434 (splay_tree_value) rcli);
4438 FREE_REG_SET (diff);
4442 /* If this block has no successors, any stores to the frame that aren't
4443 used later in the block are dead. So make a pass over the block
4444 recording any such that are made and show them dead at the end. We do
4445 a very conservative and simple job here. */
4447 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
4448 && (TYPE_RETURNS_STACK_DEPRESSED
4449 (TREE_TYPE (current_function_decl))))
4450 && (flags & PROP_SCAN_DEAD_CODE)
4451 && (bb->succ == NULL
4452 || (bb->succ->succ_next == NULL
4453 && bb->succ->dest == EXIT_BLOCK_PTR
4454 && ! current_function_calls_eh_return)))
4457 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
4458 if (GET_CODE (insn) == INSN
4459 && (set = single_set (insn))
4460 && GET_CODE (SET_DEST (set)) == MEM)
4462 rtx mem = SET_DEST (set);
4463 rtx canon_mem = canon_rtx (mem);
4465 /* This optimization is performed by faking a store to the
4466 memory at the end of the block. This doesn't work for
4467 unchanging memories because multiple stores to unchanging
4468 memory is illegal and alias analysis doesn't consider it. */
4469 if (RTX_UNCHANGING_P (canon_mem))
4472 if (XEXP (canon_mem, 0) == frame_pointer_rtx
4473 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
4474 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
4475 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
4478 /* Store a copy of mem, otherwise the address may be scrogged
4479 by find_auto_inc. This matters because insn_dead_p uses
4480 an rtx_equal_p check to determine if two addresses are
4481 the same. This works before find_auto_inc, but fails
4482 after find_auto_inc, causing discrepencies between the
4483 set of live registers calculated during the
4484 calculate_global_regs_live phase and what actually exists
4485 after flow completes, leading to aborts. */
4486 if (flags & PROP_AUTOINC)
4487 mem = shallow_copy_rtx (mem);
4489 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
4490 if (++pbi->mem_set_list_len >= MAX_MEM_SET_LIST_LEN)
4499 /* Release a propagate_block_info struct. */
4502 free_propagate_block_info (pbi)
4503 struct propagate_block_info *pbi;
4505 free_EXPR_LIST_list (&pbi->mem_set_list);
4507 BITMAP_XFREE (pbi->new_set);
4509 #ifdef HAVE_conditional_execution
4510 splay_tree_delete (pbi->reg_cond_dead);
4511 BITMAP_XFREE (pbi->reg_cond_reg);
4514 if (pbi->reg_next_use)
4515 free (pbi->reg_next_use);
4520 /* Compute the registers live at the beginning of a basic block BB from
4521 those live at the end.
4523 When called, REG_LIVE contains those live at the end. On return, it
4524 contains those live at the beginning.
4526 LOCAL_SET, if non-null, will be set with all registers killed
4527 unconditionally by this basic block.
4528 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
4529 killed conditionally by this basic block. If there is any unconditional
4530 set of a register, then the corresponding bit will be set in LOCAL_SET
4531 and cleared in COND_LOCAL_SET.
4532 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
4533 case, the resulting set will be equal to the union of the two sets that
4534 would otherwise be computed. */
4537 propagate_block (bb, live, local_set, cond_local_set, flags)
4541 regset cond_local_set;
4544 struct propagate_block_info *pbi;
4547 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
4549 if (flags & PROP_REG_INFO)
4553 /* Process the regs live at the end of the block.
4554 Mark them as not local to any one basic block. */
4555 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
4556 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4559 /* Scan the block an insn at a time from end to beginning. */
4561 for (insn = bb->end;; insn = prev)
4563 /* If this is a call to `setjmp' et al, warn if any
4564 non-volatile datum is live. */
4565 if ((flags & PROP_REG_INFO)
4566 && GET_CODE (insn) == NOTE
4567 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
4568 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
4570 prev = propagate_one_insn (pbi, insn);
4572 if (insn == bb->head)
4576 free_propagate_block_info (pbi);
4579 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
4580 (SET expressions whose destinations are registers dead after the insn).
4581 NEEDED is the regset that says which regs are alive after the insn.
4583 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
4585 If X is the entire body of an insn, NOTES contains the reg notes
4586 pertaining to the insn. */
4589 insn_dead_p (pbi, x, call_ok, notes)
4590 struct propagate_block_info *pbi;
4593 rtx notes ATTRIBUTE_UNUSED;
4595 enum rtx_code code = GET_CODE (x);
4598 /* If flow is invoked after reload, we must take existing AUTO_INC
4599 expresions into account. */
4600 if (reload_completed)
4602 for (; notes; notes = XEXP (notes, 1))
4604 if (REG_NOTE_KIND (notes) == REG_INC)
4606 int regno = REGNO (XEXP (notes, 0));
4608 /* Don't delete insns to set global regs. */
4609 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4610 || REGNO_REG_SET_P (pbi->reg_live, regno))
4617 /* If setting something that's a reg or part of one,
4618 see if that register's altered value will be live. */
4622 rtx r = SET_DEST (x);
4625 if (GET_CODE (r) == CC0)
4626 return ! pbi->cc0_live;
4629 /* A SET that is a subroutine call cannot be dead. */
4630 if (GET_CODE (SET_SRC (x)) == CALL)
4636 /* Don't eliminate loads from volatile memory or volatile asms. */
4637 else if (volatile_refs_p (SET_SRC (x)))
4640 if (GET_CODE (r) == MEM)
4644 if (MEM_VOLATILE_P (r))
4647 /* Walk the set of memory locations we are currently tracking
4648 and see if one is an identical match to this memory location.
4649 If so, this memory write is dead (remember, we're walking
4650 backwards from the end of the block to the start). Since
4651 rtx_equal_p does not check the alias set or flags, we also
4652 must have the potential for them to conflict (anti_dependence). */
4653 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
4654 if (anti_dependence (r, XEXP (temp, 0)))
4656 rtx mem = XEXP (temp, 0);
4658 if (rtx_equal_p (mem, r))
4661 /* Check if memory reference matches an auto increment. Only
4662 post increment/decrement or modify are valid. */
4663 if (GET_MODE (mem) == GET_MODE (r)
4664 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
4665 || GET_CODE (XEXP (mem, 0)) == POST_INC
4666 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
4667 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
4668 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
4675 while (GET_CODE (r) == SUBREG
4676 || GET_CODE (r) == STRICT_LOW_PART
4677 || GET_CODE (r) == ZERO_EXTRACT)
4680 if (GET_CODE (r) == REG)
4682 int regno = REGNO (r);
4685 if (REGNO_REG_SET_P (pbi->reg_live, regno))
4688 /* If this is a hard register, verify that subsequent
4689 words are not needed. */
4690 if (regno < FIRST_PSEUDO_REGISTER)
4692 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
4695 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
4699 /* Don't delete insns to set global regs. */
4700 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4703 /* Make sure insns to set the stack pointer aren't deleted. */
4704 if (regno == STACK_POINTER_REGNUM)
4707 /* ??? These bits might be redundant with the force live bits
4708 in calculate_global_regs_live. We would delete from
4709 sequential sets; whether this actually affects real code
4710 for anything but the stack pointer I don't know. */
4711 /* Make sure insns to set the frame pointer aren't deleted. */
4712 if (regno == FRAME_POINTER_REGNUM
4713 && (! reload_completed || frame_pointer_needed))
4715 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4716 if (regno == HARD_FRAME_POINTER_REGNUM
4717 && (! reload_completed || frame_pointer_needed))
4721 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4722 /* Make sure insns to set arg pointer are never deleted
4723 (if the arg pointer isn't fixed, there will be a USE
4724 for it, so we can treat it normally). */
4725 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4729 /* Otherwise, the set is dead. */
4735 /* If performing several activities, insn is dead if each activity
4736 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4737 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4739 else if (code == PARALLEL)
4741 int i = XVECLEN (x, 0);
4743 for (i--; i >= 0; i--)
4744 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4745 && GET_CODE (XVECEXP (x, 0, i)) != USE
4746 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4752 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4753 is not necessarily true for hard registers. */
4754 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4755 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4756 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4759 /* We do not check other CLOBBER or USE here. An insn consisting of just
4760 a CLOBBER or just a USE should not be deleted. */
4764 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4765 return 1 if the entire library call is dead.
4766 This is true if INSN copies a register (hard or pseudo)
4767 and if the hard return reg of the call insn is dead.
4768 (The caller should have tested the destination of the SET inside
4769 INSN already for death.)
4771 If this insn doesn't just copy a register, then we don't
4772 have an ordinary libcall. In that case, cse could not have
4773 managed to substitute the source for the dest later on,
4774 so we can assume the libcall is dead.
4776 PBI is the block info giving pseudoregs live before this insn.
4777 NOTE is the REG_RETVAL note of the insn. */
4780 libcall_dead_p (pbi, note, insn)
4781 struct propagate_block_info *pbi;
4785 rtx x = single_set (insn);
4789 register rtx r = SET_SRC (x);
4790 if (GET_CODE (r) == REG)
4792 rtx call = XEXP (note, 0);
4796 /* Find the call insn. */
4797 while (call != insn && GET_CODE (call) != CALL_INSN)
4798 call = NEXT_INSN (call);
4800 /* If there is none, do nothing special,
4801 since ordinary death handling can understand these insns. */
4805 /* See if the hard reg holding the value is dead.
4806 If this is a PARALLEL, find the call within it. */
4807 call_pat = PATTERN (call);
4808 if (GET_CODE (call_pat) == PARALLEL)
4810 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4811 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4812 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4815 /* This may be a library call that is returning a value
4816 via invisible pointer. Do nothing special, since
4817 ordinary death handling can understand these insns. */
4821 call_pat = XVECEXP (call_pat, 0, i);
4824 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4830 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4831 live at function entry. Don't count global register variables, variables
4832 in registers that can be used for function arg passing, or variables in
4833 fixed hard registers. */
4836 regno_uninitialized (regno)
4839 if (n_basic_blocks == 0
4840 || (regno < FIRST_PSEUDO_REGISTER
4841 && (global_regs[regno]
4842 || fixed_regs[regno]
4843 || FUNCTION_ARG_REGNO_P (regno))))
4846 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4849 /* 1 if register REGNO was alive at a place where `setjmp' was called
4850 and was set more than once or is an argument.
4851 Such regs may be clobbered by `longjmp'. */
4854 regno_clobbered_at_setjmp (regno)
4857 if (n_basic_blocks == 0)
4860 return ((REG_N_SETS (regno) > 1
4861 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4862 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4865 /* INSN references memory, possibly using autoincrement addressing modes.
4866 Find any entries on the mem_set_list that need to be invalidated due
4867 to an address change. */
4870 invalidate_mems_from_autoinc (pbi, insn)
4871 struct propagate_block_info *pbi;
4874 rtx note = REG_NOTES (insn);
4875 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4877 if (REG_NOTE_KIND (note) == REG_INC)
4879 rtx temp = pbi->mem_set_list;
4880 rtx prev = NULL_RTX;
4885 next = XEXP (temp, 1);
4886 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4888 /* Splice temp out of list. */
4890 XEXP (prev, 1) = next;
4892 pbi->mem_set_list = next;
4893 free_EXPR_LIST_node (temp);
4894 pbi->mem_set_list_len--;
4904 /* EXP is either a MEM or a REG. Remove any dependant entries
4905 from pbi->mem_set_list. */
4908 invalidate_mems_from_set (pbi, exp)
4909 struct propagate_block_info *pbi;
4912 rtx temp = pbi->mem_set_list;
4913 rtx prev = NULL_RTX;
4918 next = XEXP (temp, 1);
4919 if ((GET_CODE (exp) == MEM
4920 && output_dependence (XEXP (temp, 0), exp))
4921 || (GET_CODE (exp) == REG
4922 && reg_overlap_mentioned_p (exp, XEXP (temp, 0))))
4924 /* Splice this entry out of the list. */
4926 XEXP (prev, 1) = next;
4928 pbi->mem_set_list = next;
4929 free_EXPR_LIST_node (temp);
4930 pbi->mem_set_list_len--;
4938 /* Process the registers that are set within X. Their bits are set to
4939 1 in the regset DEAD, because they are dead prior to this insn.
4941 If INSN is nonzero, it is the insn being processed.
4943 FLAGS is the set of operations to perform. */
4946 mark_set_regs (pbi, x, insn)
4947 struct propagate_block_info *pbi;
4950 rtx cond = NULL_RTX;
4955 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4957 if (REG_NOTE_KIND (link) == REG_INC)
4958 mark_set_1 (pbi, SET, XEXP (link, 0),
4959 (GET_CODE (x) == COND_EXEC
4960 ? COND_EXEC_TEST (x) : NULL_RTX),
4964 switch (code = GET_CODE (x))
4968 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4972 cond = COND_EXEC_TEST (x);
4973 x = COND_EXEC_CODE (x);
4979 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4981 rtx sub = XVECEXP (x, 0, i);
4982 switch (code = GET_CODE (sub))
4985 if (cond != NULL_RTX)
4988 cond = COND_EXEC_TEST (sub);
4989 sub = COND_EXEC_CODE (sub);
4990 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4996 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
5011 /* Process a single set, which appears in INSN. REG (which may not
5012 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
5013 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
5014 If the set is conditional (because it appear in a COND_EXEC), COND
5015 will be the condition. */
5018 mark_set_1 (pbi, code, reg, cond, insn, flags)
5019 struct propagate_block_info *pbi;
5021 rtx reg, cond, insn;
5024 int regno_first = -1, regno_last = -1;
5025 unsigned long not_dead = 0;
5028 /* Modifying just one hardware register of a multi-reg value or just a
5029 byte field of a register does not mean the value from before this insn
5030 is now dead. Of course, if it was dead after it's unused now. */
5032 switch (GET_CODE (reg))
5035 /* Some targets place small structures in registers for return values of
5036 functions. We have to detect this case specially here to get correct
5037 flow information. */
5038 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5039 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
5040 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
5046 case STRICT_LOW_PART:
5047 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
5049 reg = XEXP (reg, 0);
5050 while (GET_CODE (reg) == SUBREG
5051 || GET_CODE (reg) == ZERO_EXTRACT
5052 || GET_CODE (reg) == SIGN_EXTRACT
5053 || GET_CODE (reg) == STRICT_LOW_PART);
5054 if (GET_CODE (reg) == MEM)
5056 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
5060 regno_last = regno_first = REGNO (reg);
5061 if (regno_first < FIRST_PSEUDO_REGISTER)
5062 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
5066 if (GET_CODE (SUBREG_REG (reg)) == REG)
5068 enum machine_mode outer_mode = GET_MODE (reg);
5069 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
5071 /* Identify the range of registers affected. This is moderately
5072 tricky for hard registers. See alter_subreg. */
5074 regno_last = regno_first = REGNO (SUBREG_REG (reg));
5075 if (regno_first < FIRST_PSEUDO_REGISTER)
5077 regno_first += subreg_regno_offset (regno_first, inner_mode,
5080 regno_last = (regno_first
5081 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
5083 /* Since we've just adjusted the register number ranges, make
5084 sure REG matches. Otherwise some_was_live will be clear
5085 when it shouldn't have been, and we'll create incorrect
5086 REG_UNUSED notes. */
5087 reg = gen_rtx_REG (outer_mode, regno_first);
5091 /* If the number of words in the subreg is less than the number
5092 of words in the full register, we have a well-defined partial
5093 set. Otherwise the high bits are undefined.
5095 This is only really applicable to pseudos, since we just took
5096 care of multi-word hard registers. */
5097 if (((GET_MODE_SIZE (outer_mode)
5098 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
5099 < ((GET_MODE_SIZE (inner_mode)
5100 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
5101 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
5104 reg = SUBREG_REG (reg);
5108 reg = SUBREG_REG (reg);
5115 /* If this set is a MEM, then it kills any aliased writes.
5116 If this set is a REG, then it kills any MEMs which use the reg. */
5117 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5119 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
5120 invalidate_mems_from_set (pbi, reg);
5122 /* If the memory reference had embedded side effects (autoincrement
5123 address modes. Then we may need to kill some entries on the
5125 if (insn && GET_CODE (reg) == MEM)
5126 invalidate_mems_from_autoinc (pbi, insn);
5128 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN
5129 && GET_CODE (reg) == MEM && ! side_effects_p (reg)
5130 /* ??? With more effort we could track conditional memory life. */
5132 /* We do not know the size of a BLKmode store, so we do not track
5133 them for redundant store elimination. */
5134 && GET_MODE (reg) != BLKmode
5135 /* There are no REG_INC notes for SP, so we can't assume we'll see
5136 everything that invalidates it. To be safe, don't eliminate any
5137 stores though SP; none of them should be redundant anyway. */
5138 && ! reg_mentioned_p (stack_pointer_rtx, reg))
5141 /* Store a copy of mem, otherwise the address may be
5142 scrogged by find_auto_inc. */
5143 if (flags & PROP_AUTOINC)
5144 reg = shallow_copy_rtx (reg);
5146 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
5147 pbi->mem_set_list_len++;
5151 if (GET_CODE (reg) == REG
5152 && ! (regno_first == FRAME_POINTER_REGNUM
5153 && (! reload_completed || frame_pointer_needed))
5154 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5155 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
5156 && (! reload_completed || frame_pointer_needed))
5158 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5159 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
5163 int some_was_live = 0, some_was_dead = 0;
5165 for (i = regno_first; i <= regno_last; ++i)
5167 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
5170 /* Order of the set operation matters here since both
5171 sets may be the same. */
5172 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
5173 if (cond != NULL_RTX
5174 && ! REGNO_REG_SET_P (pbi->local_set, i))
5175 SET_REGNO_REG_SET (pbi->cond_local_set, i);
5177 SET_REGNO_REG_SET (pbi->local_set, i);
5179 if (code != CLOBBER)
5180 SET_REGNO_REG_SET (pbi->new_set, i);
5182 some_was_live |= needed_regno;
5183 some_was_dead |= ! needed_regno;
5186 #ifdef HAVE_conditional_execution
5187 /* Consider conditional death in deciding that the register needs
5189 if (some_was_live && ! not_dead
5190 /* The stack pointer is never dead. Well, not strictly true,
5191 but it's very difficult to tell from here. Hopefully
5192 combine_stack_adjustments will fix up the most egregious
5194 && regno_first != STACK_POINTER_REGNUM)
5196 for (i = regno_first; i <= regno_last; ++i)
5197 if (! mark_regno_cond_dead (pbi, i, cond))
5198 not_dead |= ((unsigned long) 1) << (i - regno_first);
5202 /* Additional data to record if this is the final pass. */
5203 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
5204 | PROP_DEATH_NOTES | PROP_AUTOINC))
5207 register int blocknum = pbi->bb->index;
5210 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5212 y = pbi->reg_next_use[regno_first];
5214 /* The next use is no longer next, since a store intervenes. */
5215 for (i = regno_first; i <= regno_last; ++i)
5216 pbi->reg_next_use[i] = 0;
5219 if (flags & PROP_REG_INFO)
5221 for (i = regno_first; i <= regno_last; ++i)
5223 /* Count (weighted) references, stores, etc. This counts a
5224 register twice if it is modified, but that is correct. */
5225 REG_N_SETS (i) += 1;
5226 REG_N_REFS (i) += 1;
5227 REG_FREQ (i) += (optimize_size || !pbi->bb->frequency
5228 ? 1 : pbi->bb->frequency);
5230 /* The insns where a reg is live are normally counted
5231 elsewhere, but we want the count to include the insn
5232 where the reg is set, and the normal counting mechanism
5233 would not count it. */
5234 REG_LIVE_LENGTH (i) += 1;
5237 /* If this is a hard reg, record this function uses the reg. */
5238 if (regno_first < FIRST_PSEUDO_REGISTER)
5240 for (i = regno_first; i <= regno_last; i++)
5241 regs_ever_live[i] = 1;
5245 /* Keep track of which basic blocks each reg appears in. */
5246 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
5247 REG_BASIC_BLOCK (regno_first) = blocknum;
5248 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
5249 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
5253 if (! some_was_dead)
5255 if (flags & PROP_LOG_LINKS)
5257 /* Make a logical link from the next following insn
5258 that uses this register, back to this insn.
5259 The following insns have already been processed.
5261 We don't build a LOG_LINK for hard registers containing
5262 in ASM_OPERANDs. If these registers get replaced,
5263 we might wind up changing the semantics of the insn,
5264 even if reload can make what appear to be valid
5265 assignments later. */
5266 if (y && (BLOCK_NUM (y) == blocknum)
5267 && (regno_first >= FIRST_PSEUDO_REGISTER
5268 || asm_noperands (PATTERN (y)) < 0))
5269 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
5274 else if (! some_was_live)
5276 if (flags & PROP_REG_INFO)
5277 REG_N_DEATHS (regno_first) += 1;
5279 if (flags & PROP_DEATH_NOTES)
5281 /* Note that dead stores have already been deleted
5282 when possible. If we get here, we have found a
5283 dead store that cannot be eliminated (because the
5284 same insn does something useful). Indicate this
5285 by marking the reg being set as dying here. */
5287 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
5292 if (flags & PROP_DEATH_NOTES)
5294 /* This is a case where we have a multi-word hard register
5295 and some, but not all, of the words of the register are
5296 needed in subsequent insns. Write REG_UNUSED notes
5297 for those parts that were not needed. This case should
5300 for (i = regno_first; i <= regno_last; ++i)
5301 if (! REGNO_REG_SET_P (pbi->reg_live, i))
5303 = alloc_EXPR_LIST (REG_UNUSED,
5304 gen_rtx_REG (reg_raw_mode[i], i),
5310 /* Mark the register as being dead. */
5312 /* The stack pointer is never dead. Well, not strictly true,
5313 but it's very difficult to tell from here. Hopefully
5314 combine_stack_adjustments will fix up the most egregious
5316 && regno_first != STACK_POINTER_REGNUM)
5318 for (i = regno_first; i <= regno_last; ++i)
5319 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
5320 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
5323 else if (GET_CODE (reg) == REG)
5325 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5326 pbi->reg_next_use[regno_first] = 0;
5329 /* If this is the last pass and this is a SCRATCH, show it will be dying
5330 here and count it. */
5331 else if (GET_CODE (reg) == SCRATCH)
5333 if (flags & PROP_DEATH_NOTES)
5335 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
5339 #ifdef HAVE_conditional_execution
5340 /* Mark REGNO conditionally dead.
5341 Return true if the register is now unconditionally dead. */
5344 mark_regno_cond_dead (pbi, regno, cond)
5345 struct propagate_block_info *pbi;
5349 /* If this is a store to a predicate register, the value of the
5350 predicate is changing, we don't know that the predicate as seen
5351 before is the same as that seen after. Flush all dependent
5352 conditions from reg_cond_dead. This will make all such
5353 conditionally live registers unconditionally live. */
5354 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
5355 flush_reg_cond_reg (pbi, regno);
5357 /* If this is an unconditional store, remove any conditional
5358 life that may have existed. */
5359 if (cond == NULL_RTX)
5360 splay_tree_remove (pbi->reg_cond_dead, regno);
5363 splay_tree_node node;
5364 struct reg_cond_life_info *rcli;
5367 /* Otherwise this is a conditional set. Record that fact.
5368 It may have been conditionally used, or there may be a
5369 subsequent set with a complimentary condition. */
5371 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5374 /* The register was unconditionally live previously.
5375 Record the current condition as the condition under
5376 which it is dead. */
5377 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5378 rcli->condition = cond;
5379 rcli->stores = cond;
5380 rcli->orig_condition = const0_rtx;
5381 splay_tree_insert (pbi->reg_cond_dead, regno,
5382 (splay_tree_value) rcli);
5384 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5386 /* Not unconditionaly dead. */
5391 /* The register was conditionally live previously.
5392 Add the new condition to the old. */
5393 rcli = (struct reg_cond_life_info *) node->value;
5394 ncond = rcli->condition;
5395 ncond = ior_reg_cond (ncond, cond, 1);
5396 if (rcli->stores == const0_rtx)
5397 rcli->stores = cond;
5398 else if (rcli->stores != const1_rtx)
5399 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
5401 /* If the register is now unconditionally dead, remove the entry
5402 in the splay_tree. A register is unconditionally dead if the
5403 dead condition ncond is true. A register is also unconditionally
5404 dead if the sum of all conditional stores is an unconditional
5405 store (stores is true), and the dead condition is identically the
5406 same as the original dead condition initialized at the end of
5407 the block. This is a pointer compare, not an rtx_equal_p
5409 if (ncond == const1_rtx
5410 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
5411 splay_tree_remove (pbi->reg_cond_dead, regno);
5414 rcli->condition = ncond;
5416 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5418 /* Not unconditionaly dead. */
5427 /* Called from splay_tree_delete for pbi->reg_cond_life. */
5430 free_reg_cond_life_info (value)
5431 splay_tree_value value;
5433 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
5437 /* Helper function for flush_reg_cond_reg. */
5440 flush_reg_cond_reg_1 (node, data)
5441 splay_tree_node node;
5444 struct reg_cond_life_info *rcli;
5445 int *xdata = (int *) data;
5446 unsigned int regno = xdata[0];
5448 /* Don't need to search if last flushed value was farther on in
5449 the in-order traversal. */
5450 if (xdata[1] >= (int) node->key)
5453 /* Splice out portions of the expression that refer to regno. */
5454 rcli = (struct reg_cond_life_info *) node->value;
5455 rcli->condition = elim_reg_cond (rcli->condition, regno);
5456 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
5457 rcli->stores = elim_reg_cond (rcli->stores, regno);
5459 /* If the entire condition is now false, signal the node to be removed. */
5460 if (rcli->condition == const0_rtx)
5462 xdata[1] = node->key;
5465 else if (rcli->condition == const1_rtx)
5471 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
5474 flush_reg_cond_reg (pbi, regno)
5475 struct propagate_block_info *pbi;
5482 while (splay_tree_foreach (pbi->reg_cond_dead,
5483 flush_reg_cond_reg_1, pair) == -1)
5484 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
5486 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
5489 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
5490 For ior/and, the ADD flag determines whether we want to add the new
5491 condition X to the old one unconditionally. If it is zero, we will
5492 only return a new expression if X allows us to simplify part of
5493 OLD, otherwise we return OLD unchanged to the caller.
5494 If ADD is nonzero, we will return a new condition in all cases. The
5495 toplevel caller of one of these functions should always pass 1 for
5499 ior_reg_cond (old, x, add)
5505 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5507 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5508 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
5509 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5511 if (GET_CODE (x) == GET_CODE (old)
5512 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5516 return gen_rtx_IOR (0, old, x);
5519 switch (GET_CODE (old))
5522 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5523 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5524 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5526 if (op0 == const0_rtx)
5528 if (op1 == const0_rtx)
5530 if (op0 == const1_rtx || op1 == const1_rtx)
5532 if (op0 == XEXP (old, 0))
5533 op0 = gen_rtx_IOR (0, op0, x);
5535 op1 = gen_rtx_IOR (0, op1, x);
5536 return gen_rtx_IOR (0, op0, op1);
5540 return gen_rtx_IOR (0, old, x);
5543 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5544 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5545 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5547 if (op0 == const1_rtx)
5549 if (op1 == const1_rtx)
5551 if (op0 == const0_rtx || op1 == const0_rtx)
5553 if (op0 == XEXP (old, 0))
5554 op0 = gen_rtx_IOR (0, op0, x);
5556 op1 = gen_rtx_IOR (0, op1, x);
5557 return gen_rtx_AND (0, op0, op1);
5561 return gen_rtx_IOR (0, old, x);
5564 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5565 if (op0 != XEXP (old, 0))
5566 return not_reg_cond (op0);
5569 return gen_rtx_IOR (0, old, x);
5580 enum rtx_code x_code;
5582 if (x == const0_rtx)
5584 else if (x == const1_rtx)
5586 x_code = GET_CODE (x);
5589 if (GET_RTX_CLASS (x_code) == '<'
5590 && GET_CODE (XEXP (x, 0)) == REG)
5592 if (XEXP (x, 1) != const0_rtx)
5595 return gen_rtx_fmt_ee (reverse_condition (x_code),
5596 VOIDmode, XEXP (x, 0), const0_rtx);
5598 return gen_rtx_NOT (0, x);
5602 and_reg_cond (old, x, add)
5608 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5610 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5611 && GET_CODE (x) == reverse_condition (GET_CODE (old))
5612 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5614 if (GET_CODE (x) == GET_CODE (old)
5615 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5619 return gen_rtx_AND (0, old, x);
5622 switch (GET_CODE (old))
5625 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5626 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5627 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5629 if (op0 == const0_rtx)
5631 if (op1 == const0_rtx)
5633 if (op0 == const1_rtx || op1 == const1_rtx)
5635 if (op0 == XEXP (old, 0))
5636 op0 = gen_rtx_AND (0, op0, x);
5638 op1 = gen_rtx_AND (0, op1, x);
5639 return gen_rtx_IOR (0, op0, op1);
5643 return gen_rtx_AND (0, old, x);
5646 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5647 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5648 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5650 if (op0 == const1_rtx)
5652 if (op1 == const1_rtx)
5654 if (op0 == const0_rtx || op1 == const0_rtx)
5656 if (op0 == XEXP (old, 0))
5657 op0 = gen_rtx_AND (0, op0, x);
5659 op1 = gen_rtx_AND (0, op1, x);
5660 return gen_rtx_AND (0, op0, op1);
5665 /* If X is identical to one of the existing terms of the AND,
5666 then just return what we already have. */
5667 /* ??? There really should be some sort of recursive check here in
5668 case there are nested ANDs. */
5669 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
5670 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
5671 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
5672 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
5675 return gen_rtx_AND (0, old, x);
5678 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5679 if (op0 != XEXP (old, 0))
5680 return not_reg_cond (op0);
5683 return gen_rtx_AND (0, old, x);
5690 /* Given a condition X, remove references to reg REGNO and return the
5691 new condition. The removal will be done so that all conditions
5692 involving REGNO are considered to evaluate to false. This function
5693 is used when the value of REGNO changes. */
5696 elim_reg_cond (x, regno)
5702 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
5704 if (REGNO (XEXP (x, 0)) == regno)
5709 switch (GET_CODE (x))
5712 op0 = elim_reg_cond (XEXP (x, 0), regno);
5713 op1 = elim_reg_cond (XEXP (x, 1), regno);
5714 if (op0 == const0_rtx || op1 == const0_rtx)
5716 if (op0 == const1_rtx)
5718 if (op1 == const1_rtx)
5720 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5722 return gen_rtx_AND (0, op0, op1);
5725 op0 = elim_reg_cond (XEXP (x, 0), regno);
5726 op1 = elim_reg_cond (XEXP (x, 1), regno);
5727 if (op0 == const1_rtx || op1 == const1_rtx)
5729 if (op0 == const0_rtx)
5731 if (op1 == const0_rtx)
5733 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5735 return gen_rtx_IOR (0, op0, op1);
5738 op0 = elim_reg_cond (XEXP (x, 0), regno);
5739 if (op0 == const0_rtx)
5741 if (op0 == const1_rtx)
5743 if (op0 != XEXP (x, 0))
5744 return not_reg_cond (op0);
5751 #endif /* HAVE_conditional_execution */
5755 /* Try to substitute the auto-inc expression INC as the address inside
5756 MEM which occurs in INSN. Currently, the address of MEM is an expression
5757 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
5758 that has a single set whose source is a PLUS of INCR_REG and something
5762 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
5763 struct propagate_block_info *pbi;
5764 rtx inc, insn, mem, incr, incr_reg;
5766 int regno = REGNO (incr_reg);
5767 rtx set = single_set (incr);
5768 rtx q = SET_DEST (set);
5769 rtx y = SET_SRC (set);
5770 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
5772 /* Make sure this reg appears only once in this insn. */
5773 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
5776 if (dead_or_set_p (incr, incr_reg)
5777 /* Mustn't autoinc an eliminable register. */
5778 && (regno >= FIRST_PSEUDO_REGISTER
5779 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
5781 /* This is the simple case. Try to make the auto-inc. If
5782 we can't, we are done. Otherwise, we will do any
5783 needed updates below. */
5784 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
5787 else if (GET_CODE (q) == REG
5788 /* PREV_INSN used here to check the semi-open interval
5790 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
5791 /* We must also check for sets of q as q may be
5792 a call clobbered hard register and there may
5793 be a call between PREV_INSN (insn) and incr. */
5794 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
5796 /* We have *p followed sometime later by q = p+size.
5797 Both p and q must be live afterward,
5798 and q is not used between INSN and its assignment.
5799 Change it to q = p, ...*q..., q = q+size.
5800 Then fall into the usual case. */
5804 emit_move_insn (q, incr_reg);
5805 insns = get_insns ();
5808 if (basic_block_for_insn)
5809 for (temp = insns; temp; temp = NEXT_INSN (temp))
5810 set_block_for_insn (temp, pbi->bb);
5812 /* If we can't make the auto-inc, or can't make the
5813 replacement into Y, exit. There's no point in making
5814 the change below if we can't do the auto-inc and doing
5815 so is not correct in the pre-inc case. */
5818 validate_change (insn, &XEXP (mem, 0), inc, 1);
5819 validate_change (incr, &XEXP (y, opnum), q, 1);
5820 if (! apply_change_group ())
5823 /* We now know we'll be doing this change, so emit the
5824 new insn(s) and do the updates. */
5825 emit_insns_before (insns, insn);
5827 if (pbi->bb->head == insn)
5828 pbi->bb->head = insns;
5830 /* INCR will become a NOTE and INSN won't contain a
5831 use of INCR_REG. If a use of INCR_REG was just placed in
5832 the insn before INSN, make that the next use.
5833 Otherwise, invalidate it. */
5834 if (GET_CODE (PREV_INSN (insn)) == INSN
5835 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
5836 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
5837 pbi->reg_next_use[regno] = PREV_INSN (insn);
5839 pbi->reg_next_use[regno] = 0;
5844 /* REGNO is now used in INCR which is below INSN, but
5845 it previously wasn't live here. If we don't mark
5846 it as live, we'll put a REG_DEAD note for it
5847 on this insn, which is incorrect. */
5848 SET_REGNO_REG_SET (pbi->reg_live, regno);
5850 /* If there are any calls between INSN and INCR, show
5851 that REGNO now crosses them. */
5852 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
5853 if (GET_CODE (temp) == CALL_INSN)
5854 REG_N_CALLS_CROSSED (regno)++;
5859 /* If we haven't returned, it means we were able to make the
5860 auto-inc, so update the status. First, record that this insn
5861 has an implicit side effect. */
5863 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
5865 /* Modify the old increment-insn to simply copy
5866 the already-incremented value of our register. */
5867 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
5870 /* If that makes it a no-op (copying the register into itself) delete
5871 it so it won't appear to be a "use" and a "set" of this
5873 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
5875 /* If the original source was dead, it's dead now. */
5878 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
5880 remove_note (incr, note);
5881 if (XEXP (note, 0) != incr_reg)
5882 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
5885 PUT_CODE (incr, NOTE);
5886 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
5887 NOTE_SOURCE_FILE (incr) = 0;
5890 if (regno >= FIRST_PSEUDO_REGISTER)
5892 /* Count an extra reference to the reg. When a reg is
5893 incremented, spilling it is worse, so we want to make
5894 that less likely. */
5895 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
5896 ? 1 : pbi->bb->frequency);
5898 /* Count the increment as a setting of the register,
5899 even though it isn't a SET in rtl. */
5900 REG_N_SETS (regno)++;
5904 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
5908 find_auto_inc (pbi, x, insn)
5909 struct propagate_block_info *pbi;
5913 rtx addr = XEXP (x, 0);
5914 HOST_WIDE_INT offset = 0;
5915 rtx set, y, incr, inc_val;
5917 int size = GET_MODE_SIZE (GET_MODE (x));
5919 if (GET_CODE (insn) == JUMP_INSN)
5922 /* Here we detect use of an index register which might be good for
5923 postincrement, postdecrement, preincrement, or predecrement. */
5925 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5926 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5928 if (GET_CODE (addr) != REG)
5931 regno = REGNO (addr);
5933 /* Is the next use an increment that might make auto-increment? */
5934 incr = pbi->reg_next_use[regno];
5935 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5937 set = single_set (incr);
5938 if (set == 0 || GET_CODE (set) != SET)
5942 if (GET_CODE (y) != PLUS)
5945 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5946 inc_val = XEXP (y, 1);
5947 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5948 inc_val = XEXP (y, 0);
5952 if (GET_CODE (inc_val) == CONST_INT)
5954 if (HAVE_POST_INCREMENT
5955 && (INTVAL (inc_val) == size && offset == 0))
5956 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5958 else if (HAVE_POST_DECREMENT
5959 && (INTVAL (inc_val) == -size && offset == 0))
5960 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5962 else if (HAVE_PRE_INCREMENT
5963 && (INTVAL (inc_val) == size && offset == size))
5964 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5966 else if (HAVE_PRE_DECREMENT
5967 && (INTVAL (inc_val) == -size && offset == -size))
5968 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5970 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5971 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5972 gen_rtx_PLUS (Pmode,
5975 insn, x, incr, addr);
5977 else if (GET_CODE (inc_val) == REG
5978 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5982 if (HAVE_POST_MODIFY_REG && offset == 0)
5983 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5984 gen_rtx_PLUS (Pmode,
5987 insn, x, incr, addr);
5991 #endif /* AUTO_INC_DEC */
5994 mark_used_reg (pbi, reg, cond, insn)
5995 struct propagate_block_info *pbi;
5997 rtx cond ATTRIBUTE_UNUSED;
6000 unsigned int regno_first, regno_last, i;
6001 int some_was_live, some_was_dead, some_not_set;
6003 regno_last = regno_first = REGNO (reg);
6004 if (regno_first < FIRST_PSEUDO_REGISTER)
6005 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6007 /* Find out if any of this register is live after this instruction. */
6008 some_was_live = some_was_dead = 0;
6009 for (i = regno_first; i <= regno_last; ++i)
6011 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6012 some_was_live |= needed_regno;
6013 some_was_dead |= ! needed_regno;
6016 /* Find out if any of the register was set this insn. */
6018 for (i = regno_first; i <= regno_last; ++i)
6019 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
6021 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6023 /* Record where each reg is used, so when the reg is set we know
6024 the next insn that uses it. */
6025 pbi->reg_next_use[regno_first] = insn;
6028 if (pbi->flags & PROP_REG_INFO)
6030 if (regno_first < FIRST_PSEUDO_REGISTER)
6032 /* If this is a register we are going to try to eliminate,
6033 don't mark it live here. If we are successful in
6034 eliminating it, it need not be live unless it is used for
6035 pseudos, in which case it will have been set live when it
6036 was allocated to the pseudos. If the register will not
6037 be eliminated, reload will set it live at that point.
6039 Otherwise, record that this function uses this register. */
6040 /* ??? The PPC backend tries to "eliminate" on the pic
6041 register to itself. This should be fixed. In the mean
6042 time, hack around it. */
6044 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
6045 && (regno_first == FRAME_POINTER_REGNUM
6046 || regno_first == ARG_POINTER_REGNUM)))
6047 for (i = regno_first; i <= regno_last; ++i)
6048 regs_ever_live[i] = 1;
6052 /* Keep track of which basic block each reg appears in. */
6054 register int blocknum = pbi->bb->index;
6055 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6056 REG_BASIC_BLOCK (regno_first) = blocknum;
6057 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6058 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6060 /* Count (weighted) number of uses of each reg. */
6061 REG_FREQ (regno_first)
6062 += (optimize_size || !pbi->bb->frequency ? 1 : pbi->bb->frequency);
6063 REG_N_REFS (regno_first)++;
6067 /* Record and count the insns in which a reg dies. If it is used in
6068 this insn and was dead below the insn then it dies in this insn.
6069 If it was set in this insn, we do not make a REG_DEAD note;
6070 likewise if we already made such a note. */
6071 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
6075 /* Check for the case where the register dying partially
6076 overlaps the register set by this insn. */
6077 if (regno_first != regno_last)
6078 for (i = regno_first; i <= regno_last; ++i)
6079 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
6081 /* If none of the words in X is needed, make a REG_DEAD note.
6082 Otherwise, we must make partial REG_DEAD notes. */
6083 if (! some_was_live)
6085 if ((pbi->flags & PROP_DEATH_NOTES)
6086 && ! find_regno_note (insn, REG_DEAD, regno_first))
6088 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
6090 if (pbi->flags & PROP_REG_INFO)
6091 REG_N_DEATHS (regno_first)++;
6095 /* Don't make a REG_DEAD note for a part of a register
6096 that is set in the insn. */
6097 for (i = regno_first; i <= regno_last; ++i)
6098 if (! REGNO_REG_SET_P (pbi->reg_live, i)
6099 && ! dead_or_set_regno_p (insn, i))
6101 = alloc_EXPR_LIST (REG_DEAD,
6102 gen_rtx_REG (reg_raw_mode[i], i),
6107 /* Mark the register as being live. */
6108 for (i = regno_first; i <= regno_last; ++i)
6110 SET_REGNO_REG_SET (pbi->reg_live, i);
6112 #ifdef HAVE_conditional_execution
6113 /* If this is a conditional use, record that fact. If it is later
6114 conditionally set, we'll know to kill the register. */
6115 if (cond != NULL_RTX)
6117 splay_tree_node node;
6118 struct reg_cond_life_info *rcli;
6123 node = splay_tree_lookup (pbi->reg_cond_dead, i);
6126 /* The register was unconditionally live previously.
6127 No need to do anything. */
6131 /* The register was conditionally live previously.
6132 Subtract the new life cond from the old death cond. */
6133 rcli = (struct reg_cond_life_info *) node->value;
6134 ncond = rcli->condition;
6135 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
6137 /* If the register is now unconditionally live,
6138 remove the entry in the splay_tree. */
6139 if (ncond == const0_rtx)
6140 splay_tree_remove (pbi->reg_cond_dead, i);
6143 rcli->condition = ncond;
6144 SET_REGNO_REG_SET (pbi->reg_cond_reg,
6145 REGNO (XEXP (cond, 0)));
6151 /* The register was not previously live at all. Record
6152 the condition under which it is still dead. */
6153 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6154 rcli->condition = not_reg_cond (cond);
6155 rcli->stores = const0_rtx;
6156 rcli->orig_condition = const0_rtx;
6157 splay_tree_insert (pbi->reg_cond_dead, i,
6158 (splay_tree_value) rcli);
6160 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6163 else if (some_was_live)
6165 /* The register may have been conditionally live previously, but
6166 is now unconditionally live. Remove it from the conditionally
6167 dead list, so that a conditional set won't cause us to think
6169 splay_tree_remove (pbi->reg_cond_dead, i);
6175 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
6176 This is done assuming the registers needed from X are those that
6177 have 1-bits in PBI->REG_LIVE.
6179 INSN is the containing instruction. If INSN is dead, this function
6183 mark_used_regs (pbi, x, cond, insn)
6184 struct propagate_block_info *pbi;
6187 register RTX_CODE code;
6189 int flags = pbi->flags;
6192 code = GET_CODE (x);
6212 /* If we are clobbering a MEM, mark any registers inside the address
6214 if (GET_CODE (XEXP (x, 0)) == MEM)
6215 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
6219 /* Don't bother watching stores to mems if this is not the
6220 final pass. We'll not be deleting dead stores this round. */
6221 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
6223 /* Invalidate the data for the last MEM stored, but only if MEM is
6224 something that can be stored into. */
6225 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
6226 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
6227 /* Needn't clear the memory set list. */
6231 rtx temp = pbi->mem_set_list;
6232 rtx prev = NULL_RTX;
6237 next = XEXP (temp, 1);
6238 if (anti_dependence (XEXP (temp, 0), x))
6240 /* Splice temp out of the list. */
6242 XEXP (prev, 1) = next;
6244 pbi->mem_set_list = next;
6245 free_EXPR_LIST_node (temp);
6246 pbi->mem_set_list_len--;
6254 /* If the memory reference had embedded side effects (autoincrement
6255 address modes. Then we may need to kill some entries on the
6258 invalidate_mems_from_autoinc (pbi, insn);
6262 if (flags & PROP_AUTOINC)
6263 find_auto_inc (pbi, x, insn);
6268 #ifdef CLASS_CANNOT_CHANGE_MODE
6269 if (GET_CODE (SUBREG_REG (x)) == REG
6270 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
6271 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
6272 GET_MODE (SUBREG_REG (x))))
6273 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
6276 /* While we're here, optimize this case. */
6278 if (GET_CODE (x) != REG)
6283 /* See a register other than being set => mark it as needed. */
6284 mark_used_reg (pbi, x, cond, insn);
6289 register rtx testreg = SET_DEST (x);
6292 /* If storing into MEM, don't show it as being used. But do
6293 show the address as being used. */
6294 if (GET_CODE (testreg) == MEM)
6297 if (flags & PROP_AUTOINC)
6298 find_auto_inc (pbi, testreg, insn);
6300 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
6301 mark_used_regs (pbi, SET_SRC (x), cond, insn);
6305 /* Storing in STRICT_LOW_PART is like storing in a reg
6306 in that this SET might be dead, so ignore it in TESTREG.
6307 but in some other ways it is like using the reg.
6309 Storing in a SUBREG or a bit field is like storing the entire
6310 register in that if the register's value is not used
6311 then this SET is not needed. */
6312 while (GET_CODE (testreg) == STRICT_LOW_PART
6313 || GET_CODE (testreg) == ZERO_EXTRACT
6314 || GET_CODE (testreg) == SIGN_EXTRACT
6315 || GET_CODE (testreg) == SUBREG)
6317 #ifdef CLASS_CANNOT_CHANGE_MODE
6318 if (GET_CODE (testreg) == SUBREG
6319 && GET_CODE (SUBREG_REG (testreg)) == REG
6320 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
6321 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
6322 GET_MODE (testreg)))
6323 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
6326 /* Modifying a single register in an alternate mode
6327 does not use any of the old value. But these other
6328 ways of storing in a register do use the old value. */
6329 if (GET_CODE (testreg) == SUBREG
6330 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
6335 testreg = XEXP (testreg, 0);
6338 /* If this is a store into a register or group of registers,
6339 recursively scan the value being stored. */
6341 if ((GET_CODE (testreg) == PARALLEL
6342 && GET_MODE (testreg) == BLKmode)
6343 || (GET_CODE (testreg) == REG
6344 && (regno = REGNO (testreg),
6345 ! (regno == FRAME_POINTER_REGNUM
6346 && (! reload_completed || frame_pointer_needed)))
6347 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
6348 && ! (regno == HARD_FRAME_POINTER_REGNUM
6349 && (! reload_completed || frame_pointer_needed))
6351 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
6352 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
6357 mark_used_regs (pbi, SET_DEST (x), cond, insn);
6358 mark_used_regs (pbi, SET_SRC (x), cond, insn);
6365 case UNSPEC_VOLATILE:
6369 /* Traditional and volatile asm instructions must be considered to use
6370 and clobber all hard registers, all pseudo-registers and all of
6371 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
6373 Consider for instance a volatile asm that changes the fpu rounding
6374 mode. An insn should not be moved across this even if it only uses
6375 pseudo-regs because it might give an incorrectly rounded result.
6377 ?!? Unfortunately, marking all hard registers as live causes massive
6378 problems for the register allocator and marking all pseudos as live
6379 creates mountains of uninitialized variable warnings.
6381 So for now, just clear the memory set list and mark any regs
6382 we can find in ASM_OPERANDS as used. */
6383 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
6385 free_EXPR_LIST_list (&pbi->mem_set_list);
6386 pbi->mem_set_list_len = 0;
6389 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
6390 We can not just fall through here since then we would be confused
6391 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
6392 traditional asms unlike their normal usage. */
6393 if (code == ASM_OPERANDS)
6397 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
6398 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
6404 if (cond != NULL_RTX)
6407 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
6409 cond = COND_EXEC_TEST (x);
6410 x = COND_EXEC_CODE (x);
6414 /* We _do_not_ want to scan operands of phi nodes. Operands of
6415 a phi function are evaluated only when control reaches this
6416 block along a particular edge. Therefore, regs that appear
6417 as arguments to phi should not be added to the global live at
6425 /* Recursively scan the operands of this expression. */
6428 register const char *fmt = GET_RTX_FORMAT (code);
6431 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6435 /* Tail recursive case: save a function call level. */
6441 mark_used_regs (pbi, XEXP (x, i), cond, insn);
6443 else if (fmt[i] == 'E')
6446 for (j = 0; j < XVECLEN (x, i); j++)
6447 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
6456 try_pre_increment_1 (pbi, insn)
6457 struct propagate_block_info *pbi;
6460 /* Find the next use of this reg. If in same basic block,
6461 make it do pre-increment or pre-decrement if appropriate. */
6462 rtx x = single_set (insn);
6463 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
6464 * INTVAL (XEXP (SET_SRC (x), 1)));
6465 int regno = REGNO (SET_DEST (x));
6466 rtx y = pbi->reg_next_use[regno];
6468 && SET_DEST (x) != stack_pointer_rtx
6469 && BLOCK_NUM (y) == BLOCK_NUM (insn)
6470 /* Don't do this if the reg dies, or gets set in y; a standard addressing
6471 mode would be better. */
6472 && ! dead_or_set_p (y, SET_DEST (x))
6473 && try_pre_increment (y, SET_DEST (x), amount))
6475 /* We have found a suitable auto-increment and already changed
6476 insn Y to do it. So flush this increment instruction. */
6477 propagate_block_delete_insn (pbi->bb, insn);
6479 /* Count a reference to this reg for the increment insn we are
6480 deleting. When a reg is incremented, spilling it is worse,
6481 so we want to make that less likely. */
6482 if (regno >= FIRST_PSEUDO_REGISTER)
6484 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
6485 ? 1 : pbi->bb->frequency);
6486 REG_N_SETS (regno)++;
6489 /* Flush any remembered memories depending on the value of
6490 the incremented register. */
6491 invalidate_mems_from_set (pbi, SET_DEST (x));
6498 /* Try to change INSN so that it does pre-increment or pre-decrement
6499 addressing on register REG in order to add AMOUNT to REG.
6500 AMOUNT is negative for pre-decrement.
6501 Returns 1 if the change could be made.
6502 This checks all about the validity of the result of modifying INSN. */
6505 try_pre_increment (insn, reg, amount)
6507 HOST_WIDE_INT amount;
6511 /* Nonzero if we can try to make a pre-increment or pre-decrement.
6512 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
6514 /* Nonzero if we can try to make a post-increment or post-decrement.
6515 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
6516 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
6517 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
6520 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
6523 /* From the sign of increment, see which possibilities are conceivable
6524 on this target machine. */
6525 if (HAVE_PRE_INCREMENT && amount > 0)
6527 if (HAVE_POST_INCREMENT && amount > 0)
6530 if (HAVE_PRE_DECREMENT && amount < 0)
6532 if (HAVE_POST_DECREMENT && amount < 0)
6535 if (! (pre_ok || post_ok))
6538 /* It is not safe to add a side effect to a jump insn
6539 because if the incremented register is spilled and must be reloaded
6540 there would be no way to store the incremented value back in memory. */
6542 if (GET_CODE (insn) == JUMP_INSN)
6547 use = find_use_as_address (PATTERN (insn), reg, 0);
6548 if (post_ok && (use == 0 || use == (rtx) 1))
6550 use = find_use_as_address (PATTERN (insn), reg, -amount);
6554 if (use == 0 || use == (rtx) 1)
6557 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
6560 /* See if this combination of instruction and addressing mode exists. */
6561 if (! validate_change (insn, &XEXP (use, 0),
6562 gen_rtx_fmt_e (amount > 0
6563 ? (do_post ? POST_INC : PRE_INC)
6564 : (do_post ? POST_DEC : PRE_DEC),
6568 /* Record that this insn now has an implicit side effect on X. */
6569 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
6573 #endif /* AUTO_INC_DEC */
6575 /* Find the place in the rtx X where REG is used as a memory address.
6576 Return the MEM rtx that so uses it.
6577 If PLUSCONST is nonzero, search instead for a memory address equivalent to
6578 (plus REG (const_int PLUSCONST)).
6580 If such an address does not appear, return 0.
6581 If REG appears more than once, or is used other than in such an address,
6585 find_use_as_address (x, reg, plusconst)
6588 HOST_WIDE_INT plusconst;
6590 enum rtx_code code = GET_CODE (x);
6591 const char *fmt = GET_RTX_FORMAT (code);
6593 register rtx value = 0;
6596 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
6599 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
6600 && XEXP (XEXP (x, 0), 0) == reg
6601 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6602 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
6605 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
6607 /* If REG occurs inside a MEM used in a bit-field reference,
6608 that is unacceptable. */
6609 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
6610 return (rtx) (HOST_WIDE_INT) 1;
6614 return (rtx) (HOST_WIDE_INT) 1;
6616 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6620 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
6624 return (rtx) (HOST_WIDE_INT) 1;
6626 else if (fmt[i] == 'E')
6629 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6631 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
6635 return (rtx) (HOST_WIDE_INT) 1;
6643 /* Write information about registers and basic blocks into FILE.
6644 This is part of making a debugging dump. */
6647 dump_regset (r, outf)
6654 fputs (" (nil)", outf);
6658 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
6660 fprintf (outf, " %d", i);
6661 if (i < FIRST_PSEUDO_REGISTER)
6662 fprintf (outf, " [%s]",
6667 /* Print a human-reaable representation of R on the standard error
6668 stream. This function is designed to be used from within the
6675 dump_regset (r, stderr);
6676 putc ('\n', stderr);
6680 dump_flow_info (file)
6684 static const char * const reg_class_names[] = REG_CLASS_NAMES;
6686 fprintf (file, "%d registers.\n", max_regno);
6687 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
6690 enum reg_class class, altclass;
6691 fprintf (file, "\nRegister %d used %d times across %d insns",
6692 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
6693 if (REG_BASIC_BLOCK (i) >= 0)
6694 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
6696 fprintf (file, "; set %d time%s", REG_N_SETS (i),
6697 (REG_N_SETS (i) == 1) ? "" : "s");
6698 if (REG_USERVAR_P (regno_reg_rtx[i]))
6699 fprintf (file, "; user var");
6700 if (REG_N_DEATHS (i) != 1)
6701 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
6702 if (REG_N_CALLS_CROSSED (i) == 1)
6703 fprintf (file, "; crosses 1 call");
6704 else if (REG_N_CALLS_CROSSED (i))
6705 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
6706 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
6707 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
6708 class = reg_preferred_class (i);
6709 altclass = reg_alternate_class (i);
6710 if (class != GENERAL_REGS || altclass != ALL_REGS)
6712 if (altclass == ALL_REGS || class == ALL_REGS)
6713 fprintf (file, "; pref %s", reg_class_names[(int) class]);
6714 else if (altclass == NO_REGS)
6715 fprintf (file, "; %s or none", reg_class_names[(int) class]);
6717 fprintf (file, "; pref %s, else %s",
6718 reg_class_names[(int) class],
6719 reg_class_names[(int) altclass]);
6721 if (REG_POINTER (regno_reg_rtx[i]))
6722 fprintf (file, "; pointer");
6723 fprintf (file, ".\n");
6726 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
6727 for (i = 0; i < n_basic_blocks; i++)
6729 register basic_block bb = BASIC_BLOCK (i);
6732 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
6733 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
6734 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
6735 fprintf (file, ", freq %i.\n", bb->frequency);
6737 fprintf (file, "Predecessors: ");
6738 for (e = bb->pred; e; e = e->pred_next)
6739 dump_edge_info (file, e, 0);
6741 fprintf (file, "\nSuccessors: ");
6742 for (e = bb->succ; e; e = e->succ_next)
6743 dump_edge_info (file, e, 1);
6745 fprintf (file, "\nRegisters live at start:");
6746 dump_regset (bb->global_live_at_start, file);
6748 fprintf (file, "\nRegisters live at end:");
6749 dump_regset (bb->global_live_at_end, file);
6760 dump_flow_info (stderr);
6764 dump_edge_info (file, e, do_succ)
6769 basic_block side = (do_succ ? e->dest : e->src);
6771 if (side == ENTRY_BLOCK_PTR)
6772 fputs (" ENTRY", file);
6773 else if (side == EXIT_BLOCK_PTR)
6774 fputs (" EXIT", file);
6776 fprintf (file, " %d", side->index);
6779 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
6783 fprintf (file, " count:");
6784 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
6789 static const char * const bitnames[] = {
6790 "fallthru", "crit", "ab", "abcall", "eh", "fake"
6793 int i, flags = e->flags;
6797 for (i = 0; flags; i++)
6798 if (flags & (1 << i))
6804 if (i < (int) ARRAY_SIZE (bitnames))
6805 fputs (bitnames[i], file);
6807 fprintf (file, "%d", i);
6814 /* Print out one basic block with live information at start and end. */
6825 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
6826 bb->index, bb->loop_depth);
6827 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
6830 fputs (";; Predecessors: ", outf);
6831 for (e = bb->pred; e; e = e->pred_next)
6832 dump_edge_info (outf, e, 0);
6835 fputs (";; Registers live at start:", outf);
6836 dump_regset (bb->global_live_at_start, outf);
6839 for (insn = bb->head, last = NEXT_INSN (bb->end);
6841 insn = NEXT_INSN (insn))
6842 print_rtl_single (outf, insn);
6844 fputs (";; Registers live at end:", outf);
6845 dump_regset (bb->global_live_at_end, outf);
6848 fputs (";; Successors: ", outf);
6849 for (e = bb->succ; e; e = e->succ_next)
6850 dump_edge_info (outf, e, 1);
6858 dump_bb (bb, stderr);
6865 dump_bb (BASIC_BLOCK (n), stderr);
6868 /* Like print_rtl, but also print out live information for the start of each
6872 print_rtl_with_bb (outf, rtx_first)
6876 register rtx tmp_rtx;
6879 fprintf (outf, "(nil)\n");
6883 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
6884 int max_uid = get_max_uid ();
6885 basic_block *start = (basic_block *)
6886 xcalloc (max_uid, sizeof (basic_block));
6887 basic_block *end = (basic_block *)
6888 xcalloc (max_uid, sizeof (basic_block));
6889 enum bb_state *in_bb_p = (enum bb_state *)
6890 xcalloc (max_uid, sizeof (enum bb_state));
6892 for (i = n_basic_blocks - 1; i >= 0; i--)
6894 basic_block bb = BASIC_BLOCK (i);
6897 start[INSN_UID (bb->head)] = bb;
6898 end[INSN_UID (bb->end)] = bb;
6899 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6901 enum bb_state state = IN_MULTIPLE_BB;
6902 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
6904 in_bb_p[INSN_UID (x)] = state;
6911 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6916 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6918 fprintf (outf, ";; Start of basic block %d, registers live:",
6920 dump_regset (bb->global_live_at_start, outf);
6924 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6925 && GET_CODE (tmp_rtx) != NOTE
6926 && GET_CODE (tmp_rtx) != BARRIER)
6927 fprintf (outf, ";; Insn is not within a basic block\n");
6928 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
6929 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6931 did_output = print_rtl_single (outf, tmp_rtx);
6933 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6935 fprintf (outf, ";; End of basic block %d, registers live:\n",
6937 dump_regset (bb->global_live_at_end, outf);
6950 if (current_function_epilogue_delay_list != 0)
6952 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6953 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6954 tmp_rtx = XEXP (tmp_rtx, 1))
6955 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6959 /* Dump the rtl into the current debugging dump file, then abort. */
6962 print_rtl_and_abort_fcn (file, line, function)
6965 const char *function;
6969 print_rtl_with_bb (rtl_dump_file, get_insns ());
6970 fclose (rtl_dump_file);
6973 fancy_abort (file, line, function);
6976 /* Recompute register set/reference counts immediately prior to register
6979 This avoids problems with set/reference counts changing to/from values
6980 which have special meanings to the register allocators.
6982 Additionally, the reference counts are the primary component used by the
6983 register allocators to prioritize pseudos for allocation to hard regs.
6984 More accurate reference counts generally lead to better register allocation.
6986 F is the first insn to be scanned.
6988 LOOP_STEP denotes how much loop_depth should be incremented per
6989 loop nesting level in order to increase the ref count more for
6990 references in a loop.
6992 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6993 possibly other information which is used by the register allocators. */
6996 recompute_reg_usage (f, loop_step)
6997 rtx f ATTRIBUTE_UNUSED;
6998 int loop_step ATTRIBUTE_UNUSED;
7000 allocate_reg_life_data ();
7001 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
7004 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
7005 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
7006 of the number of registers that died. */
7009 count_or_remove_death_notes (blocks, kill)
7015 for (i = n_basic_blocks - 1; i >= 0; --i)
7020 if (blocks && ! TEST_BIT (blocks, i))
7023 bb = BASIC_BLOCK (i);
7025 for (insn = bb->head;; insn = NEXT_INSN (insn))
7029 rtx *pprev = ®_NOTES (insn);
7034 switch (REG_NOTE_KIND (link))
7037 if (GET_CODE (XEXP (link, 0)) == REG)
7039 rtx reg = XEXP (link, 0);
7042 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
7045 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
7053 rtx next = XEXP (link, 1);
7054 free_EXPR_LIST_node (link);
7055 *pprev = link = next;
7061 pprev = &XEXP (link, 1);
7068 if (insn == bb->end)
7077 /* Update insns block within BB. */
7080 update_bb_for_insn (bb)
7085 if (! basic_block_for_insn)
7088 for (insn = bb->head; ; insn = NEXT_INSN (insn))
7090 set_block_for_insn (insn, bb);
7092 if (insn == bb->end)
7098 /* Record INSN's block as BB. */
7101 set_block_for_insn (insn, bb)
7105 size_t uid = INSN_UID (insn);
7106 if (uid >= basic_block_for_insn->num_elements)
7110 /* Add one-eighth the size so we don't keep calling xrealloc. */
7111 new_size = uid + (uid + 7) / 8;
7113 VARRAY_GROW (basic_block_for_insn, new_size);
7115 VARRAY_BB (basic_block_for_insn, uid) = bb;
7118 /* When a new insn has been inserted into an existing block, it will
7119 sometimes emit more than a single insn. This routine will set the
7120 block number for the specified insn, and look backwards in the insn
7121 chain to see if there are any other uninitialized insns immediately
7122 previous to this one, and set the block number for them too. */
7125 set_block_for_new_insns (insn, bb)
7129 set_block_for_insn (insn, bb);
7131 /* Scan the previous instructions setting the block number until we find
7132 an instruction that has the block number set, or we find a note
7134 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
7136 if (GET_CODE (insn) == NOTE)
7138 if (INSN_UID (insn) >= basic_block_for_insn->num_elements
7139 || BLOCK_FOR_INSN (insn) == 0)
7140 set_block_for_insn (insn, bb);
7146 /* Verify the CFG consistency. This function check some CFG invariants and
7147 aborts when something is wrong. Hope that this function will help to
7148 convert many optimization passes to preserve CFG consistent.
7150 Currently it does following checks:
7152 - test head/end pointers
7153 - overlapping of basic blocks
7154 - edge list corectness
7155 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
7156 - tails of basic blocks (ensure that boundary is necesary)
7157 - scans body of the basic block for JUMP_INSN, CODE_LABEL
7158 and NOTE_INSN_BASIC_BLOCK
7159 - check that all insns are in the basic blocks
7160 (except the switch handling code, barriers and notes)
7161 - check that all returns are followed by barriers
7163 In future it can be extended check a lot of other stuff as well
7164 (reachability of basic blocks, life information, etc. etc.). */
7169 const int max_uid = get_max_uid ();
7170 const rtx rtx_first = get_insns ();
7171 rtx last_head = get_last_insn ();
7172 basic_block *bb_info;
7174 int i, last_bb_num_seen, num_bb_notes, err = 0;
7176 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
7178 for (i = n_basic_blocks - 1; i >= 0; i--)
7180 basic_block bb = BASIC_BLOCK (i);
7181 rtx head = bb->head;
7184 /* Verify the end of the basic block is in the INSN chain. */
7185 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
7190 error ("End insn %d for block %d not found in the insn stream.",
7191 INSN_UID (end), bb->index);
7195 /* Work backwards from the end to the head of the basic block
7196 to verify the head is in the RTL chain. */
7197 for (; x != NULL_RTX; x = PREV_INSN (x))
7199 /* While walking over the insn chain, verify insns appear
7200 in only one basic block and initialize the BB_INFO array
7201 used by other passes. */
7202 if (bb_info[INSN_UID (x)] != NULL)
7204 error ("Insn %d is in multiple basic blocks (%d and %d)",
7205 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
7208 bb_info[INSN_UID (x)] = bb;
7215 error ("Head insn %d for block %d not found in the insn stream.",
7216 INSN_UID (head), bb->index);
7223 /* Now check the basic blocks (boundaries etc.) */
7224 for (i = n_basic_blocks - 1; i >= 0; i--)
7226 basic_block bb = BASIC_BLOCK (i);
7227 /* Check corectness of edge lists */
7236 "verify_flow_info: Basic block %d succ edge is corrupted\n",
7238 fprintf (stderr, "Predecessor: ");
7239 dump_edge_info (stderr, e, 0);
7240 fprintf (stderr, "\nSuccessor: ");
7241 dump_edge_info (stderr, e, 1);
7245 if (e->dest != EXIT_BLOCK_PTR)
7247 edge e2 = e->dest->pred;
7248 while (e2 && e2 != e)
7252 error ("Basic block %i edge lists are corrupted", bb->index);
7264 error ("Basic block %d pred edge is corrupted", bb->index);
7265 fputs ("Predecessor: ", stderr);
7266 dump_edge_info (stderr, e, 0);
7267 fputs ("\nSuccessor: ", stderr);
7268 dump_edge_info (stderr, e, 1);
7269 fputc ('\n', stderr);
7272 if (e->src != ENTRY_BLOCK_PTR)
7274 edge e2 = e->src->succ;
7275 while (e2 && e2 != e)
7279 error ("Basic block %i edge lists are corrupted", bb->index);
7286 /* OK pointers are correct. Now check the header of basic
7287 block. It ought to contain optional CODE_LABEL followed
7288 by NOTE_BASIC_BLOCK. */
7290 if (GET_CODE (x) == CODE_LABEL)
7294 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
7300 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
7302 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
7309 /* Do checks for empty blocks here */
7316 if (NOTE_INSN_BASIC_BLOCK_P (x))
7318 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
7319 INSN_UID (x), bb->index);
7326 if (GET_CODE (x) == JUMP_INSN
7327 || GET_CODE (x) == CODE_LABEL
7328 || GET_CODE (x) == BARRIER)
7330 error ("In basic block %d:", bb->index);
7331 fatal_insn ("Flow control insn inside a basic block", x);
7339 last_bb_num_seen = -1;
7344 if (NOTE_INSN_BASIC_BLOCK_P (x))
7346 basic_block bb = NOTE_BASIC_BLOCK (x);
7348 if (bb->index != last_bb_num_seen + 1)
7349 /* Basic blocks not numbered consecutively. */
7352 last_bb_num_seen = bb->index;
7355 if (!bb_info[INSN_UID (x)])
7357 switch (GET_CODE (x))
7364 /* An addr_vec is placed outside any block block. */
7366 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
7367 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
7368 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
7373 /* But in any case, non-deletable labels can appear anywhere. */
7377 fatal_insn ("Insn outside basic block", x);
7382 && GET_CODE (x) == JUMP_INSN
7383 && returnjump_p (x) && ! condjump_p (x)
7384 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
7385 fatal_insn ("Return not followed by barrier", x);
7390 if (num_bb_notes != n_basic_blocks)
7392 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
7393 num_bb_notes, n_basic_blocks);
7402 /* Functions to access an edge list with a vector representation.
7403 Enough data is kept such that given an index number, the
7404 pred and succ that edge represents can be determined, or
7405 given a pred and a succ, its index number can be returned.
7406 This allows algorithms which consume a lot of memory to
7407 represent the normally full matrix of edge (pred,succ) with a
7408 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
7409 wasted space in the client code due to sparse flow graphs. */
7411 /* This functions initializes the edge list. Basically the entire
7412 flowgraph is processed, and all edges are assigned a number,
7413 and the data structure is filled in. */
7418 struct edge_list *elist;
7424 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
7428 /* Determine the number of edges in the flow graph by counting successor
7429 edges on each basic block. */
7430 for (x = 0; x < n_basic_blocks; x++)
7432 basic_block bb = BASIC_BLOCK (x);
7434 for (e = bb->succ; e; e = e->succ_next)
7437 /* Don't forget successors of the entry block. */
7438 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7441 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
7442 elist->num_blocks = block_count;
7443 elist->num_edges = num_edges;
7444 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
7448 /* Follow successors of the entry block, and register these edges. */
7449 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7451 elist->index_to_edge[num_edges] = e;
7455 for (x = 0; x < n_basic_blocks; x++)
7457 basic_block bb = BASIC_BLOCK (x);
7459 /* Follow all successors of blocks, and register these edges. */
7460 for (e = bb->succ; e; e = e->succ_next)
7462 elist->index_to_edge[num_edges] = e;
7469 /* This function free's memory associated with an edge list. */
7472 free_edge_list (elist)
7473 struct edge_list *elist;
7477 free (elist->index_to_edge);
7482 /* This function provides debug output showing an edge list. */
7485 print_edge_list (f, elist)
7487 struct edge_list *elist;
7490 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
7491 elist->num_blocks - 2, elist->num_edges);
7493 for (x = 0; x < elist->num_edges; x++)
7495 fprintf (f, " %-4d - edge(", x);
7496 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
7497 fprintf (f, "entry,");
7499 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
7501 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
7502 fprintf (f, "exit)\n");
7504 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
7508 /* This function provides an internal consistency check of an edge list,
7509 verifying that all edges are present, and that there are no
7513 verify_edge_list (f, elist)
7515 struct edge_list *elist;
7517 int x, pred, succ, index;
7520 for (x = 0; x < n_basic_blocks; x++)
7522 basic_block bb = BASIC_BLOCK (x);
7524 for (e = bb->succ; e; e = e->succ_next)
7526 pred = e->src->index;
7527 succ = e->dest->index;
7528 index = EDGE_INDEX (elist, e->src, e->dest);
7529 if (index == EDGE_INDEX_NO_EDGE)
7531 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7534 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7535 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7536 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7537 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7538 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7539 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7542 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7544 pred = e->src->index;
7545 succ = e->dest->index;
7546 index = EDGE_INDEX (elist, e->src, e->dest);
7547 if (index == EDGE_INDEX_NO_EDGE)
7549 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7552 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7553 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7554 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7555 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7556 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7557 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7559 /* We've verified that all the edges are in the list, no lets make sure
7560 there are no spurious edges in the list. */
7562 for (pred = 0; pred < n_basic_blocks; pred++)
7563 for (succ = 0; succ < n_basic_blocks; succ++)
7565 basic_block p = BASIC_BLOCK (pred);
7566 basic_block s = BASIC_BLOCK (succ);
7570 for (e = p->succ; e; e = e->succ_next)
7576 for (e = s->pred; e; e = e->pred_next)
7582 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7583 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7584 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
7586 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7587 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7588 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
7589 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7590 BASIC_BLOCK (succ)));
7592 for (succ = 0; succ < n_basic_blocks; succ++)
7594 basic_block p = ENTRY_BLOCK_PTR;
7595 basic_block s = BASIC_BLOCK (succ);
7599 for (e = p->succ; e; e = e->succ_next)
7605 for (e = s->pred; e; e = e->pred_next)
7611 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7612 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7613 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
7615 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7616 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7617 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
7618 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
7619 BASIC_BLOCK (succ)));
7621 for (pred = 0; pred < n_basic_blocks; pred++)
7623 basic_block p = BASIC_BLOCK (pred);
7624 basic_block s = EXIT_BLOCK_PTR;
7628 for (e = p->succ; e; e = e->succ_next)
7634 for (e = s->pred; e; e = e->pred_next)
7640 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7641 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7642 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
7644 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7645 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7646 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
7647 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7652 /* This routine will determine what, if any, edge there is between
7653 a specified predecessor and successor. */
7656 find_edge_index (edge_list, pred, succ)
7657 struct edge_list *edge_list;
7658 basic_block pred, succ;
7661 for (x = 0; x < NUM_EDGES (edge_list); x++)
7663 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
7664 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
7667 return (EDGE_INDEX_NO_EDGE);
7670 /* This function will remove an edge from the flow graph. */
7676 edge last_pred = NULL;
7677 edge last_succ = NULL;
7679 basic_block src, dest;
7682 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
7688 last_succ->succ_next = e->succ_next;
7690 src->succ = e->succ_next;
7692 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
7698 last_pred->pred_next = e->pred_next;
7700 dest->pred = e->pred_next;
7706 /* This routine will remove any fake successor edges for a basic block.
7707 When the edge is removed, it is also removed from whatever predecessor
7711 remove_fake_successors (bb)
7715 for (e = bb->succ; e;)
7719 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
7724 /* This routine will remove all fake edges from the flow graph. If
7725 we remove all fake successors, it will automatically remove all
7726 fake predecessors. */
7729 remove_fake_edges ()
7733 for (x = 0; x < n_basic_blocks; x++)
7734 remove_fake_successors (BASIC_BLOCK (x));
7736 /* We've handled all successors except the entry block's. */
7737 remove_fake_successors (ENTRY_BLOCK_PTR);
7740 /* This function will add a fake edge between any block which has no
7741 successors, and the exit block. Some data flow equations require these
7745 add_noreturn_fake_exit_edges ()
7749 for (x = 0; x < n_basic_blocks; x++)
7750 if (BASIC_BLOCK (x)->succ == NULL)
7751 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7754 /* This function adds a fake edge between any infinite loops to the
7755 exit block. Some optimizations require a path from each node to
7758 See also Morgan, Figure 3.10, pp. 82-83.
7760 The current implementation is ugly, not attempting to minimize the
7761 number of inserted fake edges. To reduce the number of fake edges
7762 to insert, add fake edges from _innermost_ loops containing only
7763 nodes not reachable from the exit block. */
7766 connect_infinite_loops_to_exit ()
7768 basic_block unvisited_block;
7770 /* Perform depth-first search in the reverse graph to find nodes
7771 reachable from the exit block. */
7772 struct depth_first_search_dsS dfs_ds;
7774 flow_dfs_compute_reverse_init (&dfs_ds);
7775 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7777 /* Repeatedly add fake edges, updating the unreachable nodes. */
7780 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7781 if (!unvisited_block)
7783 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7784 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7787 flow_dfs_compute_reverse_finish (&dfs_ds);
7792 /* Redirect an edge's successor from one block to another. */
7795 redirect_edge_succ (e, new_succ)
7797 basic_block new_succ;
7801 /* Disconnect the edge from the old successor block. */
7802 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
7804 *pe = (*pe)->pred_next;
7806 /* Reconnect the edge to the new successor block. */
7807 e->pred_next = new_succ->pred;
7812 /* Redirect an edge's predecessor from one block to another. */
7815 redirect_edge_pred (e, new_pred)
7817 basic_block new_pred;
7821 /* Disconnect the edge from the old predecessor block. */
7822 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
7824 *pe = (*pe)->succ_next;
7826 /* Reconnect the edge to the new predecessor block. */
7827 e->succ_next = new_pred->succ;
7832 /* Dump the list of basic blocks in the bitmap NODES. */
7835 flow_nodes_print (str, nodes, file)
7837 const sbitmap nodes;
7845 fprintf (file, "%s { ", str);
7846 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7847 fputs ("}\n", file);
7851 /* Dump the list of edges in the array EDGE_LIST. */
7854 flow_edge_list_print (str, edge_list, num_edges, file)
7856 const edge *edge_list;
7865 fprintf (file, "%s { ", str);
7866 for (i = 0; i < num_edges; i++)
7867 fprintf (file, "%d->%d ", edge_list[i]->src->index,
7868 edge_list[i]->dest->index);
7869 fputs ("}\n", file);
7873 /* Dump loop related CFG information. */
7876 flow_loops_cfg_dump (loops, file)
7877 const struct loops *loops;
7882 if (! loops->num || ! file || ! loops->cfg.dom)
7885 for (i = 0; i < n_basic_blocks; i++)
7889 fprintf (file, ";; %d succs { ", i);
7890 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7891 fprintf (file, "%d ", succ->dest->index);
7892 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7895 /* Dump the DFS node order. */
7896 if (loops->cfg.dfs_order)
7898 fputs (";; DFS order: ", file);
7899 for (i = 0; i < n_basic_blocks; i++)
7900 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7903 /* Dump the reverse completion node order. */
7904 if (loops->cfg.rc_order)
7906 fputs (";; RC order: ", file);
7907 for (i = 0; i < n_basic_blocks; i++)
7908 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7913 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7916 flow_loop_nested_p (outer, loop)
7920 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7924 /* Dump the loop information specified by LOOP to the stream FILE
7925 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7927 flow_loop_dump (loop, file, loop_dump_aux, verbose)
7928 const struct loop *loop;
7930 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7933 if (! loop || ! loop->header)
7936 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
7937 loop->num, INSN_UID (loop->first->head),
7938 INSN_UID (loop->last->end),
7939 loop->shared ? " shared" : "",
7940 loop->invalid ? " invalid" : "");
7941 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
7942 loop->header->index, loop->latch->index,
7943 loop->pre_header ? loop->pre_header->index : -1,
7944 loop->first->index, loop->last->index);
7945 fprintf (file, ";; depth %d, level %d, outer %ld\n",
7946 loop->depth, loop->level,
7947 (long) (loop->outer ? loop->outer->num : -1));
7949 if (loop->pre_header_edges)
7950 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
7951 loop->num_pre_header_edges, file);
7952 flow_edge_list_print (";; entry edges", loop->entry_edges,
7953 loop->num_entries, file);
7954 fprintf (file, ";; %d", loop->num_nodes);
7955 flow_nodes_print (" nodes", loop->nodes, file);
7956 flow_edge_list_print (";; exit edges", loop->exit_edges,
7957 loop->num_exits, file);
7958 if (loop->exits_doms)
7959 flow_nodes_print (";; exit doms", loop->exits_doms, file);
7961 loop_dump_aux (loop, file, verbose);
7965 /* Dump the loop information specified by LOOPS to the stream FILE,
7966 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7968 flow_loops_dump (loops, file, loop_dump_aux, verbose)
7969 const struct loops *loops;
7971 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7977 num_loops = loops->num;
7978 if (! num_loops || ! file)
7981 fprintf (file, ";; %d loops found, %d levels\n",
7982 num_loops, loops->levels);
7984 for (i = 0; i < num_loops; i++)
7986 struct loop *loop = &loops->array[i];
7988 flow_loop_dump (loop, file, loop_dump_aux, verbose);
7994 for (j = 0; j < i; j++)
7996 struct loop *oloop = &loops->array[j];
7998 if (loop->header == oloop->header)
8003 smaller = loop->num_nodes < oloop->num_nodes;
8005 /* If the union of LOOP and OLOOP is different than
8006 the larger of LOOP and OLOOP then LOOP and OLOOP
8007 must be disjoint. */
8008 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
8009 smaller ? oloop : loop);
8011 ";; loop header %d shared by loops %d, %d %s\n",
8012 loop->header->index, i, j,
8013 disjoint ? "disjoint" : "nested");
8020 flow_loops_cfg_dump (loops, file);
8024 /* Free all the memory allocated for LOOPS. */
8027 flow_loops_free (loops)
8028 struct loops *loops;
8037 /* Free the loop descriptors. */
8038 for (i = 0; i < loops->num; i++)
8040 struct loop *loop = &loops->array[i];
8042 if (loop->pre_header_edges)
8043 free (loop->pre_header_edges);
8045 sbitmap_free (loop->nodes);
8046 if (loop->entry_edges)
8047 free (loop->entry_edges);
8048 if (loop->exit_edges)
8049 free (loop->exit_edges);
8050 if (loop->exits_doms)
8051 sbitmap_free (loop->exits_doms);
8053 free (loops->array);
8054 loops->array = NULL;
8057 sbitmap_vector_free (loops->cfg.dom);
8058 if (loops->cfg.dfs_order)
8059 free (loops->cfg.dfs_order);
8061 if (loops->shared_headers)
8062 sbitmap_free (loops->shared_headers);
8067 /* Find the entry edges into the loop with header HEADER and nodes
8068 NODES and store in ENTRY_EDGES array. Return the number of entry
8069 edges from the loop. */
8072 flow_loop_entry_edges_find (header, nodes, entry_edges)
8074 const sbitmap nodes;
8080 *entry_edges = NULL;
8083 for (e = header->pred; e; e = e->pred_next)
8085 basic_block src = e->src;
8087 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
8094 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
8097 for (e = header->pred; e; e = e->pred_next)
8099 basic_block src = e->src;
8101 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
8102 (*entry_edges)[num_entries++] = e;
8109 /* Find the exit edges from the loop using the bitmap of loop nodes
8110 NODES and store in EXIT_EDGES array. Return the number of
8111 exit edges from the loop. */
8114 flow_loop_exit_edges_find (nodes, exit_edges)
8115 const sbitmap nodes;
8124 /* Check all nodes within the loop to see if there are any
8125 successors not in the loop. Note that a node may have multiple
8126 exiting edges ????? A node can have one jumping edge and one fallthru
8127 edge so only one of these can exit the loop. */
8129 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
8130 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
8132 basic_block dest = e->dest;
8134 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
8142 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
8144 /* Store all exiting edges into an array. */
8146 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
8147 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
8149 basic_block dest = e->dest;
8151 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
8152 (*exit_edges)[num_exits++] = e;
8160 /* Find the nodes contained within the loop with header HEADER and
8161 latch LATCH and store in NODES. Return the number of nodes within
8165 flow_loop_nodes_find (header, latch, nodes)
8174 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
8177 /* Start with only the loop header in the set of loop nodes. */
8178 sbitmap_zero (nodes);
8179 SET_BIT (nodes, header->index);
8181 header->loop_depth++;
8183 /* Push the loop latch on to the stack. */
8184 if (! TEST_BIT (nodes, latch->index))
8186 SET_BIT (nodes, latch->index);
8187 latch->loop_depth++;
8189 stack[sp++] = latch;
8198 for (e = node->pred; e; e = e->pred_next)
8200 basic_block ancestor = e->src;
8202 /* If each ancestor not marked as part of loop, add to set of
8203 loop nodes and push on to stack. */
8204 if (ancestor != ENTRY_BLOCK_PTR
8205 && ! TEST_BIT (nodes, ancestor->index))
8207 SET_BIT (nodes, ancestor->index);
8208 ancestor->loop_depth++;
8210 stack[sp++] = ancestor;
8218 /* Compute the depth first search order and store in the array
8219 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
8220 RC_ORDER is non-zero, return the reverse completion number for each
8221 node. Returns the number of nodes visited. A depth first search
8222 tries to get as far away from the starting point as quickly as
8226 flow_depth_first_order_compute (dfs_order, rc_order)
8233 int rcnum = n_basic_blocks - 1;
8236 /* Allocate stack for back-tracking up CFG. */
8237 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
8240 /* Allocate bitmap to track nodes that have been visited. */
8241 visited = sbitmap_alloc (n_basic_blocks);
8243 /* None of the nodes in the CFG have been visited yet. */
8244 sbitmap_zero (visited);
8246 /* Push the first edge on to the stack. */
8247 stack[sp++] = ENTRY_BLOCK_PTR->succ;
8255 /* Look at the edge on the top of the stack. */
8260 /* Check if the edge destination has been visited yet. */
8261 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
8263 /* Mark that we have visited the destination. */
8264 SET_BIT (visited, dest->index);
8267 dfs_order[dfsnum++] = dest->index;
8271 /* Since the DEST node has been visited for the first
8272 time, check its successors. */
8273 stack[sp++] = dest->succ;
8277 /* There are no successors for the DEST node so assign
8278 its reverse completion number. */
8280 rc_order[rcnum--] = dest->index;
8285 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
8287 /* There are no more successors for the SRC node
8288 so assign its reverse completion number. */
8290 rc_order[rcnum--] = src->index;
8294 stack[sp - 1] = e->succ_next;
8301 sbitmap_free (visited);
8303 /* The number of nodes visited should not be greater than
8305 if (dfsnum > n_basic_blocks)
8308 /* There are some nodes left in the CFG that are unreachable. */
8309 if (dfsnum < n_basic_blocks)
8314 /* Compute the depth first search order on the _reverse_ graph and
8315 store in the array DFS_ORDER, marking the nodes visited in VISITED.
8316 Returns the number of nodes visited.
8318 The computation is split into three pieces:
8320 flow_dfs_compute_reverse_init () creates the necessary data
8323 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
8324 structures. The block will start the search.
8326 flow_dfs_compute_reverse_execute () continues (or starts) the
8327 search using the block on the top of the stack, stopping when the
8330 flow_dfs_compute_reverse_finish () destroys the necessary data
8333 Thus, the user will probably call ..._init(), call ..._add_bb() to
8334 add a beginning basic block to the stack, call ..._execute(),
8335 possibly add another bb to the stack and again call ..._execute(),
8336 ..., and finally call _finish(). */
8338 /* Initialize the data structures used for depth-first search on the
8339 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
8340 added to the basic block stack. DATA is the current depth-first
8341 search context. If INITIALIZE_STACK is non-zero, there is an
8342 element on the stack. */
8345 flow_dfs_compute_reverse_init (data)
8346 depth_first_search_ds data;
8348 /* Allocate stack for back-tracking up CFG. */
8350 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
8351 * sizeof (basic_block));
8354 /* Allocate bitmap to track nodes that have been visited. */
8355 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
8357 /* None of the nodes in the CFG have been visited yet. */
8358 sbitmap_zero (data->visited_blocks);
8363 /* Add the specified basic block to the top of the dfs data
8364 structures. When the search continues, it will start at the
8368 flow_dfs_compute_reverse_add_bb (data, bb)
8369 depth_first_search_ds data;
8372 data->stack[data->sp++] = bb;
8376 /* Continue the depth-first search through the reverse graph starting
8377 with the block at the stack's top and ending when the stack is
8378 empty. Visited nodes are marked. Returns an unvisited basic
8379 block, or NULL if there is none available. */
8382 flow_dfs_compute_reverse_execute (data)
8383 depth_first_search_ds data;
8389 while (data->sp > 0)
8391 bb = data->stack[--data->sp];
8393 /* Mark that we have visited this node. */
8394 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
8396 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
8398 /* Perform depth-first search on adjacent vertices. */
8399 for (e = bb->pred; e; e = e->pred_next)
8400 flow_dfs_compute_reverse_add_bb (data, e->src);
8404 /* Determine if there are unvisited basic blocks. */
8405 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
8406 if (!TEST_BIT (data->visited_blocks, i))
8407 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
8411 /* Destroy the data structures needed for depth-first search on the
8415 flow_dfs_compute_reverse_finish (data)
8416 depth_first_search_ds data;
8419 sbitmap_free (data->visited_blocks);
8424 /* Find the root node of the loop pre-header extended basic block and
8425 the edges along the trace from the root node to the loop header. */
8428 flow_loop_pre_header_scan (loop)
8434 loop->num_pre_header_edges = 0;
8436 if (loop->num_entries != 1)
8439 ebb = loop->entry_edges[0]->src;
8441 if (ebb != ENTRY_BLOCK_PTR)
8445 /* Count number of edges along trace from loop header to
8446 root of pre-header extended basic block. Usually this is
8447 only one or two edges. */
8449 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
8451 ebb = ebb->pred->src;
8455 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
8456 loop->num_pre_header_edges = num;
8458 /* Store edges in order that they are followed. The source
8459 of the first edge is the root node of the pre-header extended
8460 basic block and the destination of the last last edge is
8462 for (e = loop->entry_edges[0]; num; e = e->src->pred)
8464 loop->pre_header_edges[--num] = e;
8470 /* Return the block for the pre-header of the loop with header
8471 HEADER where DOM specifies the dominator information. Return NULL if
8472 there is no pre-header. */
8475 flow_loop_pre_header_find (header, dom)
8479 basic_block pre_header;
8482 /* If block p is a predecessor of the header and is the only block
8483 that the header does not dominate, then it is the pre-header. */
8485 for (e = header->pred; e; e = e->pred_next)
8487 basic_block node = e->src;
8489 if (node != ENTRY_BLOCK_PTR
8490 && ! TEST_BIT (dom[node->index], header->index))
8492 if (pre_header == NULL)
8496 /* There are multiple edges into the header from outside
8497 the loop so there is no pre-header block. */
8506 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
8507 previously added. The insertion algorithm assumes that the loops
8508 are added in the order found by a depth first search of the CFG. */
8511 flow_loop_tree_node_add (prevloop, loop)
8512 struct loop *prevloop;
8516 if (flow_loop_nested_p (prevloop, loop))
8518 prevloop->inner = loop;
8519 loop->outer = prevloop;
8523 while (prevloop->outer)
8525 if (flow_loop_nested_p (prevloop->outer, loop))
8527 prevloop->next = loop;
8528 loop->outer = prevloop->outer;
8531 prevloop = prevloop->outer;
8534 prevloop->next = loop;
8538 /* Build the loop hierarchy tree for LOOPS. */
8541 flow_loops_tree_build (loops)
8542 struct loops *loops;
8547 num_loops = loops->num;
8551 /* Root the loop hierarchy tree with the first loop found.
8552 Since we used a depth first search this should be the
8554 loops->tree = &loops->array[0];
8555 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
8557 /* Add the remaining loops to the tree. */
8558 for (i = 1; i < num_loops; i++)
8559 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
8562 /* Helper function to compute loop nesting depth and enclosed loop level
8563 for the natural loop specified by LOOP at the loop depth DEPTH.
8564 Returns the loop level. */
8567 flow_loop_level_compute (loop, depth)
8577 /* Traverse loop tree assigning depth and computing level as the
8578 maximum level of all the inner loops of this loop. The loop
8579 level is equivalent to the height of the loop in the loop tree
8580 and corresponds to the number of enclosed loop levels (including
8582 for (inner = loop->inner; inner; inner = inner->next)
8586 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
8591 loop->level = level;
8592 loop->depth = depth;
8596 /* Compute the loop nesting depth and enclosed loop level for the loop
8597 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
8601 flow_loops_level_compute (loops)
8602 struct loops *loops;
8608 /* Traverse all the outer level loops. */
8609 for (loop = loops->tree; loop; loop = loop->next)
8611 level = flow_loop_level_compute (loop, 1);
8619 /* Scan a single natural loop specified by LOOP collecting information
8620 about it specified by FLAGS. */
8623 flow_loop_scan (loops, loop, flags)
8624 struct loops *loops;
8628 /* Determine prerequisites. */
8629 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
8630 flags |= LOOP_EXIT_EDGES;
8632 if (flags & LOOP_ENTRY_EDGES)
8634 /* Find edges which enter the loop header.
8635 Note that the entry edges should only
8636 enter the header of a natural loop. */
8638 = flow_loop_entry_edges_find (loop->header,
8640 &loop->entry_edges);
8643 if (flags & LOOP_EXIT_EDGES)
8645 /* Find edges which exit the loop. */
8647 = flow_loop_exit_edges_find (loop->nodes,
8651 if (flags & LOOP_EXITS_DOMS)
8655 /* Determine which loop nodes dominate all the exits
8657 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
8658 sbitmap_copy (loop->exits_doms, loop->nodes);
8659 for (j = 0; j < loop->num_exits; j++)
8660 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
8661 loops->cfg.dom[loop->exit_edges[j]->src->index]);
8663 /* The header of a natural loop must dominate
8665 if (! TEST_BIT (loop->exits_doms, loop->header->index))
8669 if (flags & LOOP_PRE_HEADER)
8671 /* Look to see if the loop has a pre-header node. */
8673 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
8675 /* Find the blocks within the extended basic block of
8676 the loop pre-header. */
8677 flow_loop_pre_header_scan (loop);
8683 /* Find all the natural loops in the function and save in LOOPS structure
8684 and recalculate loop_depth information in basic block structures.
8685 FLAGS controls which loop information is collected.
8686 Return the number of natural loops found. */
8689 flow_loops_find (loops, flags)
8690 struct loops *loops;
8702 /* This function cannot be repeatedly called with different
8703 flags to build up the loop information. The loop tree
8704 must always be built if this function is called. */
8705 if (! (flags & LOOP_TREE))
8708 memset (loops, 0, sizeof (*loops));
8710 /* Taking care of this degenerate case makes the rest of
8711 this code simpler. */
8712 if (n_basic_blocks == 0)
8718 /* Compute the dominators. */
8719 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
8720 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
8722 /* Count the number of loop edges (back edges). This should be the
8723 same as the number of natural loops. */
8726 for (b = 0; b < n_basic_blocks; b++)
8730 header = BASIC_BLOCK (b);
8731 header->loop_depth = 0;
8733 for (e = header->pred; e; e = e->pred_next)
8735 basic_block latch = e->src;
8737 /* Look for back edges where a predecessor is dominated
8738 by this block. A natural loop has a single entry
8739 node (header) that dominates all the nodes in the
8740 loop. It also has single back edge to the header
8741 from a latch node. Note that multiple natural loops
8742 may share the same header. */
8743 if (b != header->index)
8746 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
8753 /* Compute depth first search order of the CFG so that outer
8754 natural loops will be found before inner natural loops. */
8755 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8756 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8757 flow_depth_first_order_compute (dfs_order, rc_order);
8759 /* Save CFG derived information to avoid recomputing it. */
8760 loops->cfg.dom = dom;
8761 loops->cfg.dfs_order = dfs_order;
8762 loops->cfg.rc_order = rc_order;
8764 /* Allocate loop structures. */
8766 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
8768 headers = sbitmap_alloc (n_basic_blocks);
8769 sbitmap_zero (headers);
8771 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
8772 sbitmap_zero (loops->shared_headers);
8774 /* Find and record information about all the natural loops
8777 for (b = 0; b < n_basic_blocks; b++)
8781 /* Search the nodes of the CFG in reverse completion order
8782 so that we can find outer loops first. */
8783 header = BASIC_BLOCK (rc_order[b]);
8785 /* Look for all the possible latch blocks for this header. */
8786 for (e = header->pred; e; e = e->pred_next)
8788 basic_block latch = e->src;
8790 /* Look for back edges where a predecessor is dominated
8791 by this block. A natural loop has a single entry
8792 node (header) that dominates all the nodes in the
8793 loop. It also has single back edge to the header
8794 from a latch node. Note that multiple natural loops
8795 may share the same header. */
8796 if (latch != ENTRY_BLOCK_PTR
8797 && TEST_BIT (dom[latch->index], header->index))
8801 loop = loops->array + num_loops;
8803 loop->header = header;
8804 loop->latch = latch;
8805 loop->num = num_loops;
8812 for (i = 0; i < num_loops; i++)
8814 struct loop *loop = &loops->array[i];
8816 /* Keep track of blocks that are loop headers so
8817 that we can tell which loops should be merged. */
8818 if (TEST_BIT (headers, loop->header->index))
8819 SET_BIT (loops->shared_headers, loop->header->index);
8820 SET_BIT (headers, loop->header->index);
8822 /* Find nodes contained within the loop. */
8823 loop->nodes = sbitmap_alloc (n_basic_blocks);
8825 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
8827 /* Compute first and last blocks within the loop.
8828 These are often the same as the loop header and
8829 loop latch respectively, but this is not always
8832 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
8834 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
8836 flow_loop_scan (loops, loop, flags);
8839 /* Natural loops with shared headers may either be disjoint or
8840 nested. Disjoint loops with shared headers cannot be inner
8841 loops and should be merged. For now just mark loops that share
8843 for (i = 0; i < num_loops; i++)
8844 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
8845 loops->array[i].shared = 1;
8847 sbitmap_free (headers);
8850 loops->num = num_loops;
8852 /* Build the loop hierarchy tree. */
8853 flow_loops_tree_build (loops);
8855 /* Assign the loop nesting depth and enclosed loop level for each
8857 loops->levels = flow_loops_level_compute (loops);
8863 /* Update the information regarding the loops in the CFG
8864 specified by LOOPS. */
8866 flow_loops_update (loops, flags)
8867 struct loops *loops;
8870 /* One day we may want to update the current loop data. For now
8871 throw away the old stuff and rebuild what we need. */
8873 flow_loops_free (loops);
8875 return flow_loops_find (loops, flags);
8879 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
8882 flow_loop_outside_edge_p (loop, e)
8883 const struct loop *loop;
8886 if (e->dest != loop->header)
8888 return (e->src == ENTRY_BLOCK_PTR)
8889 || ! TEST_BIT (loop->nodes, e->src->index);
8892 /* Clear LOG_LINKS fields of insns in a chain.
8893 Also clear the global_live_at_{start,end} fields of the basic block
8897 clear_log_links (insns)
8903 for (i = insns; i; i = NEXT_INSN (i))
8907 for (b = 0; b < n_basic_blocks; b++)
8909 basic_block bb = BASIC_BLOCK (b);
8911 bb->global_live_at_start = NULL;
8912 bb->global_live_at_end = NULL;
8915 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
8916 EXIT_BLOCK_PTR->global_live_at_start = NULL;
8919 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
8920 correspond to the hard registers, if any, set in that map. This
8921 could be done far more efficiently by having all sorts of special-cases
8922 with moving single words, but probably isn't worth the trouble. */
8925 reg_set_to_hard_reg_set (to, from)
8931 EXECUTE_IF_SET_IN_BITMAP
8934 if (i >= FIRST_PSEUDO_REGISTER)
8936 SET_HARD_REG_BIT (*to, i);
8940 /* Called once at intialization time. */
8945 static int initialized;
8949 gcc_obstack_init (&flow_obstack);
8950 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
8955 obstack_free (&flow_obstack, flow_firstobj);
8956 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);