1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
140 #include "splay-tree.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
145 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
146 the stack pointer does not matter. The value is tested only in
147 functions that have frame pointers.
148 No definition is equivalent to always zero. */
149 #ifndef EXIT_IGNORE_STACK
150 #define EXIT_IGNORE_STACK 0
153 #ifndef HAVE_epilogue
154 #define HAVE_epilogue 0
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
159 #ifndef HAVE_sibcall_epilogue
160 #define HAVE_sibcall_epilogue 0
164 #define LOCAL_REGNO(REGNO) 0
166 #ifndef EPILOGUE_USES
167 #define EPILOGUE_USES(REGNO) 0
170 #ifdef HAVE_conditional_execution
171 #ifndef REVERSE_CONDEXEC_PREDICATES_P
172 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
176 /* The obstack on which the flow graph components are allocated. */
178 struct obstack flow_obstack;
179 static char *flow_firstobj;
181 /* Number of basic blocks in the current function. */
185 /* Number of edges in the current function. */
189 /* The basic block array. */
191 varray_type basic_block_info;
193 /* The special entry and exit blocks. */
195 struct basic_block_def entry_exit_blocks[2]
200 NULL, /* local_set */
201 NULL, /* cond_local_set */
202 NULL, /* global_live_at_start */
203 NULL, /* global_live_at_end */
205 ENTRY_BLOCK, /* index */
215 NULL, /* local_set */
216 NULL, /* cond_local_set */
217 NULL, /* global_live_at_start */
218 NULL, /* global_live_at_end */
220 EXIT_BLOCK, /* index */
227 /* Nonzero if the second flow pass has completed. */
230 /* Maximum register number used in this function, plus one. */
234 /* Indexed by n, giving various register information */
236 varray_type reg_n_info;
238 /* Size of a regset for the current function,
239 in (1) bytes and (2) elements. */
244 /* Regset of regs live when calls to `setjmp'-like functions happen. */
245 /* ??? Does this exist only for the setjmp-clobbered warning message? */
247 regset regs_live_at_setjmp;
249 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
250 that have to go in the same hard reg.
251 The first two regs in the list are a pair, and the next two
252 are another pair, etc. */
255 /* Callback that determines if it's ok for a function to have no
256 noreturn attribute. */
257 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
259 /* Set of registers that may be eliminable. These are handled specially
260 in updating regs_ever_live. */
262 static HARD_REG_SET elim_reg_set;
264 /* The basic block structure for every insn, indexed by uid. */
266 varray_type basic_block_for_insn;
268 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
269 /* ??? Should probably be using LABEL_NUSES instead. It would take a
270 bit of surgery to be able to use or co-opt the routines in jump. */
272 static rtx label_value_list;
273 static rtx tail_recursion_label_list;
275 /* Holds information for tracking conditional register life information. */
276 struct reg_cond_life_info
278 /* A boolean expression of conditions under which a register is dead. */
280 /* Conditions under which a register is dead at the basic block end. */
283 /* A boolean expression of conditions under which a register has been
287 /* ??? Could store mask of bytes that are dead, so that we could finally
288 track lifetimes of multi-word registers accessed via subregs. */
291 /* For use in communicating between propagate_block and its subroutines.
292 Holds all information needed to compute life and def-use information. */
294 struct propagate_block_info
296 /* The basic block we're considering. */
299 /* Bit N is set if register N is conditionally or unconditionally live. */
302 /* Bit N is set if register N is set this insn. */
305 /* Element N is the next insn that uses (hard or pseudo) register N
306 within the current basic block; or zero, if there is no such insn. */
309 /* Contains a list of all the MEMs we are tracking for dead store
313 /* If non-null, record the set of registers set unconditionally in the
317 /* If non-null, record the set of registers set conditionally in the
319 regset cond_local_set;
321 #ifdef HAVE_conditional_execution
322 /* Indexed by register number, holds a reg_cond_life_info for each
323 register that is not unconditionally live or dead. */
324 splay_tree reg_cond_dead;
326 /* Bit N is set if register N is in an expression in reg_cond_dead. */
330 /* The length of mem_set_list. */
331 int mem_set_list_len;
333 /* Non-zero if the value of CC0 is live. */
336 /* Flags controling the set of information propagate_block collects. */
340 /* Maximum length of pbi->mem_set_list before we start dropping
341 new elements on the floor. */
342 #define MAX_MEM_SET_LIST_LEN 100
344 /* Store the data structures necessary for depth-first search. */
345 struct depth_first_search_dsS {
346 /* stack for backtracking during the algorithm */
349 /* number of edges in the stack. That is, positions 0, ..., sp-1
353 /* record of basic blocks already seen by depth-first search */
354 sbitmap visited_blocks;
356 typedef struct depth_first_search_dsS *depth_first_search_ds;
358 /* Have print_rtl_and_abort give the same information that fancy_abort
360 #define print_rtl_and_abort() \
361 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
363 /* Forward declarations */
364 static int count_basic_blocks PARAMS ((rtx));
365 static void find_basic_blocks_1 PARAMS ((rtx));
366 static rtx find_label_refs PARAMS ((rtx, rtx));
367 static void make_edges PARAMS ((rtx));
368 static void make_label_edge PARAMS ((sbitmap *, basic_block,
370 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
372 static void commit_one_edge_insertion PARAMS ((edge));
374 static void delete_unreachable_blocks PARAMS ((void));
375 static int can_delete_note_p PARAMS ((rtx));
376 static void expunge_block PARAMS ((basic_block));
377 static int can_delete_label_p PARAMS ((rtx));
378 static int tail_recursion_label_p PARAMS ((rtx));
379 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
381 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
383 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
384 static bool try_optimize_cfg PARAMS ((void));
385 static bool forwarder_block_p PARAMS ((basic_block));
386 static bool can_fallthru PARAMS ((basic_block, basic_block));
387 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
388 static bool try_simplify_condjump PARAMS ((basic_block));
389 static bool try_forward_edges PARAMS ((basic_block));
390 static void tidy_fallthru_edges PARAMS ((void));
391 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
392 static void verify_wide_reg PARAMS ((int, rtx, rtx));
393 static void verify_local_live_at_start PARAMS ((regset, basic_block));
394 static int noop_move_p PARAMS ((rtx));
395 static void delete_noop_moves PARAMS ((rtx));
396 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
397 static void notice_stack_pointer_modification PARAMS ((rtx));
398 static void mark_reg PARAMS ((rtx, void *));
399 static void mark_regs_live_at_end PARAMS ((regset));
400 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
401 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
402 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
403 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
404 static int insn_dead_p PARAMS ((struct propagate_block_info *,
406 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
408 static void mark_set_regs PARAMS ((struct propagate_block_info *,
410 static void mark_set_1 PARAMS ((struct propagate_block_info *,
411 enum rtx_code, rtx, rtx,
413 #ifdef HAVE_conditional_execution
414 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
416 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
417 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
418 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
420 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
421 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
422 static rtx not_reg_cond PARAMS ((rtx));
423 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
426 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
427 rtx, rtx, rtx, rtx, rtx));
428 static void find_auto_inc PARAMS ((struct propagate_block_info *,
430 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
432 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
434 static void mark_used_reg PARAMS ((struct propagate_block_info *,
436 static void mark_used_regs PARAMS ((struct propagate_block_info *,
438 void dump_flow_info PARAMS ((FILE *));
439 void debug_flow_info PARAMS ((void));
440 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
444 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
446 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
448 static void remove_fake_successors PARAMS ((basic_block));
449 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
451 static void flow_edge_list_print PARAMS ((const char *, const edge *,
453 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
455 static int flow_loop_nested_p PARAMS ((struct loop *,
457 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
459 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
460 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
461 static int flow_depth_first_order_compute PARAMS ((int *, int *));
462 static void flow_dfs_compute_reverse_init
463 PARAMS ((depth_first_search_ds));
464 static void flow_dfs_compute_reverse_add_bb
465 PARAMS ((depth_first_search_ds, basic_block));
466 static basic_block flow_dfs_compute_reverse_execute
467 PARAMS ((depth_first_search_ds));
468 static void flow_dfs_compute_reverse_finish
469 PARAMS ((depth_first_search_ds));
470 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
471 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
473 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
474 static void flow_loops_tree_build PARAMS ((struct loops *));
475 static int flow_loop_level_compute PARAMS ((struct loop *, int));
476 static int flow_loops_level_compute PARAMS ((struct loops *));
477 static void allocate_bb_life_data PARAMS ((void));
478 static void find_sub_basic_blocks PARAMS ((basic_block));
479 static bool redirect_edge_and_branch PARAMS ((edge, basic_block));
480 static rtx block_label PARAMS ((basic_block));
482 /* Find basic blocks of the current function.
483 F is the first insn of the function and NREGS the number of register
487 find_basic_blocks (f, nregs, file)
489 int nregs ATTRIBUTE_UNUSED;
490 FILE *file ATTRIBUTE_UNUSED;
494 /* Flush out existing data. */
495 if (basic_block_info != NULL)
501 /* Clear bb->aux on all extant basic blocks. We'll use this as a
502 tag for reuse during create_basic_block, just in case some pass
503 copies around basic block notes improperly. */
504 for (i = 0; i < n_basic_blocks; ++i)
505 BASIC_BLOCK (i)->aux = NULL;
507 VARRAY_FREE (basic_block_info);
510 n_basic_blocks = count_basic_blocks (f);
512 /* Size the basic block table. The actual structures will be allocated
513 by find_basic_blocks_1, since we want to keep the structure pointers
514 stable across calls to find_basic_blocks. */
515 /* ??? This whole issue would be much simpler if we called find_basic_blocks
516 exactly once, and thereafter we don't have a single long chain of
517 instructions at all until close to the end of compilation when we
518 actually lay them out. */
520 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
522 find_basic_blocks_1 (f);
524 /* Record the block to which an insn belongs. */
525 /* ??? This should be done another way, by which (perhaps) a label is
526 tagged directly with the basic block that it starts. It is used for
527 more than that currently, but IMO that is the only valid use. */
529 max_uid = get_max_uid ();
531 /* Leave space for insns life_analysis makes in some cases for auto-inc.
532 These cases are rare, so we don't need too much space. */
533 max_uid += max_uid / 10;
536 compute_bb_for_insn (max_uid);
538 /* Discover the edges of our cfg. */
539 make_edges (label_value_list);
541 /* Do very simple cleanup now, for the benefit of code that runs between
542 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
543 tidy_fallthru_edges ();
545 mark_critical_edges ();
547 #ifdef ENABLE_CHECKING
553 check_function_return_warnings ()
555 if (warn_missing_noreturn
556 && !TREE_THIS_VOLATILE (cfun->decl)
557 && EXIT_BLOCK_PTR->pred == NULL
558 && (lang_missing_noreturn_ok_p
559 && !lang_missing_noreturn_ok_p (cfun->decl)))
560 warning ("function might be possible candidate for attribute `noreturn'");
562 /* If we have a path to EXIT, then we do return. */
563 if (TREE_THIS_VOLATILE (cfun->decl)
564 && EXIT_BLOCK_PTR->pred != NULL)
565 warning ("`noreturn' function does return");
567 /* If the clobber_return_insn appears in some basic block, then we
568 do reach the end without returning a value. */
569 else if (warn_return_type
570 && cfun->x_clobber_return_insn != NULL
571 && EXIT_BLOCK_PTR->pred != NULL)
573 int max_uid = get_max_uid ();
575 /* If clobber_return_insn was excised by jump1, then renumber_insns
576 can make max_uid smaller than the number still recorded in our rtx.
577 That's fine, since this is a quick way of verifying that the insn
578 is no longer in the chain. */
579 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
581 /* Recompute insn->block mapping, since the initial mapping is
582 set before we delete unreachable blocks. */
583 compute_bb_for_insn (max_uid);
585 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
586 warning ("control reaches end of non-void function");
591 /* Count the basic blocks of the function. */
594 count_basic_blocks (f)
598 register RTX_CODE prev_code;
599 register int count = 0;
600 int saw_abnormal_edge = 0;
602 prev_code = JUMP_INSN;
603 for (insn = f; insn; insn = NEXT_INSN (insn))
605 enum rtx_code code = GET_CODE (insn);
607 if (code == CODE_LABEL
608 || (GET_RTX_CLASS (code) == 'i'
609 && (prev_code == JUMP_INSN
610 || prev_code == BARRIER
611 || saw_abnormal_edge)))
613 saw_abnormal_edge = 0;
617 /* Record whether this insn created an edge. */
618 if (code == CALL_INSN)
622 /* If there is a nonlocal goto label and the specified
623 region number isn't -1, we have an edge. */
624 if (nonlocal_goto_handler_labels
625 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
626 || INTVAL (XEXP (note, 0)) >= 0))
627 saw_abnormal_edge = 1;
629 else if (can_throw_internal (insn))
630 saw_abnormal_edge = 1;
632 else if (flag_non_call_exceptions
634 && can_throw_internal (insn))
635 saw_abnormal_edge = 1;
641 /* The rest of the compiler works a bit smoother when we don't have to
642 check for the edge case of do-nothing functions with no basic blocks. */
645 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
652 /* Scan a list of insns for labels referred to other than by jumps.
653 This is used to scan the alternatives of a call placeholder. */
655 find_label_refs (f, lvl)
661 for (insn = f; insn; insn = NEXT_INSN (insn))
662 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
666 /* Make a list of all labels referred to other than by jumps
667 (which just don't have the REG_LABEL notes).
669 Make a special exception for labels followed by an ADDR*VEC,
670 as this would be a part of the tablejump setup code.
672 Make a special exception to registers loaded with label
673 values just before jump insns that use them. */
675 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
676 if (REG_NOTE_KIND (note) == REG_LABEL)
678 rtx lab = XEXP (note, 0), next;
680 if ((next = next_nonnote_insn (lab)) != NULL
681 && GET_CODE (next) == JUMP_INSN
682 && (GET_CODE (PATTERN (next)) == ADDR_VEC
683 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
685 else if (GET_CODE (lab) == NOTE)
687 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
688 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
691 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
698 /* Assume that someone emitted code with control flow instructions to the
699 basic block. Update the data structure. */
701 find_sub_basic_blocks (bb)
704 rtx first_insn = bb->head, insn;
706 edge succ_list = bb->succ;
707 rtx jump_insn = NULL_RTX;
711 basic_block first_bb = bb, last_bb;
714 if (GET_CODE (first_insn) == LABEL_REF)
715 first_insn = NEXT_INSN (first_insn);
716 first_insn = NEXT_INSN (first_insn);
720 /* Scan insn chain and try to find new basic block boundaries. */
723 enum rtx_code code = GET_CODE (insn);
727 /* We need some special care for those expressions. */
728 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
729 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
738 /* On code label, split current basic block. */
740 falltru = split_block (bb, PREV_INSN (insn));
745 remove_edge (falltru);
749 if (LABEL_ALTERNATE_NAME (insn))
750 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
753 /* In case we've previously split insn on the JUMP_INSN, move the
754 block header to proper place. */
757 falltru = split_block (bb, PREV_INSN (insn));
767 insn = NEXT_INSN (insn);
769 /* Last basic block must end in the original BB end. */
773 /* Wire in the original edges for last basic block. */
776 bb->succ = succ_list;
778 succ_list->src = bb, succ_list = succ_list->succ_next;
781 bb->succ = succ_list;
783 /* Now re-scan and wire in all edges. This expect simple (conditional)
784 jumps at the end of each new basic blocks. */
786 for (i = first_bb->index; i < last_bb->index; i++)
788 bb = BASIC_BLOCK (i);
789 if (GET_CODE (bb->end) == JUMP_INSN)
791 mark_jump_label (PATTERN (bb->end), bb->end, 0, 0);
792 make_label_edge (NULL, bb, JUMP_LABEL (bb->end), 0);
794 insn = NEXT_INSN (insn);
798 /* Find all basic blocks of the function whose first insn is F.
800 Collect and return a list of labels whose addresses are taken. This
801 will be used in make_edges for use with computed gotos. */
804 find_basic_blocks_1 (f)
807 register rtx insn, next;
809 rtx bb_note = NULL_RTX;
815 /* We process the instructions in a slightly different way than we did
816 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
817 closed out the previous block, so that it gets attached at the proper
818 place. Since this form should be equivalent to the previous,
819 count_basic_blocks continues to use the old form as a check. */
821 for (insn = f; insn; insn = next)
823 enum rtx_code code = GET_CODE (insn);
825 next = NEXT_INSN (insn);
831 int kind = NOTE_LINE_NUMBER (insn);
833 /* Look for basic block notes with which to keep the
834 basic_block_info pointers stable. Unthread the note now;
835 we'll put it back at the right place in create_basic_block.
836 Or not at all if we've already found a note in this block. */
837 if (kind == NOTE_INSN_BASIC_BLOCK)
839 if (bb_note == NULL_RTX)
842 next = flow_delete_insn (insn);
848 /* A basic block starts at a label. If we've closed one off due
849 to a barrier or some such, no need to do it again. */
850 if (head != NULL_RTX)
852 /* While we now have edge lists with which other portions of
853 the compiler might determine a call ending a basic block
854 does not imply an abnormal edge, it will be a bit before
855 everything can be updated. So continue to emit a noop at
856 the end of such a block. */
857 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
859 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
860 end = emit_insn_after (nop, end);
863 create_basic_block (i++, head, end, bb_note);
871 /* A basic block ends at a jump. */
872 if (head == NULL_RTX)
876 /* ??? Make a special check for table jumps. The way this
877 happens is truly and amazingly gross. We are about to
878 create a basic block that contains just a code label and
879 an addr*vec jump insn. Worse, an addr_diff_vec creates
880 its own natural loop.
882 Prevent this bit of brain damage, pasting things together
883 correctly in make_edges.
885 The correct solution involves emitting the table directly
886 on the tablejump instruction as a note, or JUMP_LABEL. */
888 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
889 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
897 goto new_bb_inclusive;
900 /* A basic block ends at a barrier. It may be that an unconditional
901 jump already closed the basic block -- no need to do it again. */
902 if (head == NULL_RTX)
905 /* While we now have edge lists with which other portions of the
906 compiler might determine a call ending a basic block does not
907 imply an abnormal edge, it will be a bit before everything can
908 be updated. So continue to emit a noop at the end of such a
910 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
912 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
913 end = emit_insn_after (nop, end);
915 goto new_bb_exclusive;
919 /* Record whether this call created an edge. */
920 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
921 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
923 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
925 /* Scan each of the alternatives for label refs. */
926 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
927 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
928 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
929 /* Record its tail recursion label, if any. */
930 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
931 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
934 /* A basic block ends at a call that can either throw or
935 do a non-local goto. */
936 if ((nonlocal_goto_handler_labels && region >= 0)
937 || can_throw_internal (insn))
940 if (head == NULL_RTX)
945 create_basic_block (i++, head, end, bb_note);
946 head = end = NULL_RTX;
954 /* Non-call exceptions generate new blocks just like calls. */
955 if (flag_non_call_exceptions && can_throw_internal (insn))
956 goto new_bb_inclusive;
958 if (head == NULL_RTX)
967 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
971 /* Make a list of all labels referred to other than by jumps.
973 Make a special exception for labels followed by an ADDR*VEC,
974 as this would be a part of the tablejump setup code.
976 Make a special exception to registers loaded with label
977 values just before jump insns that use them. */
979 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
980 if (REG_NOTE_KIND (note) == REG_LABEL)
982 rtx lab = XEXP (note, 0), next;
984 if ((next = next_nonnote_insn (lab)) != NULL
985 && GET_CODE (next) == JUMP_INSN
986 && (GET_CODE (PATTERN (next)) == ADDR_VEC
987 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
989 else if (GET_CODE (lab) == NOTE)
991 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
992 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
995 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
1000 if (head != NULL_RTX)
1001 create_basic_block (i++, head, end, bb_note);
1003 flow_delete_insn (bb_note);
1005 if (i != n_basic_blocks)
1008 label_value_list = lvl;
1009 tail_recursion_label_list = trll;
1012 /* Tidy the CFG by deleting unreachable code and whatnot. */
1017 delete_unreachable_blocks ();
1018 if (try_optimize_cfg ())
1019 delete_unreachable_blocks ();
1020 mark_critical_edges ();
1022 /* Kill the data we won't maintain. */
1023 free_EXPR_LIST_list (&label_value_list);
1024 free_EXPR_LIST_list (&tail_recursion_label_list);
1027 /* Create a new basic block consisting of the instructions between
1028 HEAD and END inclusive. Reuses the note and basic block struct
1029 in BB_NOTE, if any. */
1032 create_basic_block (index, head, end, bb_note)
1034 rtx head, end, bb_note;
1039 && ! RTX_INTEGRATED_P (bb_note)
1040 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1043 /* If we found an existing note, thread it back onto the chain. */
1047 if (GET_CODE (head) == CODE_LABEL)
1051 after = PREV_INSN (head);
1055 if (after != bb_note && NEXT_INSN (after) != bb_note)
1056 reorder_insns (bb_note, bb_note, after);
1060 /* Otherwise we must create a note and a basic block structure.
1061 Since we allow basic block structs in rtl, give the struct
1062 the same lifetime by allocating it off the function obstack
1063 rather than using malloc. */
1065 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1066 memset (bb, 0, sizeof (*bb));
1068 if (GET_CODE (head) == CODE_LABEL)
1069 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1072 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1075 NOTE_BASIC_BLOCK (bb_note) = bb;
1078 /* Always include the bb note in the block. */
1079 if (NEXT_INSN (end) == bb_note)
1085 BASIC_BLOCK (index) = bb;
1087 /* Tag the block so that we know it has been used when considering
1088 other basic block notes. */
1092 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1093 indexed by INSN_UID. MAX is the size of the array. */
1096 compute_bb_for_insn (max)
1101 if (basic_block_for_insn)
1102 VARRAY_FREE (basic_block_for_insn);
1103 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1105 for (i = 0; i < n_basic_blocks; ++i)
1107 basic_block bb = BASIC_BLOCK (i);
1114 int uid = INSN_UID (insn);
1116 VARRAY_BB (basic_block_for_insn, uid) = bb;
1119 insn = NEXT_INSN (insn);
1124 /* Free the memory associated with the edge structures. */
1132 for (i = 0; i < n_basic_blocks; ++i)
1134 basic_block bb = BASIC_BLOCK (i);
1136 for (e = bb->succ; e; e = n)
1146 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1152 ENTRY_BLOCK_PTR->succ = 0;
1153 EXIT_BLOCK_PTR->pred = 0;
1158 /* Identify the edges between basic blocks.
1160 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1161 that are otherwise unreachable may be reachable with a non-local goto.
1163 BB_EH_END is an array indexed by basic block number in which we record
1164 the list of exception regions active at the end of the basic block. */
1167 make_edges (label_value_list)
1168 rtx label_value_list;
1171 sbitmap *edge_cache = NULL;
1173 /* Assume no computed jump; revise as we create edges. */
1174 current_function_has_computed_jump = 0;
1176 /* Heavy use of computed goto in machine-generated code can lead to
1177 nearly fully-connected CFGs. In that case we spend a significant
1178 amount of time searching the edge lists for duplicates. */
1179 if (forced_labels || label_value_list)
1181 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1182 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1185 /* By nature of the way these get numbered, block 0 is always the entry. */
1186 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1188 for (i = 0; i < n_basic_blocks; ++i)
1190 basic_block bb = BASIC_BLOCK (i);
1193 int force_fallthru = 0;
1195 if (GET_CODE (bb->head) == CODE_LABEL
1196 && LABEL_ALTERNATE_NAME (bb->head))
1197 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1199 /* Examine the last instruction of the block, and discover the
1200 ways we can leave the block. */
1203 code = GET_CODE (insn);
1206 if (code == JUMP_INSN)
1210 /* Recognize exception handling placeholders. */
1211 if (GET_CODE (PATTERN (insn)) == RESX)
1212 make_eh_edge (edge_cache, bb, insn);
1214 /* Recognize a non-local goto as a branch outside the
1215 current function. */
1216 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1219 /* ??? Recognize a tablejump and do the right thing. */
1220 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1221 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1222 && GET_CODE (tmp) == JUMP_INSN
1223 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1224 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1229 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1230 vec = XVEC (PATTERN (tmp), 0);
1232 vec = XVEC (PATTERN (tmp), 1);
1234 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1235 make_label_edge (edge_cache, bb,
1236 XEXP (RTVEC_ELT (vec, j), 0), 0);
1238 /* Some targets (eg, ARM) emit a conditional jump that also
1239 contains the out-of-range target. Scan for these and
1240 add an edge if necessary. */
1241 if ((tmp = single_set (insn)) != NULL
1242 && SET_DEST (tmp) == pc_rtx
1243 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1244 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1245 make_label_edge (edge_cache, bb,
1246 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1248 #ifdef CASE_DROPS_THROUGH
1249 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1250 us naturally detecting fallthru into the next block. */
1255 /* If this is a computed jump, then mark it as reaching
1256 everything on the label_value_list and forced_labels list. */
1257 else if (computed_jump_p (insn))
1259 current_function_has_computed_jump = 1;
1261 for (x = label_value_list; x; x = XEXP (x, 1))
1262 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1264 for (x = forced_labels; x; x = XEXP (x, 1))
1265 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1268 /* Returns create an exit out. */
1269 else if (returnjump_p (insn))
1270 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1272 /* Otherwise, we have a plain conditional or unconditional jump. */
1275 if (! JUMP_LABEL (insn))
1277 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1281 /* If this is a sibling call insn, then this is in effect a
1282 combined call and return, and so we need an edge to the
1283 exit block. No need to worry about EH edges, since we
1284 wouldn't have created the sibling call in the first place. */
1286 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1287 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1288 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1290 /* If this is a CALL_INSN, then mark it as reaching the active EH
1291 handler for this CALL_INSN. If we're handling non-call
1292 exceptions then any insn can reach any of the active handlers.
1294 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1296 else if (code == CALL_INSN || flag_non_call_exceptions)
1298 /* Add any appropriate EH edges. */
1299 make_eh_edge (edge_cache, bb, insn);
1301 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1303 /* ??? This could be made smarter: in some cases it's possible
1304 to tell that certain calls will not do a nonlocal goto.
1306 For example, if the nested functions that do the nonlocal
1307 gotos do not have their addresses taken, then only calls to
1308 those functions or to other nested functions that use them
1309 could possibly do nonlocal gotos. */
1310 /* We do know that a REG_EH_REGION note with a value less
1311 than 0 is guaranteed not to perform a non-local goto. */
1312 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1313 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1314 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1315 make_label_edge (edge_cache, bb, XEXP (x, 0),
1316 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1320 /* Find out if we can drop through to the next block. */
1321 insn = next_nonnote_insn (insn);
1322 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1323 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1324 else if (i + 1 < n_basic_blocks)
1326 rtx tmp = BLOCK_HEAD (i + 1);
1327 if (GET_CODE (tmp) == NOTE)
1328 tmp = next_nonnote_insn (tmp);
1329 if (force_fallthru || insn == tmp)
1330 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1335 sbitmap_vector_free (edge_cache);
1338 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1339 about the edge that is accumulated between calls. */
1342 make_edge (edge_cache, src, dst, flags)
1343 sbitmap *edge_cache;
1344 basic_block src, dst;
1350 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1351 many edges to them, and we didn't allocate memory for it. */
1352 use_edge_cache = (edge_cache
1353 && src != ENTRY_BLOCK_PTR
1354 && dst != EXIT_BLOCK_PTR);
1356 /* Make sure we don't add duplicate edges. */
1357 switch (use_edge_cache)
1360 /* Quick test for non-existance of the edge. */
1361 if (! TEST_BIT (edge_cache[src->index], dst->index))
1364 /* The edge exists; early exit if no work to do. */
1370 for (e = src->succ; e; e = e->succ_next)
1379 e = (edge) xcalloc (1, sizeof (*e));
1382 e->succ_next = src->succ;
1383 e->pred_next = dst->pred;
1392 SET_BIT (edge_cache[src->index], dst->index);
1395 /* Create an edge from a basic block to a label. */
1398 make_label_edge (edge_cache, src, label, flags)
1399 sbitmap *edge_cache;
1404 if (GET_CODE (label) != CODE_LABEL)
1407 /* If the label was never emitted, this insn is junk, but avoid a
1408 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1409 as a result of a syntax error and a diagnostic has already been
1412 if (INSN_UID (label) == 0)
1415 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1418 /* Create the edges generated by INSN in REGION. */
1421 make_eh_edge (edge_cache, src, insn)
1422 sbitmap *edge_cache;
1426 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1429 handlers = reachable_handlers (insn);
1431 for (i = handlers; i; i = XEXP (i, 1))
1432 make_label_edge (edge_cache, src, XEXP (i, 0),
1433 EDGE_ABNORMAL | EDGE_EH | is_call);
1435 free_INSN_LIST_list (&handlers);
1438 /* Identify critical edges and set the bits appropriately. */
1441 mark_critical_edges ()
1443 int i, n = n_basic_blocks;
1446 /* We begin with the entry block. This is not terribly important now,
1447 but could be if a front end (Fortran) implemented alternate entry
1449 bb = ENTRY_BLOCK_PTR;
1456 /* (1) Critical edges must have a source with multiple successors. */
1457 if (bb->succ && bb->succ->succ_next)
1459 for (e = bb->succ; e; e = e->succ_next)
1461 /* (2) Critical edges must have a destination with multiple
1462 predecessors. Note that we know there is at least one
1463 predecessor -- the edge we followed to get here. */
1464 if (e->dest->pred->pred_next)
1465 e->flags |= EDGE_CRITICAL;
1467 e->flags &= ~EDGE_CRITICAL;
1472 for (e = bb->succ; e; e = e->succ_next)
1473 e->flags &= ~EDGE_CRITICAL;
1478 bb = BASIC_BLOCK (i);
1482 /* Split a block BB after insn INSN creating a new fallthru edge.
1483 Return the new edge. Note that to keep other parts of the compiler happy,
1484 this function renumbers all the basic blocks so that the new
1485 one has a number one greater than the block split. */
1488 split_block (bb, insn)
1498 /* There is no point splitting the block after its end. */
1499 if (bb->end == insn)
1502 /* Create the new structures. */
1503 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1504 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1507 memset (new_bb, 0, sizeof (*new_bb));
1509 new_bb->head = NEXT_INSN (insn);
1510 new_bb->end = bb->end;
1513 new_bb->succ = bb->succ;
1514 bb->succ = new_edge;
1515 new_bb->pred = new_edge;
1516 new_bb->count = bb->count;
1517 new_bb->frequency = bb->frequency;
1518 new_bb->loop_depth = bb->loop_depth;
1521 new_edge->dest = new_bb;
1522 new_edge->flags = EDGE_FALLTHRU;
1523 new_edge->probability = REG_BR_PROB_BASE;
1524 new_edge->count = bb->count;
1526 /* Redirect the src of the successor edges of bb to point to new_bb. */
1527 for (e = new_bb->succ; e; e = e->succ_next)
1530 /* Place the new block just after the block being split. */
1531 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1533 /* Some parts of the compiler expect blocks to be number in
1534 sequential order so insert the new block immediately after the
1535 block being split.. */
1537 for (i = n_basic_blocks - 1; i > j + 1; --i)
1539 basic_block tmp = BASIC_BLOCK (i - 1);
1540 BASIC_BLOCK (i) = tmp;
1544 BASIC_BLOCK (i) = new_bb;
1547 if (GET_CODE (new_bb->head) == CODE_LABEL)
1549 /* Create the basic block note. */
1550 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1552 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1556 /* Create the basic block note. */
1557 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1559 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1560 new_bb->head = bb_note;
1563 update_bb_for_insn (new_bb);
1565 if (bb->global_live_at_start)
1567 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1568 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1569 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1571 /* We now have to calculate which registers are live at the end
1572 of the split basic block and at the start of the new basic
1573 block. Start with those registers that are known to be live
1574 at the end of the original basic block and get
1575 propagate_block to determine which registers are live. */
1576 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1577 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1578 COPY_REG_SET (bb->global_live_at_end,
1579 new_bb->global_live_at_start);
1585 /* Return label in the head of basic block. Create one if it doesn't exist. */
1590 if (GET_CODE (block->head) != CODE_LABEL)
1591 block->head = emit_label_before (gen_label_rtx (), block->head);
1595 /* Return true if the block has no effect and only forwards control flow to
1596 its single destination. */
1598 forwarder_block_p (bb)
1602 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1603 || !bb->succ || bb->succ->succ_next)
1606 insn = next_active_insn (bb->head);
1609 if (GET_CODE (insn) == CODE_LABEL
1610 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)))
1615 /* Return nonzero if we can reach target from src by falling trought. */
1617 can_fallthru (src, target)
1618 basic_block src, target;
1620 rtx insn = src->end;
1621 rtx insn2 = target->head;
1623 if (!active_insn_p (insn2))
1624 insn2 = next_active_insn (insn2);
1625 /* ??? Later we may add code to move jump tables offline. */
1626 return next_active_insn (insn) == insn2;
1629 /* Attempt to perform edge redirection by replacing possibly complex jump
1630 instruction by unconditional jump or removing jump completely.
1631 This can apply only if all edges now point to the same block.
1633 The parameters and return values are equivalent to redirect_edge_and_branch.
1636 try_redirect_by_replacing_jump (e, target)
1640 basic_block src = e->src;
1641 rtx insn = src->end;
1647 /* Verify that all targets will be TARGET. */
1648 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1649 if (tmp->dest != target && tmp != e)
1651 if (tmp || GET_CODE (insn) != JUMP_INSN)
1654 /* Avoid removing branch with side effects. */
1655 set = single_set (insn);
1656 if (!set || side_effects_p (set))
1659 /* See if we can create the fallthru edge. */
1660 if (can_fallthru (src, target))
1662 src->end = PREV_INSN (insn);
1664 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1665 flow_delete_insn (insn);
1669 /* If this already is simplejump, redirect it. */
1670 else if (simplejump_p (insn))
1672 if (e->dest == target)
1675 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1676 INSN_UID (insn), e->dest->index, target->index);
1677 redirect_jump (insn, block_label (target), 0);
1679 /* Or replace possibly complicated jump insn by simple jump insn. */
1682 rtx target_label = block_label (target);
1684 src->end = PREV_INSN (insn);
1685 src->end = emit_jump_insn_after (gen_jump (target_label), src->end);
1686 JUMP_LABEL (src->end) = target_label;
1687 LABEL_NUSES (target_label)++;
1689 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1690 INSN_UID (insn), INSN_UID (src->end));
1691 flow_delete_insn (insn);
1695 /* Keep only one edge out and set proper flags. */
1696 while (src->succ->succ_next)
1697 remove_edge (src->succ);
1700 e->flags = EDGE_FALLTHRU;
1704 /* Fixup barriers. */
1705 barrier = next_nonnote_insn (insn);
1706 if (fallthru && GET_CODE (barrier) == BARRIER)
1707 flow_delete_insn (barrier);
1708 else if (!fallthru && GET_CODE (barrier) != BARRIER)
1709 emit_barrier_after (insn);
1711 if (e->dest != target)
1712 redirect_edge_succ (e, target);
1716 /* Attempt to change code to redirect edge E to TARGET.
1717 Don't do that on expense of adding new instructions or reordering
1720 Function can be also called with edge destionation equivalent to the
1721 TARGET. Then it should try the simplifications and do nothing if
1724 Return true if transformation suceeded. We still return flase in case
1725 E already destinated TARGET and we didn't managed to simplify instruction
1728 redirect_edge_and_branch (e, target)
1733 rtx old_label = e->dest->head;
1734 basic_block src = e->src;
1735 rtx insn = src->end;
1737 if (try_redirect_by_replacing_jump (e, target))
1739 /* Do this fast path late, as we want above code to simplify for cases
1740 where called on single edge leaving basic block containing nontrivial
1742 else if (e->dest == target)
1745 /* We can only redirect non-fallthru edges of jump insn. */
1746 if (e->flags & EDGE_FALLTHRU)
1748 if (GET_CODE (insn) != JUMP_INSN)
1751 /* Recognize a tablejump and adjust all matching cases. */
1752 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1753 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1754 && GET_CODE (tmp) == JUMP_INSN
1755 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1756 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1760 rtx new_label = block_label (target);
1762 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1763 vec = XVEC (PATTERN (tmp), 0);
1765 vec = XVEC (PATTERN (tmp), 1);
1767 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1768 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1770 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1771 --LABEL_NUSES (old_label);
1772 ++LABEL_NUSES (new_label);
1775 /* Handle casesi dispatch insns */
1776 if ((tmp = single_set (insn)) != NULL
1777 && SET_DEST (tmp) == pc_rtx
1778 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1779 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1780 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1782 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1784 --LABEL_NUSES (old_label);
1785 ++LABEL_NUSES (new_label);
1790 /* ?? We may play the games with moving the named labels from
1791 one basic block to the other in case only one computed_jump is
1793 if (computed_jump_p (insn))
1796 /* A return instruction can't be redirected. */
1797 if (returnjump_p (insn))
1800 /* If the insn doesn't go where we think, we're confused. */
1801 if (JUMP_LABEL (insn) != old_label)
1803 redirect_jump (insn, block_label (target), 0);
1807 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
1808 e->src->index, e->dest->index, target->index);
1809 if (e->dest != target)
1812 /* Check whether the edge is already present. */
1813 for (s = src->succ; s; s=s->succ_next)
1814 if (s->dest == target)
1818 s->flags |= e->flags;
1822 redirect_edge_succ (e, target);
1827 /* Split a (typically critical) edge. Return the new block.
1828 Abort on abnormal edges.
1830 ??? The code generally expects to be called on critical edges.
1831 The case of a block ending in an unconditional jump to a
1832 block with multiple predecessors is not handled optimally. */
1835 split_edge (edge_in)
1838 basic_block old_pred, bb, old_succ;
1843 /* Abnormal edges cannot be split. */
1844 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1847 old_pred = edge_in->src;
1848 old_succ = edge_in->dest;
1850 /* Create the new structures. */
1851 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1852 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1855 memset (bb, 0, sizeof (*bb));
1857 /* ??? This info is likely going to be out of date very soon. */
1858 if (old_succ->global_live_at_start)
1860 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1861 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1862 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1863 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1867 bb->succ = edge_out;
1868 bb->count = edge_in->count;
1869 bb->frequency = (edge_in->probability * edge_in->src->frequency
1870 / REG_BR_PROB_BASE);
1872 edge_in->flags &= ~EDGE_CRITICAL;
1874 edge_out->pred_next = old_succ->pred;
1875 edge_out->succ_next = NULL;
1877 edge_out->dest = old_succ;
1878 edge_out->flags = EDGE_FALLTHRU;
1879 edge_out->probability = REG_BR_PROB_BASE;
1880 edge_out->count = edge_in->count;
1882 old_succ->pred = edge_out;
1884 /* Tricky case -- if there existed a fallthru into the successor
1885 (and we're not it) we must add a new unconditional jump around
1886 the new block we're actually interested in.
1888 Further, if that edge is critical, this means a second new basic
1889 block must be created to hold it. In order to simplify correct
1890 insn placement, do this before we touch the existing basic block
1891 ordering for the block we were really wanting. */
1892 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1895 for (e = edge_out->pred_next; e; e = e->pred_next)
1896 if (e->flags & EDGE_FALLTHRU)
1901 basic_block jump_block;
1904 if ((e->flags & EDGE_CRITICAL) == 0
1905 && e->src != ENTRY_BLOCK_PTR)
1907 /* Non critical -- we can simply add a jump to the end
1908 of the existing predecessor. */
1909 jump_block = e->src;
1913 /* We need a new block to hold the jump. The simplest
1914 way to do the bulk of the work here is to recursively
1916 jump_block = split_edge (e);
1917 e = jump_block->succ;
1920 /* Now add the jump insn ... */
1921 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1923 jump_block->end = pos;
1924 if (basic_block_for_insn)
1925 set_block_for_insn (pos, jump_block);
1926 emit_barrier_after (pos);
1928 /* ... let jump know that label is in use, ... */
1929 JUMP_LABEL (pos) = old_succ->head;
1930 ++LABEL_NUSES (old_succ->head);
1932 /* ... and clear fallthru on the outgoing edge. */
1933 e->flags &= ~EDGE_FALLTHRU;
1935 /* Continue splitting the interesting edge. */
1939 /* Place the new block just in front of the successor. */
1940 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1941 if (old_succ == EXIT_BLOCK_PTR)
1942 j = n_basic_blocks - 1;
1944 j = old_succ->index;
1945 for (i = n_basic_blocks - 1; i > j; --i)
1947 basic_block tmp = BASIC_BLOCK (i - 1);
1948 BASIC_BLOCK (i) = tmp;
1951 BASIC_BLOCK (i) = bb;
1954 /* Create the basic block note.
1956 Where we place the note can have a noticable impact on the generated
1957 code. Consider this cfg:
1967 If we need to insert an insn on the edge from block 0 to block 1,
1968 we want to ensure the instructions we insert are outside of any
1969 loop notes that physically sit between block 0 and block 1. Otherwise
1970 we confuse the loop optimizer into thinking the loop is a phony. */
1971 if (old_succ != EXIT_BLOCK_PTR
1972 && PREV_INSN (old_succ->head)
1973 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1974 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1975 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1976 PREV_INSN (old_succ->head));
1977 else if (old_succ != EXIT_BLOCK_PTR)
1978 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1980 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1981 NOTE_BASIC_BLOCK (bb_note) = bb;
1982 bb->head = bb->end = bb_note;
1984 /* For non-fallthry edges, we must adjust the predecessor's
1985 jump instruction to target our new block. */
1986 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1988 if (!redirect_edge_and_branch (edge_in, bb))
1992 redirect_edge_succ (edge_in, bb);
1997 /* Queue instructions for insertion on an edge between two basic blocks.
1998 The new instructions and basic blocks (if any) will not appear in the
1999 CFG until commit_edge_insertions is called. */
2002 insert_insn_on_edge (pattern, e)
2006 /* We cannot insert instructions on an abnormal critical edge.
2007 It will be easier to find the culprit if we die now. */
2008 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2009 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2012 if (e->insns == NULL_RTX)
2015 push_to_sequence (e->insns);
2017 emit_insn (pattern);
2019 e->insns = get_insns ();
2023 /* Update the CFG for the instructions queued on edge E. */
2026 commit_one_edge_insertion (e)
2029 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2032 /* Pull the insns off the edge now since the edge might go away. */
2034 e->insns = NULL_RTX;
2036 /* Figure out where to put these things. If the destination has
2037 one predecessor, insert there. Except for the exit block. */
2038 if (e->dest->pred->pred_next == NULL
2039 && e->dest != EXIT_BLOCK_PTR)
2043 /* Get the location correct wrt a code label, and "nice" wrt
2044 a basic block note, and before everything else. */
2046 if (GET_CODE (tmp) == CODE_LABEL)
2047 tmp = NEXT_INSN (tmp);
2048 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2049 tmp = NEXT_INSN (tmp);
2050 if (tmp == bb->head)
2053 after = PREV_INSN (tmp);
2056 /* If the source has one successor and the edge is not abnormal,
2057 insert there. Except for the entry block. */
2058 else if ((e->flags & EDGE_ABNORMAL) == 0
2059 && e->src->succ->succ_next == NULL
2060 && e->src != ENTRY_BLOCK_PTR)
2063 /* It is possible to have a non-simple jump here. Consider a target
2064 where some forms of unconditional jumps clobber a register. This
2065 happens on the fr30 for example.
2067 We know this block has a single successor, so we can just emit
2068 the queued insns before the jump. */
2069 if (GET_CODE (bb->end) == JUMP_INSN)
2075 /* We'd better be fallthru, or we've lost track of what's what. */
2076 if ((e->flags & EDGE_FALLTHRU) == 0)
2083 /* Otherwise we must split the edge. */
2086 bb = split_edge (e);
2090 /* Now that we've found the spot, do the insertion. */
2092 /* Set the new block number for these insns, if structure is allocated. */
2093 if (basic_block_for_insn)
2096 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2097 set_block_for_insn (i, bb);
2102 emit_insns_before (insns, before);
2103 if (before == bb->head)
2106 last = prev_nonnote_insn (before);
2110 last = emit_insns_after (insns, after);
2111 if (after == bb->end)
2115 if (returnjump_p (last))
2117 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2118 This is not currently a problem because this only happens
2119 for the (single) epilogue, which already has a fallthru edge
2123 if (e->dest != EXIT_BLOCK_PTR
2124 || e->succ_next != NULL
2125 || (e->flags & EDGE_FALLTHRU) == 0)
2127 e->flags &= ~EDGE_FALLTHRU;
2129 emit_barrier_after (last);
2133 flow_delete_insn (before);
2135 else if (GET_CODE (last) == JUMP_INSN)
2137 find_sub_basic_blocks (bb);
2140 /* Update the CFG for all queued instructions. */
2143 commit_edge_insertions ()
2148 #ifdef ENABLE_CHECKING
2149 verify_flow_info ();
2153 bb = ENTRY_BLOCK_PTR;
2158 for (e = bb->succ; e; e = next)
2160 next = e->succ_next;
2162 commit_one_edge_insertion (e);
2165 if (++i >= n_basic_blocks)
2167 bb = BASIC_BLOCK (i);
2171 /* Add fake edges to the function exit for any non constant calls in
2172 the bitmap of blocks specified by BLOCKS or to the whole CFG if
2173 BLOCKS is zero. Return the nuber of blocks that were split. */
2176 flow_call_edges_add (blocks)
2180 int blocks_split = 0;
2184 /* Map bb indicies into basic block pointers since split_block
2185 will renumber the basic blocks. */
2187 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2191 for (i = 0; i < n_basic_blocks; i++)
2192 bbs[bb_num++] = BASIC_BLOCK (i);
2196 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2198 bbs[bb_num++] = BASIC_BLOCK (i);
2203 /* Now add fake edges to the function exit for any non constant
2204 calls since there is no way that we can determine if they will
2207 for (i = 0; i < bb_num; i++)
2209 basic_block bb = bbs[i];
2213 for (insn = bb->end; ; insn = prev_insn)
2215 prev_insn = PREV_INSN (insn);
2216 if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
2220 /* Note that the following may create a new basic block
2221 and renumber the existing basic blocks. */
2222 e = split_block (bb, insn);
2226 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2228 if (insn == bb->head)
2234 verify_flow_info ();
2237 return blocks_split;
2240 /* Find unreachable blocks. An unreachable block will have NULL in
2241 block->aux, a non-NULL value indicates the block is reachable. */
2244 find_unreachable_blocks ()
2248 basic_block *tos, *worklist;
2251 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2253 /* Use basic_block->aux as a marker. Clear them all. */
2255 for (i = 0; i < n; ++i)
2256 BASIC_BLOCK (i)->aux = NULL;
2258 /* Add our starting points to the worklist. Almost always there will
2259 be only one. It isn't inconcievable that we might one day directly
2260 support Fortran alternate entry points. */
2262 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2266 /* Mark the block with a handy non-null value. */
2270 /* Iterate: find everything reachable from what we've already seen. */
2272 while (tos != worklist)
2274 basic_block b = *--tos;
2276 for (e = b->succ; e; e = e->succ_next)
2287 /* Delete all unreachable basic blocks. */
2289 delete_unreachable_blocks ()
2293 find_unreachable_blocks ();
2295 /* Delete all unreachable basic blocks. Count down so that we
2296 don't interfere with the block renumbering that happens in
2297 flow_delete_block. */
2299 for (i = n_basic_blocks - 1; i >= 0; --i)
2301 basic_block b = BASIC_BLOCK (i);
2304 /* This block was found. Tidy up the mark. */
2307 flow_delete_block (b);
2310 tidy_fallthru_edges ();
2313 /* Return true if NOTE is not one of the ones that must be kept paired,
2314 so that we may simply delete them. */
2317 can_delete_note_p (note)
2320 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2321 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2324 /* Unlink a chain of insns between START and FINISH, leaving notes
2325 that must be paired. */
2328 flow_delete_insn_chain (start, finish)
2331 /* Unchain the insns one by one. It would be quicker to delete all
2332 of these with a single unchaining, rather than one at a time, but
2333 we need to keep the NOTE's. */
2339 next = NEXT_INSN (start);
2340 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2342 else if (GET_CODE (start) == CODE_LABEL
2343 && ! can_delete_label_p (start))
2345 const char *name = LABEL_NAME (start);
2346 PUT_CODE (start, NOTE);
2347 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2348 NOTE_SOURCE_FILE (start) = name;
2351 next = flow_delete_insn (start);
2353 if (start == finish)
2359 /* Delete the insns in a (non-live) block. We physically delete every
2360 non-deleted-note insn, and update the flow graph appropriately.
2362 Return nonzero if we deleted an exception handler. */
2364 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2365 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2368 flow_delete_block (b)
2371 int deleted_handler = 0;
2374 /* If the head of this block is a CODE_LABEL, then it might be the
2375 label for an exception handler which can't be reached.
2377 We need to remove the label from the exception_handler_label list
2378 and remove the associated NOTE_INSN_EH_REGION_BEG and
2379 NOTE_INSN_EH_REGION_END notes. */
2383 never_reached_warning (insn);
2385 if (GET_CODE (insn) == CODE_LABEL)
2386 maybe_remove_eh_handler (insn);
2388 /* Include any jump table following the basic block. */
2390 if (GET_CODE (end) == JUMP_INSN
2391 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2392 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2393 && GET_CODE (tmp) == JUMP_INSN
2394 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2395 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2398 /* Include any barrier that may follow the basic block. */
2399 tmp = next_nonnote_insn (end);
2400 if (tmp && GET_CODE (tmp) == BARRIER)
2403 /* Selectively delete the entire chain. */
2404 flow_delete_insn_chain (insn, end);
2406 /* Remove the edges into and out of this block. Note that there may
2407 indeed be edges in, if we are removing an unreachable loop. */
2411 for (e = b->pred; e; e = next)
2413 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2416 next = e->pred_next;
2420 for (e = b->succ; e; e = next)
2422 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2425 next = e->succ_next;
2434 /* Remove the basic block from the array, and compact behind it. */
2437 return deleted_handler;
2440 /* Remove block B from the basic block array and compact behind it. */
2446 int i, n = n_basic_blocks;
2448 for (i = b->index; i + 1 < n; ++i)
2450 basic_block x = BASIC_BLOCK (i + 1);
2451 BASIC_BLOCK (i) = x;
2455 basic_block_info->num_elements--;
2459 /* Delete INSN by patching it out. Return the next insn. */
2462 flow_delete_insn (insn)
2465 rtx prev = PREV_INSN (insn);
2466 rtx next = NEXT_INSN (insn);
2469 PREV_INSN (insn) = NULL_RTX;
2470 NEXT_INSN (insn) = NULL_RTX;
2471 INSN_DELETED_P (insn) = 1;
2474 NEXT_INSN (prev) = next;
2476 PREV_INSN (next) = prev;
2478 set_last_insn (prev);
2480 if (GET_CODE (insn) == CODE_LABEL)
2481 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2483 /* If deleting a jump, decrement the use count of the label. Deleting
2484 the label itself should happen in the normal course of block merging. */
2485 if (GET_CODE (insn) == JUMP_INSN
2486 && JUMP_LABEL (insn)
2487 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2488 LABEL_NUSES (JUMP_LABEL (insn))--;
2490 /* Also if deleting an insn that references a label. */
2491 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2492 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2493 LABEL_NUSES (XEXP (note, 0))--;
2495 if (GET_CODE (insn) == JUMP_INSN
2496 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2497 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2499 rtx pat = PATTERN (insn);
2500 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2501 int len = XVECLEN (pat, diff_vec_p);
2504 for (i = 0; i < len; i++)
2505 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2511 /* True if a given label can be deleted. */
2514 can_delete_label_p (label)
2519 if (LABEL_PRESERVE_P (label))
2522 for (x = forced_labels; x; x = XEXP (x, 1))
2523 if (label == XEXP (x, 0))
2525 for (x = label_value_list; x; x = XEXP (x, 1))
2526 if (label == XEXP (x, 0))
2528 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2529 if (label == XEXP (x, 0))
2532 /* User declared labels must be preserved. */
2533 if (LABEL_NAME (label) != 0)
2540 tail_recursion_label_p (label)
2545 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2546 if (label == XEXP (x, 0))
2552 /* Blocks A and B are to be merged into a single block A. The insns
2553 are already contiguous, hence `nomove'. */
2556 merge_blocks_nomove (a, b)
2560 rtx b_head, b_end, a_end;
2561 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2564 /* If there was a CODE_LABEL beginning B, delete it. */
2567 if (GET_CODE (b_head) == CODE_LABEL)
2569 /* Detect basic blocks with nothing but a label. This can happen
2570 in particular at the end of a function. */
2571 if (b_head == b_end)
2573 del_first = del_last = b_head;
2574 b_head = NEXT_INSN (b_head);
2577 /* Delete the basic block note. */
2578 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2580 if (b_head == b_end)
2585 b_head = NEXT_INSN (b_head);
2588 /* If there was a jump out of A, delete it. */
2590 if (GET_CODE (a_end) == JUMP_INSN)
2594 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2595 if (GET_CODE (prev) != NOTE
2596 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2603 /* If this was a conditional jump, we need to also delete
2604 the insn that set cc0. */
2605 if (prev && sets_cc0_p (prev))
2608 prev = prev_nonnote_insn (prev);
2617 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2618 del_first = NEXT_INSN (a_end);
2620 /* Delete everything marked above as well as crap that might be
2621 hanging out between the two blocks. */
2622 flow_delete_insn_chain (del_first, del_last);
2624 /* Normally there should only be one successor of A and that is B, but
2625 partway though the merge of blocks for conditional_execution we'll
2626 be merging a TEST block with THEN and ELSE successors. Free the
2627 whole lot of them and hope the caller knows what they're doing. */
2629 remove_edge (a->succ);
2631 /* Adjust the edges out of B for the new owner. */
2632 for (e = b->succ; e; e = e->succ_next)
2636 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2637 b->pred = b->succ = NULL;
2639 /* Reassociate the insns of B with A. */
2642 if (basic_block_for_insn)
2644 BLOCK_FOR_INSN (b_head) = a;
2645 while (b_head != b_end)
2647 b_head = NEXT_INSN (b_head);
2648 BLOCK_FOR_INSN (b_head) = a;
2658 /* Blocks A and B are to be merged into a single block. A has no incoming
2659 fallthru edge, so it can be moved before B without adding or modifying
2660 any jumps (aside from the jump from A to B). */
2663 merge_blocks_move_predecessor_nojumps (a, b)
2666 rtx start, end, barrier;
2672 barrier = next_nonnote_insn (end);
2673 if (GET_CODE (barrier) != BARRIER)
2675 flow_delete_insn (barrier);
2677 /* Move block and loop notes out of the chain so that we do not
2678 disturb their order.
2680 ??? A better solution would be to squeeze out all the non-nested notes
2681 and adjust the block trees appropriately. Even better would be to have
2682 a tighter connection between block trees and rtl so that this is not
2684 start = squeeze_notes (start, end);
2686 /* Scramble the insn chain. */
2687 if (end != PREV_INSN (b->head))
2688 reorder_insns (start, end, PREV_INSN (b->head));
2692 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2693 a->index, b->index);
2696 /* Swap the records for the two blocks around. Although we are deleting B,
2697 A is now where B was and we want to compact the BB array from where
2699 BASIC_BLOCK (a->index) = b;
2700 BASIC_BLOCK (b->index) = a;
2702 a->index = b->index;
2705 /* Now blocks A and B are contiguous. Merge them. */
2706 merge_blocks_nomove (a, b);
2711 /* Blocks A and B are to be merged into a single block. B has no outgoing
2712 fallthru edge, so it can be moved after A without adding or modifying
2713 any jumps (aside from the jump from A to B). */
2716 merge_blocks_move_successor_nojumps (a, b)
2719 rtx start, end, barrier;
2723 barrier = NEXT_INSN (end);
2725 /* Recognize a jump table following block B. */
2726 if (GET_CODE (barrier) == CODE_LABEL
2727 && NEXT_INSN (barrier)
2728 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2729 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2730 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2732 end = NEXT_INSN (barrier);
2733 barrier = NEXT_INSN (end);
2736 /* There had better have been a barrier there. Delete it. */
2737 if (GET_CODE (barrier) != BARRIER)
2739 flow_delete_insn (barrier);
2741 /* Move block and loop notes out of the chain so that we do not
2742 disturb their order.
2744 ??? A better solution would be to squeeze out all the non-nested notes
2745 and adjust the block trees appropriately. Even better would be to have
2746 a tighter connection between block trees and rtl so that this is not
2748 start = squeeze_notes (start, end);
2750 /* Scramble the insn chain. */
2751 reorder_insns (start, end, a->end);
2753 /* Now blocks A and B are contiguous. Merge them. */
2754 merge_blocks_nomove (a, b);
2758 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2759 b->index, a->index);
2765 /* Attempt to merge basic blocks that are potentially non-adjacent.
2766 Return true iff the attempt succeeded. */
2769 merge_blocks (e, b, c)
2773 /* If C has a tail recursion label, do not merge. There is no
2774 edge recorded from the call_placeholder back to this label, as
2775 that would make optimize_sibling_and_tail_recursive_calls more
2776 complex for no gain. */
2777 if (GET_CODE (c->head) == CODE_LABEL
2778 && tail_recursion_label_p (c->head))
2781 /* If B has a fallthru edge to C, no need to move anything. */
2782 if (e->flags & EDGE_FALLTHRU)
2784 merge_blocks_nomove (b, c);
2788 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2789 b->index, c->index);
2797 int c_has_outgoing_fallthru;
2798 int b_has_incoming_fallthru;
2800 /* We must make sure to not munge nesting of exception regions,
2801 lexical blocks, and loop notes.
2803 The first is taken care of by requiring that the active eh
2804 region at the end of one block always matches the active eh
2805 region at the beginning of the next block.
2807 The later two are taken care of by squeezing out all the notes. */
2809 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2810 executed and we may want to treat blocks which have two out
2811 edges, one normal, one abnormal as only having one edge for
2812 block merging purposes. */
2814 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2815 if (tmp_edge->flags & EDGE_FALLTHRU)
2817 c_has_outgoing_fallthru = (tmp_edge != NULL);
2819 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2820 if (tmp_edge->flags & EDGE_FALLTHRU)
2822 b_has_incoming_fallthru = (tmp_edge != NULL);
2824 /* If B does not have an incoming fallthru, then it can be moved
2825 immediately before C without introducing or modifying jumps.
2826 C cannot be the first block, so we do not have to worry about
2827 accessing a non-existent block. */
2828 if (! b_has_incoming_fallthru)
2829 return merge_blocks_move_predecessor_nojumps (b, c);
2831 /* Otherwise, we're going to try to move C after B. If C does
2832 not have an outgoing fallthru, then it can be moved
2833 immediately after B without introducing or modifying jumps. */
2834 if (! c_has_outgoing_fallthru)
2835 return merge_blocks_move_successor_nojumps (b, c);
2837 /* Otherwise, we'll need to insert an extra jump, and possibly
2838 a new block to contain it. */
2839 /* ??? Not implemented yet. */
2845 /* Simplify conditional jump around an jump.
2846 Return nonzero in case optimization matched. */
2849 try_simplify_condjump (src)
2852 basic_block final_block, next_block;
2853 rtx insn = src->end;
2854 edge branch, fallthru;
2856 if (!any_condjump_p (insn))
2859 fallthru = FALLTHRU_EDGE (src);
2861 /* Following block must be simple forwarder block with single
2862 entry and must not be last in the stream. */
2863 next_block = fallthru->dest;
2864 if (!forwarder_block_p (next_block)
2865 || next_block->pred->pred_next
2866 || next_block->index == n_basic_blocks - 1)
2869 /* The branch must target to block afterwards. */
2870 final_block = BASIC_BLOCK (next_block->index + 1);
2872 branch = BRANCH_EDGE (src);
2874 if (branch->dest != final_block)
2877 /* Avoid jump.c from being overactive on removin ureachable insns. */
2878 LABEL_NUSES (JUMP_LABEL (insn))++;
2879 if (!invert_jump (insn, block_label (next_block->succ->dest), 1))
2881 LABEL_NUSES (JUMP_LABEL (insn))--;
2885 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
2886 INSN_UID (insn), INSN_UID (next_block->end));
2888 redirect_edge_succ (branch, final_block);
2889 redirect_edge_succ (fallthru, next_block->succ->dest);
2891 branch->flags |= EDGE_FALLTHRU;
2892 fallthru->flags &= EDGE_FALLTHRU;
2894 flow_delete_block (next_block);
2898 /* Attempt to forward edges leaving basic block B.
2899 Return nonzero if sucessfull. */
2902 try_forward_edges (b)
2907 for (e = b->succ; e; e = e->succ_next)
2909 basic_block target = e->dest, first = e->dest;
2912 /* Look for the real destination of jump.
2913 Avoid inifinite loop in the infinite empty loop by counting
2914 up to n_basic_blocks. */
2915 while (forwarder_block_p (target)
2916 && target->succ->dest != EXIT_BLOCK_PTR
2917 && counter < n_basic_blocks)
2919 /* Bypass trivial infinite loops. */
2920 if (target == target->succ->dest)
2921 counter = n_basic_blocks;
2922 target = target->succ->dest, counter++;
2925 if (target != first && counter < n_basic_blocks
2926 && redirect_edge_and_branch (e, target))
2928 while (first != target)
2930 first->count -= e->count;
2931 first->succ->count -= e->count;
2932 first->frequency -= ((e->probability * b->frequency
2933 + REG_BR_PROB_BASE / 2)
2934 / REG_BR_PROB_BASE);
2935 first = first->succ->dest;
2937 /* We've possibly removed the edge. */
2941 else if (rtl_dump_file && counter == n_basic_blocks)
2942 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n", target->index);
2943 else if (rtl_dump_file && first != target)
2944 fprintf (rtl_dump_file,
2945 "Forwarding edge %i->%i to %i failed.\n", b->index,
2946 e->dest->index, target->index);
2951 /* Do simple CFG optimizations - basic block merging, simplifying of jump
2954 Return nonzero in case some optimizations matched. */
2960 bool changed_overall = 0;
2963 /* Attempt to merge blocks as made possible by edge removal. If a block
2964 has only one successor, and the successor has only one predecessor,
2965 they may be combined. */
2970 for (i = 0; i < n_basic_blocks;)
2972 basic_block c, b = BASIC_BLOCK (i);
2974 int changed_here = 0;
2976 /* Delete trivially dead basic block. */
2977 if (b->pred == NULL)
2979 c = BASIC_BLOCK (i - 1);
2981 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
2982 flow_delete_block (b);
2986 /* The fallthru forwarder block can be deleted. */
2987 if (b->pred->pred_next == NULL
2988 && forwarder_block_p (b)
2989 && (b->pred->flags & EDGE_FALLTHRU)
2990 && (b->succ->flags & EDGE_FALLTHRU))
2993 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
2995 c = BASIC_BLOCK (i ? i - 1 : i + 1);
2996 redirect_edge_succ (b->pred, b->succ->dest);
2997 flow_delete_block (b);
3002 /* A loop because chains of blocks might be combineable. */
3003 while ((s = b->succ) != NULL
3004 && s->succ_next == NULL
3005 && (s->flags & EDGE_EH) == 0
3006 && (c = s->dest) != EXIT_BLOCK_PTR
3007 && c->pred->pred_next == NULL
3008 /* If the jump insn has side effects, we can't kill the edge. */
3009 && (GET_CODE (b->end) != JUMP_INSN
3010 || onlyjump_p (b->end)) && merge_blocks (s, b, c))
3013 if (try_simplify_condjump (b))
3016 /* In the case basic blocks has single outgoing edge, but over by the
3017 non-trivial jump instruction, we can replace it by unconditional
3018 jump, or delete the jump completely. Use logic of
3019 redirect_edge_and_branch to do the dirty job for us.
3021 We match cases as conditional jumps jumping to the next block or
3025 && b->succ->succ_next == NULL
3026 && GET_CODE (b->end) == JUMP_INSN
3027 && b->succ->dest != EXIT_BLOCK_PTR
3028 && redirect_edge_and_branch (b->succ, b->succ->dest))
3031 if (try_forward_edges (b))
3034 /* Don't get confused by the index shift caused by deleting
3041 changed_overall |= changed;
3045 #ifdef ENABLE_CHECKING
3047 verify_flow_info ();
3049 return changed_overall;
3052 /* The given edge should potentially be a fallthru edge. If that is in
3053 fact true, delete the jump and barriers that are in the way. */
3056 tidy_fallthru_edge (e, b, c)
3062 /* ??? In a late-running flow pass, other folks may have deleted basic
3063 blocks by nopping out blocks, leaving multiple BARRIERs between here
3064 and the target label. They ought to be chastized and fixed.
3066 We can also wind up with a sequence of undeletable labels between
3067 one block and the next.
3069 So search through a sequence of barriers, labels, and notes for
3070 the head of block C and assert that we really do fall through. */
3072 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
3075 /* Remove what will soon cease being the jump insn from the source block.
3076 If block B consisted only of this single jump, turn it into a deleted
3079 if (GET_CODE (q) == JUMP_INSN
3081 && (any_uncondjump_p (q)
3082 || (b->succ == e && e->succ_next == NULL)))
3085 /* If this was a conditional jump, we need to also delete
3086 the insn that set cc0. */
3087 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
3094 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
3095 NOTE_SOURCE_FILE (q) = 0;
3101 /* We don't want a block to end on a line-number note since that has
3102 the potential of changing the code between -g and not -g. */
3103 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
3110 /* Selectively unlink the sequence. */
3111 if (q != PREV_INSN (c->head))
3112 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
3114 e->flags |= EDGE_FALLTHRU;
3117 /* Fix up edges that now fall through, or rather should now fall through
3118 but previously required a jump around now deleted blocks. Simplify
3119 the search by only examining blocks numerically adjacent, since this
3120 is how find_basic_blocks created them. */
3123 tidy_fallthru_edges ()
3127 for (i = 1; i < n_basic_blocks; ++i)
3129 basic_block b = BASIC_BLOCK (i - 1);
3130 basic_block c = BASIC_BLOCK (i);
3133 /* We care about simple conditional or unconditional jumps with
3136 If we had a conditional branch to the next instruction when
3137 find_basic_blocks was called, then there will only be one
3138 out edge for the block which ended with the conditional
3139 branch (since we do not create duplicate edges).
3141 Furthermore, the edge will be marked as a fallthru because we
3142 merge the flags for the duplicate edges. So we do not want to
3143 check that the edge is not a FALLTHRU edge. */
3144 if ((s = b->succ) != NULL
3145 && ! (s->flags & EDGE_COMPLEX)
3146 && s->succ_next == NULL
3148 /* If the jump insn has side effects, we can't tidy the edge. */
3149 && (GET_CODE (b->end) != JUMP_INSN
3150 || onlyjump_p (b->end)))
3151 tidy_fallthru_edge (s, b, c);
3155 /* Perform data flow analysis.
3156 F is the first insn of the function; FLAGS is a set of PROP_* flags
3157 to be used in accumulating flow info. */
3160 life_analysis (f, file, flags)
3165 #ifdef ELIMINABLE_REGS
3167 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
3170 /* Record which registers will be eliminated. We use this in
3173 CLEAR_HARD_REG_SET (elim_reg_set);
3175 #ifdef ELIMINABLE_REGS
3176 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3177 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3179 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3183 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
3185 /* The post-reload life analysis have (on a global basis) the same
3186 registers live as was computed by reload itself. elimination
3187 Otherwise offsets and such may be incorrect.
3189 Reload will make some registers as live even though they do not
3192 We don't want to create new auto-incs after reload, since they
3193 are unlikely to be useful and can cause problems with shared
3195 if (reload_completed)
3196 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
3198 /* We want alias analysis information for local dead store elimination. */
3199 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3200 init_alias_analysis ();
3202 /* Always remove no-op moves. Do this before other processing so
3203 that we don't have to keep re-scanning them. */
3204 delete_noop_moves (f);
3206 /* Some targets can emit simpler epilogues if they know that sp was
3207 not ever modified during the function. After reload, of course,
3208 we've already emitted the epilogue so there's no sense searching. */
3209 if (! reload_completed)
3210 notice_stack_pointer_modification (f);
3212 /* Allocate and zero out data structures that will record the
3213 data from lifetime analysis. */
3214 allocate_reg_life_data ();
3215 allocate_bb_life_data ();
3217 /* Find the set of registers live on function exit. */
3218 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
3220 /* "Update" life info from zero. It'd be nice to begin the
3221 relaxation with just the exit and noreturn blocks, but that set
3222 is not immediately handy. */
3224 if (flags & PROP_REG_INFO)
3225 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3226 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
3229 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3230 end_alias_analysis ();
3233 dump_flow_info (file);
3235 free_basic_block_vars (1);
3237 #ifdef ENABLE_CHECKING
3241 /* Search for any REG_LABEL notes which reference deleted labels. */
3242 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3244 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3246 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
3253 /* A subroutine of verify_wide_reg, called through for_each_rtx.
3254 Search for REGNO. If found, abort if it is not wider than word_mode. */
3257 verify_wide_reg_1 (px, pregno)
3262 unsigned int regno = *(int *) pregno;
3264 if (GET_CODE (x) == REG && REGNO (x) == regno)
3266 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
3273 /* A subroutine of verify_local_live_at_start. Search through insns
3274 between HEAD and END looking for register REGNO. */
3277 verify_wide_reg (regno, head, end)
3284 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
3288 head = NEXT_INSN (head);
3291 /* We didn't find the register at all. Something's way screwy. */
3293 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
3294 print_rtl_and_abort ();
3297 /* A subroutine of update_life_info. Verify that there are no untoward
3298 changes in live_at_start during a local update. */
3301 verify_local_live_at_start (new_live_at_start, bb)
3302 regset new_live_at_start;
3305 if (reload_completed)
3307 /* After reload, there are no pseudos, nor subregs of multi-word
3308 registers. The regsets should exactly match. */
3309 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
3313 fprintf (rtl_dump_file,
3314 "live_at_start mismatch in bb %d, aborting\n",
3316 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
3317 debug_bitmap_file (rtl_dump_file, new_live_at_start);
3319 print_rtl_and_abort ();
3326 /* Find the set of changed registers. */
3327 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
3329 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
3331 /* No registers should die. */
3332 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
3335 fprintf (rtl_dump_file,
3336 "Register %d died unexpectedly in block %d\n", i,
3338 print_rtl_and_abort ();
3341 /* Verify that the now-live register is wider than word_mode. */
3342 verify_wide_reg (i, bb->head, bb->end);
3347 /* Updates life information starting with the basic blocks set in BLOCKS.
3348 If BLOCKS is null, consider it to be the universal set.
3350 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
3351 we are only expecting local modifications to basic blocks. If we find
3352 extra registers live at the beginning of a block, then we either killed
3353 useful data, or we have a broken split that wants data not provided.
3354 If we find registers removed from live_at_start, that means we have
3355 a broken peephole that is killing a register it shouldn't.
3357 ??? This is not true in one situation -- when a pre-reload splitter
3358 generates subregs of a multi-word pseudo, current life analysis will
3359 lose the kill. So we _can_ have a pseudo go live. How irritating.
3361 Including PROP_REG_INFO does not properly refresh regs_ever_live
3362 unless the caller resets it to zero. */
3365 update_life_info (blocks, extent, prop_flags)
3367 enum update_life_extent extent;
3371 regset_head tmp_head;
3374 tmp = INITIALIZE_REG_SET (tmp_head);
3376 /* For a global update, we go through the relaxation process again. */
3377 if (extent != UPDATE_LIFE_LOCAL)
3379 calculate_global_regs_live (blocks, blocks,
3380 prop_flags & PROP_SCAN_DEAD_CODE);
3382 /* If asked, remove notes from the blocks we'll update. */
3383 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
3384 count_or_remove_death_notes (blocks, 1);
3389 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
3391 basic_block bb = BASIC_BLOCK (i);
3393 COPY_REG_SET (tmp, bb->global_live_at_end);
3394 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3396 if (extent == UPDATE_LIFE_LOCAL)
3397 verify_local_live_at_start (tmp, bb);
3402 for (i = n_basic_blocks - 1; i >= 0; --i)
3404 basic_block bb = BASIC_BLOCK (i);
3406 COPY_REG_SET (tmp, bb->global_live_at_end);
3407 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3409 if (extent == UPDATE_LIFE_LOCAL)
3410 verify_local_live_at_start (tmp, bb);
3416 if (prop_flags & PROP_REG_INFO)
3418 /* The only pseudos that are live at the beginning of the function
3419 are those that were not set anywhere in the function. local-alloc
3420 doesn't know how to handle these correctly, so mark them as not
3421 local to any one basic block. */
3422 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
3423 FIRST_PSEUDO_REGISTER, i,
3424 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3426 /* We have a problem with any pseudoreg that lives across the setjmp.
3427 ANSI says that if a user variable does not change in value between
3428 the setjmp and the longjmp, then the longjmp preserves it. This
3429 includes longjmp from a place where the pseudo appears dead.
3430 (In principle, the value still exists if it is in scope.)
3431 If the pseudo goes in a hard reg, some other value may occupy
3432 that hard reg where this pseudo is dead, thus clobbering the pseudo.
3433 Conclusion: such a pseudo must not go in a hard reg. */
3434 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
3435 FIRST_PSEUDO_REGISTER, i,
3437 if (regno_reg_rtx[i] != 0)
3439 REG_LIVE_LENGTH (i) = -1;
3440 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3446 /* Free the variables allocated by find_basic_blocks.
3448 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
3451 free_basic_block_vars (keep_head_end_p)
3452 int keep_head_end_p;
3454 if (basic_block_for_insn)
3456 VARRAY_FREE (basic_block_for_insn);
3457 basic_block_for_insn = NULL;
3460 if (! keep_head_end_p)
3462 if (basic_block_info)
3465 VARRAY_FREE (basic_block_info);
3469 ENTRY_BLOCK_PTR->aux = NULL;
3470 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
3471 EXIT_BLOCK_PTR->aux = NULL;
3472 EXIT_BLOCK_PTR->global_live_at_start = NULL;
3476 /* Return nonzero if an insn consists only of SETs, each of which only sets a
3483 rtx pat = PATTERN (insn);
3485 /* Insns carrying these notes are useful later on. */
3486 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
3489 if (GET_CODE (pat) == SET && set_noop_p (pat))
3492 if (GET_CODE (pat) == PARALLEL)
3495 /* If nothing but SETs of registers to themselves,
3496 this insn can also be deleted. */
3497 for (i = 0; i < XVECLEN (pat, 0); i++)
3499 rtx tem = XVECEXP (pat, 0, i);
3501 if (GET_CODE (tem) == USE
3502 || GET_CODE (tem) == CLOBBER)
3505 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
3514 /* Delete any insns that copy a register to itself. */
3517 delete_noop_moves (f)
3521 for (insn = f; insn; insn = NEXT_INSN (insn))
3523 if (GET_CODE (insn) == INSN && noop_move_p (insn))
3525 PUT_CODE (insn, NOTE);
3526 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3527 NOTE_SOURCE_FILE (insn) = 0;
3532 /* Determine if the stack pointer is constant over the life of the function.
3533 Only useful before prologues have been emitted. */
3536 notice_stack_pointer_modification_1 (x, pat, data)
3538 rtx pat ATTRIBUTE_UNUSED;
3539 void *data ATTRIBUTE_UNUSED;
3541 if (x == stack_pointer_rtx
3542 /* The stack pointer is only modified indirectly as the result
3543 of a push until later in flow. See the comments in rtl.texi
3544 regarding Embedded Side-Effects on Addresses. */
3545 || (GET_CODE (x) == MEM
3546 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
3547 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
3548 current_function_sp_is_unchanging = 0;
3552 notice_stack_pointer_modification (f)
3557 /* Assume that the stack pointer is unchanging if alloca hasn't
3559 current_function_sp_is_unchanging = !current_function_calls_alloca;
3560 if (! current_function_sp_is_unchanging)
3563 for (insn = f; insn; insn = NEXT_INSN (insn))
3567 /* Check if insn modifies the stack pointer. */
3568 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3570 if (! current_function_sp_is_unchanging)
3576 /* Mark a register in SET. Hard registers in large modes get all
3577 of their component registers set as well. */
3580 mark_reg (reg, xset)
3584 regset set = (regset) xset;
3585 int regno = REGNO (reg);
3587 if (GET_MODE (reg) == BLKmode)
3590 SET_REGNO_REG_SET (set, regno);
3591 if (regno < FIRST_PSEUDO_REGISTER)
3593 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3595 SET_REGNO_REG_SET (set, regno + n);
3599 /* Mark those regs which are needed at the end of the function as live
3600 at the end of the last basic block. */
3603 mark_regs_live_at_end (set)
3608 /* If exiting needs the right stack value, consider the stack pointer
3609 live at the end of the function. */
3610 if ((HAVE_epilogue && reload_completed)
3611 || ! EXIT_IGNORE_STACK
3612 || (! FRAME_POINTER_REQUIRED
3613 && ! current_function_calls_alloca
3614 && flag_omit_frame_pointer)
3615 || current_function_sp_is_unchanging)
3617 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3620 /* Mark the frame pointer if needed at the end of the function. If
3621 we end up eliminating it, it will be removed from the live list
3622 of each basic block by reload. */
3624 if (! reload_completed || frame_pointer_needed)
3626 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3627 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3628 /* If they are different, also mark the hard frame pointer as live. */
3629 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3630 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3634 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3635 /* Many architectures have a GP register even without flag_pic.
3636 Assume the pic register is not in use, or will be handled by
3637 other means, if it is not fixed. */
3638 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3639 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3640 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3643 /* Mark all global registers, and all registers used by the epilogue
3644 as being live at the end of the function since they may be
3645 referenced by our caller. */
3646 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3647 if (global_regs[i] || EPILOGUE_USES (i))
3648 SET_REGNO_REG_SET (set, i);
3650 if (HAVE_epilogue && reload_completed)
3652 /* Mark all call-saved registers that we actually used. */
3653 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3654 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3655 SET_REGNO_REG_SET (set, i);
3658 #ifdef EH_RETURN_DATA_REGNO
3659 /* Mark the registers that will contain data for the handler. */
3660 if (reload_completed && current_function_calls_eh_return)
3663 unsigned regno = EH_RETURN_DATA_REGNO(i);
3664 if (regno == INVALID_REGNUM)
3666 SET_REGNO_REG_SET (set, regno);
3669 #ifdef EH_RETURN_STACKADJ_RTX
3670 if ((! HAVE_epilogue || ! reload_completed)
3671 && current_function_calls_eh_return)
3673 rtx tmp = EH_RETURN_STACKADJ_RTX;
3674 if (tmp && REG_P (tmp))
3675 mark_reg (tmp, set);
3678 #ifdef EH_RETURN_HANDLER_RTX
3679 if ((! HAVE_epilogue || ! reload_completed)
3680 && current_function_calls_eh_return)
3682 rtx tmp = EH_RETURN_HANDLER_RTX;
3683 if (tmp && REG_P (tmp))
3684 mark_reg (tmp, set);
3688 /* Mark function return value. */
3689 diddle_return_value (mark_reg, set);
3692 /* Callback function for for_each_successor_phi. DATA is a regset.
3693 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3694 INSN, in the regset. */
3697 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3698 rtx insn ATTRIBUTE_UNUSED;
3699 int dest_regno ATTRIBUTE_UNUSED;
3703 regset live = (regset) data;
3704 SET_REGNO_REG_SET (live, src_regno);
3708 /* Propagate global life info around the graph of basic blocks. Begin
3709 considering blocks with their corresponding bit set in BLOCKS_IN.
3710 If BLOCKS_IN is null, consider it the universal set.
3712 BLOCKS_OUT is set for every block that was changed. */
3715 calculate_global_regs_live (blocks_in, blocks_out, flags)
3716 sbitmap blocks_in, blocks_out;
3719 basic_block *queue, *qhead, *qtail, *qend;
3720 regset tmp, new_live_at_end, call_used;
3721 regset_head tmp_head, call_used_head;
3722 regset_head new_live_at_end_head;
3725 tmp = INITIALIZE_REG_SET (tmp_head);
3726 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3727 call_used = INITIALIZE_REG_SET (call_used_head);
3729 /* Inconveniently, this is only redily available in hard reg set form. */
3730 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
3731 if (call_used_regs[i])
3732 SET_REGNO_REG_SET (call_used, i);
3734 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3735 because the `head == tail' style test for an empty queue doesn't
3736 work with a full queue. */
3737 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3739 qhead = qend = queue + n_basic_blocks + 2;
3741 /* Queue the blocks set in the initial mask. Do this in reverse block
3742 number order so that we are more likely for the first round to do
3743 useful work. We use AUX non-null to flag that the block is queued. */
3746 /* Clear out the garbage that might be hanging out in bb->aux. */
3747 for (i = n_basic_blocks - 1; i >= 0; --i)
3748 BASIC_BLOCK (i)->aux = NULL;
3750 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3752 basic_block bb = BASIC_BLOCK (i);
3759 for (i = 0; i < n_basic_blocks; ++i)
3761 basic_block bb = BASIC_BLOCK (i);
3768 sbitmap_zero (blocks_out);
3770 /* We work through the queue until there are no more blocks. What
3771 is live at the end of this block is precisely the union of what
3772 is live at the beginning of all its successors. So, we set its
3773 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
3774 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
3775 this block by walking through the instructions in this block in
3776 reverse order and updating as we go. If that changed
3777 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
3778 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
3780 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
3781 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
3782 must either be live at the end of the block, or used within the
3783 block. In the latter case, it will certainly never disappear
3784 from GLOBAL_LIVE_AT_START. In the former case, the register
3785 could go away only if it disappeared from GLOBAL_LIVE_AT_START
3786 for one of the successor blocks. By induction, that cannot
3788 while (qhead != qtail)
3790 int rescan, changed;
3799 /* Begin by propagating live_at_start from the successor blocks. */
3800 CLEAR_REG_SET (new_live_at_end);
3801 for (e = bb->succ; e; e = e->succ_next)
3803 basic_block sb = e->dest;
3805 /* Call-clobbered registers die across exception and call edges. */
3806 /* ??? Abnormal call edges ignored for the moment, as this gets
3807 confused by sibling call edges, which crashes reg-stack. */
3808 if (e->flags & EDGE_EH)
3810 bitmap_operation (tmp, sb->global_live_at_start,
3811 call_used, BITMAP_AND_COMPL);
3812 IOR_REG_SET (new_live_at_end, tmp);
3815 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3818 /* The all-important stack pointer must always be live. */
3819 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3821 /* Before reload, there are a few registers that must be forced
3822 live everywhere -- which might not already be the case for
3823 blocks within infinite loops. */
3824 if (! reload_completed)
3826 /* Any reference to any pseudo before reload is a potential
3827 reference of the frame pointer. */
3828 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
3830 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3831 /* Pseudos with argument area equivalences may require
3832 reloading via the argument pointer. */
3833 if (fixed_regs[ARG_POINTER_REGNUM])
3834 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
3837 /* Any constant, or pseudo with constant equivalences, may
3838 require reloading from memory using the pic register. */
3839 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3840 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3841 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
3844 /* Regs used in phi nodes are not included in
3845 global_live_at_start, since they are live only along a
3846 particular edge. Set those regs that are live because of a
3847 phi node alternative corresponding to this particular block. */
3849 for_each_successor_phi (bb, &set_phi_alternative_reg,
3852 if (bb == ENTRY_BLOCK_PTR)
3854 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3858 /* On our first pass through this block, we'll go ahead and continue.
3859 Recognize first pass by local_set NULL. On subsequent passes, we
3860 get to skip out early if live_at_end wouldn't have changed. */
3862 if (bb->local_set == NULL)
3864 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3865 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3870 /* If any bits were removed from live_at_end, we'll have to
3871 rescan the block. This wouldn't be necessary if we had
3872 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3873 local_live is really dependent on live_at_end. */
3874 CLEAR_REG_SET (tmp);
3875 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3876 new_live_at_end, BITMAP_AND_COMPL);
3880 /* If any of the registers in the new live_at_end set are
3881 conditionally set in this basic block, we must rescan.
3882 This is because conditional lifetimes at the end of the
3883 block do not just take the live_at_end set into account,
3884 but also the liveness at the start of each successor
3885 block. We can miss changes in those sets if we only
3886 compare the new live_at_end against the previous one. */
3887 CLEAR_REG_SET (tmp);
3888 rescan = bitmap_operation (tmp, new_live_at_end,
3889 bb->cond_local_set, BITMAP_AND);
3894 /* Find the set of changed bits. Take this opportunity
3895 to notice that this set is empty and early out. */
3896 CLEAR_REG_SET (tmp);
3897 changed = bitmap_operation (tmp, bb->global_live_at_end,
3898 new_live_at_end, BITMAP_XOR);
3902 /* If any of the changed bits overlap with local_set,
3903 we'll have to rescan the block. Detect overlap by
3904 the AND with ~local_set turning off bits. */
3905 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3910 /* Let our caller know that BB changed enough to require its
3911 death notes updated. */
3913 SET_BIT (blocks_out, bb->index);
3917 /* Add to live_at_start the set of all registers in
3918 new_live_at_end that aren't in the old live_at_end. */
3920 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3922 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3924 changed = bitmap_operation (bb->global_live_at_start,
3925 bb->global_live_at_start,
3932 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3934 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3935 into live_at_start. */
3936 propagate_block (bb, new_live_at_end, bb->local_set,
3937 bb->cond_local_set, flags);
3939 /* If live_at start didn't change, no need to go farther. */
3940 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3943 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3946 /* Queue all predecessors of BB so that we may re-examine
3947 their live_at_end. */
3948 for (e = bb->pred; e; e = e->pred_next)
3950 basic_block pb = e->src;
3951 if (pb->aux == NULL)
3962 FREE_REG_SET (new_live_at_end);
3963 FREE_REG_SET (call_used);
3967 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3969 basic_block bb = BASIC_BLOCK (i);
3970 FREE_REG_SET (bb->local_set);
3971 FREE_REG_SET (bb->cond_local_set);
3976 for (i = n_basic_blocks - 1; i >= 0; --i)
3978 basic_block bb = BASIC_BLOCK (i);
3979 FREE_REG_SET (bb->local_set);
3980 FREE_REG_SET (bb->cond_local_set);
3987 /* Subroutines of life analysis. */
3989 /* Allocate the permanent data structures that represent the results
3990 of life analysis. Not static since used also for stupid life analysis. */
3993 allocate_bb_life_data ()
3997 for (i = 0; i < n_basic_blocks; i++)
3999 basic_block bb = BASIC_BLOCK (i);
4001 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4002 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4005 ENTRY_BLOCK_PTR->global_live_at_end
4006 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4007 EXIT_BLOCK_PTR->global_live_at_start
4008 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4010 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4014 allocate_reg_life_data ()
4018 max_regno = max_reg_num ();
4020 /* Recalculate the register space, in case it has grown. Old style
4021 vector oriented regsets would set regset_{size,bytes} here also. */
4022 allocate_reg_info (max_regno, FALSE, FALSE);
4024 /* Reset all the data we'll collect in propagate_block and its
4026 for (i = 0; i < max_regno; i++)
4030 REG_N_DEATHS (i) = 0;
4031 REG_N_CALLS_CROSSED (i) = 0;
4032 REG_LIVE_LENGTH (i) = 0;
4033 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4037 /* Delete dead instructions for propagate_block. */
4040 propagate_block_delete_insn (bb, insn)
4044 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4046 /* If the insn referred to a label, and that label was attached to
4047 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
4048 pretty much mandatory to delete it, because the ADDR_VEC may be
4049 referencing labels that no longer exist.
4051 INSN may reference a deleted label, particularly when a jump
4052 table has been optimized into a direct jump. There's no
4053 real good way to fix up the reference to the deleted label
4054 when the label is deleted, so we just allow it here.
4056 After dead code elimination is complete, we do search for
4057 any REG_LABEL notes which reference deleted labels as a
4060 if (inote && GET_CODE (inote) == CODE_LABEL)
4062 rtx label = XEXP (inote, 0);
4065 /* The label may be forced if it has been put in the constant
4066 pool. If that is the only use we must discard the table
4067 jump following it, but not the label itself. */
4068 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
4069 && (next = next_nonnote_insn (label)) != NULL
4070 && GET_CODE (next) == JUMP_INSN
4071 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4072 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4074 rtx pat = PATTERN (next);
4075 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4076 int len = XVECLEN (pat, diff_vec_p);
4079 for (i = 0; i < len; i++)
4080 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
4082 flow_delete_insn (next);
4086 if (bb->end == insn)
4087 bb->end = PREV_INSN (insn);
4088 flow_delete_insn (insn);
4091 /* Delete dead libcalls for propagate_block. Return the insn
4092 before the libcall. */
4095 propagate_block_delete_libcall (bb, insn, note)
4099 rtx first = XEXP (note, 0);
4100 rtx before = PREV_INSN (first);
4102 if (insn == bb->end)
4105 flow_delete_insn_chain (first, insn);
4109 /* Update the life-status of regs for one insn. Return the previous insn. */
4112 propagate_one_insn (pbi, insn)
4113 struct propagate_block_info *pbi;
4116 rtx prev = PREV_INSN (insn);
4117 int flags = pbi->flags;
4118 int insn_is_dead = 0;
4119 int libcall_is_dead = 0;
4123 if (! INSN_P (insn))
4126 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4127 if (flags & PROP_SCAN_DEAD_CODE)
4129 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
4130 libcall_is_dead = (insn_is_dead && note != 0
4131 && libcall_dead_p (pbi, note, insn));
4134 /* If an instruction consists of just dead store(s) on final pass,
4136 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
4138 /* If we're trying to delete a prologue or epilogue instruction
4139 that isn't flagged as possibly being dead, something is wrong.
4140 But if we are keeping the stack pointer depressed, we might well
4141 be deleting insns that are used to compute the amount to update
4142 it by, so they are fine. */
4143 if (reload_completed
4144 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
4145 && (TYPE_RETURNS_STACK_DEPRESSED
4146 (TREE_TYPE (current_function_decl))))
4147 && (((HAVE_epilogue || HAVE_prologue)
4148 && prologue_epilogue_contains (insn))
4149 || (HAVE_sibcall_epilogue
4150 && sibcall_epilogue_contains (insn)))
4151 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
4154 /* Record sets. Do this even for dead instructions, since they
4155 would have killed the values if they hadn't been deleted. */
4156 mark_set_regs (pbi, PATTERN (insn), insn);
4158 /* CC0 is now known to be dead. Either this insn used it,
4159 in which case it doesn't anymore, or clobbered it,
4160 so the next insn can't use it. */
4163 if (libcall_is_dead)
4164 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
4166 propagate_block_delete_insn (pbi->bb, insn);
4171 /* See if this is an increment or decrement that can be merged into
4172 a following memory address. */
4175 register rtx x = single_set (insn);
4177 /* Does this instruction increment or decrement a register? */
4178 if ((flags & PROP_AUTOINC)
4180 && GET_CODE (SET_DEST (x)) == REG
4181 && (GET_CODE (SET_SRC (x)) == PLUS
4182 || GET_CODE (SET_SRC (x)) == MINUS)
4183 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
4184 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
4185 /* Ok, look for a following memory ref we can combine with.
4186 If one is found, change the memory ref to a PRE_INC
4187 or PRE_DEC, cancel this insn, and return 1.
4188 Return 0 if nothing has been done. */
4189 && try_pre_increment_1 (pbi, insn))
4192 #endif /* AUTO_INC_DEC */
4194 CLEAR_REG_SET (pbi->new_set);
4196 /* If this is not the final pass, and this insn is copying the value of
4197 a library call and it's dead, don't scan the insns that perform the
4198 library call, so that the call's arguments are not marked live. */
4199 if (libcall_is_dead)
4201 /* Record the death of the dest reg. */
4202 mark_set_regs (pbi, PATTERN (insn), insn);
4204 insn = XEXP (note, 0);
4205 return PREV_INSN (insn);
4207 else if (GET_CODE (PATTERN (insn)) == SET
4208 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
4209 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
4210 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
4211 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
4212 /* We have an insn to pop a constant amount off the stack.
4213 (Such insns use PLUS regardless of the direction of the stack,
4214 and any insn to adjust the stack by a constant is always a pop.)
4215 These insns, if not dead stores, have no effect on life. */
4219 /* Any regs live at the time of a call instruction must not go
4220 in a register clobbered by calls. Find all regs now live and
4221 record this for them. */
4223 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
4224 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
4225 { REG_N_CALLS_CROSSED (i)++; });
4227 /* Record sets. Do this even for dead instructions, since they
4228 would have killed the values if they hadn't been deleted. */
4229 mark_set_regs (pbi, PATTERN (insn), insn);
4231 if (GET_CODE (insn) == CALL_INSN)
4237 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
4238 cond = COND_EXEC_TEST (PATTERN (insn));
4240 /* Non-constant calls clobber memory. */
4241 if (! CONST_CALL_P (insn))
4243 free_EXPR_LIST_list (&pbi->mem_set_list);
4244 pbi->mem_set_list_len = 0;
4247 /* There may be extra registers to be clobbered. */
4248 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4250 note = XEXP (note, 1))
4251 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
4252 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
4253 cond, insn, pbi->flags);
4255 /* Calls change all call-used and global registers. */
4256 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4257 if (call_used_regs[i] && ! global_regs[i]
4260 /* We do not want REG_UNUSED notes for these registers. */
4261 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
4263 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
4267 /* If an insn doesn't use CC0, it becomes dead since we assume
4268 that every insn clobbers it. So show it dead here;
4269 mark_used_regs will set it live if it is referenced. */
4274 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
4276 /* Sometimes we may have inserted something before INSN (such as a move)
4277 when we make an auto-inc. So ensure we will scan those insns. */
4279 prev = PREV_INSN (insn);
4282 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
4288 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
4289 cond = COND_EXEC_TEST (PATTERN (insn));
4291 /* Calls use their arguments. */
4292 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4294 note = XEXP (note, 1))
4295 if (GET_CODE (XEXP (note, 0)) == USE)
4296 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
4299 /* The stack ptr is used (honorarily) by a CALL insn. */
4300 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
4302 /* Calls may also reference any of the global registers,
4303 so they are made live. */
4304 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4306 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
4311 /* On final pass, update counts of how many insns in which each reg
4313 if (flags & PROP_REG_INFO)
4314 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
4315 { REG_LIVE_LENGTH (i)++; });
4320 /* Initialize a propagate_block_info struct for public consumption.
4321 Note that the structure itself is opaque to this file, but that
4322 the user can use the regsets provided here. */
4324 struct propagate_block_info *
4325 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
4327 regset live, local_set, cond_local_set;
4330 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
4333 pbi->reg_live = live;
4334 pbi->mem_set_list = NULL_RTX;
4335 pbi->mem_set_list_len = 0;
4336 pbi->local_set = local_set;
4337 pbi->cond_local_set = cond_local_set;
4341 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4342 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
4344 pbi->reg_next_use = NULL;
4346 pbi->new_set = BITMAP_XMALLOC ();
4348 #ifdef HAVE_conditional_execution
4349 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
4350 free_reg_cond_life_info);
4351 pbi->reg_cond_reg = BITMAP_XMALLOC ();
4353 /* If this block ends in a conditional branch, for each register live
4354 from one side of the branch and not the other, record the register
4355 as conditionally dead. */
4356 if (GET_CODE (bb->end) == JUMP_INSN
4357 && any_condjump_p (bb->end))
4359 regset_head diff_head;
4360 regset diff = INITIALIZE_REG_SET (diff_head);
4361 basic_block bb_true, bb_false;
4362 rtx cond_true, cond_false, set_src;
4365 /* Identify the successor blocks. */
4366 bb_true = bb->succ->dest;
4367 if (bb->succ->succ_next != NULL)
4369 bb_false = bb->succ->succ_next->dest;
4371 if (bb->succ->flags & EDGE_FALLTHRU)
4373 basic_block t = bb_false;
4377 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
4382 /* This can happen with a conditional jump to the next insn. */
4383 if (JUMP_LABEL (bb->end) != bb_true->head)
4386 /* Simplest way to do nothing. */
4390 /* Extract the condition from the branch. */
4391 set_src = SET_SRC (pc_set (bb->end));
4392 cond_true = XEXP (set_src, 0);
4393 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
4394 GET_MODE (cond_true), XEXP (cond_true, 0),
4395 XEXP (cond_true, 1));
4396 if (GET_CODE (XEXP (set_src, 1)) == PC)
4399 cond_false = cond_true;
4403 /* Compute which register lead different lives in the successors. */
4404 if (bitmap_operation (diff, bb_true->global_live_at_start,
4405 bb_false->global_live_at_start, BITMAP_XOR))
4407 rtx reg = XEXP (cond_true, 0);
4409 if (GET_CODE (reg) == SUBREG)
4410 reg = SUBREG_REG (reg);
4412 if (GET_CODE (reg) != REG)
4415 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
4417 /* For each such register, mark it conditionally dead. */
4418 EXECUTE_IF_SET_IN_REG_SET
4421 struct reg_cond_life_info *rcli;
4424 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
4426 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
4430 rcli->condition = cond;
4431 rcli->stores = const0_rtx;
4432 rcli->orig_condition = cond;
4434 splay_tree_insert (pbi->reg_cond_dead, i,
4435 (splay_tree_value) rcli);
4439 FREE_REG_SET (diff);
4443 /* If this block has no successors, any stores to the frame that aren't
4444 used later in the block are dead. So make a pass over the block
4445 recording any such that are made and show them dead at the end. We do
4446 a very conservative and simple job here. */
4448 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
4449 && (TYPE_RETURNS_STACK_DEPRESSED
4450 (TREE_TYPE (current_function_decl))))
4451 && (flags & PROP_SCAN_DEAD_CODE)
4452 && (bb->succ == NULL
4453 || (bb->succ->succ_next == NULL
4454 && bb->succ->dest == EXIT_BLOCK_PTR
4455 && ! current_function_calls_eh_return)))
4458 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
4459 if (GET_CODE (insn) == INSN
4460 && (set = single_set (insn))
4461 && GET_CODE (SET_DEST (set)) == MEM)
4463 rtx mem = SET_DEST (set);
4464 rtx canon_mem = canon_rtx (mem);
4466 /* This optimization is performed by faking a store to the
4467 memory at the end of the block. This doesn't work for
4468 unchanging memories because multiple stores to unchanging
4469 memory is illegal and alias analysis doesn't consider it. */
4470 if (RTX_UNCHANGING_P (canon_mem))
4473 if (XEXP (canon_mem, 0) == frame_pointer_rtx
4474 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
4475 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
4476 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
4479 /* Store a copy of mem, otherwise the address may be scrogged
4480 by find_auto_inc. This matters because insn_dead_p uses
4481 an rtx_equal_p check to determine if two addresses are
4482 the same. This works before find_auto_inc, but fails
4483 after find_auto_inc, causing discrepencies between the
4484 set of live registers calculated during the
4485 calculate_global_regs_live phase and what actually exists
4486 after flow completes, leading to aborts. */
4487 if (flags & PROP_AUTOINC)
4488 mem = shallow_copy_rtx (mem);
4490 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
4491 if (++pbi->mem_set_list_len >= MAX_MEM_SET_LIST_LEN)
4500 /* Release a propagate_block_info struct. */
4503 free_propagate_block_info (pbi)
4504 struct propagate_block_info *pbi;
4506 free_EXPR_LIST_list (&pbi->mem_set_list);
4508 BITMAP_XFREE (pbi->new_set);
4510 #ifdef HAVE_conditional_execution
4511 splay_tree_delete (pbi->reg_cond_dead);
4512 BITMAP_XFREE (pbi->reg_cond_reg);
4515 if (pbi->reg_next_use)
4516 free (pbi->reg_next_use);
4521 /* Compute the registers live at the beginning of a basic block BB from
4522 those live at the end.
4524 When called, REG_LIVE contains those live at the end. On return, it
4525 contains those live at the beginning.
4527 LOCAL_SET, if non-null, will be set with all registers killed
4528 unconditionally by this basic block.
4529 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
4530 killed conditionally by this basic block. If there is any unconditional
4531 set of a register, then the corresponding bit will be set in LOCAL_SET
4532 and cleared in COND_LOCAL_SET.
4533 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
4534 case, the resulting set will be equal to the union of the two sets that
4535 would otherwise be computed. */
4538 propagate_block (bb, live, local_set, cond_local_set, flags)
4542 regset cond_local_set;
4545 struct propagate_block_info *pbi;
4548 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
4550 if (flags & PROP_REG_INFO)
4554 /* Process the regs live at the end of the block.
4555 Mark them as not local to any one basic block. */
4556 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
4557 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4560 /* Scan the block an insn at a time from end to beginning. */
4562 for (insn = bb->end;; insn = prev)
4564 /* If this is a call to `setjmp' et al, warn if any
4565 non-volatile datum is live. */
4566 if ((flags & PROP_REG_INFO)
4567 && GET_CODE (insn) == NOTE
4568 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
4569 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
4571 prev = propagate_one_insn (pbi, insn);
4573 if (insn == bb->head)
4577 free_propagate_block_info (pbi);
4580 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
4581 (SET expressions whose destinations are registers dead after the insn).
4582 NEEDED is the regset that says which regs are alive after the insn.
4584 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
4586 If X is the entire body of an insn, NOTES contains the reg notes
4587 pertaining to the insn. */
4590 insn_dead_p (pbi, x, call_ok, notes)
4591 struct propagate_block_info *pbi;
4594 rtx notes ATTRIBUTE_UNUSED;
4596 enum rtx_code code = GET_CODE (x);
4599 /* If flow is invoked after reload, we must take existing AUTO_INC
4600 expresions into account. */
4601 if (reload_completed)
4603 for (; notes; notes = XEXP (notes, 1))
4605 if (REG_NOTE_KIND (notes) == REG_INC)
4607 int regno = REGNO (XEXP (notes, 0));
4609 /* Don't delete insns to set global regs. */
4610 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4611 || REGNO_REG_SET_P (pbi->reg_live, regno))
4618 /* If setting something that's a reg or part of one,
4619 see if that register's altered value will be live. */
4623 rtx r = SET_DEST (x);
4626 if (GET_CODE (r) == CC0)
4627 return ! pbi->cc0_live;
4630 /* A SET that is a subroutine call cannot be dead. */
4631 if (GET_CODE (SET_SRC (x)) == CALL)
4637 /* Don't eliminate loads from volatile memory or volatile asms. */
4638 else if (volatile_refs_p (SET_SRC (x)))
4641 if (GET_CODE (r) == MEM)
4645 if (MEM_VOLATILE_P (r))
4648 /* Walk the set of memory locations we are currently tracking
4649 and see if one is an identical match to this memory location.
4650 If so, this memory write is dead (remember, we're walking
4651 backwards from the end of the block to the start). Since
4652 rtx_equal_p does not check the alias set or flags, we also
4653 must have the potential for them to conflict (anti_dependence). */
4654 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
4655 if (anti_dependence (r, XEXP (temp, 0)))
4657 rtx mem = XEXP (temp, 0);
4659 if (rtx_equal_p (mem, r))
4662 /* Check if memory reference matches an auto increment. Only
4663 post increment/decrement or modify are valid. */
4664 if (GET_MODE (mem) == GET_MODE (r)
4665 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
4666 || GET_CODE (XEXP (mem, 0)) == POST_INC
4667 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
4668 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
4669 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
4676 while (GET_CODE (r) == SUBREG
4677 || GET_CODE (r) == STRICT_LOW_PART
4678 || GET_CODE (r) == ZERO_EXTRACT)
4681 if (GET_CODE (r) == REG)
4683 int regno = REGNO (r);
4686 if (REGNO_REG_SET_P (pbi->reg_live, regno))
4689 /* If this is a hard register, verify that subsequent
4690 words are not needed. */
4691 if (regno < FIRST_PSEUDO_REGISTER)
4693 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
4696 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
4700 /* Don't delete insns to set global regs. */
4701 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4704 /* Make sure insns to set the stack pointer aren't deleted. */
4705 if (regno == STACK_POINTER_REGNUM)
4708 /* ??? These bits might be redundant with the force live bits
4709 in calculate_global_regs_live. We would delete from
4710 sequential sets; whether this actually affects real code
4711 for anything but the stack pointer I don't know. */
4712 /* Make sure insns to set the frame pointer aren't deleted. */
4713 if (regno == FRAME_POINTER_REGNUM
4714 && (! reload_completed || frame_pointer_needed))
4716 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4717 if (regno == HARD_FRAME_POINTER_REGNUM
4718 && (! reload_completed || frame_pointer_needed))
4722 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4723 /* Make sure insns to set arg pointer are never deleted
4724 (if the arg pointer isn't fixed, there will be a USE
4725 for it, so we can treat it normally). */
4726 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4730 /* Otherwise, the set is dead. */
4736 /* If performing several activities, insn is dead if each activity
4737 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4738 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4740 else if (code == PARALLEL)
4742 int i = XVECLEN (x, 0);
4744 for (i--; i >= 0; i--)
4745 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4746 && GET_CODE (XVECEXP (x, 0, i)) != USE
4747 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4753 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4754 is not necessarily true for hard registers. */
4755 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4756 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4757 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4760 /* We do not check other CLOBBER or USE here. An insn consisting of just
4761 a CLOBBER or just a USE should not be deleted. */
4765 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4766 return 1 if the entire library call is dead.
4767 This is true if INSN copies a register (hard or pseudo)
4768 and if the hard return reg of the call insn is dead.
4769 (The caller should have tested the destination of the SET inside
4770 INSN already for death.)
4772 If this insn doesn't just copy a register, then we don't
4773 have an ordinary libcall. In that case, cse could not have
4774 managed to substitute the source for the dest later on,
4775 so we can assume the libcall is dead.
4777 PBI is the block info giving pseudoregs live before this insn.
4778 NOTE is the REG_RETVAL note of the insn. */
4781 libcall_dead_p (pbi, note, insn)
4782 struct propagate_block_info *pbi;
4786 rtx x = single_set (insn);
4790 register rtx r = SET_SRC (x);
4791 if (GET_CODE (r) == REG)
4793 rtx call = XEXP (note, 0);
4797 /* Find the call insn. */
4798 while (call != insn && GET_CODE (call) != CALL_INSN)
4799 call = NEXT_INSN (call);
4801 /* If there is none, do nothing special,
4802 since ordinary death handling can understand these insns. */
4806 /* See if the hard reg holding the value is dead.
4807 If this is a PARALLEL, find the call within it. */
4808 call_pat = PATTERN (call);
4809 if (GET_CODE (call_pat) == PARALLEL)
4811 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4812 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4813 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4816 /* This may be a library call that is returning a value
4817 via invisible pointer. Do nothing special, since
4818 ordinary death handling can understand these insns. */
4822 call_pat = XVECEXP (call_pat, 0, i);
4825 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4831 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4832 live at function entry. Don't count global register variables, variables
4833 in registers that can be used for function arg passing, or variables in
4834 fixed hard registers. */
4837 regno_uninitialized (regno)
4840 if (n_basic_blocks == 0
4841 || (regno < FIRST_PSEUDO_REGISTER
4842 && (global_regs[regno]
4843 || fixed_regs[regno]
4844 || FUNCTION_ARG_REGNO_P (regno))))
4847 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4850 /* 1 if register REGNO was alive at a place where `setjmp' was called
4851 and was set more than once or is an argument.
4852 Such regs may be clobbered by `longjmp'. */
4855 regno_clobbered_at_setjmp (regno)
4858 if (n_basic_blocks == 0)
4861 return ((REG_N_SETS (regno) > 1
4862 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4863 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4866 /* INSN references memory, possibly using autoincrement addressing modes.
4867 Find any entries on the mem_set_list that need to be invalidated due
4868 to an address change. */
4871 invalidate_mems_from_autoinc (pbi, insn)
4872 struct propagate_block_info *pbi;
4875 rtx note = REG_NOTES (insn);
4876 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4878 if (REG_NOTE_KIND (note) == REG_INC)
4880 rtx temp = pbi->mem_set_list;
4881 rtx prev = NULL_RTX;
4886 next = XEXP (temp, 1);
4887 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4889 /* Splice temp out of list. */
4891 XEXP (prev, 1) = next;
4893 pbi->mem_set_list = next;
4894 free_EXPR_LIST_node (temp);
4895 pbi->mem_set_list_len--;
4905 /* EXP is either a MEM or a REG. Remove any dependant entries
4906 from pbi->mem_set_list. */
4909 invalidate_mems_from_set (pbi, exp)
4910 struct propagate_block_info *pbi;
4913 rtx temp = pbi->mem_set_list;
4914 rtx prev = NULL_RTX;
4919 next = XEXP (temp, 1);
4920 if ((GET_CODE (exp) == MEM
4921 && output_dependence (XEXP (temp, 0), exp))
4922 || (GET_CODE (exp) == REG
4923 && reg_overlap_mentioned_p (exp, XEXP (temp, 0))))
4925 /* Splice this entry out of the list. */
4927 XEXP (prev, 1) = next;
4929 pbi->mem_set_list = next;
4930 free_EXPR_LIST_node (temp);
4931 pbi->mem_set_list_len--;
4939 /* Process the registers that are set within X. Their bits are set to
4940 1 in the regset DEAD, because they are dead prior to this insn.
4942 If INSN is nonzero, it is the insn being processed.
4944 FLAGS is the set of operations to perform. */
4947 mark_set_regs (pbi, x, insn)
4948 struct propagate_block_info *pbi;
4951 rtx cond = NULL_RTX;
4956 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4958 if (REG_NOTE_KIND (link) == REG_INC)
4959 mark_set_1 (pbi, SET, XEXP (link, 0),
4960 (GET_CODE (x) == COND_EXEC
4961 ? COND_EXEC_TEST (x) : NULL_RTX),
4965 switch (code = GET_CODE (x))
4969 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4973 cond = COND_EXEC_TEST (x);
4974 x = COND_EXEC_CODE (x);
4980 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4982 rtx sub = XVECEXP (x, 0, i);
4983 switch (code = GET_CODE (sub))
4986 if (cond != NULL_RTX)
4989 cond = COND_EXEC_TEST (sub);
4990 sub = COND_EXEC_CODE (sub);
4991 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4997 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
5012 /* Process a single set, which appears in INSN. REG (which may not
5013 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
5014 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
5015 If the set is conditional (because it appear in a COND_EXEC), COND
5016 will be the condition. */
5019 mark_set_1 (pbi, code, reg, cond, insn, flags)
5020 struct propagate_block_info *pbi;
5022 rtx reg, cond, insn;
5025 int regno_first = -1, regno_last = -1;
5026 unsigned long not_dead = 0;
5029 /* Modifying just one hardware register of a multi-reg value or just a
5030 byte field of a register does not mean the value from before this insn
5031 is now dead. Of course, if it was dead after it's unused now. */
5033 switch (GET_CODE (reg))
5036 /* Some targets place small structures in registers for return values of
5037 functions. We have to detect this case specially here to get correct
5038 flow information. */
5039 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5040 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
5041 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
5047 case STRICT_LOW_PART:
5048 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
5050 reg = XEXP (reg, 0);
5051 while (GET_CODE (reg) == SUBREG
5052 || GET_CODE (reg) == ZERO_EXTRACT
5053 || GET_CODE (reg) == SIGN_EXTRACT
5054 || GET_CODE (reg) == STRICT_LOW_PART);
5055 if (GET_CODE (reg) == MEM)
5057 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
5061 regno_last = regno_first = REGNO (reg);
5062 if (regno_first < FIRST_PSEUDO_REGISTER)
5063 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
5067 if (GET_CODE (SUBREG_REG (reg)) == REG)
5069 enum machine_mode outer_mode = GET_MODE (reg);
5070 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
5072 /* Identify the range of registers affected. This is moderately
5073 tricky for hard registers. See alter_subreg. */
5075 regno_last = regno_first = REGNO (SUBREG_REG (reg));
5076 if (regno_first < FIRST_PSEUDO_REGISTER)
5078 regno_first += subreg_regno_offset (regno_first, inner_mode,
5081 regno_last = (regno_first
5082 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
5084 /* Since we've just adjusted the register number ranges, make
5085 sure REG matches. Otherwise some_was_live will be clear
5086 when it shouldn't have been, and we'll create incorrect
5087 REG_UNUSED notes. */
5088 reg = gen_rtx_REG (outer_mode, regno_first);
5092 /* If the number of words in the subreg is less than the number
5093 of words in the full register, we have a well-defined partial
5094 set. Otherwise the high bits are undefined.
5096 This is only really applicable to pseudos, since we just took
5097 care of multi-word hard registers. */
5098 if (((GET_MODE_SIZE (outer_mode)
5099 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
5100 < ((GET_MODE_SIZE (inner_mode)
5101 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
5102 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
5105 reg = SUBREG_REG (reg);
5109 reg = SUBREG_REG (reg);
5116 /* If this set is a MEM, then it kills any aliased writes.
5117 If this set is a REG, then it kills any MEMs which use the reg. */
5118 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5120 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
5121 invalidate_mems_from_set (pbi, reg);
5123 /* If the memory reference had embedded side effects (autoincrement
5124 address modes. Then we may need to kill some entries on the
5126 if (insn && GET_CODE (reg) == MEM)
5127 invalidate_mems_from_autoinc (pbi, insn);
5129 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN
5130 && GET_CODE (reg) == MEM && ! side_effects_p (reg)
5131 /* ??? With more effort we could track conditional memory life. */
5133 /* We do not know the size of a BLKmode store, so we do not track
5134 them for redundant store elimination. */
5135 && GET_MODE (reg) != BLKmode
5136 /* There are no REG_INC notes for SP, so we can't assume we'll see
5137 everything that invalidates it. To be safe, don't eliminate any
5138 stores though SP; none of them should be redundant anyway. */
5139 && ! reg_mentioned_p (stack_pointer_rtx, reg))
5142 /* Store a copy of mem, otherwise the address may be
5143 scrogged by find_auto_inc. */
5144 if (flags & PROP_AUTOINC)
5145 reg = shallow_copy_rtx (reg);
5147 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
5148 pbi->mem_set_list_len++;
5152 if (GET_CODE (reg) == REG
5153 && ! (regno_first == FRAME_POINTER_REGNUM
5154 && (! reload_completed || frame_pointer_needed))
5155 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5156 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
5157 && (! reload_completed || frame_pointer_needed))
5159 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5160 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
5164 int some_was_live = 0, some_was_dead = 0;
5166 for (i = regno_first; i <= regno_last; ++i)
5168 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
5171 /* Order of the set operation matters here since both
5172 sets may be the same. */
5173 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
5174 if (cond != NULL_RTX
5175 && ! REGNO_REG_SET_P (pbi->local_set, i))
5176 SET_REGNO_REG_SET (pbi->cond_local_set, i);
5178 SET_REGNO_REG_SET (pbi->local_set, i);
5180 if (code != CLOBBER)
5181 SET_REGNO_REG_SET (pbi->new_set, i);
5183 some_was_live |= needed_regno;
5184 some_was_dead |= ! needed_regno;
5187 #ifdef HAVE_conditional_execution
5188 /* Consider conditional death in deciding that the register needs
5190 if (some_was_live && ! not_dead
5191 /* The stack pointer is never dead. Well, not strictly true,
5192 but it's very difficult to tell from here. Hopefully
5193 combine_stack_adjustments will fix up the most egregious
5195 && regno_first != STACK_POINTER_REGNUM)
5197 for (i = regno_first; i <= regno_last; ++i)
5198 if (! mark_regno_cond_dead (pbi, i, cond))
5199 not_dead |= ((unsigned long) 1) << (i - regno_first);
5203 /* Additional data to record if this is the final pass. */
5204 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
5205 | PROP_DEATH_NOTES | PROP_AUTOINC))
5208 register int blocknum = pbi->bb->index;
5211 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5213 y = pbi->reg_next_use[regno_first];
5215 /* The next use is no longer next, since a store intervenes. */
5216 for (i = regno_first; i <= regno_last; ++i)
5217 pbi->reg_next_use[i] = 0;
5220 if (flags & PROP_REG_INFO)
5222 for (i = regno_first; i <= regno_last; ++i)
5224 /* Count (weighted) references, stores, etc. This counts a
5225 register twice if it is modified, but that is correct. */
5226 REG_N_SETS (i) += 1;
5227 REG_N_REFS (i) += 1;
5228 REG_FREQ (i) += (optimize_size || !pbi->bb->frequency
5229 ? 1 : pbi->bb->frequency);
5231 /* The insns where a reg is live are normally counted
5232 elsewhere, but we want the count to include the insn
5233 where the reg is set, and the normal counting mechanism
5234 would not count it. */
5235 REG_LIVE_LENGTH (i) += 1;
5238 /* If this is a hard reg, record this function uses the reg. */
5239 if (regno_first < FIRST_PSEUDO_REGISTER)
5241 for (i = regno_first; i <= regno_last; i++)
5242 regs_ever_live[i] = 1;
5246 /* Keep track of which basic blocks each reg appears in. */
5247 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
5248 REG_BASIC_BLOCK (regno_first) = blocknum;
5249 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
5250 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
5254 if (! some_was_dead)
5256 if (flags & PROP_LOG_LINKS)
5258 /* Make a logical link from the next following insn
5259 that uses this register, back to this insn.
5260 The following insns have already been processed.
5262 We don't build a LOG_LINK for hard registers containing
5263 in ASM_OPERANDs. If these registers get replaced,
5264 we might wind up changing the semantics of the insn,
5265 even if reload can make what appear to be valid
5266 assignments later. */
5267 if (y && (BLOCK_NUM (y) == blocknum)
5268 && (regno_first >= FIRST_PSEUDO_REGISTER
5269 || asm_noperands (PATTERN (y)) < 0))
5270 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
5275 else if (! some_was_live)
5277 if (flags & PROP_REG_INFO)
5278 REG_N_DEATHS (regno_first) += 1;
5280 if (flags & PROP_DEATH_NOTES)
5282 /* Note that dead stores have already been deleted
5283 when possible. If we get here, we have found a
5284 dead store that cannot be eliminated (because the
5285 same insn does something useful). Indicate this
5286 by marking the reg being set as dying here. */
5288 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
5293 if (flags & PROP_DEATH_NOTES)
5295 /* This is a case where we have a multi-word hard register
5296 and some, but not all, of the words of the register are
5297 needed in subsequent insns. Write REG_UNUSED notes
5298 for those parts that were not needed. This case should
5301 for (i = regno_first; i <= regno_last; ++i)
5302 if (! REGNO_REG_SET_P (pbi->reg_live, i))
5304 = alloc_EXPR_LIST (REG_UNUSED,
5305 gen_rtx_REG (reg_raw_mode[i], i),
5311 /* Mark the register as being dead. */
5313 /* The stack pointer is never dead. Well, not strictly true,
5314 but it's very difficult to tell from here. Hopefully
5315 combine_stack_adjustments will fix up the most egregious
5317 && regno_first != STACK_POINTER_REGNUM)
5319 for (i = regno_first; i <= regno_last; ++i)
5320 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
5321 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
5324 else if (GET_CODE (reg) == REG)
5326 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5327 pbi->reg_next_use[regno_first] = 0;
5330 /* If this is the last pass and this is a SCRATCH, show it will be dying
5331 here and count it. */
5332 else if (GET_CODE (reg) == SCRATCH)
5334 if (flags & PROP_DEATH_NOTES)
5336 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
5340 #ifdef HAVE_conditional_execution
5341 /* Mark REGNO conditionally dead.
5342 Return true if the register is now unconditionally dead. */
5345 mark_regno_cond_dead (pbi, regno, cond)
5346 struct propagate_block_info *pbi;
5350 /* If this is a store to a predicate register, the value of the
5351 predicate is changing, we don't know that the predicate as seen
5352 before is the same as that seen after. Flush all dependent
5353 conditions from reg_cond_dead. This will make all such
5354 conditionally live registers unconditionally live. */
5355 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
5356 flush_reg_cond_reg (pbi, regno);
5358 /* If this is an unconditional store, remove any conditional
5359 life that may have existed. */
5360 if (cond == NULL_RTX)
5361 splay_tree_remove (pbi->reg_cond_dead, regno);
5364 splay_tree_node node;
5365 struct reg_cond_life_info *rcli;
5368 /* Otherwise this is a conditional set. Record that fact.
5369 It may have been conditionally used, or there may be a
5370 subsequent set with a complimentary condition. */
5372 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5375 /* The register was unconditionally live previously.
5376 Record the current condition as the condition under
5377 which it is dead. */
5378 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5379 rcli->condition = cond;
5380 rcli->stores = cond;
5381 rcli->orig_condition = const0_rtx;
5382 splay_tree_insert (pbi->reg_cond_dead, regno,
5383 (splay_tree_value) rcli);
5385 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5387 /* Not unconditionaly dead. */
5392 /* The register was conditionally live previously.
5393 Add the new condition to the old. */
5394 rcli = (struct reg_cond_life_info *) node->value;
5395 ncond = rcli->condition;
5396 ncond = ior_reg_cond (ncond, cond, 1);
5397 if (rcli->stores == const0_rtx)
5398 rcli->stores = cond;
5399 else if (rcli->stores != const1_rtx)
5400 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
5402 /* If the register is now unconditionally dead, remove the entry
5403 in the splay_tree. A register is unconditionally dead if the
5404 dead condition ncond is true. A register is also unconditionally
5405 dead if the sum of all conditional stores is an unconditional
5406 store (stores is true), and the dead condition is identically the
5407 same as the original dead condition initialized at the end of
5408 the block. This is a pointer compare, not an rtx_equal_p
5410 if (ncond == const1_rtx
5411 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
5412 splay_tree_remove (pbi->reg_cond_dead, regno);
5415 rcli->condition = ncond;
5417 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5419 /* Not unconditionaly dead. */
5428 /* Called from splay_tree_delete for pbi->reg_cond_life. */
5431 free_reg_cond_life_info (value)
5432 splay_tree_value value;
5434 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
5438 /* Helper function for flush_reg_cond_reg. */
5441 flush_reg_cond_reg_1 (node, data)
5442 splay_tree_node node;
5445 struct reg_cond_life_info *rcli;
5446 int *xdata = (int *) data;
5447 unsigned int regno = xdata[0];
5449 /* Don't need to search if last flushed value was farther on in
5450 the in-order traversal. */
5451 if (xdata[1] >= (int) node->key)
5454 /* Splice out portions of the expression that refer to regno. */
5455 rcli = (struct reg_cond_life_info *) node->value;
5456 rcli->condition = elim_reg_cond (rcli->condition, regno);
5457 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
5458 rcli->stores = elim_reg_cond (rcli->stores, regno);
5460 /* If the entire condition is now false, signal the node to be removed. */
5461 if (rcli->condition == const0_rtx)
5463 xdata[1] = node->key;
5466 else if (rcli->condition == const1_rtx)
5472 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
5475 flush_reg_cond_reg (pbi, regno)
5476 struct propagate_block_info *pbi;
5483 while (splay_tree_foreach (pbi->reg_cond_dead,
5484 flush_reg_cond_reg_1, pair) == -1)
5485 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
5487 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
5490 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
5491 For ior/and, the ADD flag determines whether we want to add the new
5492 condition X to the old one unconditionally. If it is zero, we will
5493 only return a new expression if X allows us to simplify part of
5494 OLD, otherwise we return OLD unchanged to the caller.
5495 If ADD is nonzero, we will return a new condition in all cases. The
5496 toplevel caller of one of these functions should always pass 1 for
5500 ior_reg_cond (old, x, add)
5506 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5508 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5509 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
5510 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5512 if (GET_CODE (x) == GET_CODE (old)
5513 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5517 return gen_rtx_IOR (0, old, x);
5520 switch (GET_CODE (old))
5523 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5524 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5525 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5527 if (op0 == const0_rtx)
5529 if (op1 == const0_rtx)
5531 if (op0 == const1_rtx || op1 == const1_rtx)
5533 if (op0 == XEXP (old, 0))
5534 op0 = gen_rtx_IOR (0, op0, x);
5536 op1 = gen_rtx_IOR (0, op1, x);
5537 return gen_rtx_IOR (0, op0, op1);
5541 return gen_rtx_IOR (0, old, x);
5544 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5545 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5546 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5548 if (op0 == const1_rtx)
5550 if (op1 == const1_rtx)
5552 if (op0 == const0_rtx || op1 == const0_rtx)
5554 if (op0 == XEXP (old, 0))
5555 op0 = gen_rtx_IOR (0, op0, x);
5557 op1 = gen_rtx_IOR (0, op1, x);
5558 return gen_rtx_AND (0, op0, op1);
5562 return gen_rtx_IOR (0, old, x);
5565 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5566 if (op0 != XEXP (old, 0))
5567 return not_reg_cond (op0);
5570 return gen_rtx_IOR (0, old, x);
5581 enum rtx_code x_code;
5583 if (x == const0_rtx)
5585 else if (x == const1_rtx)
5587 x_code = GET_CODE (x);
5590 if (GET_RTX_CLASS (x_code) == '<'
5591 && GET_CODE (XEXP (x, 0)) == REG)
5593 if (XEXP (x, 1) != const0_rtx)
5596 return gen_rtx_fmt_ee (reverse_condition (x_code),
5597 VOIDmode, XEXP (x, 0), const0_rtx);
5599 return gen_rtx_NOT (0, x);
5603 and_reg_cond (old, x, add)
5609 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5611 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5612 && GET_CODE (x) == reverse_condition (GET_CODE (old))
5613 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5615 if (GET_CODE (x) == GET_CODE (old)
5616 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5620 return gen_rtx_AND (0, old, x);
5623 switch (GET_CODE (old))
5626 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5627 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5628 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5630 if (op0 == const0_rtx)
5632 if (op1 == const0_rtx)
5634 if (op0 == const1_rtx || op1 == const1_rtx)
5636 if (op0 == XEXP (old, 0))
5637 op0 = gen_rtx_AND (0, op0, x);
5639 op1 = gen_rtx_AND (0, op1, x);
5640 return gen_rtx_IOR (0, op0, op1);
5644 return gen_rtx_AND (0, old, x);
5647 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5648 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5649 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5651 if (op0 == const1_rtx)
5653 if (op1 == const1_rtx)
5655 if (op0 == const0_rtx || op1 == const0_rtx)
5657 if (op0 == XEXP (old, 0))
5658 op0 = gen_rtx_AND (0, op0, x);
5660 op1 = gen_rtx_AND (0, op1, x);
5661 return gen_rtx_AND (0, op0, op1);
5666 /* If X is identical to one of the existing terms of the AND,
5667 then just return what we already have. */
5668 /* ??? There really should be some sort of recursive check here in
5669 case there are nested ANDs. */
5670 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
5671 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
5672 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
5673 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
5676 return gen_rtx_AND (0, old, x);
5679 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5680 if (op0 != XEXP (old, 0))
5681 return not_reg_cond (op0);
5684 return gen_rtx_AND (0, old, x);
5691 /* Given a condition X, remove references to reg REGNO and return the
5692 new condition. The removal will be done so that all conditions
5693 involving REGNO are considered to evaluate to false. This function
5694 is used when the value of REGNO changes. */
5697 elim_reg_cond (x, regno)
5703 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
5705 if (REGNO (XEXP (x, 0)) == regno)
5710 switch (GET_CODE (x))
5713 op0 = elim_reg_cond (XEXP (x, 0), regno);
5714 op1 = elim_reg_cond (XEXP (x, 1), regno);
5715 if (op0 == const0_rtx || op1 == const0_rtx)
5717 if (op0 == const1_rtx)
5719 if (op1 == const1_rtx)
5721 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5723 return gen_rtx_AND (0, op0, op1);
5726 op0 = elim_reg_cond (XEXP (x, 0), regno);
5727 op1 = elim_reg_cond (XEXP (x, 1), regno);
5728 if (op0 == const1_rtx || op1 == const1_rtx)
5730 if (op0 == const0_rtx)
5732 if (op1 == const0_rtx)
5734 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5736 return gen_rtx_IOR (0, op0, op1);
5739 op0 = elim_reg_cond (XEXP (x, 0), regno);
5740 if (op0 == const0_rtx)
5742 if (op0 == const1_rtx)
5744 if (op0 != XEXP (x, 0))
5745 return not_reg_cond (op0);
5752 #endif /* HAVE_conditional_execution */
5756 /* Try to substitute the auto-inc expression INC as the address inside
5757 MEM which occurs in INSN. Currently, the address of MEM is an expression
5758 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
5759 that has a single set whose source is a PLUS of INCR_REG and something
5763 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
5764 struct propagate_block_info *pbi;
5765 rtx inc, insn, mem, incr, incr_reg;
5767 int regno = REGNO (incr_reg);
5768 rtx set = single_set (incr);
5769 rtx q = SET_DEST (set);
5770 rtx y = SET_SRC (set);
5771 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
5773 /* Make sure this reg appears only once in this insn. */
5774 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
5777 if (dead_or_set_p (incr, incr_reg)
5778 /* Mustn't autoinc an eliminable register. */
5779 && (regno >= FIRST_PSEUDO_REGISTER
5780 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
5782 /* This is the simple case. Try to make the auto-inc. If
5783 we can't, we are done. Otherwise, we will do any
5784 needed updates below. */
5785 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
5788 else if (GET_CODE (q) == REG
5789 /* PREV_INSN used here to check the semi-open interval
5791 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
5792 /* We must also check for sets of q as q may be
5793 a call clobbered hard register and there may
5794 be a call between PREV_INSN (insn) and incr. */
5795 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
5797 /* We have *p followed sometime later by q = p+size.
5798 Both p and q must be live afterward,
5799 and q is not used between INSN and its assignment.
5800 Change it to q = p, ...*q..., q = q+size.
5801 Then fall into the usual case. */
5805 emit_move_insn (q, incr_reg);
5806 insns = get_insns ();
5809 if (basic_block_for_insn)
5810 for (temp = insns; temp; temp = NEXT_INSN (temp))
5811 set_block_for_insn (temp, pbi->bb);
5813 /* If we can't make the auto-inc, or can't make the
5814 replacement into Y, exit. There's no point in making
5815 the change below if we can't do the auto-inc and doing
5816 so is not correct in the pre-inc case. */
5819 validate_change (insn, &XEXP (mem, 0), inc, 1);
5820 validate_change (incr, &XEXP (y, opnum), q, 1);
5821 if (! apply_change_group ())
5824 /* We now know we'll be doing this change, so emit the
5825 new insn(s) and do the updates. */
5826 emit_insns_before (insns, insn);
5828 if (pbi->bb->head == insn)
5829 pbi->bb->head = insns;
5831 /* INCR will become a NOTE and INSN won't contain a
5832 use of INCR_REG. If a use of INCR_REG was just placed in
5833 the insn before INSN, make that the next use.
5834 Otherwise, invalidate it. */
5835 if (GET_CODE (PREV_INSN (insn)) == INSN
5836 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
5837 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
5838 pbi->reg_next_use[regno] = PREV_INSN (insn);
5840 pbi->reg_next_use[regno] = 0;
5845 /* REGNO is now used in INCR which is below INSN, but
5846 it previously wasn't live here. If we don't mark
5847 it as live, we'll put a REG_DEAD note for it
5848 on this insn, which is incorrect. */
5849 SET_REGNO_REG_SET (pbi->reg_live, regno);
5851 /* If there are any calls between INSN and INCR, show
5852 that REGNO now crosses them. */
5853 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
5854 if (GET_CODE (temp) == CALL_INSN)
5855 REG_N_CALLS_CROSSED (regno)++;
5860 /* If we haven't returned, it means we were able to make the
5861 auto-inc, so update the status. First, record that this insn
5862 has an implicit side effect. */
5864 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
5866 /* Modify the old increment-insn to simply copy
5867 the already-incremented value of our register. */
5868 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
5871 /* If that makes it a no-op (copying the register into itself) delete
5872 it so it won't appear to be a "use" and a "set" of this
5874 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
5876 /* If the original source was dead, it's dead now. */
5879 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
5881 remove_note (incr, note);
5882 if (XEXP (note, 0) != incr_reg)
5883 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
5886 PUT_CODE (incr, NOTE);
5887 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
5888 NOTE_SOURCE_FILE (incr) = 0;
5891 if (regno >= FIRST_PSEUDO_REGISTER)
5893 /* Count an extra reference to the reg. When a reg is
5894 incremented, spilling it is worse, so we want to make
5895 that less likely. */
5896 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
5897 ? 1 : pbi->bb->frequency);
5899 /* Count the increment as a setting of the register,
5900 even though it isn't a SET in rtl. */
5901 REG_N_SETS (regno)++;
5905 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
5909 find_auto_inc (pbi, x, insn)
5910 struct propagate_block_info *pbi;
5914 rtx addr = XEXP (x, 0);
5915 HOST_WIDE_INT offset = 0;
5916 rtx set, y, incr, inc_val;
5918 int size = GET_MODE_SIZE (GET_MODE (x));
5920 if (GET_CODE (insn) == JUMP_INSN)
5923 /* Here we detect use of an index register which might be good for
5924 postincrement, postdecrement, preincrement, or predecrement. */
5926 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5927 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5929 if (GET_CODE (addr) != REG)
5932 regno = REGNO (addr);
5934 /* Is the next use an increment that might make auto-increment? */
5935 incr = pbi->reg_next_use[regno];
5936 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5938 set = single_set (incr);
5939 if (set == 0 || GET_CODE (set) != SET)
5943 if (GET_CODE (y) != PLUS)
5946 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5947 inc_val = XEXP (y, 1);
5948 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5949 inc_val = XEXP (y, 0);
5953 if (GET_CODE (inc_val) == CONST_INT)
5955 if (HAVE_POST_INCREMENT
5956 && (INTVAL (inc_val) == size && offset == 0))
5957 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5959 else if (HAVE_POST_DECREMENT
5960 && (INTVAL (inc_val) == -size && offset == 0))
5961 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5963 else if (HAVE_PRE_INCREMENT
5964 && (INTVAL (inc_val) == size && offset == size))
5965 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5967 else if (HAVE_PRE_DECREMENT
5968 && (INTVAL (inc_val) == -size && offset == -size))
5969 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5971 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5972 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5973 gen_rtx_PLUS (Pmode,
5976 insn, x, incr, addr);
5978 else if (GET_CODE (inc_val) == REG
5979 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5983 if (HAVE_POST_MODIFY_REG && offset == 0)
5984 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5985 gen_rtx_PLUS (Pmode,
5988 insn, x, incr, addr);
5992 #endif /* AUTO_INC_DEC */
5995 mark_used_reg (pbi, reg, cond, insn)
5996 struct propagate_block_info *pbi;
5998 rtx cond ATTRIBUTE_UNUSED;
6001 unsigned int regno_first, regno_last, i;
6002 int some_was_live, some_was_dead, some_not_set;
6004 regno_last = regno_first = REGNO (reg);
6005 if (regno_first < FIRST_PSEUDO_REGISTER)
6006 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6008 /* Find out if any of this register is live after this instruction. */
6009 some_was_live = some_was_dead = 0;
6010 for (i = regno_first; i <= regno_last; ++i)
6012 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6013 some_was_live |= needed_regno;
6014 some_was_dead |= ! needed_regno;
6017 /* Find out if any of the register was set this insn. */
6019 for (i = regno_first; i <= regno_last; ++i)
6020 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
6022 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6024 /* Record where each reg is used, so when the reg is set we know
6025 the next insn that uses it. */
6026 pbi->reg_next_use[regno_first] = insn;
6029 if (pbi->flags & PROP_REG_INFO)
6031 if (regno_first < FIRST_PSEUDO_REGISTER)
6033 /* If this is a register we are going to try to eliminate,
6034 don't mark it live here. If we are successful in
6035 eliminating it, it need not be live unless it is used for
6036 pseudos, in which case it will have been set live when it
6037 was allocated to the pseudos. If the register will not
6038 be eliminated, reload will set it live at that point.
6040 Otherwise, record that this function uses this register. */
6041 /* ??? The PPC backend tries to "eliminate" on the pic
6042 register to itself. This should be fixed. In the mean
6043 time, hack around it. */
6045 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
6046 && (regno_first == FRAME_POINTER_REGNUM
6047 || regno_first == ARG_POINTER_REGNUM)))
6048 for (i = regno_first; i <= regno_last; ++i)
6049 regs_ever_live[i] = 1;
6053 /* Keep track of which basic block each reg appears in. */
6055 register int blocknum = pbi->bb->index;
6056 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6057 REG_BASIC_BLOCK (regno_first) = blocknum;
6058 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6059 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6061 /* Count (weighted) number of uses of each reg. */
6062 REG_FREQ (regno_first)
6063 += (optimize_size || !pbi->bb->frequency ? 1 : pbi->bb->frequency);
6064 REG_N_REFS (regno_first)++;
6068 /* Record and count the insns in which a reg dies. If it is used in
6069 this insn and was dead below the insn then it dies in this insn.
6070 If it was set in this insn, we do not make a REG_DEAD note;
6071 likewise if we already made such a note. */
6072 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
6076 /* Check for the case where the register dying partially
6077 overlaps the register set by this insn. */
6078 if (regno_first != regno_last)
6079 for (i = regno_first; i <= regno_last; ++i)
6080 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
6082 /* If none of the words in X is needed, make a REG_DEAD note.
6083 Otherwise, we must make partial REG_DEAD notes. */
6084 if (! some_was_live)
6086 if ((pbi->flags & PROP_DEATH_NOTES)
6087 && ! find_regno_note (insn, REG_DEAD, regno_first))
6089 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
6091 if (pbi->flags & PROP_REG_INFO)
6092 REG_N_DEATHS (regno_first)++;
6096 /* Don't make a REG_DEAD note for a part of a register
6097 that is set in the insn. */
6098 for (i = regno_first; i <= regno_last; ++i)
6099 if (! REGNO_REG_SET_P (pbi->reg_live, i)
6100 && ! dead_or_set_regno_p (insn, i))
6102 = alloc_EXPR_LIST (REG_DEAD,
6103 gen_rtx_REG (reg_raw_mode[i], i),
6108 /* Mark the register as being live. */
6109 for (i = regno_first; i <= regno_last; ++i)
6111 SET_REGNO_REG_SET (pbi->reg_live, i);
6113 #ifdef HAVE_conditional_execution
6114 /* If this is a conditional use, record that fact. If it is later
6115 conditionally set, we'll know to kill the register. */
6116 if (cond != NULL_RTX)
6118 splay_tree_node node;
6119 struct reg_cond_life_info *rcli;
6124 node = splay_tree_lookup (pbi->reg_cond_dead, i);
6127 /* The register was unconditionally live previously.
6128 No need to do anything. */
6132 /* The register was conditionally live previously.
6133 Subtract the new life cond from the old death cond. */
6134 rcli = (struct reg_cond_life_info *) node->value;
6135 ncond = rcli->condition;
6136 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
6138 /* If the register is now unconditionally live,
6139 remove the entry in the splay_tree. */
6140 if (ncond == const0_rtx)
6141 splay_tree_remove (pbi->reg_cond_dead, i);
6144 rcli->condition = ncond;
6145 SET_REGNO_REG_SET (pbi->reg_cond_reg,
6146 REGNO (XEXP (cond, 0)));
6152 /* The register was not previously live at all. Record
6153 the condition under which it is still dead. */
6154 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6155 rcli->condition = not_reg_cond (cond);
6156 rcli->stores = const0_rtx;
6157 rcli->orig_condition = const0_rtx;
6158 splay_tree_insert (pbi->reg_cond_dead, i,
6159 (splay_tree_value) rcli);
6161 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6164 else if (some_was_live)
6166 /* The register may have been conditionally live previously, but
6167 is now unconditionally live. Remove it from the conditionally
6168 dead list, so that a conditional set won't cause us to think
6170 splay_tree_remove (pbi->reg_cond_dead, i);
6176 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
6177 This is done assuming the registers needed from X are those that
6178 have 1-bits in PBI->REG_LIVE.
6180 INSN is the containing instruction. If INSN is dead, this function
6184 mark_used_regs (pbi, x, cond, insn)
6185 struct propagate_block_info *pbi;
6188 register RTX_CODE code;
6190 int flags = pbi->flags;
6193 code = GET_CODE (x);
6213 /* If we are clobbering a MEM, mark any registers inside the address
6215 if (GET_CODE (XEXP (x, 0)) == MEM)
6216 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
6220 /* Don't bother watching stores to mems if this is not the
6221 final pass. We'll not be deleting dead stores this round. */
6222 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
6224 /* Invalidate the data for the last MEM stored, but only if MEM is
6225 something that can be stored into. */
6226 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
6227 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
6228 /* Needn't clear the memory set list. */
6232 rtx temp = pbi->mem_set_list;
6233 rtx prev = NULL_RTX;
6238 next = XEXP (temp, 1);
6239 if (anti_dependence (XEXP (temp, 0), x))
6241 /* Splice temp out of the list. */
6243 XEXP (prev, 1) = next;
6245 pbi->mem_set_list = next;
6246 free_EXPR_LIST_node (temp);
6247 pbi->mem_set_list_len--;
6255 /* If the memory reference had embedded side effects (autoincrement
6256 address modes. Then we may need to kill some entries on the
6259 invalidate_mems_from_autoinc (pbi, insn);
6263 if (flags & PROP_AUTOINC)
6264 find_auto_inc (pbi, x, insn);
6269 #ifdef CLASS_CANNOT_CHANGE_MODE
6270 if (GET_CODE (SUBREG_REG (x)) == REG
6271 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
6272 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
6273 GET_MODE (SUBREG_REG (x))))
6274 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
6277 /* While we're here, optimize this case. */
6279 if (GET_CODE (x) != REG)
6284 /* See a register other than being set => mark it as needed. */
6285 mark_used_reg (pbi, x, cond, insn);
6290 register rtx testreg = SET_DEST (x);
6293 /* If storing into MEM, don't show it as being used. But do
6294 show the address as being used. */
6295 if (GET_CODE (testreg) == MEM)
6298 if (flags & PROP_AUTOINC)
6299 find_auto_inc (pbi, testreg, insn);
6301 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
6302 mark_used_regs (pbi, SET_SRC (x), cond, insn);
6306 /* Storing in STRICT_LOW_PART is like storing in a reg
6307 in that this SET might be dead, so ignore it in TESTREG.
6308 but in some other ways it is like using the reg.
6310 Storing in a SUBREG or a bit field is like storing the entire
6311 register in that if the register's value is not used
6312 then this SET is not needed. */
6313 while (GET_CODE (testreg) == STRICT_LOW_PART
6314 || GET_CODE (testreg) == ZERO_EXTRACT
6315 || GET_CODE (testreg) == SIGN_EXTRACT
6316 || GET_CODE (testreg) == SUBREG)
6318 #ifdef CLASS_CANNOT_CHANGE_MODE
6319 if (GET_CODE (testreg) == SUBREG
6320 && GET_CODE (SUBREG_REG (testreg)) == REG
6321 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
6322 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
6323 GET_MODE (testreg)))
6324 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
6327 /* Modifying a single register in an alternate mode
6328 does not use any of the old value. But these other
6329 ways of storing in a register do use the old value. */
6330 if (GET_CODE (testreg) == SUBREG
6331 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
6336 testreg = XEXP (testreg, 0);
6339 /* If this is a store into a register or group of registers,
6340 recursively scan the value being stored. */
6342 if ((GET_CODE (testreg) == PARALLEL
6343 && GET_MODE (testreg) == BLKmode)
6344 || (GET_CODE (testreg) == REG
6345 && (regno = REGNO (testreg),
6346 ! (regno == FRAME_POINTER_REGNUM
6347 && (! reload_completed || frame_pointer_needed)))
6348 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
6349 && ! (regno == HARD_FRAME_POINTER_REGNUM
6350 && (! reload_completed || frame_pointer_needed))
6352 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
6353 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
6358 mark_used_regs (pbi, SET_DEST (x), cond, insn);
6359 mark_used_regs (pbi, SET_SRC (x), cond, insn);
6366 case UNSPEC_VOLATILE:
6370 /* Traditional and volatile asm instructions must be considered to use
6371 and clobber all hard registers, all pseudo-registers and all of
6372 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
6374 Consider for instance a volatile asm that changes the fpu rounding
6375 mode. An insn should not be moved across this even if it only uses
6376 pseudo-regs because it might give an incorrectly rounded result.
6378 ?!? Unfortunately, marking all hard registers as live causes massive
6379 problems for the register allocator and marking all pseudos as live
6380 creates mountains of uninitialized variable warnings.
6382 So for now, just clear the memory set list and mark any regs
6383 we can find in ASM_OPERANDS as used. */
6384 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
6386 free_EXPR_LIST_list (&pbi->mem_set_list);
6387 pbi->mem_set_list_len = 0;
6390 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
6391 We can not just fall through here since then we would be confused
6392 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
6393 traditional asms unlike their normal usage. */
6394 if (code == ASM_OPERANDS)
6398 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
6399 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
6405 if (cond != NULL_RTX)
6408 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
6410 cond = COND_EXEC_TEST (x);
6411 x = COND_EXEC_CODE (x);
6415 /* We _do_not_ want to scan operands of phi nodes. Operands of
6416 a phi function are evaluated only when control reaches this
6417 block along a particular edge. Therefore, regs that appear
6418 as arguments to phi should not be added to the global live at
6426 /* Recursively scan the operands of this expression. */
6429 register const char *fmt = GET_RTX_FORMAT (code);
6432 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6436 /* Tail recursive case: save a function call level. */
6442 mark_used_regs (pbi, XEXP (x, i), cond, insn);
6444 else if (fmt[i] == 'E')
6447 for (j = 0; j < XVECLEN (x, i); j++)
6448 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
6457 try_pre_increment_1 (pbi, insn)
6458 struct propagate_block_info *pbi;
6461 /* Find the next use of this reg. If in same basic block,
6462 make it do pre-increment or pre-decrement if appropriate. */
6463 rtx x = single_set (insn);
6464 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
6465 * INTVAL (XEXP (SET_SRC (x), 1)));
6466 int regno = REGNO (SET_DEST (x));
6467 rtx y = pbi->reg_next_use[regno];
6469 && SET_DEST (x) != stack_pointer_rtx
6470 && BLOCK_NUM (y) == BLOCK_NUM (insn)
6471 /* Don't do this if the reg dies, or gets set in y; a standard addressing
6472 mode would be better. */
6473 && ! dead_or_set_p (y, SET_DEST (x))
6474 && try_pre_increment (y, SET_DEST (x), amount))
6476 /* We have found a suitable auto-increment and already changed
6477 insn Y to do it. So flush this increment instruction. */
6478 propagate_block_delete_insn (pbi->bb, insn);
6480 /* Count a reference to this reg for the increment insn we are
6481 deleting. When a reg is incremented, spilling it is worse,
6482 so we want to make that less likely. */
6483 if (regno >= FIRST_PSEUDO_REGISTER)
6485 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
6486 ? 1 : pbi->bb->frequency);
6487 REG_N_SETS (regno)++;
6490 /* Flush any remembered memories depending on the value of
6491 the incremented register. */
6492 invalidate_mems_from_set (pbi, SET_DEST (x));
6499 /* Try to change INSN so that it does pre-increment or pre-decrement
6500 addressing on register REG in order to add AMOUNT to REG.
6501 AMOUNT is negative for pre-decrement.
6502 Returns 1 if the change could be made.
6503 This checks all about the validity of the result of modifying INSN. */
6506 try_pre_increment (insn, reg, amount)
6508 HOST_WIDE_INT amount;
6512 /* Nonzero if we can try to make a pre-increment or pre-decrement.
6513 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
6515 /* Nonzero if we can try to make a post-increment or post-decrement.
6516 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
6517 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
6518 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
6521 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
6524 /* From the sign of increment, see which possibilities are conceivable
6525 on this target machine. */
6526 if (HAVE_PRE_INCREMENT && amount > 0)
6528 if (HAVE_POST_INCREMENT && amount > 0)
6531 if (HAVE_PRE_DECREMENT && amount < 0)
6533 if (HAVE_POST_DECREMENT && amount < 0)
6536 if (! (pre_ok || post_ok))
6539 /* It is not safe to add a side effect to a jump insn
6540 because if the incremented register is spilled and must be reloaded
6541 there would be no way to store the incremented value back in memory. */
6543 if (GET_CODE (insn) == JUMP_INSN)
6548 use = find_use_as_address (PATTERN (insn), reg, 0);
6549 if (post_ok && (use == 0 || use == (rtx) 1))
6551 use = find_use_as_address (PATTERN (insn), reg, -amount);
6555 if (use == 0 || use == (rtx) 1)
6558 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
6561 /* See if this combination of instruction and addressing mode exists. */
6562 if (! validate_change (insn, &XEXP (use, 0),
6563 gen_rtx_fmt_e (amount > 0
6564 ? (do_post ? POST_INC : PRE_INC)
6565 : (do_post ? POST_DEC : PRE_DEC),
6569 /* Record that this insn now has an implicit side effect on X. */
6570 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
6574 #endif /* AUTO_INC_DEC */
6576 /* Find the place in the rtx X where REG is used as a memory address.
6577 Return the MEM rtx that so uses it.
6578 If PLUSCONST is nonzero, search instead for a memory address equivalent to
6579 (plus REG (const_int PLUSCONST)).
6581 If such an address does not appear, return 0.
6582 If REG appears more than once, or is used other than in such an address,
6586 find_use_as_address (x, reg, plusconst)
6589 HOST_WIDE_INT plusconst;
6591 enum rtx_code code = GET_CODE (x);
6592 const char *fmt = GET_RTX_FORMAT (code);
6594 register rtx value = 0;
6597 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
6600 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
6601 && XEXP (XEXP (x, 0), 0) == reg
6602 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6603 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
6606 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
6608 /* If REG occurs inside a MEM used in a bit-field reference,
6609 that is unacceptable. */
6610 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
6611 return (rtx) (HOST_WIDE_INT) 1;
6615 return (rtx) (HOST_WIDE_INT) 1;
6617 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6621 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
6625 return (rtx) (HOST_WIDE_INT) 1;
6627 else if (fmt[i] == 'E')
6630 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6632 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
6636 return (rtx) (HOST_WIDE_INT) 1;
6644 /* Write information about registers and basic blocks into FILE.
6645 This is part of making a debugging dump. */
6648 dump_regset (r, outf)
6655 fputs (" (nil)", outf);
6659 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
6661 fprintf (outf, " %d", i);
6662 if (i < FIRST_PSEUDO_REGISTER)
6663 fprintf (outf, " [%s]",
6668 /* Print a human-reaable representation of R on the standard error
6669 stream. This function is designed to be used from within the
6676 dump_regset (r, stderr);
6677 putc ('\n', stderr);
6681 dump_flow_info (file)
6685 static const char * const reg_class_names[] = REG_CLASS_NAMES;
6687 fprintf (file, "%d registers.\n", max_regno);
6688 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
6691 enum reg_class class, altclass;
6692 fprintf (file, "\nRegister %d used %d times across %d insns",
6693 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
6694 if (REG_BASIC_BLOCK (i) >= 0)
6695 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
6697 fprintf (file, "; set %d time%s", REG_N_SETS (i),
6698 (REG_N_SETS (i) == 1) ? "" : "s");
6699 if (REG_USERVAR_P (regno_reg_rtx[i]))
6700 fprintf (file, "; user var");
6701 if (REG_N_DEATHS (i) != 1)
6702 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
6703 if (REG_N_CALLS_CROSSED (i) == 1)
6704 fprintf (file, "; crosses 1 call");
6705 else if (REG_N_CALLS_CROSSED (i))
6706 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
6707 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
6708 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
6709 class = reg_preferred_class (i);
6710 altclass = reg_alternate_class (i);
6711 if (class != GENERAL_REGS || altclass != ALL_REGS)
6713 if (altclass == ALL_REGS || class == ALL_REGS)
6714 fprintf (file, "; pref %s", reg_class_names[(int) class]);
6715 else if (altclass == NO_REGS)
6716 fprintf (file, "; %s or none", reg_class_names[(int) class]);
6718 fprintf (file, "; pref %s, else %s",
6719 reg_class_names[(int) class],
6720 reg_class_names[(int) altclass]);
6722 if (REG_POINTER (regno_reg_rtx[i]))
6723 fprintf (file, "; pointer");
6724 fprintf (file, ".\n");
6727 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
6728 for (i = 0; i < n_basic_blocks; i++)
6730 register basic_block bb = BASIC_BLOCK (i);
6733 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
6734 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
6735 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
6736 fprintf (file, ", freq %i.\n", bb->frequency);
6738 fprintf (file, "Predecessors: ");
6739 for (e = bb->pred; e; e = e->pred_next)
6740 dump_edge_info (file, e, 0);
6742 fprintf (file, "\nSuccessors: ");
6743 for (e = bb->succ; e; e = e->succ_next)
6744 dump_edge_info (file, e, 1);
6746 fprintf (file, "\nRegisters live at start:");
6747 dump_regset (bb->global_live_at_start, file);
6749 fprintf (file, "\nRegisters live at end:");
6750 dump_regset (bb->global_live_at_end, file);
6761 dump_flow_info (stderr);
6765 dump_edge_info (file, e, do_succ)
6770 basic_block side = (do_succ ? e->dest : e->src);
6772 if (side == ENTRY_BLOCK_PTR)
6773 fputs (" ENTRY", file);
6774 else if (side == EXIT_BLOCK_PTR)
6775 fputs (" EXIT", file);
6777 fprintf (file, " %d", side->index);
6780 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
6784 fprintf (file, " count:");
6785 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
6790 static const char * const bitnames[] = {
6791 "fallthru", "crit", "ab", "abcall", "eh", "fake"
6794 int i, flags = e->flags;
6798 for (i = 0; flags; i++)
6799 if (flags & (1 << i))
6805 if (i < (int) ARRAY_SIZE (bitnames))
6806 fputs (bitnames[i], file);
6808 fprintf (file, "%d", i);
6815 /* Print out one basic block with live information at start and end. */
6826 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
6827 bb->index, bb->loop_depth, bb->count);
6828 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
6831 fputs (";; Predecessors: ", outf);
6832 for (e = bb->pred; e; e = e->pred_next)
6833 dump_edge_info (outf, e, 0);
6836 fputs (";; Registers live at start:", outf);
6837 dump_regset (bb->global_live_at_start, outf);
6840 for (insn = bb->head, last = NEXT_INSN (bb->end);
6842 insn = NEXT_INSN (insn))
6843 print_rtl_single (outf, insn);
6845 fputs (";; Registers live at end:", outf);
6846 dump_regset (bb->global_live_at_end, outf);
6849 fputs (";; Successors: ", outf);
6850 for (e = bb->succ; e; e = e->succ_next)
6851 dump_edge_info (outf, e, 1);
6859 dump_bb (bb, stderr);
6866 dump_bb (BASIC_BLOCK (n), stderr);
6869 /* Like print_rtl, but also print out live information for the start of each
6873 print_rtl_with_bb (outf, rtx_first)
6877 register rtx tmp_rtx;
6880 fprintf (outf, "(nil)\n");
6884 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
6885 int max_uid = get_max_uid ();
6886 basic_block *start = (basic_block *)
6887 xcalloc (max_uid, sizeof (basic_block));
6888 basic_block *end = (basic_block *)
6889 xcalloc (max_uid, sizeof (basic_block));
6890 enum bb_state *in_bb_p = (enum bb_state *)
6891 xcalloc (max_uid, sizeof (enum bb_state));
6893 for (i = n_basic_blocks - 1; i >= 0; i--)
6895 basic_block bb = BASIC_BLOCK (i);
6898 start[INSN_UID (bb->head)] = bb;
6899 end[INSN_UID (bb->end)] = bb;
6900 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6902 enum bb_state state = IN_MULTIPLE_BB;
6903 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
6905 in_bb_p[INSN_UID (x)] = state;
6912 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6917 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6919 fprintf (outf, ";; Start of basic block %d, registers live:",
6921 dump_regset (bb->global_live_at_start, outf);
6925 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6926 && GET_CODE (tmp_rtx) != NOTE
6927 && GET_CODE (tmp_rtx) != BARRIER)
6928 fprintf (outf, ";; Insn is not within a basic block\n");
6929 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
6930 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6932 did_output = print_rtl_single (outf, tmp_rtx);
6934 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6936 fprintf (outf, ";; End of basic block %d, registers live:\n",
6938 dump_regset (bb->global_live_at_end, outf);
6951 if (current_function_epilogue_delay_list != 0)
6953 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6954 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6955 tmp_rtx = XEXP (tmp_rtx, 1))
6956 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6960 /* Dump the rtl into the current debugging dump file, then abort. */
6963 print_rtl_and_abort_fcn (file, line, function)
6966 const char *function;
6970 print_rtl_with_bb (rtl_dump_file, get_insns ());
6971 fclose (rtl_dump_file);
6974 fancy_abort (file, line, function);
6977 /* Recompute register set/reference counts immediately prior to register
6980 This avoids problems with set/reference counts changing to/from values
6981 which have special meanings to the register allocators.
6983 Additionally, the reference counts are the primary component used by the
6984 register allocators to prioritize pseudos for allocation to hard regs.
6985 More accurate reference counts generally lead to better register allocation.
6987 F is the first insn to be scanned.
6989 LOOP_STEP denotes how much loop_depth should be incremented per
6990 loop nesting level in order to increase the ref count more for
6991 references in a loop.
6993 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6994 possibly other information which is used by the register allocators. */
6997 recompute_reg_usage (f, loop_step)
6998 rtx f ATTRIBUTE_UNUSED;
6999 int loop_step ATTRIBUTE_UNUSED;
7001 allocate_reg_life_data ();
7002 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
7005 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
7006 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
7007 of the number of registers that died. */
7010 count_or_remove_death_notes (blocks, kill)
7016 for (i = n_basic_blocks - 1; i >= 0; --i)
7021 if (blocks && ! TEST_BIT (blocks, i))
7024 bb = BASIC_BLOCK (i);
7026 for (insn = bb->head;; insn = NEXT_INSN (insn))
7030 rtx *pprev = ®_NOTES (insn);
7035 switch (REG_NOTE_KIND (link))
7038 if (GET_CODE (XEXP (link, 0)) == REG)
7040 rtx reg = XEXP (link, 0);
7043 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
7046 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
7054 rtx next = XEXP (link, 1);
7055 free_EXPR_LIST_node (link);
7056 *pprev = link = next;
7062 pprev = &XEXP (link, 1);
7069 if (insn == bb->end)
7078 /* Update insns block within BB. */
7081 update_bb_for_insn (bb)
7086 if (! basic_block_for_insn)
7089 for (insn = bb->head; ; insn = NEXT_INSN (insn))
7091 set_block_for_insn (insn, bb);
7093 if (insn == bb->end)
7099 /* Record INSN's block as BB. */
7102 set_block_for_insn (insn, bb)
7106 size_t uid = INSN_UID (insn);
7107 if (uid >= basic_block_for_insn->num_elements)
7111 /* Add one-eighth the size so we don't keep calling xrealloc. */
7112 new_size = uid + (uid + 7) / 8;
7114 VARRAY_GROW (basic_block_for_insn, new_size);
7116 VARRAY_BB (basic_block_for_insn, uid) = bb;
7119 /* When a new insn has been inserted into an existing block, it will
7120 sometimes emit more than a single insn. This routine will set the
7121 block number for the specified insn, and look backwards in the insn
7122 chain to see if there are any other uninitialized insns immediately
7123 previous to this one, and set the block number for them too. */
7126 set_block_for_new_insns (insn, bb)
7130 set_block_for_insn (insn, bb);
7132 /* Scan the previous instructions setting the block number until we find
7133 an instruction that has the block number set, or we find a note
7135 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
7137 if (GET_CODE (insn) == NOTE)
7139 if (INSN_UID (insn) >= basic_block_for_insn->num_elements
7140 || BLOCK_FOR_INSN (insn) == 0)
7141 set_block_for_insn (insn, bb);
7147 /* Verify the CFG consistency. This function check some CFG invariants and
7148 aborts when something is wrong. Hope that this function will help to
7149 convert many optimization passes to preserve CFG consistent.
7151 Currently it does following checks:
7153 - test head/end pointers
7154 - overlapping of basic blocks
7155 - edge list corectness
7156 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
7157 - tails of basic blocks (ensure that boundary is necesary)
7158 - scans body of the basic block for JUMP_INSN, CODE_LABEL
7159 and NOTE_INSN_BASIC_BLOCK
7160 - check that all insns are in the basic blocks
7161 (except the switch handling code, barriers and notes)
7162 - check that all returns are followed by barriers
7164 In future it can be extended check a lot of other stuff as well
7165 (reachability of basic blocks, life information, etc. etc.). */
7170 const int max_uid = get_max_uid ();
7171 const rtx rtx_first = get_insns ();
7172 rtx last_head = get_last_insn ();
7173 basic_block *bb_info;
7175 int i, last_bb_num_seen, num_bb_notes, err = 0;
7177 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
7179 for (i = n_basic_blocks - 1; i >= 0; i--)
7181 basic_block bb = BASIC_BLOCK (i);
7182 rtx head = bb->head;
7185 /* Verify the end of the basic block is in the INSN chain. */
7186 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
7191 error ("End insn %d for block %d not found in the insn stream.",
7192 INSN_UID (end), bb->index);
7196 /* Work backwards from the end to the head of the basic block
7197 to verify the head is in the RTL chain. */
7198 for (; x != NULL_RTX; x = PREV_INSN (x))
7200 /* While walking over the insn chain, verify insns appear
7201 in only one basic block and initialize the BB_INFO array
7202 used by other passes. */
7203 if (bb_info[INSN_UID (x)] != NULL)
7205 error ("Insn %d is in multiple basic blocks (%d and %d)",
7206 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
7209 bb_info[INSN_UID (x)] = bb;
7216 error ("Head insn %d for block %d not found in the insn stream.",
7217 INSN_UID (head), bb->index);
7224 /* Now check the basic blocks (boundaries etc.) */
7225 for (i = n_basic_blocks - 1; i >= 0; i--)
7227 basic_block bb = BASIC_BLOCK (i);
7228 /* Check corectness of edge lists */
7237 "verify_flow_info: Basic block %d succ edge is corrupted\n",
7239 fprintf (stderr, "Predecessor: ");
7240 dump_edge_info (stderr, e, 0);
7241 fprintf (stderr, "\nSuccessor: ");
7242 dump_edge_info (stderr, e, 1);
7246 if (e->dest != EXIT_BLOCK_PTR)
7248 edge e2 = e->dest->pred;
7249 while (e2 && e2 != e)
7253 error ("Basic block %i edge lists are corrupted", bb->index);
7265 error ("Basic block %d pred edge is corrupted", bb->index);
7266 fputs ("Predecessor: ", stderr);
7267 dump_edge_info (stderr, e, 0);
7268 fputs ("\nSuccessor: ", stderr);
7269 dump_edge_info (stderr, e, 1);
7270 fputc ('\n', stderr);
7273 if (e->src != ENTRY_BLOCK_PTR)
7275 edge e2 = e->src->succ;
7276 while (e2 && e2 != e)
7280 error ("Basic block %i edge lists are corrupted", bb->index);
7287 /* OK pointers are correct. Now check the header of basic
7288 block. It ought to contain optional CODE_LABEL followed
7289 by NOTE_BASIC_BLOCK. */
7291 if (GET_CODE (x) == CODE_LABEL)
7295 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
7301 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
7303 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
7310 /* Do checks for empty blocks here */
7317 if (NOTE_INSN_BASIC_BLOCK_P (x))
7319 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
7320 INSN_UID (x), bb->index);
7327 if (GET_CODE (x) == JUMP_INSN
7328 || GET_CODE (x) == CODE_LABEL
7329 || GET_CODE (x) == BARRIER)
7331 error ("In basic block %d:", bb->index);
7332 fatal_insn ("Flow control insn inside a basic block", x);
7340 last_bb_num_seen = -1;
7345 if (NOTE_INSN_BASIC_BLOCK_P (x))
7347 basic_block bb = NOTE_BASIC_BLOCK (x);
7349 if (bb->index != last_bb_num_seen + 1)
7350 /* Basic blocks not numbered consecutively. */
7353 last_bb_num_seen = bb->index;
7356 if (!bb_info[INSN_UID (x)])
7358 switch (GET_CODE (x))
7365 /* An addr_vec is placed outside any block block. */
7367 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
7368 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
7369 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
7374 /* But in any case, non-deletable labels can appear anywhere. */
7378 fatal_insn ("Insn outside basic block", x);
7383 && GET_CODE (x) == JUMP_INSN
7384 && returnjump_p (x) && ! condjump_p (x)
7385 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
7386 fatal_insn ("Return not followed by barrier", x);
7391 if (num_bb_notes != n_basic_blocks)
7393 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
7394 num_bb_notes, n_basic_blocks);
7403 /* Functions to access an edge list with a vector representation.
7404 Enough data is kept such that given an index number, the
7405 pred and succ that edge represents can be determined, or
7406 given a pred and a succ, its index number can be returned.
7407 This allows algorithms which consume a lot of memory to
7408 represent the normally full matrix of edge (pred,succ) with a
7409 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
7410 wasted space in the client code due to sparse flow graphs. */
7412 /* This functions initializes the edge list. Basically the entire
7413 flowgraph is processed, and all edges are assigned a number,
7414 and the data structure is filled in. */
7419 struct edge_list *elist;
7425 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
7429 /* Determine the number of edges in the flow graph by counting successor
7430 edges on each basic block. */
7431 for (x = 0; x < n_basic_blocks; x++)
7433 basic_block bb = BASIC_BLOCK (x);
7435 for (e = bb->succ; e; e = e->succ_next)
7438 /* Don't forget successors of the entry block. */
7439 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7442 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
7443 elist->num_blocks = block_count;
7444 elist->num_edges = num_edges;
7445 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
7449 /* Follow successors of the entry block, and register these edges. */
7450 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7452 elist->index_to_edge[num_edges] = e;
7456 for (x = 0; x < n_basic_blocks; x++)
7458 basic_block bb = BASIC_BLOCK (x);
7460 /* Follow all successors of blocks, and register these edges. */
7461 for (e = bb->succ; e; e = e->succ_next)
7463 elist->index_to_edge[num_edges] = e;
7470 /* This function free's memory associated with an edge list. */
7473 free_edge_list (elist)
7474 struct edge_list *elist;
7478 free (elist->index_to_edge);
7483 /* This function provides debug output showing an edge list. */
7486 print_edge_list (f, elist)
7488 struct edge_list *elist;
7491 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
7492 elist->num_blocks - 2, elist->num_edges);
7494 for (x = 0; x < elist->num_edges; x++)
7496 fprintf (f, " %-4d - edge(", x);
7497 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
7498 fprintf (f, "entry,");
7500 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
7502 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
7503 fprintf (f, "exit)\n");
7505 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
7509 /* This function provides an internal consistency check of an edge list,
7510 verifying that all edges are present, and that there are no
7514 verify_edge_list (f, elist)
7516 struct edge_list *elist;
7518 int x, pred, succ, index;
7521 for (x = 0; x < n_basic_blocks; x++)
7523 basic_block bb = BASIC_BLOCK (x);
7525 for (e = bb->succ; e; e = e->succ_next)
7527 pred = e->src->index;
7528 succ = e->dest->index;
7529 index = EDGE_INDEX (elist, e->src, e->dest);
7530 if (index == EDGE_INDEX_NO_EDGE)
7532 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7535 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7536 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7537 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7538 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7539 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7540 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7543 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7545 pred = e->src->index;
7546 succ = e->dest->index;
7547 index = EDGE_INDEX (elist, e->src, e->dest);
7548 if (index == EDGE_INDEX_NO_EDGE)
7550 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7553 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7554 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7555 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7556 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7557 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7558 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7560 /* We've verified that all the edges are in the list, no lets make sure
7561 there are no spurious edges in the list. */
7563 for (pred = 0; pred < n_basic_blocks; pred++)
7564 for (succ = 0; succ < n_basic_blocks; succ++)
7566 basic_block p = BASIC_BLOCK (pred);
7567 basic_block s = BASIC_BLOCK (succ);
7571 for (e = p->succ; e; e = e->succ_next)
7577 for (e = s->pred; e; e = e->pred_next)
7583 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7584 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7585 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
7587 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7588 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7589 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
7590 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7591 BASIC_BLOCK (succ)));
7593 for (succ = 0; succ < n_basic_blocks; succ++)
7595 basic_block p = ENTRY_BLOCK_PTR;
7596 basic_block s = BASIC_BLOCK (succ);
7600 for (e = p->succ; e; e = e->succ_next)
7606 for (e = s->pred; e; e = e->pred_next)
7612 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7613 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7614 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
7616 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7617 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7618 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
7619 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
7620 BASIC_BLOCK (succ)));
7622 for (pred = 0; pred < n_basic_blocks; pred++)
7624 basic_block p = BASIC_BLOCK (pred);
7625 basic_block s = EXIT_BLOCK_PTR;
7629 for (e = p->succ; e; e = e->succ_next)
7635 for (e = s->pred; e; e = e->pred_next)
7641 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7642 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7643 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
7645 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7646 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7647 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
7648 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7653 /* This routine will determine what, if any, edge there is between
7654 a specified predecessor and successor. */
7657 find_edge_index (edge_list, pred, succ)
7658 struct edge_list *edge_list;
7659 basic_block pred, succ;
7662 for (x = 0; x < NUM_EDGES (edge_list); x++)
7664 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
7665 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
7668 return (EDGE_INDEX_NO_EDGE);
7671 /* This function will remove an edge from the flow graph. */
7677 edge last_pred = NULL;
7678 edge last_succ = NULL;
7680 basic_block src, dest;
7683 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
7689 last_succ->succ_next = e->succ_next;
7691 src->succ = e->succ_next;
7693 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
7699 last_pred->pred_next = e->pred_next;
7701 dest->pred = e->pred_next;
7707 /* This routine will remove any fake successor edges for a basic block.
7708 When the edge is removed, it is also removed from whatever predecessor
7712 remove_fake_successors (bb)
7716 for (e = bb->succ; e;)
7720 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
7725 /* This routine will remove all fake edges from the flow graph. If
7726 we remove all fake successors, it will automatically remove all
7727 fake predecessors. */
7730 remove_fake_edges ()
7734 for (x = 0; x < n_basic_blocks; x++)
7735 remove_fake_successors (BASIC_BLOCK (x));
7737 /* We've handled all successors except the entry block's. */
7738 remove_fake_successors (ENTRY_BLOCK_PTR);
7741 /* This function will add a fake edge between any block which has no
7742 successors, and the exit block. Some data flow equations require these
7746 add_noreturn_fake_exit_edges ()
7750 for (x = 0; x < n_basic_blocks; x++)
7751 if (BASIC_BLOCK (x)->succ == NULL)
7752 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7755 /* This function adds a fake edge between any infinite loops to the
7756 exit block. Some optimizations require a path from each node to
7759 See also Morgan, Figure 3.10, pp. 82-83.
7761 The current implementation is ugly, not attempting to minimize the
7762 number of inserted fake edges. To reduce the number of fake edges
7763 to insert, add fake edges from _innermost_ loops containing only
7764 nodes not reachable from the exit block. */
7767 connect_infinite_loops_to_exit ()
7769 basic_block unvisited_block;
7771 /* Perform depth-first search in the reverse graph to find nodes
7772 reachable from the exit block. */
7773 struct depth_first_search_dsS dfs_ds;
7775 flow_dfs_compute_reverse_init (&dfs_ds);
7776 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7778 /* Repeatedly add fake edges, updating the unreachable nodes. */
7781 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7782 if (!unvisited_block)
7784 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7785 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7788 flow_dfs_compute_reverse_finish (&dfs_ds);
7793 /* Redirect an edge's successor from one block to another. */
7796 redirect_edge_succ (e, new_succ)
7798 basic_block new_succ;
7802 /* Disconnect the edge from the old successor block. */
7803 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
7805 *pe = (*pe)->pred_next;
7807 /* Reconnect the edge to the new successor block. */
7808 e->pred_next = new_succ->pred;
7813 /* Redirect an edge's predecessor from one block to another. */
7816 redirect_edge_pred (e, new_pred)
7818 basic_block new_pred;
7822 /* Disconnect the edge from the old predecessor block. */
7823 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
7825 *pe = (*pe)->succ_next;
7827 /* Reconnect the edge to the new predecessor block. */
7828 e->succ_next = new_pred->succ;
7833 /* Dump the list of basic blocks in the bitmap NODES. */
7836 flow_nodes_print (str, nodes, file)
7838 const sbitmap nodes;
7846 fprintf (file, "%s { ", str);
7847 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7848 fputs ("}\n", file);
7852 /* Dump the list of edges in the array EDGE_LIST. */
7855 flow_edge_list_print (str, edge_list, num_edges, file)
7857 const edge *edge_list;
7866 fprintf (file, "%s { ", str);
7867 for (i = 0; i < num_edges; i++)
7868 fprintf (file, "%d->%d ", edge_list[i]->src->index,
7869 edge_list[i]->dest->index);
7870 fputs ("}\n", file);
7874 /* Dump loop related CFG information. */
7877 flow_loops_cfg_dump (loops, file)
7878 const struct loops *loops;
7883 if (! loops->num || ! file || ! loops->cfg.dom)
7886 for (i = 0; i < n_basic_blocks; i++)
7890 fprintf (file, ";; %d succs { ", i);
7891 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7892 fprintf (file, "%d ", succ->dest->index);
7893 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7896 /* Dump the DFS node order. */
7897 if (loops->cfg.dfs_order)
7899 fputs (";; DFS order: ", file);
7900 for (i = 0; i < n_basic_blocks; i++)
7901 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7904 /* Dump the reverse completion node order. */
7905 if (loops->cfg.rc_order)
7907 fputs (";; RC order: ", file);
7908 for (i = 0; i < n_basic_blocks; i++)
7909 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7914 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7917 flow_loop_nested_p (outer, loop)
7921 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7925 /* Dump the loop information specified by LOOP to the stream FILE
7926 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7928 flow_loop_dump (loop, file, loop_dump_aux, verbose)
7929 const struct loop *loop;
7931 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7934 if (! loop || ! loop->header)
7937 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
7938 loop->num, INSN_UID (loop->first->head),
7939 INSN_UID (loop->last->end),
7940 loop->shared ? " shared" : "",
7941 loop->invalid ? " invalid" : "");
7942 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
7943 loop->header->index, loop->latch->index,
7944 loop->pre_header ? loop->pre_header->index : -1,
7945 loop->first->index, loop->last->index);
7946 fprintf (file, ";; depth %d, level %d, outer %ld\n",
7947 loop->depth, loop->level,
7948 (long) (loop->outer ? loop->outer->num : -1));
7950 if (loop->pre_header_edges)
7951 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
7952 loop->num_pre_header_edges, file);
7953 flow_edge_list_print (";; entry edges", loop->entry_edges,
7954 loop->num_entries, file);
7955 fprintf (file, ";; %d", loop->num_nodes);
7956 flow_nodes_print (" nodes", loop->nodes, file);
7957 flow_edge_list_print (";; exit edges", loop->exit_edges,
7958 loop->num_exits, file);
7959 if (loop->exits_doms)
7960 flow_nodes_print (";; exit doms", loop->exits_doms, file);
7962 loop_dump_aux (loop, file, verbose);
7966 /* Dump the loop information specified by LOOPS to the stream FILE,
7967 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7969 flow_loops_dump (loops, file, loop_dump_aux, verbose)
7970 const struct loops *loops;
7972 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7978 num_loops = loops->num;
7979 if (! num_loops || ! file)
7982 fprintf (file, ";; %d loops found, %d levels\n",
7983 num_loops, loops->levels);
7985 for (i = 0; i < num_loops; i++)
7987 struct loop *loop = &loops->array[i];
7989 flow_loop_dump (loop, file, loop_dump_aux, verbose);
7995 for (j = 0; j < i; j++)
7997 struct loop *oloop = &loops->array[j];
7999 if (loop->header == oloop->header)
8004 smaller = loop->num_nodes < oloop->num_nodes;
8006 /* If the union of LOOP and OLOOP is different than
8007 the larger of LOOP and OLOOP then LOOP and OLOOP
8008 must be disjoint. */
8009 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
8010 smaller ? oloop : loop);
8012 ";; loop header %d shared by loops %d, %d %s\n",
8013 loop->header->index, i, j,
8014 disjoint ? "disjoint" : "nested");
8021 flow_loops_cfg_dump (loops, file);
8025 /* Free all the memory allocated for LOOPS. */
8028 flow_loops_free (loops)
8029 struct loops *loops;
8038 /* Free the loop descriptors. */
8039 for (i = 0; i < loops->num; i++)
8041 struct loop *loop = &loops->array[i];
8043 if (loop->pre_header_edges)
8044 free (loop->pre_header_edges);
8046 sbitmap_free (loop->nodes);
8047 if (loop->entry_edges)
8048 free (loop->entry_edges);
8049 if (loop->exit_edges)
8050 free (loop->exit_edges);
8051 if (loop->exits_doms)
8052 sbitmap_free (loop->exits_doms);
8054 free (loops->array);
8055 loops->array = NULL;
8058 sbitmap_vector_free (loops->cfg.dom);
8059 if (loops->cfg.dfs_order)
8060 free (loops->cfg.dfs_order);
8062 if (loops->shared_headers)
8063 sbitmap_free (loops->shared_headers);
8068 /* Find the entry edges into the loop with header HEADER and nodes
8069 NODES and store in ENTRY_EDGES array. Return the number of entry
8070 edges from the loop. */
8073 flow_loop_entry_edges_find (header, nodes, entry_edges)
8075 const sbitmap nodes;
8081 *entry_edges = NULL;
8084 for (e = header->pred; e; e = e->pred_next)
8086 basic_block src = e->src;
8088 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
8095 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
8098 for (e = header->pred; e; e = e->pred_next)
8100 basic_block src = e->src;
8102 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
8103 (*entry_edges)[num_entries++] = e;
8110 /* Find the exit edges from the loop using the bitmap of loop nodes
8111 NODES and store in EXIT_EDGES array. Return the number of
8112 exit edges from the loop. */
8115 flow_loop_exit_edges_find (nodes, exit_edges)
8116 const sbitmap nodes;
8125 /* Check all nodes within the loop to see if there are any
8126 successors not in the loop. Note that a node may have multiple
8127 exiting edges ????? A node can have one jumping edge and one fallthru
8128 edge so only one of these can exit the loop. */
8130 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
8131 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
8133 basic_block dest = e->dest;
8135 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
8143 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
8145 /* Store all exiting edges into an array. */
8147 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
8148 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
8150 basic_block dest = e->dest;
8152 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
8153 (*exit_edges)[num_exits++] = e;
8161 /* Find the nodes contained within the loop with header HEADER and
8162 latch LATCH and store in NODES. Return the number of nodes within
8166 flow_loop_nodes_find (header, latch, nodes)
8175 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
8178 /* Start with only the loop header in the set of loop nodes. */
8179 sbitmap_zero (nodes);
8180 SET_BIT (nodes, header->index);
8182 header->loop_depth++;
8184 /* Push the loop latch on to the stack. */
8185 if (! TEST_BIT (nodes, latch->index))
8187 SET_BIT (nodes, latch->index);
8188 latch->loop_depth++;
8190 stack[sp++] = latch;
8199 for (e = node->pred; e; e = e->pred_next)
8201 basic_block ancestor = e->src;
8203 /* If each ancestor not marked as part of loop, add to set of
8204 loop nodes and push on to stack. */
8205 if (ancestor != ENTRY_BLOCK_PTR
8206 && ! TEST_BIT (nodes, ancestor->index))
8208 SET_BIT (nodes, ancestor->index);
8209 ancestor->loop_depth++;
8211 stack[sp++] = ancestor;
8219 /* Compute the depth first search order and store in the array
8220 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
8221 RC_ORDER is non-zero, return the reverse completion number for each
8222 node. Returns the number of nodes visited. A depth first search
8223 tries to get as far away from the starting point as quickly as
8227 flow_depth_first_order_compute (dfs_order, rc_order)
8234 int rcnum = n_basic_blocks - 1;
8237 /* Allocate stack for back-tracking up CFG. */
8238 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
8241 /* Allocate bitmap to track nodes that have been visited. */
8242 visited = sbitmap_alloc (n_basic_blocks);
8244 /* None of the nodes in the CFG have been visited yet. */
8245 sbitmap_zero (visited);
8247 /* Push the first edge on to the stack. */
8248 stack[sp++] = ENTRY_BLOCK_PTR->succ;
8256 /* Look at the edge on the top of the stack. */
8261 /* Check if the edge destination has been visited yet. */
8262 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
8264 /* Mark that we have visited the destination. */
8265 SET_BIT (visited, dest->index);
8268 dfs_order[dfsnum++] = dest->index;
8272 /* Since the DEST node has been visited for the first
8273 time, check its successors. */
8274 stack[sp++] = dest->succ;
8278 /* There are no successors for the DEST node so assign
8279 its reverse completion number. */
8281 rc_order[rcnum--] = dest->index;
8286 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
8288 /* There are no more successors for the SRC node
8289 so assign its reverse completion number. */
8291 rc_order[rcnum--] = src->index;
8295 stack[sp - 1] = e->succ_next;
8302 sbitmap_free (visited);
8304 /* The number of nodes visited should not be greater than
8306 if (dfsnum > n_basic_blocks)
8309 /* There are some nodes left in the CFG that are unreachable. */
8310 if (dfsnum < n_basic_blocks)
8315 /* Compute the depth first search order on the _reverse_ graph and
8316 store in the array DFS_ORDER, marking the nodes visited in VISITED.
8317 Returns the number of nodes visited.
8319 The computation is split into three pieces:
8321 flow_dfs_compute_reverse_init () creates the necessary data
8324 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
8325 structures. The block will start the search.
8327 flow_dfs_compute_reverse_execute () continues (or starts) the
8328 search using the block on the top of the stack, stopping when the
8331 flow_dfs_compute_reverse_finish () destroys the necessary data
8334 Thus, the user will probably call ..._init(), call ..._add_bb() to
8335 add a beginning basic block to the stack, call ..._execute(),
8336 possibly add another bb to the stack and again call ..._execute(),
8337 ..., and finally call _finish(). */
8339 /* Initialize the data structures used for depth-first search on the
8340 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
8341 added to the basic block stack. DATA is the current depth-first
8342 search context. If INITIALIZE_STACK is non-zero, there is an
8343 element on the stack. */
8346 flow_dfs_compute_reverse_init (data)
8347 depth_first_search_ds data;
8349 /* Allocate stack for back-tracking up CFG. */
8351 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
8352 * sizeof (basic_block));
8355 /* Allocate bitmap to track nodes that have been visited. */
8356 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
8358 /* None of the nodes in the CFG have been visited yet. */
8359 sbitmap_zero (data->visited_blocks);
8364 /* Add the specified basic block to the top of the dfs data
8365 structures. When the search continues, it will start at the
8369 flow_dfs_compute_reverse_add_bb (data, bb)
8370 depth_first_search_ds data;
8373 data->stack[data->sp++] = bb;
8377 /* Continue the depth-first search through the reverse graph starting
8378 with the block at the stack's top and ending when the stack is
8379 empty. Visited nodes are marked. Returns an unvisited basic
8380 block, or NULL if there is none available. */
8383 flow_dfs_compute_reverse_execute (data)
8384 depth_first_search_ds data;
8390 while (data->sp > 0)
8392 bb = data->stack[--data->sp];
8394 /* Mark that we have visited this node. */
8395 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
8397 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
8399 /* Perform depth-first search on adjacent vertices. */
8400 for (e = bb->pred; e; e = e->pred_next)
8401 flow_dfs_compute_reverse_add_bb (data, e->src);
8405 /* Determine if there are unvisited basic blocks. */
8406 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
8407 if (!TEST_BIT (data->visited_blocks, i))
8408 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
8412 /* Destroy the data structures needed for depth-first search on the
8416 flow_dfs_compute_reverse_finish (data)
8417 depth_first_search_ds data;
8420 sbitmap_free (data->visited_blocks);
8425 /* Find the root node of the loop pre-header extended basic block and
8426 the edges along the trace from the root node to the loop header. */
8429 flow_loop_pre_header_scan (loop)
8435 loop->num_pre_header_edges = 0;
8437 if (loop->num_entries != 1)
8440 ebb = loop->entry_edges[0]->src;
8442 if (ebb != ENTRY_BLOCK_PTR)
8446 /* Count number of edges along trace from loop header to
8447 root of pre-header extended basic block. Usually this is
8448 only one or two edges. */
8450 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
8452 ebb = ebb->pred->src;
8456 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
8457 loop->num_pre_header_edges = num;
8459 /* Store edges in order that they are followed. The source
8460 of the first edge is the root node of the pre-header extended
8461 basic block and the destination of the last last edge is
8463 for (e = loop->entry_edges[0]; num; e = e->src->pred)
8465 loop->pre_header_edges[--num] = e;
8471 /* Return the block for the pre-header of the loop with header
8472 HEADER where DOM specifies the dominator information. Return NULL if
8473 there is no pre-header. */
8476 flow_loop_pre_header_find (header, dom)
8480 basic_block pre_header;
8483 /* If block p is a predecessor of the header and is the only block
8484 that the header does not dominate, then it is the pre-header. */
8486 for (e = header->pred; e; e = e->pred_next)
8488 basic_block node = e->src;
8490 if (node != ENTRY_BLOCK_PTR
8491 && ! TEST_BIT (dom[node->index], header->index))
8493 if (pre_header == NULL)
8497 /* There are multiple edges into the header from outside
8498 the loop so there is no pre-header block. */
8507 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
8508 previously added. The insertion algorithm assumes that the loops
8509 are added in the order found by a depth first search of the CFG. */
8512 flow_loop_tree_node_add (prevloop, loop)
8513 struct loop *prevloop;
8517 if (flow_loop_nested_p (prevloop, loop))
8519 prevloop->inner = loop;
8520 loop->outer = prevloop;
8524 while (prevloop->outer)
8526 if (flow_loop_nested_p (prevloop->outer, loop))
8528 prevloop->next = loop;
8529 loop->outer = prevloop->outer;
8532 prevloop = prevloop->outer;
8535 prevloop->next = loop;
8539 /* Build the loop hierarchy tree for LOOPS. */
8542 flow_loops_tree_build (loops)
8543 struct loops *loops;
8548 num_loops = loops->num;
8552 /* Root the loop hierarchy tree with the first loop found.
8553 Since we used a depth first search this should be the
8555 loops->tree = &loops->array[0];
8556 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
8558 /* Add the remaining loops to the tree. */
8559 for (i = 1; i < num_loops; i++)
8560 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
8563 /* Helper function to compute loop nesting depth and enclosed loop level
8564 for the natural loop specified by LOOP at the loop depth DEPTH.
8565 Returns the loop level. */
8568 flow_loop_level_compute (loop, depth)
8578 /* Traverse loop tree assigning depth and computing level as the
8579 maximum level of all the inner loops of this loop. The loop
8580 level is equivalent to the height of the loop in the loop tree
8581 and corresponds to the number of enclosed loop levels (including
8583 for (inner = loop->inner; inner; inner = inner->next)
8587 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
8592 loop->level = level;
8593 loop->depth = depth;
8597 /* Compute the loop nesting depth and enclosed loop level for the loop
8598 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
8602 flow_loops_level_compute (loops)
8603 struct loops *loops;
8609 /* Traverse all the outer level loops. */
8610 for (loop = loops->tree; loop; loop = loop->next)
8612 level = flow_loop_level_compute (loop, 1);
8620 /* Scan a single natural loop specified by LOOP collecting information
8621 about it specified by FLAGS. */
8624 flow_loop_scan (loops, loop, flags)
8625 struct loops *loops;
8629 /* Determine prerequisites. */
8630 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
8631 flags |= LOOP_EXIT_EDGES;
8633 if (flags & LOOP_ENTRY_EDGES)
8635 /* Find edges which enter the loop header.
8636 Note that the entry edges should only
8637 enter the header of a natural loop. */
8639 = flow_loop_entry_edges_find (loop->header,
8641 &loop->entry_edges);
8644 if (flags & LOOP_EXIT_EDGES)
8646 /* Find edges which exit the loop. */
8648 = flow_loop_exit_edges_find (loop->nodes,
8652 if (flags & LOOP_EXITS_DOMS)
8656 /* Determine which loop nodes dominate all the exits
8658 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
8659 sbitmap_copy (loop->exits_doms, loop->nodes);
8660 for (j = 0; j < loop->num_exits; j++)
8661 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
8662 loops->cfg.dom[loop->exit_edges[j]->src->index]);
8664 /* The header of a natural loop must dominate
8666 if (! TEST_BIT (loop->exits_doms, loop->header->index))
8670 if (flags & LOOP_PRE_HEADER)
8672 /* Look to see if the loop has a pre-header node. */
8674 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
8676 /* Find the blocks within the extended basic block of
8677 the loop pre-header. */
8678 flow_loop_pre_header_scan (loop);
8684 /* Find all the natural loops in the function and save in LOOPS structure
8685 and recalculate loop_depth information in basic block structures.
8686 FLAGS controls which loop information is collected.
8687 Return the number of natural loops found. */
8690 flow_loops_find (loops, flags)
8691 struct loops *loops;
8703 /* This function cannot be repeatedly called with different
8704 flags to build up the loop information. The loop tree
8705 must always be built if this function is called. */
8706 if (! (flags & LOOP_TREE))
8709 memset (loops, 0, sizeof (*loops));
8711 /* Taking care of this degenerate case makes the rest of
8712 this code simpler. */
8713 if (n_basic_blocks == 0)
8719 /* Compute the dominators. */
8720 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
8721 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
8723 /* Count the number of loop edges (back edges). This should be the
8724 same as the number of natural loops. */
8727 for (b = 0; b < n_basic_blocks; b++)
8731 header = BASIC_BLOCK (b);
8732 header->loop_depth = 0;
8734 for (e = header->pred; e; e = e->pred_next)
8736 basic_block latch = e->src;
8738 /* Look for back edges where a predecessor is dominated
8739 by this block. A natural loop has a single entry
8740 node (header) that dominates all the nodes in the
8741 loop. It also has single back edge to the header
8742 from a latch node. Note that multiple natural loops
8743 may share the same header. */
8744 if (b != header->index)
8747 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
8754 /* Compute depth first search order of the CFG so that outer
8755 natural loops will be found before inner natural loops. */
8756 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8757 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8758 flow_depth_first_order_compute (dfs_order, rc_order);
8760 /* Save CFG derived information to avoid recomputing it. */
8761 loops->cfg.dom = dom;
8762 loops->cfg.dfs_order = dfs_order;
8763 loops->cfg.rc_order = rc_order;
8765 /* Allocate loop structures. */
8767 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
8769 headers = sbitmap_alloc (n_basic_blocks);
8770 sbitmap_zero (headers);
8772 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
8773 sbitmap_zero (loops->shared_headers);
8775 /* Find and record information about all the natural loops
8778 for (b = 0; b < n_basic_blocks; b++)
8782 /* Search the nodes of the CFG in reverse completion order
8783 so that we can find outer loops first. */
8784 header = BASIC_BLOCK (rc_order[b]);
8786 /* Look for all the possible latch blocks for this header. */
8787 for (e = header->pred; e; e = e->pred_next)
8789 basic_block latch = e->src;
8791 /* Look for back edges where a predecessor is dominated
8792 by this block. A natural loop has a single entry
8793 node (header) that dominates all the nodes in the
8794 loop. It also has single back edge to the header
8795 from a latch node. Note that multiple natural loops
8796 may share the same header. */
8797 if (latch != ENTRY_BLOCK_PTR
8798 && TEST_BIT (dom[latch->index], header->index))
8802 loop = loops->array + num_loops;
8804 loop->header = header;
8805 loop->latch = latch;
8806 loop->num = num_loops;
8813 for (i = 0; i < num_loops; i++)
8815 struct loop *loop = &loops->array[i];
8817 /* Keep track of blocks that are loop headers so
8818 that we can tell which loops should be merged. */
8819 if (TEST_BIT (headers, loop->header->index))
8820 SET_BIT (loops->shared_headers, loop->header->index);
8821 SET_BIT (headers, loop->header->index);
8823 /* Find nodes contained within the loop. */
8824 loop->nodes = sbitmap_alloc (n_basic_blocks);
8826 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
8828 /* Compute first and last blocks within the loop.
8829 These are often the same as the loop header and
8830 loop latch respectively, but this is not always
8833 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
8835 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
8837 flow_loop_scan (loops, loop, flags);
8840 /* Natural loops with shared headers may either be disjoint or
8841 nested. Disjoint loops with shared headers cannot be inner
8842 loops and should be merged. For now just mark loops that share
8844 for (i = 0; i < num_loops; i++)
8845 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
8846 loops->array[i].shared = 1;
8848 sbitmap_free (headers);
8851 loops->num = num_loops;
8853 /* Build the loop hierarchy tree. */
8854 flow_loops_tree_build (loops);
8856 /* Assign the loop nesting depth and enclosed loop level for each
8858 loops->levels = flow_loops_level_compute (loops);
8864 /* Update the information regarding the loops in the CFG
8865 specified by LOOPS. */
8867 flow_loops_update (loops, flags)
8868 struct loops *loops;
8871 /* One day we may want to update the current loop data. For now
8872 throw away the old stuff and rebuild what we need. */
8874 flow_loops_free (loops);
8876 return flow_loops_find (loops, flags);
8880 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
8883 flow_loop_outside_edge_p (loop, e)
8884 const struct loop *loop;
8887 if (e->dest != loop->header)
8889 return (e->src == ENTRY_BLOCK_PTR)
8890 || ! TEST_BIT (loop->nodes, e->src->index);
8893 /* Clear LOG_LINKS fields of insns in a chain.
8894 Also clear the global_live_at_{start,end} fields of the basic block
8898 clear_log_links (insns)
8904 for (i = insns; i; i = NEXT_INSN (i))
8908 for (b = 0; b < n_basic_blocks; b++)
8910 basic_block bb = BASIC_BLOCK (b);
8912 bb->global_live_at_start = NULL;
8913 bb->global_live_at_end = NULL;
8916 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
8917 EXIT_BLOCK_PTR->global_live_at_start = NULL;
8920 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
8921 correspond to the hard registers, if any, set in that map. This
8922 could be done far more efficiently by having all sorts of special-cases
8923 with moving single words, but probably isn't worth the trouble. */
8926 reg_set_to_hard_reg_set (to, from)
8932 EXECUTE_IF_SET_IN_BITMAP
8935 if (i >= FIRST_PSEUDO_REGISTER)
8937 SET_HARD_REG_BIT (*to, i);
8941 /* Called once at intialization time. */
8946 static int initialized;
8950 gcc_obstack_init (&flow_obstack);
8951 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
8956 obstack_free (&flow_obstack, flow_firstobj);
8957 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);