1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 93, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 /* Instruction reorganization pass.
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
64 Three techniques for filling delay slots have been implemented so far:
66 (1) `fill_simple_delay_slots' is the simplest, most efficient way
67 to fill delay slots. This pass first looks for insns which come
68 from before the branch and which are safe to execute after the
69 branch. Then it searches after the insn requiring delay slots or,
70 in the case of a branch, for insns that are after the point at
71 which the branch merges into the fallthrough code, if such a point
72 exists. When such insns are found, the branch penalty decreases
73 and no code expansion takes place.
75 (2) `fill_eager_delay_slots' is more complicated: it is used for
76 scheduling conditional jumps, or for scheduling jumps which cannot
77 be filled using (1). A machine need not have annulled jumps to use
78 this strategy, but it helps (by keeping more options open).
79 `fill_eager_delay_slots' tries to guess the direction the branch
80 will go; if it guesses right 100% of the time, it can reduce the
81 branch penalty as much as `fill_simple_delay_slots' does. If it
82 guesses wrong 100% of the time, it might as well schedule nops (or
83 on the m88k, unexpose the branch slot). When
84 `fill_eager_delay_slots' takes insns from the fall-through path of
85 the jump, usually there is no code expansion; when it takes insns
86 from the branch target, there is code expansion if it is not the
87 only way to reach that target.
89 (3) `relax_delay_slots' uses a set of rules to simplify code that
90 has been reorganized by (1) and (2). It finds cases where
91 conditional test can be eliminated, jumps can be threaded, extra
92 insns can be eliminated, etc. It is the job of (1) and (2) to do a
93 good job of scheduling locally; `relax_delay_slots' takes care of
94 making the various individual schedules work well together. It is
95 especially tuned to handle the control flow interactions of branch
96 insns. It does nothing for insns with delay slots that do not
99 On machines that use CC0, we are very conservative. We will not make
100 a copy of an insn involving CC0 since we want to maintain a 1-1
101 correspondence between the insn that sets and uses CC0. The insns are
102 allowed to be separated by placing an insn that sets CC0 (but not an insn
103 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
104 delay slot. In that case, we point each insn at the other with REG_CC_USER
105 and REG_CC_SETTER notes. Note that these restrictions affect very few
106 machines because most RISC machines with delay slots will not use CC0
107 (the RT is the only known exception at this point).
111 The Acorn Risc Machine can conditionally execute most insns, so
112 it is profitable to move single insns into a position to execute
113 based on the condition code of the previous insn.
115 The HP-PA can conditionally nullify insns, providing a similar
116 effect to the ARM, differing mostly in which insn is "in charge". */
122 #include "insn-config.h"
123 #include "conditions.h"
124 #include "hard-reg-set.h"
125 #include "basic-block.h"
127 #include "insn-flags.h"
132 #include "insn-attr.h"
137 #define obstack_chunk_alloc xmalloc
138 #define obstack_chunk_free free
140 #ifndef ANNUL_IFTRUE_SLOTS
141 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
143 #ifndef ANNUL_IFFALSE_SLOTS
144 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
147 /* Insns which have delay slots that have not yet been filled. */
149 static struct obstack unfilled_slots_obstack;
150 static rtx *unfilled_firstobj;
152 /* Define macros to refer to the first and last slot containing unfilled
153 insns. These are used because the list may move and its address
154 should be recomputed at each use. */
156 #define unfilled_slots_base \
157 ((rtx *) obstack_base (&unfilled_slots_obstack))
159 #define unfilled_slots_next \
160 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
162 /* This structure is used to indicate which hardware resources are set or
163 needed by insns so far. */
167 char memory; /* Insn sets or needs a memory location. */
168 char unch_memory; /* Insn sets of needs a "unchanging" MEM. */
169 char volatil; /* Insn sets or needs a volatile memory loc. */
170 char cc; /* Insn sets or needs the condition codes. */
171 HARD_REG_SET regs; /* Which registers are set or needed. */
174 /* Macro to clear all resources. */
175 #define CLEAR_RESOURCE(RES) \
176 do { (RES)->memory = (RES)->unch_memory = (RES)->volatil = (RES)->cc = 0; \
177 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
179 /* Indicates what resources are required at the beginning of the epilogue. */
180 static struct resources start_of_epilogue_needs;
182 /* Indicates what resources are required at function end. */
183 static struct resources end_of_function_needs;
185 /* Points to the label before the end of the function. */
186 static rtx end_of_function_label;
188 /* This structure is used to record liveness information at the targets or
189 fallthrough insns of branches. We will most likely need the information
190 at targets again, so save them in a hash table rather than recomputing them
195 int uid; /* INSN_UID of target. */
196 struct target_info *next; /* Next info for same hash bucket. */
197 HARD_REG_SET live_regs; /* Registers live at target. */
198 int block; /* Basic block number containing target. */
199 int bb_tick; /* Generation count of basic block info. */
202 #define TARGET_HASH_PRIME 257
204 /* Define the hash table itself. */
205 static struct target_info **target_hash_table;
207 /* For each basic block, we maintain a generation number of its basic
208 block info, which is updated each time we move an insn from the
209 target of a jump. This is the generation number indexed by block
212 static int *bb_ticks;
214 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
215 not always monotonically increase. */
216 static int *uid_to_ruid;
218 /* Highest valid index in `uid_to_ruid'. */
221 static void mark_referenced_resources PROTO((rtx, struct resources *, int));
222 static void mark_set_resources PROTO((rtx, struct resources *, int, int));
223 static int stop_search_p PROTO((rtx, int));
224 static int resource_conflicts_p PROTO((struct resources *,
225 struct resources *));
226 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
227 static int insn_sets_resource_p PROTO((rtx, struct resources *, int));
228 static rtx find_end_label PROTO((void));
229 static rtx emit_delay_sequence PROTO((rtx, rtx, int));
230 static rtx add_to_delay_list PROTO((rtx, rtx));
231 static rtx delete_from_delay_slot PROTO((rtx));
232 static void delete_scheduled_jump PROTO((rtx));
233 static void note_delay_statistics PROTO((int, int));
234 static rtx optimize_skip PROTO((rtx));
235 static int get_jump_flags PROTO((rtx, rtx));
236 static int rare_destination PROTO((rtx));
237 static int mostly_true_jump PROTO((rtx, rtx));
238 static rtx get_branch_condition PROTO((rtx, rtx));
239 static int condition_dominates_p PROTO((rtx, rtx));
240 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
244 int, int *, int *, rtx *));
245 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
250 static void try_merge_delay_insns PROTO((rtx, rtx));
251 static rtx redundant_insn PROTO((rtx, rtx, rtx));
252 static int own_thread_p PROTO((rtx, rtx, int));
253 static int find_basic_block PROTO((rtx));
254 static void update_block PROTO((rtx, rtx));
255 static int reorg_redirect_jump PROTO((rtx, rtx));
256 static void update_reg_dead_notes PROTO((rtx, rtx));
257 static void fix_reg_dead_note PROTO((rtx, rtx));
258 static void update_reg_unused_notes PROTO((rtx, rtx));
259 static void update_live_status PROTO((rtx, rtx));
260 static rtx next_insn_no_annul PROTO((rtx));
261 static rtx find_dead_or_set_registers PROTO ((rtx, struct resources *, rtx *,
262 int, struct resources,
264 static void mark_target_live_regs PROTO((rtx, struct resources *));
265 static void fill_simple_delay_slots PROTO((int));
266 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
267 int, int, int *, rtx));
268 static void fill_eager_delay_slots PROTO((void));
269 static void relax_delay_slots PROTO((rtx));
270 static void make_return_insns PROTO((rtx));
271 static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
272 static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
274 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
275 which resources are references by the insn. If INCLUDE_DELAYED_EFFECTS
276 is TRUE, resources used by the called routine will be included for
280 mark_referenced_resources (x, res, include_delayed_effects)
282 register struct resources *res;
283 register int include_delayed_effects;
285 register enum rtx_code code = GET_CODE (x);
287 register char *format_ptr;
289 /* Handle leaf items for which we set resource flags. Also, special-case
290 CALL, SET and CLOBBER operators. */
302 if (GET_CODE (SUBREG_REG (x)) != REG)
303 mark_referenced_resources (SUBREG_REG (x), res, 0);
306 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
307 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
308 for (i = regno; i < last_regno; i++)
309 SET_HARD_REG_BIT (res->regs, i);
314 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
315 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
319 /* If this memory shouldn't change, it really isn't referencing
321 if (RTX_UNCHANGING_P (x))
322 res->unch_memory = 1;
325 res->volatil = MEM_VOLATILE_P (x);
327 /* Mark registers used to access memory. */
328 mark_referenced_resources (XEXP (x, 0), res, 0);
335 case UNSPEC_VOLATILE:
337 /* Traditional asm's are always volatile. */
346 res->volatil = MEM_VOLATILE_P (x);
348 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
349 We can not just fall through here since then we would be confused
350 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
351 traditional asms unlike their normal usage. */
353 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
354 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
358 /* The first operand will be a (MEM (xxx)) but doesn't really reference
359 memory. The second operand may be referenced, though. */
360 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
361 mark_referenced_resources (XEXP (x, 1), res, 0);
365 /* Usually, the first operand of SET is set, not referenced. But
366 registers used to access memory are referenced. SET_DEST is
367 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
369 mark_referenced_resources (SET_SRC (x), res, 0);
372 if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
373 mark_referenced_resources (x, res, 0);
374 else if (GET_CODE (x) == SUBREG)
376 if (GET_CODE (x) == MEM)
377 mark_referenced_resources (XEXP (x, 0), res, 0);
384 if (include_delayed_effects)
386 /* A CALL references memory, the frame pointer if it exists, the
387 stack pointer, any global registers and any registers given in
388 USE insns immediately in front of the CALL.
390 However, we may have moved some of the parameter loading insns
391 into the delay slot of this CALL. If so, the USE's for them
392 don't count and should be skipped. */
393 rtx insn = PREV_INSN (x);
396 rtx next = NEXT_INSN (x);
399 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
400 if (NEXT_INSN (insn) != x)
402 next = NEXT_INSN (NEXT_INSN (insn));
403 sequence = PATTERN (NEXT_INSN (insn));
404 seq_size = XVECLEN (sequence, 0);
405 if (GET_CODE (sequence) != SEQUENCE)
410 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
411 if (frame_pointer_needed)
413 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
414 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
415 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
419 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
421 SET_HARD_REG_BIT (res->regs, i);
423 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
424 assume that this call can need any register.
426 This is done to be more conservative about how we handle setjmp.
427 We assume that they both use and set all registers. Using all
428 registers ensures that a register will not be considered dead
429 just because it crosses a setjmp call. A register should be
430 considered dead only if the setjmp call returns non-zero. */
431 if (next && GET_CODE (next) == NOTE
432 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
433 SET_HARD_REG_SET (res->regs);
438 for (link = CALL_INSN_FUNCTION_USAGE (x);
440 link = XEXP (link, 1))
441 if (GET_CODE (XEXP (link, 0)) == USE)
443 for (i = 1; i < seq_size; i++)
445 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
446 if (GET_CODE (slot_pat) == SET
447 && rtx_equal_p (SET_DEST (slot_pat),
448 SET_DEST (XEXP (link, 0))))
452 mark_referenced_resources (SET_DEST (XEXP (link, 0)),
458 /* ... fall through to other INSN processing ... */
463 #ifdef INSN_REFERENCES_ARE_DELAYED
464 if (! include_delayed_effects
465 && INSN_REFERENCES_ARE_DELAYED (x))
469 /* No special processing, just speed up. */
470 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
477 /* Process each sub-expression and flag what it needs. */
478 format_ptr = GET_RTX_FORMAT (code);
479 for (i = 0; i < GET_RTX_LENGTH (code); i++)
480 switch (*format_ptr++)
483 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
487 for (j = 0; j < XVECLEN (x, i); j++)
488 mark_referenced_resources (XVECEXP (x, i, j), res,
489 include_delayed_effects);
494 /* Given X, a part of an insn, and a pointer to a `struct resource',
495 RES, indicate which resources are modified by the insn. If
496 INCLUDE_DELAYED_EFFECTS is nonzero, also mark resources potentially
497 set by the called routine.
499 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
500 objects are being referenced instead of set.
502 We never mark the insn as modifying the condition code unless it explicitly
503 SETs CC0 even though this is not totally correct. The reason for this is
504 that we require a SET of CC0 to immediately precede the reference to CC0.
505 So if some other insn sets CC0 as a side-effect, we know it cannot affect
506 our computation and thus may be placed in a delay slot. */
509 mark_set_resources (x, res, in_dest, include_delayed_effects)
511 register struct resources *res;
513 int include_delayed_effects;
515 register enum rtx_code code;
517 register char *format_ptr;
535 /* These don't set any resources. */
544 /* Called routine modifies the condition code, memory, any registers
545 that aren't saved across calls, global registers and anything
546 explicitly CLOBBERed immediately after the CALL_INSN. */
548 if (include_delayed_effects)
550 rtx next = NEXT_INSN (x);
551 rtx prev = PREV_INSN (x);
554 res->cc = res->memory = 1;
555 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
556 if (call_used_regs[i] || global_regs[i])
557 SET_HARD_REG_BIT (res->regs, i);
559 /* If X is part of a delay slot sequence, then NEXT should be
560 the first insn after the sequence. */
561 if (NEXT_INSN (prev) != x)
562 next = NEXT_INSN (NEXT_INSN (prev));
564 for (link = CALL_INSN_FUNCTION_USAGE (x);
565 link; link = XEXP (link, 1))
566 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
567 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
569 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
570 assume that this call can clobber any register. */
571 if (next && GET_CODE (next) == NOTE
572 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
573 SET_HARD_REG_SET (res->regs);
576 /* ... and also what its RTL says it modifies, if anything. */
581 /* An insn consisting of just a CLOBBER (or USE) is just for flow
582 and doesn't actually do anything, so we ignore it. */
584 #ifdef INSN_SETS_ARE_DELAYED
585 if (! include_delayed_effects
586 && INSN_SETS_ARE_DELAYED (x))
591 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
596 /* If the source of a SET is a CALL, this is actually done by
597 the called routine. So only include it if we are to include the
598 effects of the calling routine. */
600 mark_set_resources (SET_DEST (x), res,
601 (include_delayed_effects
602 || GET_CODE (SET_SRC (x)) != CALL),
605 mark_set_resources (SET_SRC (x), res, 0, 0);
609 mark_set_resources (XEXP (x, 0), res, 1, 0);
613 for (i = 0; i < XVECLEN (x, 0); i++)
614 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
615 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
616 mark_set_resources (XVECEXP (x, 0, i), res, 0,
617 include_delayed_effects);
624 mark_set_resources (XEXP (x, 0), res, 1, 0);
628 mark_set_resources (XEXP (x, 0), res, in_dest, 0);
629 mark_set_resources (XEXP (x, 1), res, 0, 0);
630 mark_set_resources (XEXP (x, 2), res, 0, 0);
637 res->unch_memory = RTX_UNCHANGING_P (x);
638 res->volatil = MEM_VOLATILE_P (x);
641 mark_set_resources (XEXP (x, 0), res, 0, 0);
647 if (GET_CODE (SUBREG_REG (x)) != REG)
648 mark_set_resources (SUBREG_REG (x), res,
649 in_dest, include_delayed_effects);
652 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
653 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
654 for (i = regno; i < last_regno; i++)
655 SET_HARD_REG_BIT (res->regs, i);
662 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
663 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
670 /* Process each sub-expression and flag what it needs. */
671 format_ptr = GET_RTX_FORMAT (code);
672 for (i = 0; i < GET_RTX_LENGTH (code); i++)
673 switch (*format_ptr++)
676 mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
680 for (j = 0; j < XVECLEN (x, i); j++)
681 mark_set_resources (XVECEXP (x, i, j), res, in_dest,
682 include_delayed_effects);
687 /* Return TRUE if this insn should stop the search for insn to fill delay
688 slots. LABELS_P indicates that labels should terminate the search.
689 In all cases, jumps terminate the search. */
692 stop_search_p (insn, labels_p)
699 switch (GET_CODE (insn))
713 /* OK unless it contains a delay slot or is an `asm' insn of some type.
714 We don't know anything about these. */
715 return (GET_CODE (PATTERN (insn)) == SEQUENCE
716 || GET_CODE (PATTERN (insn)) == ASM_INPUT
717 || asm_noperands (PATTERN (insn)) >= 0);
724 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
725 resource set contains a volatile memory reference. Otherwise, return FALSE. */
728 resource_conflicts_p (res1, res2)
729 struct resources *res1, *res2;
731 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
732 || (res1->unch_memory && res2->unch_memory)
733 || res1->volatil || res2->volatil)
737 return (res1->regs & res2->regs) != HARD_CONST (0);
742 for (i = 0; i < HARD_REG_SET_LONGS; i++)
743 if ((res1->regs[i] & res2->regs[i]) != 0)
750 /* Return TRUE if any resource marked in RES, a `struct resources', is
751 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
752 routine is using those resources.
754 We compute this by computing all the resources referenced by INSN and
755 seeing if this conflicts with RES. It might be faster to directly check
756 ourselves, and this is the way it used to work, but it means duplicating
757 a large block of complex code. */
760 insn_references_resource_p (insn, res, include_delayed_effects)
762 register struct resources *res;
763 int include_delayed_effects;
765 struct resources insn_res;
767 CLEAR_RESOURCE (&insn_res);
768 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
769 return resource_conflicts_p (&insn_res, res);
772 /* Return TRUE if INSN modifies resources that are marked in RES.
773 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
774 included. CC0 is only modified if it is explicitly set; see comments
775 in front of mark_set_resources for details. */
778 insn_sets_resource_p (insn, res, include_delayed_effects)
780 register struct resources *res;
781 int include_delayed_effects;
783 struct resources insn_sets;
785 CLEAR_RESOURCE (&insn_sets);
786 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
787 return resource_conflicts_p (&insn_sets, res);
790 /* Find a label at the end of the function or before a RETURN. If there is
798 /* If we found one previously, return it. */
799 if (end_of_function_label)
800 return end_of_function_label;
802 /* Otherwise, see if there is a label at the end of the function. If there
803 is, it must be that RETURN insns aren't needed, so that is our return
804 label and we don't have to do anything else. */
806 insn = get_last_insn ();
807 while (GET_CODE (insn) == NOTE
808 || (GET_CODE (insn) == INSN
809 && (GET_CODE (PATTERN (insn)) == USE
810 || GET_CODE (PATTERN (insn)) == CLOBBER)))
811 insn = PREV_INSN (insn);
813 /* When a target threads its epilogue we might already have a
814 suitable return insn. If so put a label before it for the
815 end_of_function_label. */
816 if (GET_CODE (insn) == BARRIER
817 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
818 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
820 rtx temp = PREV_INSN (PREV_INSN (insn));
821 end_of_function_label = gen_label_rtx ();
822 LABEL_NUSES (end_of_function_label) = 0;
824 /* Put the label before an USE insns that may proceed the RETURN insn. */
825 while (GET_CODE (temp) == USE)
826 temp = PREV_INSN (temp);
828 emit_label_after (end_of_function_label, temp);
831 else if (GET_CODE (insn) == CODE_LABEL)
832 end_of_function_label = insn;
835 /* Otherwise, make a new label and emit a RETURN and BARRIER,
837 end_of_function_label = gen_label_rtx ();
838 LABEL_NUSES (end_of_function_label) = 0;
839 emit_label (end_of_function_label);
843 /* The return we make may have delay slots too. */
844 rtx insn = gen_return ();
845 insn = emit_jump_insn (insn);
847 if (num_delay_slots (insn) > 0)
848 obstack_ptr_grow (&unfilled_slots_obstack, insn);
853 /* Show one additional use for this label so it won't go away until
855 ++LABEL_NUSES (end_of_function_label);
857 return end_of_function_label;
860 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
861 the pattern of INSN with the SEQUENCE.
863 Chain the insns so that NEXT_INSN of each insn in the sequence points to
864 the next and NEXT_INSN of the last insn in the sequence points to
865 the first insn after the sequence. Similarly for PREV_INSN. This makes
866 it easier to scan all insns.
868 Returns the SEQUENCE that replaces INSN. */
871 emit_delay_sequence (insn, list, length)
880 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
881 rtvec seqv = rtvec_alloc (length + 1);
882 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
883 rtx seq_insn = make_insn_raw (seq);
884 rtx first = get_insns ();
885 rtx last = get_last_insn ();
887 /* Make a copy of the insn having delay slots. */
888 rtx delay_insn = copy_rtx (insn);
890 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
891 confuse further processing. Update LAST in case it was the last insn.
892 We will put the BARRIER back in later. */
893 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
895 delete_insn (NEXT_INSN (insn));
896 last = get_last_insn ();
900 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
901 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
902 PREV_INSN (seq_insn) = PREV_INSN (insn);
905 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
908 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
910 /* Note the calls to set_new_first_and_last_insn must occur after
911 SEQ_INSN has been completely spliced into the insn stream.
913 Otherwise CUR_INSN_UID will get set to an incorrect value because
914 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
916 set_new_first_and_last_insn (first, seq_insn);
919 set_new_first_and_last_insn (seq_insn, last);
921 /* Build our SEQUENCE and rebuild the insn chain. */
922 XVECEXP (seq, 0, 0) = delay_insn;
923 INSN_DELETED_P (delay_insn) = 0;
924 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
926 for (li = list; li; li = XEXP (li, 1), i++)
928 rtx tem = XEXP (li, 0);
931 /* Show that this copy of the insn isn't deleted. */
932 INSN_DELETED_P (tem) = 0;
934 XVECEXP (seq, 0, i) = tem;
935 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
936 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
938 /* Remove any REG_DEAD notes because we can't rely on them now
939 that the insn has been moved. */
940 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
941 if (REG_NOTE_KIND (note) == REG_DEAD)
942 XEXP (note, 0) = const0_rtx;
945 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
947 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
948 last insn in that SEQUENCE to point to us. Similarly for the first
949 insn in the following insn if it is a SEQUENCE. */
951 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
952 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
953 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
954 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
957 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
958 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
959 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
961 /* If there used to be a BARRIER, put it back. */
963 emit_barrier_after (seq_insn);
971 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
972 be in the order in which the insns are to be executed. */
975 add_to_delay_list (insn, delay_list)
979 /* If we have an empty list, just make a new list element. If
980 INSN has its block number recorded, clear it since we may
981 be moving the insn to a new block. */
985 struct target_info *tinfo;
987 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
988 tinfo; tinfo = tinfo->next)
989 if (tinfo->uid == INSN_UID (insn))
995 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
998 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
1000 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
1005 /* Delete INSN from the delay slot of the insn that it is in. This may
1006 produce an insn without anything in its delay slots. */
1009 delete_from_delay_slot (insn)
1012 rtx trial, seq_insn, seq, prev;
1016 /* We first must find the insn containing the SEQUENCE with INSN in its
1017 delay slot. Do this by finding an insn, TRIAL, where
1018 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
1021 PREV_INSN (NEXT_INSN (trial)) == trial;
1022 trial = NEXT_INSN (trial))
1025 seq_insn = PREV_INSN (NEXT_INSN (trial));
1026 seq = PATTERN (seq_insn);
1028 /* Create a delay list consisting of all the insns other than the one
1029 we are deleting (unless we were the only one). */
1030 if (XVECLEN (seq, 0) > 2)
1031 for (i = 1; i < XVECLEN (seq, 0); i++)
1032 if (XVECEXP (seq, 0, i) != insn)
1033 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
1035 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
1036 list, and rebuild the delay list if non-empty. */
1037 prev = PREV_INSN (seq_insn);
1038 trial = XVECEXP (seq, 0, 0);
1039 delete_insn (seq_insn);
1040 add_insn_after (trial, prev);
1042 if (GET_CODE (trial) == JUMP_INSN
1043 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
1044 emit_barrier_after (trial);
1046 /* If there are any delay insns, remit them. Otherwise clear the
1049 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
1051 INSN_ANNULLED_BRANCH_P (trial) = 0;
1053 INSN_FROM_TARGET_P (insn) = 0;
1055 /* Show we need to fill this insn again. */
1056 obstack_ptr_grow (&unfilled_slots_obstack, trial);
1061 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
1062 the insn that sets CC0 for it and delete it too. */
1065 delete_scheduled_jump (insn)
1068 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
1069 delete the insn that sets the condition code, but it is hard to find it.
1070 Since this case is rare anyway, don't bother trying; there would likely
1071 be other insns that became dead anyway, which we wouldn't know to
1075 if (reg_mentioned_p (cc0_rtx, insn))
1077 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1079 /* If a reg-note was found, it points to an insn to set CC0. This
1080 insn is in the delay list of some other insn. So delete it from
1081 the delay list it was in. */
1084 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
1085 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
1086 delete_from_delay_slot (XEXP (note, 0));
1090 /* The insn setting CC0 is our previous insn, but it may be in
1091 a delay slot. It will be the last insn in the delay slot, if
1093 rtx trial = previous_insn (insn);
1094 if (GET_CODE (trial) == NOTE)
1095 trial = prev_nonnote_insn (trial);
1096 if (sets_cc0_p (PATTERN (trial)) != 1
1097 || FIND_REG_INC_NOTE (trial, 0))
1099 if (PREV_INSN (NEXT_INSN (trial)) == trial)
1100 delete_insn (trial);
1102 delete_from_delay_slot (trial);
1110 /* Counters for delay-slot filling. */
1112 #define NUM_REORG_FUNCTIONS 2
1113 #define MAX_DELAY_HISTOGRAM 3
1114 #define MAX_REORG_PASSES 2
1116 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
1118 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
1120 static int reorg_pass_number;
1123 note_delay_statistics (slots_filled, index)
1124 int slots_filled, index;
1126 num_insns_needing_delays[index][reorg_pass_number]++;
1127 if (slots_filled > MAX_DELAY_HISTOGRAM)
1128 slots_filled = MAX_DELAY_HISTOGRAM;
1129 num_filled_delays[index][slots_filled][reorg_pass_number]++;
1132 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1134 /* Optimize the following cases:
1136 1. When a conditional branch skips over only one instruction,
1137 use an annulling branch and put that insn in the delay slot.
1138 Use either a branch that annuls when the condition if true or
1139 invert the test with a branch that annuls when the condition is
1140 false. This saves insns, since otherwise we must copy an insn
1143 (orig) (skip) (otherwise)
1144 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1151 2. When a conditional branch skips over only one instruction,
1152 and after that, it unconditionally branches somewhere else,
1153 perform the similar optimization. This saves executing the
1154 second branch in the case where the inverted condition is true.
1161 INSN is a JUMP_INSN.
1163 This should be expanded to skip over N insns, where N is the number
1164 of delay slots required. */
1167 optimize_skip (insn)
1170 register rtx trial = next_nonnote_insn (insn);
1171 rtx next_trial = next_active_insn (trial);
1176 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1179 || GET_CODE (trial) != INSN
1180 || GET_CODE (PATTERN (trial)) == SEQUENCE
1181 || recog_memoized (trial) < 0
1182 || (! eligible_for_annul_false (insn, 0, trial, flags)
1183 && ! eligible_for_annul_true (insn, 0, trial, flags)))
1186 /* There are two cases where we are just executing one insn (we assume
1187 here that a branch requires only one insn; this should be generalized
1188 at some point): Where the branch goes around a single insn or where
1189 we have one insn followed by a branch to the same label we branch to.
1190 In both of these cases, inverting the jump and annulling the delay
1191 slot give the same effect in fewer insns. */
1192 if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
1194 && GET_CODE (next_trial) == JUMP_INSN
1195 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
1196 && (simplejump_p (next_trial)
1197 || GET_CODE (PATTERN (next_trial)) == RETURN)))
1199 if (eligible_for_annul_false (insn, 0, trial, flags))
1201 if (invert_jump (insn, JUMP_LABEL (insn)))
1202 INSN_FROM_TARGET_P (trial) = 1;
1203 else if (! eligible_for_annul_true (insn, 0, trial, flags))
1207 delay_list = add_to_delay_list (trial, NULL_RTX);
1208 next_trial = next_active_insn (trial);
1209 update_block (trial, trial);
1210 delete_insn (trial);
1212 /* Also, if we are targeting an unconditional
1213 branch, thread our jump to the target of that branch. Don't
1214 change this into a RETURN here, because it may not accept what
1215 we have in the delay slot. We'll fix this up later. */
1216 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
1217 && (simplejump_p (next_trial)
1218 || GET_CODE (PATTERN (next_trial)) == RETURN))
1220 target_label = JUMP_LABEL (next_trial);
1221 if (target_label == 0)
1222 target_label = find_end_label ();
1224 /* Recompute the flags based on TARGET_LABEL since threading
1225 the jump to TARGET_LABEL may change the direction of the
1226 jump (which may change the circumstances in which the
1227 delay slot is nullified). */
1228 flags = get_jump_flags (insn, target_label);
1229 if (eligible_for_annul_true (insn, 0, trial, flags))
1230 reorg_redirect_jump (insn, target_label);
1233 INSN_ANNULLED_BRANCH_P (insn) = 1;
1241 /* Encode and return branch direction and prediction information for
1242 INSN assuming it will jump to LABEL.
1244 Non conditional branches return no direction information and
1245 are predicted as very likely taken. */
1248 get_jump_flags (insn, label)
1253 /* get_jump_flags can be passed any insn with delay slots, these may
1254 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1255 direction information, and only if they are conditional jumps.
1257 If LABEL is zero, then there is no way to determine the branch
1259 if (GET_CODE (insn) == JUMP_INSN
1260 && (condjump_p (insn) || condjump_in_parallel_p (insn))
1261 && INSN_UID (insn) <= max_uid
1263 && INSN_UID (label) <= max_uid)
1265 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
1266 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
1267 /* No valid direction information. */
1271 /* If insn is a conditional branch call mostly_true_jump to get
1272 determine the branch prediction.
1274 Non conditional branches are predicted as very likely taken. */
1275 if (GET_CODE (insn) == JUMP_INSN
1276 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
1280 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
1284 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1287 flags |= ATTR_FLAG_likely;
1290 flags |= ATTR_FLAG_unlikely;
1293 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
1301 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1306 /* Return 1 if INSN is a destination that will be branched to rarely (the
1307 return point of a function); return 2 if DEST will be branched to very
1308 rarely (a call to a function that doesn't return). Otherwise,
1312 rare_destination (insn)
1318 for (; insn; insn = next)
1320 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1321 insn = XVECEXP (PATTERN (insn), 0, 0);
1323 next = NEXT_INSN (insn);
1325 switch (GET_CODE (insn))
1330 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1331 don't scan past JUMP_INSNs, so any barrier we find here must
1332 have been after a CALL_INSN and hence mean the call doesn't
1336 if (GET_CODE (PATTERN (insn)) == RETURN)
1338 else if (simplejump_p (insn)
1339 && jump_count++ < 10)
1340 next = JUMP_LABEL (insn);
1349 /* If we got here it means we hit the end of the function. So this
1350 is an unlikely destination. */
1355 /* Return truth value of the statement that this branch
1356 is mostly taken. If we think that the branch is extremely likely
1357 to be taken, we return 2. If the branch is slightly more likely to be
1358 taken, return 1. If the branch is slightly less likely to be taken,
1359 return 0 and if the branch is highly unlikely to be taken, return -1.
1361 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1364 mostly_true_jump (jump_insn, condition)
1365 rtx jump_insn, condition;
1367 rtx target_label = JUMP_LABEL (jump_insn);
1369 int rare_dest = rare_destination (target_label);
1370 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1372 /* If branch probabilities are available, then use that number since it
1373 always gives a correct answer. */
1374 if (flag_branch_probabilities)
1376 rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);;
1379 int prob = XINT (note, 0);
1381 if (prob >= REG_BR_PROB_BASE * 9 / 10)
1383 else if (prob >= REG_BR_PROB_BASE / 2)
1385 else if (prob >= REG_BR_PROB_BASE / 10)
1392 /* If this is a branch outside a loop, it is highly unlikely. */
1393 if (GET_CODE (PATTERN (jump_insn)) == SET
1394 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
1395 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
1396 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
1397 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
1398 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
1403 /* If this is the test of a loop, it is very likely true. We scan
1404 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1405 before the next real insn, we assume the branch is to the top of
1407 for (insn = PREV_INSN (target_label);
1408 insn && GET_CODE (insn) == NOTE;
1409 insn = PREV_INSN (insn))
1410 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1413 /* If this is a jump to the test of a loop, it is likely true. We scan
1414 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1415 before the next real insn, we assume the branch is to the loop branch
1417 for (insn = NEXT_INSN (target_label);
1418 insn && GET_CODE (insn) == NOTE;
1419 insn = PREV_INSN (insn))
1420 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
1424 /* Look at the relative rarities of the fallthrough and destination. If
1425 they differ, we can predict the branch that way. */
1427 switch (rare_fallthrough - rare_dest)
1441 /* If we couldn't figure out what this jump was, assume it won't be
1442 taken. This should be rare. */
1446 /* EQ tests are usually false and NE tests are usually true. Also,
1447 most quantities are positive, so we can make the appropriate guesses
1448 about signed comparisons against zero. */
1449 switch (GET_CODE (condition))
1452 /* Unconditional branch. */
1460 if (XEXP (condition, 1) == const0_rtx)
1465 if (XEXP (condition, 1) == const0_rtx)
1473 /* Predict backward branches usually take, forward branches usually not. If
1474 we don't know whether this is forward or backward, assume the branch
1475 will be taken, since most are. */
1476 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1477 || INSN_UID (target_label) > max_uid
1478 || (uid_to_ruid[INSN_UID (jump_insn)]
1479 > uid_to_ruid[INSN_UID (target_label)]));;
1482 /* Return the condition under which INSN will branch to TARGET. If TARGET
1483 is zero, return the condition under which INSN will return. If INSN is
1484 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1485 type of jump, or it doesn't go to TARGET, return 0. */
1488 get_branch_condition (insn, target)
1492 rtx pat = PATTERN (insn);
1495 if (condjump_in_parallel_p (insn))
1496 pat = XVECEXP (pat, 0, 0);
1498 if (GET_CODE (pat) == RETURN)
1499 return target == 0 ? const_true_rtx : 0;
1501 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1504 src = SET_SRC (pat);
1505 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1506 return const_true_rtx;
1508 else if (GET_CODE (src) == IF_THEN_ELSE
1509 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1510 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1511 && XEXP (XEXP (src, 1), 0) == target))
1512 && XEXP (src, 2) == pc_rtx)
1513 return XEXP (src, 0);
1515 else if (GET_CODE (src) == IF_THEN_ELSE
1516 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1517 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1518 && XEXP (XEXP (src, 2), 0) == target))
1519 && XEXP (src, 1) == pc_rtx)
1520 return gen_rtx_fmt_ee (reverse_condition (GET_CODE (XEXP (src, 0))),
1521 GET_MODE (XEXP (src, 0)),
1522 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1527 /* Return non-zero if CONDITION is more strict than the condition of
1528 INSN, i.e., if INSN will always branch if CONDITION is true. */
1531 condition_dominates_p (condition, insn)
1535 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1536 enum rtx_code code = GET_CODE (condition);
1537 enum rtx_code other_code;
1539 if (rtx_equal_p (condition, other_condition)
1540 || other_condition == const_true_rtx)
1543 else if (condition == const_true_rtx || other_condition == 0)
1546 other_code = GET_CODE (other_condition);
1547 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1548 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1549 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1552 return comparison_dominates_p (code, other_code);
1555 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1556 any insns already in the delay slot of JUMP. */
1559 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1560 rtx jump, newlabel, seq;
1563 rtx pat = PATTERN (seq);
1565 /* Make sure all the delay slots of this jump would still
1566 be valid after threading the jump. If they are still
1567 valid, then return non-zero. */
1569 flags = get_jump_flags (jump, newlabel);
1570 for (i = 1; i < XVECLEN (pat, 0); i++)
1572 #ifdef ANNUL_IFFALSE_SLOTS
1573 (INSN_ANNULLED_BRANCH_P (jump)
1574 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1575 ? eligible_for_annul_false (jump, i - 1,
1576 XVECEXP (pat, 0, i), flags) :
1578 #ifdef ANNUL_IFTRUE_SLOTS
1579 (INSN_ANNULLED_BRANCH_P (jump)
1580 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1581 ? eligible_for_annul_true (jump, i - 1,
1582 XVECEXP (pat, 0, i), flags) :
1584 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1587 return (i == XVECLEN (pat, 0));
1590 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1591 any insns we wish to place in the delay slot of JUMP. */
1594 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1595 rtx jump, newlabel, delay_list;
1600 /* Make sure all the insns in DELAY_LIST would still be
1601 valid after threading the jump. If they are still
1602 valid, then return non-zero. */
1604 flags = get_jump_flags (jump, newlabel);
1605 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1607 #ifdef ANNUL_IFFALSE_SLOTS
1608 (INSN_ANNULLED_BRANCH_P (jump)
1609 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1610 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1612 #ifdef ANNUL_IFTRUE_SLOTS
1613 (INSN_ANNULLED_BRANCH_P (jump)
1614 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1615 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1617 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1620 return (li == NULL);
1623 /* DELAY_LIST is a list of insns that have already been placed into delay
1624 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1625 If not, return 0; otherwise return 1. */
1628 check_annul_list_true_false (annul_true_p, delay_list)
1636 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1638 rtx trial = XEXP (temp, 0);
1640 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1641 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1649 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1650 the condition tested by INSN is CONDITION and the resources shown in
1651 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1652 from SEQ's delay list, in addition to whatever insns it may execute
1653 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1654 needed while searching for delay slot insns. Return the concatenated
1655 delay list if possible, otherwise, return 0.
1657 SLOTS_TO_FILL is the total number of slots required by INSN, and
1658 PSLOTS_FILLED points to the number filled so far (also the number of
1659 insns in DELAY_LIST). It is updated with the number that have been
1660 filled from the SEQUENCE, if any.
1662 PANNUL_P points to a non-zero value if we already know that we need
1663 to annul INSN. If this routine determines that annulling is needed,
1664 it may set that value non-zero.
1666 PNEW_THREAD points to a location that is to receive the place at which
1667 execution should continue. */
1670 steal_delay_list_from_target (insn, condition, seq, delay_list,
1671 sets, needed, other_needed,
1672 slots_to_fill, pslots_filled, pannul_p,
1674 rtx insn, condition;
1677 struct resources *sets, *needed, *other_needed;
1684 int slots_remaining = slots_to_fill - *pslots_filled;
1685 int total_slots_filled = *pslots_filled;
1686 rtx new_delay_list = 0;
1687 int must_annul = *pannul_p;
1691 /* We can't do anything if there are more delay slots in SEQ than we
1692 can handle, or if we don't know that it will be a taken branch.
1693 We know that it will be a taken branch if it is either an unconditional
1694 branch or a conditional branch with a stricter branch condition.
1696 Also, exit if the branch has more than one set, since then it is computing
1697 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1698 ??? It may be possible to move other sets into INSN in addition to
1699 moving the instructions in the delay slots. */
1701 if (XVECLEN (seq, 0) - 1 > slots_remaining
1702 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1703 || ! single_set (XVECEXP (seq, 0, 0)))
1706 for (i = 1; i < XVECLEN (seq, 0); i++)
1708 rtx trial = XVECEXP (seq, 0, i);
1711 if (insn_references_resource_p (trial, sets, 0)
1712 || insn_sets_resource_p (trial, needed, 0)
1713 || insn_sets_resource_p (trial, sets, 0)
1715 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1717 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1719 /* If TRIAL is from the fallthrough code of an annulled branch insn
1720 in SEQ, we cannot use it. */
1721 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1722 && ! INSN_FROM_TARGET_P (trial)))
1725 /* If this insn was already done (usually in a previous delay slot),
1726 pretend we put it in our delay slot. */
1727 if (redundant_insn (trial, insn, new_delay_list))
1730 /* We will end up re-vectoring this branch, so compute flags
1731 based on jumping to the new label. */
1732 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1735 && ((condition == const_true_rtx
1736 || (! insn_sets_resource_p (trial, other_needed, 0)
1737 && ! may_trap_p (PATTERN (trial)))))
1738 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1739 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1741 check_annul_list_true_false (0, delay_list)
1742 && check_annul_list_true_false (0, new_delay_list)
1743 && eligible_for_annul_false (insn, total_slots_filled,
1748 temp = copy_rtx (trial);
1749 INSN_FROM_TARGET_P (temp) = 1;
1750 new_delay_list = add_to_delay_list (temp, new_delay_list);
1751 total_slots_filled++;
1753 if (--slots_remaining == 0)
1760 /* Show the place to which we will be branching. */
1761 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1763 /* Add any new insns to the delay list and update the count of the
1764 number of slots filled. */
1765 *pslots_filled = total_slots_filled;
1769 if (delay_list == 0)
1770 return new_delay_list;
1772 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1773 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1778 /* Similar to steal_delay_list_from_target except that SEQ is on the
1779 fallthrough path of INSN. Here we only do something if the delay insn
1780 of SEQ is an unconditional branch. In that case we steal its delay slot
1781 for INSN since unconditional branches are much easier to fill. */
1784 steal_delay_list_from_fallthrough (insn, condition, seq,
1785 delay_list, sets, needed, other_needed,
1786 slots_to_fill, pslots_filled, pannul_p)
1787 rtx insn, condition;
1790 struct resources *sets, *needed, *other_needed;
1797 int must_annul = *pannul_p;
1800 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1802 /* We can't do anything if SEQ's delay insn isn't an
1803 unconditional branch. */
1805 if (! simplejump_p (XVECEXP (seq, 0, 0))
1806 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1809 for (i = 1; i < XVECLEN (seq, 0); i++)
1811 rtx trial = XVECEXP (seq, 0, i);
1813 /* If TRIAL sets CC0, stealing it will move it too far from the use
1815 if (insn_references_resource_p (trial, sets, 0)
1816 || insn_sets_resource_p (trial, needed, 0)
1817 || insn_sets_resource_p (trial, sets, 0)
1819 || sets_cc0_p (PATTERN (trial))
1825 /* If this insn was already done, we don't need it. */
1826 if (redundant_insn (trial, insn, delay_list))
1828 delete_from_delay_slot (trial);
1833 && ((condition == const_true_rtx
1834 || (! insn_sets_resource_p (trial, other_needed, 0)
1835 && ! may_trap_p (PATTERN (trial)))))
1836 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1837 : (must_annul || delay_list == NULL) && (must_annul = 1,
1838 check_annul_list_true_false (1, delay_list)
1839 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1843 delete_from_delay_slot (trial);
1844 delay_list = add_to_delay_list (trial, delay_list);
1846 if (++(*pslots_filled) == slots_to_fill)
1859 /* Try merging insns starting at THREAD which match exactly the insns in
1862 If all insns were matched and the insn was previously annulling, the
1863 annul bit will be cleared.
1865 For each insn that is merged, if the branch is or will be non-annulling,
1866 we delete the merged insn. */
1869 try_merge_delay_insns (insn, thread)
1872 rtx trial, next_trial;
1873 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1874 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1875 int slot_number = 1;
1876 int num_slots = XVECLEN (PATTERN (insn), 0);
1877 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1878 struct resources set, needed;
1879 rtx merged_insns = 0;
1883 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1885 CLEAR_RESOURCE (&needed);
1886 CLEAR_RESOURCE (&set);
1888 /* If this is not an annulling branch, take into account anything needed in
1889 INSN's delay slot. This prevents two increments from being incorrectly
1890 folded into one. If we are annulling, this would be the correct
1891 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1892 will essentially disable this optimization. This method is somewhat of
1893 a kludge, but I don't see a better way.) */
1895 for (i = 1 ; i < num_slots ; i++)
1896 if (XVECEXP (PATTERN (insn), 0, i))
1897 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
1899 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1901 rtx pat = PATTERN (trial);
1902 rtx oldtrial = trial;
1904 next_trial = next_nonnote_insn (trial);
1906 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1907 if (GET_CODE (trial) == INSN
1908 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1911 if (GET_CODE (next_to_match) == GET_CODE (trial)
1913 /* We can't share an insn that sets cc0. */
1914 && ! sets_cc0_p (pat)
1916 && ! insn_references_resource_p (trial, &set, 1)
1917 && ! insn_sets_resource_p (trial, &set, 1)
1918 && ! insn_sets_resource_p (trial, &needed, 1)
1919 && (trial = try_split (pat, trial, 0)) != 0
1920 /* Update next_trial, in case try_split succeeded. */
1921 && (next_trial = next_nonnote_insn (trial))
1922 /* Likewise THREAD. */
1923 && (thread = oldtrial == thread ? trial : thread)
1924 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1925 /* Have to test this condition if annul condition is different
1926 from (and less restrictive than) non-annulling one. */
1927 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1932 update_block (trial, thread);
1933 if (trial == thread)
1934 thread = next_active_insn (thread);
1936 delete_insn (trial);
1937 INSN_FROM_TARGET_P (next_to_match) = 0;
1940 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1942 if (++slot_number == num_slots)
1945 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1948 mark_set_resources (trial, &set, 0, 1);
1949 mark_referenced_resources (trial, &needed, 1);
1952 /* See if we stopped on a filled insn. If we did, try to see if its
1953 delay slots match. */
1954 if (slot_number != num_slots
1955 && trial && GET_CODE (trial) == INSN
1956 && GET_CODE (PATTERN (trial)) == SEQUENCE
1957 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1959 rtx pat = PATTERN (trial);
1960 rtx filled_insn = XVECEXP (pat, 0, 0);
1962 /* Account for resources set/needed by the filled insn. */
1963 mark_set_resources (filled_insn, &set, 0, 1);
1964 mark_referenced_resources (filled_insn, &needed, 1);
1966 for (i = 1; i < XVECLEN (pat, 0); i++)
1968 rtx dtrial = XVECEXP (pat, 0, i);
1970 if (! insn_references_resource_p (dtrial, &set, 1)
1971 && ! insn_sets_resource_p (dtrial, &set, 1)
1972 && ! insn_sets_resource_p (dtrial, &needed, 1)
1974 && ! sets_cc0_p (PATTERN (dtrial))
1976 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1977 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1983 update_block (dtrial, thread);
1984 new = delete_from_delay_slot (dtrial);
1985 if (INSN_DELETED_P (thread))
1987 INSN_FROM_TARGET_P (next_to_match) = 0;
1990 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1993 if (++slot_number == num_slots)
1996 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
2000 /* Keep track of the set/referenced resources for the delay
2001 slots of any trial insns we encounter. */
2002 mark_set_resources (dtrial, &set, 0, 1);
2003 mark_referenced_resources (dtrial, &needed, 1);
2008 /* If all insns in the delay slot have been matched and we were previously
2009 annulling the branch, we need not any more. In that case delete all the
2010 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
2011 the delay list so that we know that it isn't only being used at the
2013 if (slot_number == num_slots && annul_p)
2015 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
2017 if (GET_MODE (merged_insns) == SImode)
2021 update_block (XEXP (merged_insns, 0), thread);
2022 new = delete_from_delay_slot (XEXP (merged_insns, 0));
2023 if (INSN_DELETED_P (thread))
2028 update_block (XEXP (merged_insns, 0), thread);
2029 delete_insn (XEXP (merged_insns, 0));
2033 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
2035 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2036 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
2040 /* See if INSN is redundant with an insn in front of TARGET. Often this
2041 is called when INSN is a candidate for a delay slot of TARGET.
2042 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
2043 of INSN. Often INSN will be redundant with an insn in a delay slot of
2044 some previous insn. This happens when we have a series of branches to the
2045 same label; in that case the first insn at the target might want to go
2046 into each of the delay slots.
2048 If we are not careful, this routine can take up a significant fraction
2049 of the total compilation time (4%), but only wins rarely. Hence we
2050 speed this routine up by making two passes. The first pass goes back
2051 until it hits a label and sees if it find an insn with an identical
2052 pattern. Only in this (relatively rare) event does it check for
2055 We do not split insns we encounter. This could cause us not to find a
2056 redundant insn, but the cost of splitting seems greater than the possible
2057 gain in rare cases. */
2060 redundant_insn (insn, target, delay_list)
2065 rtx target_main = target;
2066 rtx ipat = PATTERN (insn);
2068 struct resources needed, set;
2071 /* If INSN has any REG_UNUSED notes, it can't match anything since we
2072 are allowed to not actually assign to such a register. */
2073 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
2076 /* Scan backwards looking for a match. */
2077 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
2079 if (GET_CODE (trial) == CODE_LABEL)
2082 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
2085 pat = PATTERN (trial);
2086 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2089 if (GET_CODE (pat) == SEQUENCE)
2091 /* Stop for a CALL and its delay slots because it is difficult to
2092 track its resource needs correctly. */
2093 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
2096 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
2097 slots because it is difficult to track its resource needs
2100 #ifdef INSN_SETS_ARE_DELAYED
2101 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2105 #ifdef INSN_REFERENCES_ARE_DELAYED
2106 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2110 /* See if any of the insns in the delay slot match, updating
2111 resource requirements as we go. */
2112 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2113 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
2114 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
2115 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
2118 /* If found a match, exit this loop early. */
2123 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
2124 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
2128 /* If we didn't find an insn that matches, return 0. */
2132 /* See what resources this insn sets and needs. If they overlap, or
2133 if this insn references CC0, it can't be redundant. */
2135 CLEAR_RESOURCE (&needed);
2136 CLEAR_RESOURCE (&set);
2137 mark_set_resources (insn, &set, 0, 1);
2138 mark_referenced_resources (insn, &needed, 1);
2140 /* If TARGET is a SEQUENCE, get the main insn. */
2141 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2142 target_main = XVECEXP (PATTERN (target), 0, 0);
2144 if (resource_conflicts_p (&needed, &set)
2146 || reg_mentioned_p (cc0_rtx, ipat)
2148 /* The insn requiring the delay may not set anything needed or set by
2150 || insn_sets_resource_p (target_main, &needed, 1)
2151 || insn_sets_resource_p (target_main, &set, 1))
2154 /* Insns we pass may not set either NEEDED or SET, so merge them for
2156 needed.memory |= set.memory;
2157 needed.unch_memory |= set.unch_memory;
2158 IOR_HARD_REG_SET (needed.regs, set.regs);
2160 /* This insn isn't redundant if it conflicts with an insn that either is
2161 or will be in a delay slot of TARGET. */
2165 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
2167 delay_list = XEXP (delay_list, 1);
2170 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2171 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
2172 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
2175 /* Scan backwards until we reach a label or an insn that uses something
2176 INSN sets or sets something insn uses or sets. */
2178 for (trial = PREV_INSN (target);
2179 trial && GET_CODE (trial) != CODE_LABEL;
2180 trial = PREV_INSN (trial))
2182 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
2183 && GET_CODE (trial) != JUMP_INSN)
2186 pat = PATTERN (trial);
2187 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2190 if (GET_CODE (pat) == SEQUENCE)
2192 /* If this is a CALL_INSN and its delay slots, it is hard to track
2193 the resource needs properly, so give up. */
2194 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
2197 /* If this is an INSN or JUMP_INSN with delayed effects, it
2198 is hard to track the resource needs properly, so give up. */
2200 #ifdef INSN_SETS_ARE_DELAYED
2201 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2205 #ifdef INSN_REFERENCES_ARE_DELAYED
2206 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2210 /* See if any of the insns in the delay slot match, updating
2211 resource requirements as we go. */
2212 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2214 rtx candidate = XVECEXP (pat, 0, i);
2216 /* If an insn will be annulled if the branch is false, it isn't
2217 considered as a possible duplicate insn. */
2218 if (rtx_equal_p (PATTERN (candidate), ipat)
2219 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2220 && INSN_FROM_TARGET_P (candidate)))
2222 /* Show that this insn will be used in the sequel. */
2223 INSN_FROM_TARGET_P (candidate) = 0;
2227 /* Unless this is an annulled insn from the target of a branch,
2228 we must stop if it sets anything needed or set by INSN. */
2229 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2230 || ! INSN_FROM_TARGET_P (candidate))
2231 && insn_sets_resource_p (candidate, &needed, 1))
2236 /* If the insn requiring the delay slot conflicts with INSN, we
2238 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
2243 /* See if TRIAL is the same as INSN. */
2244 pat = PATTERN (trial);
2245 if (rtx_equal_p (pat, ipat))
2248 /* Can't go any further if TRIAL conflicts with INSN. */
2249 if (insn_sets_resource_p (trial, &needed, 1))
2257 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2258 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2259 is non-zero, we are allowed to fall into this thread; otherwise, we are
2262 If LABEL is used more than one or we pass a label other than LABEL before
2263 finding an active insn, we do not own this thread. */
2266 own_thread_p (thread, label, allow_fallthrough)
2269 int allow_fallthrough;
2274 /* We don't own the function end. */
2278 /* Get the first active insn, or THREAD, if it is an active insn. */
2279 active_insn = next_active_insn (PREV_INSN (thread));
2281 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
2282 if (GET_CODE (insn) == CODE_LABEL
2283 && (insn != label || LABEL_NUSES (insn) != 1))
2286 if (allow_fallthrough)
2289 /* Ensure that we reach a BARRIER before any insn or label. */
2290 for (insn = prev_nonnote_insn (thread);
2291 insn == 0 || GET_CODE (insn) != BARRIER;
2292 insn = prev_nonnote_insn (insn))
2294 || GET_CODE (insn) == CODE_LABEL
2295 || (GET_CODE (insn) == INSN
2296 && GET_CODE (PATTERN (insn)) != USE
2297 && GET_CODE (PATTERN (insn)) != CLOBBER))
2303 /* Find the number of the basic block that starts closest to INSN. Return -1
2304 if we couldn't find such a basic block. */
2307 find_basic_block (insn)
2312 /* Scan backwards to the previous BARRIER. Then see if we can find a
2313 label that starts a basic block. Return the basic block number. */
2315 for (insn = prev_nonnote_insn (insn);
2316 insn && GET_CODE (insn) != BARRIER;
2317 insn = prev_nonnote_insn (insn))
2320 /* The start of the function is basic block zero. */
2324 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2325 anything other than a CODE_LABEL or note, we can't find this code. */
2326 for (insn = next_nonnote_insn (insn);
2327 insn && GET_CODE (insn) == CODE_LABEL;
2328 insn = next_nonnote_insn (insn))
2330 for (i = 0; i < n_basic_blocks; i++)
2331 if (insn == basic_block_head[i])
2338 /* Called when INSN is being moved from a location near the target of a jump.
2339 We leave a marker of the form (use (INSN)) immediately in front
2340 of WHERE for mark_target_live_regs. These markers will be deleted when
2343 We used to try to update the live status of registers if WHERE is at
2344 the start of a basic block, but that can't work since we may remove a
2345 BARRIER in relax_delay_slots. */
2348 update_block (insn, where)
2354 /* Ignore if this was in a delay slot and it came from the target of
2356 if (INSN_FROM_TARGET_P (insn))
2359 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
2361 /* INSN might be making a value live in a block where it didn't use to
2362 be. So recompute liveness information for this block. */
2364 b = find_basic_block (insn);
2369 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2370 the basic block containing the jump. */
2373 reorg_redirect_jump (jump, nlabel)
2377 int b = find_basic_block (jump);
2382 return redirect_jump (jump, nlabel);
2385 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2386 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2387 that reference values used in INSN. If we find one, then we move the
2388 REG_DEAD note to INSN.
2390 This is needed to handle the case where an later insn (after INSN) has a
2391 REG_DEAD note for a register used by INSN, and this later insn subsequently
2392 gets moved before a CODE_LABEL because it is a redundant insn. In this
2393 case, mark_target_live_regs may be confused into thinking the register
2394 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2397 update_reg_dead_notes (insn, delayed_insn)
2398 rtx insn, delayed_insn;
2402 for (p = next_nonnote_insn (insn); p != delayed_insn;
2403 p = next_nonnote_insn (p))
2404 for (link = REG_NOTES (p); link; link = next)
2406 next = XEXP (link, 1);
2408 if (REG_NOTE_KIND (link) != REG_DEAD
2409 || GET_CODE (XEXP (link, 0)) != REG)
2412 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
2414 /* Move the REG_DEAD note from P to INSN. */
2415 remove_note (p, link);
2416 XEXP (link, 1) = REG_NOTES (insn);
2417 REG_NOTES (insn) = link;
2422 /* Called when an insn redundant with start_insn is deleted. If there
2423 is a REG_DEAD note for the target of start_insn between start_insn
2424 and stop_insn, then the REG_DEAD note needs to be deleted since the
2425 value no longer dies there.
2427 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
2428 confused into thinking the register is dead. */
2431 fix_reg_dead_note (start_insn, stop_insn)
2432 rtx start_insn, stop_insn;
2436 for (p = next_nonnote_insn (start_insn); p != stop_insn;
2437 p = next_nonnote_insn (p))
2438 for (link = REG_NOTES (p); link; link = next)
2440 next = XEXP (link, 1);
2442 if (REG_NOTE_KIND (link) != REG_DEAD
2443 || GET_CODE (XEXP (link, 0)) != REG)
2446 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
2448 remove_note (p, link);
2454 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
2456 This handles the case of udivmodXi4 instructions which optimize their
2457 output depending on whether any REG_UNUSED notes are present.
2458 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2462 update_reg_unused_notes (insn, redundant_insn)
2463 rtx insn, redundant_insn;
2467 for (link = REG_NOTES (insn); link; link = next)
2469 next = XEXP (link, 1);
2471 if (REG_NOTE_KIND (link) != REG_UNUSED
2472 || GET_CODE (XEXP (link, 0)) != REG)
2475 if (! find_regno_note (redundant_insn, REG_UNUSED,
2476 REGNO (XEXP (link, 0))))
2477 remove_note (insn, link);
2481 /* Marks registers possibly live at the current place being scanned by
2482 mark_target_live_regs. Used only by next two function. */
2484 static HARD_REG_SET current_live_regs;
2486 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2487 Also only used by the next two functions. */
2489 static HARD_REG_SET pending_dead_regs;
2491 /* Utility function called from mark_target_live_regs via note_stores.
2492 It deadens any CLOBBERed registers and livens any SET registers. */
2495 update_live_status (dest, x)
2499 int first_regno, last_regno;
2502 if (GET_CODE (dest) != REG
2503 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
2506 if (GET_CODE (dest) == SUBREG)
2507 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2509 first_regno = REGNO (dest);
2511 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
2513 if (GET_CODE (x) == CLOBBER)
2514 for (i = first_regno; i < last_regno; i++)
2515 CLEAR_HARD_REG_BIT (current_live_regs, i);
2517 for (i = first_regno; i < last_regno; i++)
2519 SET_HARD_REG_BIT (current_live_regs, i);
2520 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
2524 /* Similar to next_insn, but ignores insns in the delay slots of
2525 an annulled branch. */
2528 next_insn_no_annul (insn)
2533 /* If INSN is an annulled branch, skip any insns from the target
2535 if (INSN_ANNULLED_BRANCH_P (insn)
2536 && NEXT_INSN (PREV_INSN (insn)) != insn)
2537 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
2538 insn = NEXT_INSN (insn);
2540 insn = NEXT_INSN (insn);
2541 if (insn && GET_CODE (insn) == INSN
2542 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2543 insn = XVECEXP (PATTERN (insn), 0, 0);
2549 /* A subroutine of mark_target_live_regs. Search forward from TARGET
2550 looking for registers that are set before they are used. These are dead.
2551 Stop after passing a few conditional jumps, and/or a small
2552 number of unconditional branches. */
2555 find_dead_or_set_registers (target, res, jump_target, jump_count, set, needed)
2557 struct resources *res;
2560 struct resources set, needed;
2562 HARD_REG_SET scratch;
2567 for (insn = target; insn; insn = next)
2569 rtx this_jump_insn = insn;
2571 next = NEXT_INSN (insn);
2572 switch (GET_CODE (insn))
2575 /* After a label, any pending dead registers that weren't yet
2576 used can be made dead. */
2577 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
2578 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
2579 CLEAR_HARD_REG_SET (pending_dead_regs);
2588 if (GET_CODE (PATTERN (insn)) == USE)
2590 /* If INSN is a USE made by update_block, we care about the
2591 underlying insn. Any registers set by the underlying insn
2592 are live since the insn is being done somewhere else. */
2593 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2594 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
2596 /* All other USE insns are to be ignored. */
2599 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
2601 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2603 /* An unconditional jump can be used to fill the delay slot
2604 of a call, so search for a JUMP_INSN in any position. */
2605 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2607 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
2608 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2617 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2619 if (jump_count++ < 10)
2621 if (simplejump_p (this_jump_insn)
2622 || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
2624 next = JUMP_LABEL (this_jump_insn);
2629 *jump_target = JUMP_LABEL (this_jump_insn);
2632 else if (condjump_p (this_jump_insn)
2633 || condjump_in_parallel_p (this_jump_insn))
2635 struct resources target_set, target_res;
2636 struct resources fallthrough_res;
2638 /* We can handle conditional branches here by following
2639 both paths, and then IOR the results of the two paths
2640 together, which will give us registers that are dead
2641 on both paths. Since this is expensive, we give it
2642 a much higher cost than unconditional branches. The
2643 cost was chosen so that we will follow at most 1
2644 conditional branch. */
2647 if (jump_count >= 10)
2650 mark_referenced_resources (insn, &needed, 1);
2652 /* For an annulled branch, mark_set_resources ignores slots
2653 filled by instructions from the target. This is correct
2654 if the branch is not taken. Since we are following both
2655 paths from the branch, we must also compute correct info
2656 if the branch is taken. We do this by inverting all of
2657 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
2658 and then inverting the INSN_FROM_TARGET_P bits again. */
2660 if (GET_CODE (PATTERN (insn)) == SEQUENCE
2661 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
2663 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
2664 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
2665 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
2668 mark_set_resources (insn, &target_set, 0, 1);
2670 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
2671 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
2672 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
2674 mark_set_resources (insn, &set, 0, 1);
2678 mark_set_resources (insn, &set, 0, 1);
2683 COPY_HARD_REG_SET (scratch, target_set.regs);
2684 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2685 AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
2687 fallthrough_res = *res;
2688 COPY_HARD_REG_SET (scratch, set.regs);
2689 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2690 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
2692 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
2693 &target_res, 0, jump_count,
2694 target_set, needed);
2695 find_dead_or_set_registers (next,
2696 &fallthrough_res, 0, jump_count,
2698 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
2699 AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
2707 /* Don't try this optimization if we expired our jump count
2708 above, since that would mean there may be an infinite loop
2709 in the function being compiled. */
2715 mark_referenced_resources (insn, &needed, 1);
2716 mark_set_resources (insn, &set, 0, 1);
2718 COPY_HARD_REG_SET (scratch, set.regs);
2719 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2720 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2726 /* Set the resources that are live at TARGET.
2728 If TARGET is zero, we refer to the end of the current function and can
2729 return our precomputed value.
2731 Otherwise, we try to find out what is live by consulting the basic block
2732 information. This is tricky, because we must consider the actions of
2733 reload and jump optimization, which occur after the basic block information
2736 Accordingly, we proceed as follows::
2738 We find the previous BARRIER and look at all immediately following labels
2739 (with no intervening active insns) to see if any of them start a basic
2740 block. If we hit the start of the function first, we use block 0.
2742 Once we have found a basic block and a corresponding first insns, we can
2743 accurately compute the live status from basic_block_live_regs and
2744 reg_renumber. (By starting at a label following a BARRIER, we are immune
2745 to actions taken by reload and jump.) Then we scan all insns between
2746 that point and our target. For each CLOBBER (or for call-clobbered regs
2747 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2748 a SET, mark them as live.
2750 We have to be careful when using REG_DEAD notes because they are not
2751 updated by such things as find_equiv_reg. So keep track of registers
2752 marked as dead that haven't been assigned to, and mark them dead at the
2753 next CODE_LABEL since reload and jump won't propagate values across labels.
2755 If we cannot find the start of a basic block (should be a very rare
2756 case, if it can happen at all), mark everything as potentially live.
2758 Next, scan forward from TARGET looking for things set or clobbered
2759 before they are used. These are not live.
2761 Because we can be called many times on the same target, save our results
2762 in a hash table indexed by INSN_UID. */
2765 mark_target_live_regs (target, res)
2767 struct resources *res;
2771 struct target_info *tinfo;
2775 HARD_REG_SET scratch;
2776 struct resources set, needed;
2778 /* Handle end of function. */
2781 *res = end_of_function_needs;
2785 /* We have to assume memory is needed, but the CC isn't. */
2787 res->volatil = res->unch_memory = 0;
2790 /* See if we have computed this value already. */
2791 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2792 tinfo; tinfo = tinfo->next)
2793 if (tinfo->uid == INSN_UID (target))
2796 /* Start by getting the basic block number. If we have saved information,
2797 we can get it from there unless the insn at the start of the basic block
2798 has been deleted. */
2799 if (tinfo && tinfo->block != -1
2800 && ! INSN_DELETED_P (basic_block_head[tinfo->block]))
2804 b = find_basic_block (target);
2808 /* If the information is up-to-date, use it. Otherwise, we will
2810 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
2812 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
2818 /* Allocate a place to put our results and chain it into the
2820 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
2821 tinfo->uid = INSN_UID (target);
2823 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2824 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
2827 CLEAR_HARD_REG_SET (pending_dead_regs);
2829 /* If we found a basic block, get the live registers from it and update
2830 them with anything set or killed between its start and the insn before
2831 TARGET. Otherwise, we must assume everything is live. */
2834 regset regs_live = basic_block_live_at_start[b];
2837 rtx start_insn, stop_insn;
2839 /* Compute hard regs live at start of block -- this is the real hard regs
2840 marked live, plus live pseudo regs that have been renumbered to
2843 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
2845 EXECUTE_IF_SET_IN_REG_SET
2846 (regs_live, FIRST_PSEUDO_REGISTER, i,
2848 if ((regno = reg_renumber[i]) >= 0)
2850 j < regno + HARD_REGNO_NREGS (regno,
2851 PSEUDO_REGNO_MODE (i));
2853 SET_HARD_REG_BIT (current_live_regs, j);
2856 /* Get starting and ending insn, handling the case where each might
2858 start_insn = (b == 0 ? get_insns () : basic_block_head[b]);
2861 if (GET_CODE (start_insn) == INSN
2862 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
2863 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
2865 if (GET_CODE (stop_insn) == INSN
2866 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
2867 stop_insn = next_insn (PREV_INSN (stop_insn));
2869 for (insn = start_insn; insn != stop_insn;
2870 insn = next_insn_no_annul (insn))
2873 rtx real_insn = insn;
2875 /* If this insn is from the target of a branch, it isn't going to
2876 be used in the sequel. If it is used in both cases, this
2877 test will not be true. */
2878 if (INSN_FROM_TARGET_P (insn))
2881 /* If this insn is a USE made by update_block, we care about the
2883 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
2884 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2885 real_insn = XEXP (PATTERN (insn), 0);
2887 if (GET_CODE (real_insn) == CALL_INSN)
2889 /* CALL clobbers all call-used regs that aren't fixed except
2890 sp, ap, and fp. Do this before setting the result of the
2892 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2893 if (call_used_regs[i]
2894 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
2895 && i != ARG_POINTER_REGNUM
2896 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2897 && i != HARD_FRAME_POINTER_REGNUM
2899 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2900 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
2902 #ifdef PIC_OFFSET_TABLE_REGNUM
2903 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2906 CLEAR_HARD_REG_BIT (current_live_regs, i);
2908 /* A CALL_INSN sets any global register live, since it may
2909 have been modified by the call. */
2910 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2912 SET_HARD_REG_BIT (current_live_regs, i);
2915 /* Mark anything killed in an insn to be deadened at the next
2916 label. Ignore USE insns; the only REG_DEAD notes will be for
2917 parameters. But they might be early. A CALL_INSN will usually
2918 clobber registers used for parameters. It isn't worth bothering
2919 with the unlikely case when it won't. */
2920 if ((GET_CODE (real_insn) == INSN
2921 && GET_CODE (PATTERN (real_insn)) != USE
2922 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
2923 || GET_CODE (real_insn) == JUMP_INSN
2924 || GET_CODE (real_insn) == CALL_INSN)
2926 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2927 if (REG_NOTE_KIND (link) == REG_DEAD
2928 && GET_CODE (XEXP (link, 0)) == REG
2929 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2931 int first_regno = REGNO (XEXP (link, 0));
2934 + HARD_REGNO_NREGS (first_regno,
2935 GET_MODE (XEXP (link, 0))));
2937 for (i = first_regno; i < last_regno; i++)
2938 SET_HARD_REG_BIT (pending_dead_regs, i);
2941 note_stores (PATTERN (real_insn), update_live_status);
2943 /* If any registers were unused after this insn, kill them.
2944 These notes will always be accurate. */
2945 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2946 if (REG_NOTE_KIND (link) == REG_UNUSED
2947 && GET_CODE (XEXP (link, 0)) == REG
2948 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2950 int first_regno = REGNO (XEXP (link, 0));
2953 + HARD_REGNO_NREGS (first_regno,
2954 GET_MODE (XEXP (link, 0))));
2956 for (i = first_regno; i < last_regno; i++)
2957 CLEAR_HARD_REG_BIT (current_live_regs, i);
2961 else if (GET_CODE (real_insn) == CODE_LABEL)
2963 /* A label clobbers the pending dead registers since neither
2964 reload nor jump will propagate a value across a label. */
2965 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
2966 CLEAR_HARD_REG_SET (pending_dead_regs);
2969 /* The beginning of the epilogue corresponds to the end of the
2970 RTL chain when there are no epilogue insns. Certain resources
2971 are implicitly required at that point. */
2972 else if (GET_CODE (real_insn) == NOTE
2973 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
2974 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
2977 COPY_HARD_REG_SET (res->regs, current_live_regs);
2979 tinfo->bb_tick = bb_ticks[b];
2982 /* We didn't find the start of a basic block. Assume everything
2983 in use. This should happen only extremely rarely. */
2984 SET_HARD_REG_SET (res->regs);
2986 CLEAR_RESOURCE (&set);
2987 CLEAR_RESOURCE (&needed);
2989 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
2992 /* If we hit an unconditional branch, we have another way of finding out
2993 what is live: we can see what is live at the branch target and include
2994 anything used but not set before the branch. The only things that are
2995 live are those that are live using the above test and the test below. */
2999 struct resources new_resources;
3000 rtx stop_insn = next_active_insn (jump_insn);
3002 mark_target_live_regs (next_active_insn (jump_target), &new_resources);
3003 CLEAR_RESOURCE (&set);
3004 CLEAR_RESOURCE (&needed);
3006 /* Include JUMP_INSN in the needed registers. */
3007 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
3009 mark_referenced_resources (insn, &needed, 1);
3011 COPY_HARD_REG_SET (scratch, needed.regs);
3012 AND_COMPL_HARD_REG_SET (scratch, set.regs);
3013 IOR_HARD_REG_SET (new_resources.regs, scratch);
3015 mark_set_resources (insn, &set, 0, 1);
3018 AND_HARD_REG_SET (res->regs, new_resources.regs);
3021 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
3024 /* Scan a function looking for insns that need a delay slot and find insns to
3025 put into the delay slot.
3027 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
3028 as calls). We do these first since we don't want jump insns (that are
3029 easier to fill) to get the only insns that could be used for non-jump insns.
3030 When it is zero, only try to fill JUMP_INSNs.
3032 When slots are filled in this manner, the insns (including the
3033 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
3034 it is possible to tell whether a delay slot has really been filled
3035 or not. `final' knows how to deal with this, by communicating
3036 through FINAL_SEQUENCE. */
3039 fill_simple_delay_slots (non_jumps_p)
3042 register rtx insn, pat, trial, next_trial;
3044 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3045 struct resources needed, set;
3046 int slots_to_fill, slots_filled;
3049 for (i = 0; i < num_unfilled_slots; i++)
3052 /* Get the next insn to fill. If it has already had any slots assigned,
3053 we can't do anything with it. Maybe we'll improve this later. */
3055 insn = unfilled_slots_base[i];
3057 || INSN_DELETED_P (insn)
3058 || (GET_CODE (insn) == INSN
3059 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3060 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
3061 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
3064 if (GET_CODE (insn) == JUMP_INSN)
3065 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3067 flags = get_jump_flags (insn, NULL_RTX);
3068 slots_to_fill = num_delay_slots (insn);
3070 /* Some machine description have defined instructions to have
3071 delay slots only in certain circumstances which may depend on
3072 nearby insns (which change due to reorg's actions).
3074 For example, the PA port normally has delay slots for unconditional
3077 However, the PA port claims such jumps do not have a delay slot
3078 if they are immediate successors of certain CALL_INSNs. This
3079 allows the port to favor filling the delay slot of the call with
3080 the unconditional jump. */
3081 if (slots_to_fill == 0)
3084 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
3085 says how many. After initialization, first try optimizing
3088 nop add %o7,.-L1,%o7
3092 If this case applies, the delay slot of the call is filled with
3093 the unconditional jump. This is done first to avoid having the
3094 delay slot of the call filled in the backward scan. Also, since
3095 the unconditional jump is likely to also have a delay slot, that
3096 insn must exist when it is subsequently scanned.
3098 This is tried on each insn with delay slots as some machines
3099 have insns which perform calls, but are not represented as
3105 if ((trial = next_active_insn (insn))
3106 && GET_CODE (trial) == JUMP_INSN
3107 && simplejump_p (trial)
3108 && eligible_for_delay (insn, slots_filled, trial, flags)
3109 && no_labels_between_p (insn, trial))
3113 delay_list = add_to_delay_list (trial, delay_list);
3115 /* TRIAL may have had its delay slot filled, then unfilled. When
3116 the delay slot is unfilled, TRIAL is placed back on the unfilled
3117 slots obstack. Unfortunately, it is placed on the end of the
3118 obstack, not in its original location. Therefore, we must search
3119 from entry i + 1 to the end of the unfilled slots obstack to
3120 try and find TRIAL. */
3121 tmp = &unfilled_slots_base[i + 1];
3122 while (*tmp != trial && tmp != unfilled_slots_next)
3125 /* Remove the unconditional jump from consideration for delay slot
3126 filling and unthread it. */
3130 rtx next = NEXT_INSN (trial);
3131 rtx prev = PREV_INSN (trial);
3133 NEXT_INSN (prev) = next;
3135 PREV_INSN (next) = prev;
3139 /* Now, scan backwards from the insn to search for a potential
3140 delay-slot candidate. Stop searching when a label or jump is hit.
3142 For each candidate, if it is to go into the delay slot (moved
3143 forward in execution sequence), it must not need or set any resources
3144 that were set by later insns and must not set any resources that
3145 are needed for those insns.
3147 The delay slot insn itself sets resources unless it is a call
3148 (in which case the called routine, not the insn itself, is doing
3151 if (slots_filled < slots_to_fill)
3153 CLEAR_RESOURCE (&needed);
3154 CLEAR_RESOURCE (&set);
3155 mark_set_resources (insn, &set, 0, 0);
3156 mark_referenced_resources (insn, &needed, 0);
3158 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
3161 next_trial = prev_nonnote_insn (trial);
3163 /* This must be an INSN or CALL_INSN. */
3164 pat = PATTERN (trial);
3166 /* USE and CLOBBER at this level was just for flow; ignore it. */
3167 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3170 /* Check for resource conflict first, to avoid unnecessary
3172 if (! insn_references_resource_p (trial, &set, 1)
3173 && ! insn_sets_resource_p (trial, &set, 1)
3174 && ! insn_sets_resource_p (trial, &needed, 1)
3176 /* Can't separate set of cc0 from its use. */
3177 && ! (reg_mentioned_p (cc0_rtx, pat)
3178 && ! sets_cc0_p (cc0_rtx, pat))
3182 trial = try_split (pat, trial, 1);
3183 next_trial = prev_nonnote_insn (trial);
3184 if (eligible_for_delay (insn, slots_filled, trial, flags))
3186 /* In this case, we are searching backward, so if we
3187 find insns to put on the delay list, we want
3188 to put them at the head, rather than the
3189 tail, of the list. */
3191 update_reg_dead_notes (trial, insn);
3192 delay_list = gen_rtx_INSN_LIST (VOIDmode,
3194 update_block (trial, trial);
3195 delete_insn (trial);
3196 if (slots_to_fill == ++slots_filled)
3202 mark_set_resources (trial, &set, 0, 1);
3203 mark_referenced_resources (trial, &needed, 1);
3207 /* If all needed slots haven't been filled, we come here. */
3209 /* Try to optimize case of jumping around a single insn. */
3210 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
3211 if (slots_filled != slots_to_fill
3213 && GET_CODE (insn) == JUMP_INSN
3214 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
3216 delay_list = optimize_skip (insn);
3222 /* Try to get insns from beyond the insn needing the delay slot.
3223 These insns can neither set or reference resources set in insns being
3224 skipped, cannot set resources in the insn being skipped, and, if this
3225 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
3226 call might not return).
3228 There used to be code which continued past the target label if
3229 we saw all uses of the target label. This code did not work,
3230 because it failed to account for some instructions which were
3231 both annulled and marked as from the target. This can happen as a
3232 result of optimize_skip. Since this code was redundant with
3233 fill_eager_delay_slots anyways, it was just deleted. */
3235 if (slots_filled != slots_to_fill
3236 && (GET_CODE (insn) != JUMP_INSN
3237 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
3238 && ! simplejump_p (insn)
3239 && JUMP_LABEL (insn) != 0)))
3242 int maybe_never = 0;
3243 struct resources needed_at_jump;
3245 CLEAR_RESOURCE (&needed);
3246 CLEAR_RESOURCE (&set);
3248 if (GET_CODE (insn) == CALL_INSN)
3250 mark_set_resources (insn, &set, 0, 1);
3251 mark_referenced_resources (insn, &needed, 1);
3256 mark_set_resources (insn, &set, 0, 1);
3257 mark_referenced_resources (insn, &needed, 1);
3258 if (GET_CODE (insn) == JUMP_INSN)
3259 target = JUMP_LABEL (insn);
3262 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
3264 rtx pat, trial_delay;
3266 next_trial = next_nonnote_insn (trial);
3268 if (GET_CODE (trial) == CODE_LABEL
3269 || GET_CODE (trial) == BARRIER)
3272 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
3273 pat = PATTERN (trial);
3275 /* Stand-alone USE and CLOBBER are just for flow. */
3276 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3279 /* If this already has filled delay slots, get the insn needing
3281 if (GET_CODE (pat) == SEQUENCE)
3282 trial_delay = XVECEXP (pat, 0, 0);
3284 trial_delay = trial;
3286 /* If this is a jump insn to our target, indicate that we have
3287 seen another jump to it. If we aren't handling a conditional
3288 jump, stop our search. Otherwise, compute the needs at its
3289 target and add them to NEEDED. */
3290 if (GET_CODE (trial_delay) == JUMP_INSN)
3294 else if (JUMP_LABEL (trial_delay) != target)
3296 mark_target_live_regs
3297 (next_active_insn (JUMP_LABEL (trial_delay)),
3299 needed.memory |= needed_at_jump.memory;
3300 needed.unch_memory |= needed_at_jump.unch_memory;
3301 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
3305 /* See if we have a resource problem before we try to
3308 && GET_CODE (pat) != SEQUENCE
3309 && ! insn_references_resource_p (trial, &set, 1)
3310 && ! insn_sets_resource_p (trial, &set, 1)
3311 && ! insn_sets_resource_p (trial, &needed, 1)
3313 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
3315 && ! (maybe_never && may_trap_p (pat))
3316 && (trial = try_split (pat, trial, 0))
3317 && eligible_for_delay (insn, slots_filled, trial, flags))
3319 next_trial = next_nonnote_insn (trial);
3320 delay_list = add_to_delay_list (trial, delay_list);
3323 if (reg_mentioned_p (cc0_rtx, pat))
3324 link_cc0_insns (trial);
3327 delete_insn (trial);
3328 if (slots_to_fill == ++slots_filled)
3333 mark_set_resources (trial, &set, 0, 1);
3334 mark_referenced_resources (trial, &needed, 1);
3336 /* Ensure we don't put insns between the setting of cc and the
3337 comparison by moving a setting of cc into an earlier delay
3338 slot since these insns could clobber the condition code. */
3341 /* If this is a call or jump, we might not get here. */
3342 if (GET_CODE (trial_delay) == CALL_INSN
3343 || GET_CODE (trial_delay) == JUMP_INSN)
3347 /* If there are slots left to fill and our search was stopped by an
3348 unconditional branch, try the insn at the branch target. We can
3349 redirect the branch if it works.
3351 Don't do this if the insn at the branch target is a branch. */
3352 if (slots_to_fill != slots_filled
3354 && GET_CODE (trial) == JUMP_INSN
3355 && simplejump_p (trial)
3356 && (target == 0 || JUMP_LABEL (trial) == target)
3357 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
3358 && ! (GET_CODE (next_trial) == INSN
3359 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
3360 && GET_CODE (next_trial) != JUMP_INSN
3361 && ! insn_references_resource_p (next_trial, &set, 1)
3362 && ! insn_sets_resource_p (next_trial, &set, 1)
3363 && ! insn_sets_resource_p (next_trial, &needed, 1)
3365 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
3367 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
3368 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
3369 && eligible_for_delay (insn, slots_filled, next_trial, flags))
3371 rtx new_label = next_active_insn (next_trial);
3374 new_label = get_label_before (new_label);
3376 new_label = find_end_label ();
3379 = add_to_delay_list (copy_rtx (next_trial), delay_list);
3381 reorg_redirect_jump (trial, new_label);
3383 /* If we merged because we both jumped to the same place,
3384 redirect the original insn also. */
3386 reorg_redirect_jump (insn, new_label);
3390 /* If this is an unconditional jump, then try to get insns from the
3391 target of the jump. */
3392 if (GET_CODE (insn) == JUMP_INSN
3393 && simplejump_p (insn)
3394 && slots_filled != slots_to_fill)
3396 = fill_slots_from_thread (insn, const_true_rtx,
3397 next_active_insn (JUMP_LABEL (insn)),
3399 own_thread_p (JUMP_LABEL (insn),
3400 JUMP_LABEL (insn), 0),
3401 slots_to_fill, &slots_filled,
3405 unfilled_slots_base[i]
3406 = emit_delay_sequence (insn, delay_list, slots_filled);
3408 if (slots_to_fill == slots_filled)
3409 unfilled_slots_base[i] = 0;
3411 note_delay_statistics (slots_filled, 0);
3414 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3415 /* See if the epilogue needs any delay slots. Try to fill them if so.
3416 The only thing we can do is scan backwards from the end of the
3417 function. If we did this in a previous pass, it is incorrect to do it
3419 if (current_function_epilogue_delay_list)
3422 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
3423 if (slots_to_fill == 0)
3427 CLEAR_RESOURCE (&set);
3429 /* The frame pointer and stack pointer are needed at the beginning of
3430 the epilogue, so instructions setting them can not be put in the
3431 epilogue delay slot. However, everything else needed at function
3432 end is safe, so we don't want to use end_of_function_needs here. */
3433 CLEAR_RESOURCE (&needed);
3434 if (frame_pointer_needed)
3436 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
3437 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3438 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
3440 #ifdef EXIT_IGNORE_STACK
3441 if (! EXIT_IGNORE_STACK
3442 || current_function_sp_is_unchanging)
3444 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3447 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3449 #ifdef EPILOGUE_USES
3450 for (i = 0; i <FIRST_PSEUDO_REGISTER; i++)
3452 if (EPILOGUE_USES (i))
3453 SET_HARD_REG_BIT (needed.regs, i);
3457 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
3458 trial = PREV_INSN (trial))
3460 if (GET_CODE (trial) == NOTE)
3462 pat = PATTERN (trial);
3463 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3466 if (! insn_references_resource_p (trial, &set, 1)
3467 && ! insn_sets_resource_p (trial, &needed, 1)
3468 && ! insn_sets_resource_p (trial, &set, 1)
3470 /* Don't want to mess with cc0 here. */
3471 && ! reg_mentioned_p (cc0_rtx, pat)
3475 trial = try_split (pat, trial, 1);
3476 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
3478 /* Here as well we are searching backward, so put the
3479 insns we find on the head of the list. */
3481 current_function_epilogue_delay_list
3482 = gen_rtx_INSN_LIST (VOIDmode, trial,
3483 current_function_epilogue_delay_list);
3484 mark_referenced_resources (trial, &end_of_function_needs, 1);
3485 update_block (trial, trial);
3486 delete_insn (trial);
3488 /* Clear deleted bit so final.c will output the insn. */
3489 INSN_DELETED_P (trial) = 0;
3491 if (slots_to_fill == ++slots_filled)
3497 mark_set_resources (trial, &set, 0, 1);
3498 mark_referenced_resources (trial, &needed, 1);
3501 note_delay_statistics (slots_filled, 0);
3505 /* Try to find insns to place in delay slots.
3507 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3508 or is an unconditional branch if CONDITION is const_true_rtx.
3509 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3511 THREAD is a flow-of-control, either the insns to be executed if the
3512 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3514 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3515 to see if any potential delay slot insns set things needed there.
3517 LIKELY is non-zero if it is extremely likely that the branch will be
3518 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3519 end of a loop back up to the top.
3521 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3522 thread. I.e., it is the fallthrough code of our jump or the target of the
3523 jump when we are the only jump going there.
3525 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3526 case, we can only take insns from the head of the thread for our delay
3527 slot. We then adjust the jump to point after the insns we have taken. */
3530 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
3531 thread_if_true, own_thread,
3532 slots_to_fill, pslots_filled, delay_list)
3535 rtx thread, opposite_thread;
3539 int slots_to_fill, *pslots_filled;
3543 struct resources opposite_needed, set, needed;
3549 /* Validate our arguments. */
3550 if ((condition == const_true_rtx && ! thread_if_true)
3551 || (! own_thread && ! thread_if_true))
3554 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3556 /* If our thread is the end of subroutine, we can't get any delay
3561 /* If this is an unconditional branch, nothing is needed at the
3562 opposite thread. Otherwise, compute what is needed there. */
3563 if (condition == const_true_rtx)
3564 CLEAR_RESOURCE (&opposite_needed);
3566 mark_target_live_regs (opposite_thread, &opposite_needed);
3568 /* If the insn at THREAD can be split, do it here to avoid having to
3569 update THREAD and NEW_THREAD if it is done in the loop below. Also
3570 initialize NEW_THREAD. */
3572 new_thread = thread = try_split (PATTERN (thread), thread, 0);
3574 /* Scan insns at THREAD. We are looking for an insn that can be removed
3575 from THREAD (it neither sets nor references resources that were set
3576 ahead of it and it doesn't set anything needs by the insns ahead of
3577 it) and that either can be placed in an annulling insn or aren't
3578 needed at OPPOSITE_THREAD. */
3580 CLEAR_RESOURCE (&needed);
3581 CLEAR_RESOURCE (&set);
3583 /* If we do not own this thread, we must stop as soon as we find
3584 something that we can't put in a delay slot, since all we can do
3585 is branch into THREAD at a later point. Therefore, labels stop
3586 the search if this is not the `true' thread. */
3588 for (trial = thread;
3589 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
3590 trial = next_nonnote_insn (trial))
3594 /* If we have passed a label, we no longer own this thread. */
3595 if (GET_CODE (trial) == CODE_LABEL)
3601 pat = PATTERN (trial);
3602 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3605 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3606 don't separate or copy insns that set and use CC0. */
3607 if (! insn_references_resource_p (trial, &set, 1)
3608 && ! insn_sets_resource_p (trial, &set, 1)
3609 && ! insn_sets_resource_p (trial, &needed, 1)
3611 && ! (reg_mentioned_p (cc0_rtx, pat)
3612 && (! own_thread || ! sets_cc0_p (pat)))
3618 /* If TRIAL is redundant with some insn before INSN, we don't
3619 actually need to add it to the delay list; we can merely pretend
3621 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
3623 fix_reg_dead_note (prior_insn, insn);
3626 update_block (trial, thread);
3627 if (trial == thread)
3629 thread = next_active_insn (thread);
3630 if (new_thread == trial)
3631 new_thread = thread;
3634 delete_insn (trial);
3638 update_reg_unused_notes (prior_insn, trial);
3639 new_thread = next_active_insn (trial);
3645 /* There are two ways we can win: If TRIAL doesn't set anything
3646 needed at the opposite thread and can't trap, or if it can
3647 go into an annulled delay slot. */
3649 && (condition == const_true_rtx
3650 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
3651 && ! may_trap_p (pat))))
3654 trial = try_split (pat, trial, 0);
3655 if (new_thread == old_trial)
3657 if (thread == old_trial)
3659 pat = PATTERN (trial);
3660 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
3664 #ifdef ANNUL_IFTRUE_SLOTS
3667 #ifdef ANNUL_IFFALSE_SLOTS
3673 trial = try_split (pat, trial, 0);
3674 if (new_thread == old_trial)
3676 if (thread == old_trial)
3678 pat = PATTERN (trial);
3679 if ((must_annul || delay_list == NULL) && (thread_if_true
3680 ? check_annul_list_true_false (0, delay_list)
3681 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
3682 : check_annul_list_true_false (1, delay_list)
3683 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
3691 if (reg_mentioned_p (cc0_rtx, pat))
3692 link_cc0_insns (trial);
3695 /* If we own this thread, delete the insn. If this is the
3696 destination of a branch, show that a basic block status
3697 may have been updated. In any case, mark the new
3698 starting point of this thread. */
3701 update_block (trial, thread);
3702 if (trial == thread)
3704 thread = next_active_insn (thread);
3705 if (new_thread == trial)
3706 new_thread = thread;
3708 delete_insn (trial);
3711 new_thread = next_active_insn (trial);
3713 temp = own_thread ? trial : copy_rtx (trial);
3715 INSN_FROM_TARGET_P (temp) = 1;
3717 delay_list = add_to_delay_list (temp, delay_list);
3719 mark_set_resources (trial, &opposite_needed, 0, 1);
3721 if (slots_to_fill == ++(*pslots_filled))
3723 /* Even though we have filled all the slots, we
3724 may be branching to a location that has a
3725 redundant insn. Skip any if so. */
3726 while (new_thread && ! own_thread
3727 && ! insn_sets_resource_p (new_thread, &set, 1)
3728 && ! insn_sets_resource_p (new_thread, &needed, 1)
3729 && ! insn_references_resource_p (new_thread,
3732 = redundant_insn (new_thread, insn,
3735 /* We know we do not own the thread, so no need
3736 to call update_block and delete_insn. */
3737 fix_reg_dead_note (prior_insn, insn);
3738 update_reg_unused_notes (prior_insn, new_thread);
3739 new_thread = next_active_insn (new_thread);
3749 /* This insn can't go into a delay slot. */
3751 mark_set_resources (trial, &set, 0, 1);
3752 mark_referenced_resources (trial, &needed, 1);
3754 /* Ensure we don't put insns between the setting of cc and the comparison
3755 by moving a setting of cc into an earlier delay slot since these insns
3756 could clobber the condition code. */
3759 /* If this insn is a register-register copy and the next insn has
3760 a use of our destination, change it to use our source. That way,
3761 it will become a candidate for our delay slot the next time
3762 through this loop. This case occurs commonly in loops that
3765 We could check for more complex cases than those tested below,
3766 but it doesn't seem worth it. It might also be a good idea to try
3767 to swap the two insns. That might do better.
3769 We can't do this if the next insn modifies our destination, because
3770 that would make the replacement into the insn invalid. We also can't
3771 do this if it modifies our source, because it might be an earlyclobber
3772 operand. This latter test also prevents updating the contents of
3775 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
3776 && GET_CODE (SET_SRC (pat)) == REG
3777 && GET_CODE (SET_DEST (pat)) == REG)
3779 rtx next = next_nonnote_insn (trial);
3781 if (next && GET_CODE (next) == INSN
3782 && GET_CODE (PATTERN (next)) != USE
3783 && ! reg_set_p (SET_DEST (pat), next)
3784 && ! reg_set_p (SET_SRC (pat), next)
3785 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
3786 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
3790 /* If we stopped on a branch insn that has delay slots, see if we can
3791 steal some of the insns in those slots. */
3792 if (trial && GET_CODE (trial) == INSN
3793 && GET_CODE (PATTERN (trial)) == SEQUENCE
3794 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
3796 /* If this is the `true' thread, we will want to follow the jump,
3797 so we can only do this if we have taken everything up to here. */
3798 if (thread_if_true && trial == new_thread
3799 && ! insn_references_resource_p (XVECEXP (PATTERN (trial), 0, 0),
3800 &opposite_needed, 0))
3802 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
3803 delay_list, &set, &needed,
3804 &opposite_needed, slots_to_fill,
3805 pslots_filled, &must_annul,
3807 else if (! thread_if_true)
3809 = steal_delay_list_from_fallthrough (insn, condition,
3811 delay_list, &set, &needed,
3812 &opposite_needed, slots_to_fill,
3813 pslots_filled, &must_annul);
3816 /* If we haven't found anything for this delay slot and it is very
3817 likely that the branch will be taken, see if the insn at our target
3818 increments or decrements a register with an increment that does not
3819 depend on the destination register. If so, try to place the opposite
3820 arithmetic insn after the jump insn and put the arithmetic insn in the
3821 delay slot. If we can't do this, return. */
3822 if (delay_list == 0 && likely && new_thread
3823 && GET_CODE (new_thread) == INSN
3824 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
3825 && asm_noperands (PATTERN (new_thread)) < 0)
3827 rtx pat = PATTERN (new_thread);
3832 pat = PATTERN (trial);
3834 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
3835 || ! eligible_for_delay (insn, 0, trial, flags))
3838 dest = SET_DEST (pat), src = SET_SRC (pat);
3839 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
3840 && rtx_equal_p (XEXP (src, 0), dest)
3841 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
3843 rtx other = XEXP (src, 1);
3847 /* If this is a constant adjustment, use the same code with
3848 the negated constant. Otherwise, reverse the sense of the
3850 if (GET_CODE (other) == CONST_INT)
3851 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
3852 negate_rtx (GET_MODE (src), other));
3854 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
3855 GET_MODE (src), dest, other);
3857 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
3860 if (recog_memoized (ninsn) < 0
3861 || (insn_extract (ninsn),
3862 ! constrain_operands (INSN_CODE (ninsn), 1)))
3864 delete_insn (ninsn);
3870 update_block (trial, thread);
3871 if (trial == thread)
3873 thread = next_active_insn (thread);
3874 if (new_thread == trial)
3875 new_thread = thread;
3877 delete_insn (trial);
3880 new_thread = next_active_insn (trial);
3882 ninsn = own_thread ? trial : copy_rtx (trial);
3884 INSN_FROM_TARGET_P (ninsn) = 1;
3886 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3891 if (delay_list && must_annul)
3892 INSN_ANNULLED_BRANCH_P (insn) = 1;
3894 /* If we are to branch into the middle of this thread, find an appropriate
3895 label or make a new one if none, and redirect INSN to it. If we hit the
3896 end of the function, use the end-of-function label. */
3897 if (new_thread != thread)
3901 if (! thread_if_true)
3904 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
3905 && (simplejump_p (new_thread)
3906 || GET_CODE (PATTERN (new_thread)) == RETURN)
3907 && redirect_with_delay_list_safe_p (insn,
3908 JUMP_LABEL (new_thread),
3910 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3912 if (new_thread == 0)
3913 label = find_end_label ();
3914 else if (GET_CODE (new_thread) == CODE_LABEL)
3917 label = get_label_before (new_thread);
3919 reorg_redirect_jump (insn, label);
3925 /* Make another attempt to find insns to place in delay slots.
3927 We previously looked for insns located in front of the delay insn
3928 and, for non-jump delay insns, located behind the delay insn.
3930 Here only try to schedule jump insns and try to move insns from either
3931 the target or the following insns into the delay slot. If annulling is
3932 supported, we will be likely to do this. Otherwise, we can do this only
3936 fill_eager_delay_slots ()
3940 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3942 for (i = 0; i < num_unfilled_slots; i++)
3945 rtx target_label, insn_at_target, fallthrough_insn;
3948 int own_fallthrough;
3949 int prediction, slots_to_fill, slots_filled;
3951 insn = unfilled_slots_base[i];
3953 || INSN_DELETED_P (insn)
3954 || GET_CODE (insn) != JUMP_INSN
3955 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3958 slots_to_fill = num_delay_slots (insn);
3959 /* Some machine description have defined instructions to have
3960 delay slots only in certain circumstances which may depend on
3961 nearby insns (which change due to reorg's actions).
3963 For example, the PA port normally has delay slots for unconditional
3966 However, the PA port claims such jumps do not have a delay slot
3967 if they are immediate successors of certain CALL_INSNs. This
3968 allows the port to favor filling the delay slot of the call with
3969 the unconditional jump. */
3970 if (slots_to_fill == 0)
3974 target_label = JUMP_LABEL (insn);
3975 condition = get_branch_condition (insn, target_label);
3980 /* Get the next active fallthrough and target insns and see if we own
3981 them. Then see whether the branch is likely true. We don't need
3982 to do a lot of this for unconditional branches. */
3984 insn_at_target = next_active_insn (target_label);
3985 own_target = own_thread_p (target_label, target_label, 0);
3987 if (condition == const_true_rtx)
3989 own_fallthrough = 0;
3990 fallthrough_insn = 0;
3995 fallthrough_insn = next_active_insn (insn);
3996 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3997 prediction = mostly_true_jump (insn, condition);
4000 /* If this insn is expected to branch, first try to get insns from our
4001 target, then our fallthrough insns. If it is not, expected to branch,
4002 try the other order. */
4007 = fill_slots_from_thread (insn, condition, insn_at_target,
4008 fallthrough_insn, prediction == 2, 1,
4010 slots_to_fill, &slots_filled, delay_list);
4012 if (delay_list == 0 && own_fallthrough)
4014 /* Even though we didn't find anything for delay slots,
4015 we might have found a redundant insn which we deleted
4016 from the thread that was filled. So we have to recompute
4017 the next insn at the target. */
4018 target_label = JUMP_LABEL (insn);
4019 insn_at_target = next_active_insn (target_label);
4022 = fill_slots_from_thread (insn, condition, fallthrough_insn,
4023 insn_at_target, 0, 0,
4025 slots_to_fill, &slots_filled,
4031 if (own_fallthrough)
4033 = fill_slots_from_thread (insn, condition, fallthrough_insn,
4034 insn_at_target, 0, 0,
4036 slots_to_fill, &slots_filled,
4039 if (delay_list == 0)
4041 = fill_slots_from_thread (insn, condition, insn_at_target,
4042 next_active_insn (insn), 0, 1,
4044 slots_to_fill, &slots_filled,
4049 unfilled_slots_base[i]
4050 = emit_delay_sequence (insn, delay_list, slots_filled);
4052 if (slots_to_fill == slots_filled)
4053 unfilled_slots_base[i] = 0;
4055 note_delay_statistics (slots_filled, 1);
4059 /* Once we have tried two ways to fill a delay slot, make a pass over the
4060 code to try to improve the results and to do such things as more jump
4064 relax_delay_slots (first)
4067 register rtx insn, next, pat;
4068 register rtx trial, delay_insn, target_label;
4070 /* Look at every JUMP_INSN and see if we can improve it. */
4071 for (insn = first; insn; insn = next)
4075 next = next_active_insn (insn);
4077 /* If this is a jump insn, see if it now jumps to a jump, jumps to
4078 the next insn, or jumps to a label that is not the last of a
4079 group of consecutive labels. */
4080 if (GET_CODE (insn) == JUMP_INSN
4081 && (condjump_p (insn) || condjump_in_parallel_p (insn))
4082 && (target_label = JUMP_LABEL (insn)) != 0)
4084 target_label = follow_jumps (target_label);
4085 target_label = prev_label (next_active_insn (target_label));
4087 if (target_label == 0)
4088 target_label = find_end_label ();
4090 if (next_active_insn (target_label) == next
4091 && ! condjump_in_parallel_p (insn))
4097 if (target_label != JUMP_LABEL (insn))
4098 reorg_redirect_jump (insn, target_label);
4100 /* See if this jump branches around a unconditional jump.
4101 If so, invert this jump and point it to the target of the
4103 if (next && GET_CODE (next) == JUMP_INSN
4104 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
4105 && next_active_insn (target_label) == next_active_insn (next)
4106 && no_labels_between_p (insn, next))
4108 rtx label = JUMP_LABEL (next);
4110 /* Be careful how we do this to avoid deleting code or
4111 labels that are momentarily dead. See similar optimization
4114 We also need to ensure we properly handle the case when
4115 invert_jump fails. */
4117 ++LABEL_NUSES (target_label);
4119 ++LABEL_NUSES (label);
4121 if (invert_jump (insn, label))
4128 --LABEL_NUSES (label);
4130 if (--LABEL_NUSES (target_label) == 0)
4131 delete_insn (target_label);
4137 /* If this is an unconditional jump and the previous insn is a
4138 conditional jump, try reversing the condition of the previous
4139 insn and swapping our targets. The next pass might be able to
4142 Don't do this if we expect the conditional branch to be true, because
4143 we would then be making the more common case longer. */
4145 if (GET_CODE (insn) == JUMP_INSN
4146 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
4147 && (other = prev_active_insn (insn)) != 0
4148 && (condjump_p (other) || condjump_in_parallel_p (other))
4149 && no_labels_between_p (other, insn)
4150 && 0 < mostly_true_jump (other,
4151 get_branch_condition (other,
4152 JUMP_LABEL (other))))
4154 rtx other_target = JUMP_LABEL (other);
4155 target_label = JUMP_LABEL (insn);
4157 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
4158 as we move the label. */
4160 ++LABEL_NUSES (other_target);
4162 if (invert_jump (other, target_label))
4163 reorg_redirect_jump (insn, other_target);
4166 --LABEL_NUSES (other_target);
4169 /* Now look only at cases where we have filled a delay slot. */
4170 if (GET_CODE (insn) != INSN
4171 || GET_CODE (PATTERN (insn)) != SEQUENCE)
4174 pat = PATTERN (insn);
4175 delay_insn = XVECEXP (pat, 0, 0);
4177 /* See if the first insn in the delay slot is redundant with some
4178 previous insn. Remove it from the delay slot if so; then set up
4179 to reprocess this insn. */
4180 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
4182 delete_from_delay_slot (XVECEXP (pat, 0, 1));
4183 next = prev_active_insn (next);
4187 /* Now look only at the cases where we have a filled JUMP_INSN. */
4188 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
4189 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
4190 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
4193 target_label = JUMP_LABEL (delay_insn);
4197 /* If this jump goes to another unconditional jump, thread it, but
4198 don't convert a jump into a RETURN here. */
4199 trial = follow_jumps (target_label);
4200 /* We use next_real_insn instead of next_active_insn, so that
4201 the special USE insns emitted by reorg won't be ignored.
4202 If they are ignored, then they will get deleted if target_label
4203 is now unreachable, and that would cause mark_target_live_regs
4205 trial = prev_label (next_real_insn (trial));
4206 if (trial == 0 && target_label != 0)
4207 trial = find_end_label ();
4209 if (trial != target_label
4210 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
4212 reorg_redirect_jump (delay_insn, trial);
4213 target_label = trial;
4216 /* If the first insn at TARGET_LABEL is redundant with a previous
4217 insn, redirect the jump to the following insn process again. */
4218 trial = next_active_insn (target_label);
4219 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
4220 && redundant_insn (trial, insn, 0))
4224 /* Figure out where to emit the special USE insn so we don't
4225 later incorrectly compute register live/death info. */
4226 tmp = next_active_insn (trial);
4228 tmp = find_end_label ();
4230 /* Insert the special USE insn and update dataflow info. */
4231 update_block (trial, tmp);
4233 /* Now emit a label before the special USE insn, and
4234 redirect our jump to the new label. */
4235 target_label = get_label_before (PREV_INSN (tmp));
4236 reorg_redirect_jump (delay_insn, target_label);
4241 /* Similarly, if it is an unconditional jump with one insn in its
4242 delay list and that insn is redundant, thread the jump. */
4243 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
4244 && XVECLEN (PATTERN (trial), 0) == 2
4245 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
4246 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
4247 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
4248 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
4250 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
4251 if (target_label == 0)
4252 target_label = find_end_label ();
4254 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
4257 reorg_redirect_jump (delay_insn, target_label);
4264 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
4265 && prev_active_insn (target_label) == insn
4266 && ! condjump_in_parallel_p (delay_insn)
4268 /* If the last insn in the delay slot sets CC0 for some insn,
4269 various code assumes that it is in a delay slot. We could
4270 put it back where it belonged and delete the register notes,
4271 but it doesn't seem worthwhile in this uncommon case. */
4272 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
4273 REG_CC_USER, NULL_RTX)
4279 /* All this insn does is execute its delay list and jump to the
4280 following insn. So delete the jump and just execute the delay
4283 We do this by deleting the INSN containing the SEQUENCE, then
4284 re-emitting the insns separately, and then deleting the jump.
4285 This allows the count of the jump target to be properly
4288 /* Clear the from target bit, since these insns are no longer
4290 for (i = 0; i < XVECLEN (pat, 0); i++)
4291 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
4293 trial = PREV_INSN (insn);
4295 emit_insn_after (pat, trial);
4296 delete_scheduled_jump (delay_insn);
4300 /* See if this is an unconditional jump around a single insn which is
4301 identical to the one in its delay slot. In this case, we can just
4302 delete the branch and the insn in its delay slot. */
4303 if (next && GET_CODE (next) == INSN
4304 && prev_label (next_active_insn (next)) == target_label
4305 && simplejump_p (insn)
4306 && XVECLEN (pat, 0) == 2
4307 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
4313 /* See if this jump (with its delay slots) branches around another
4314 jump (without delay slots). If so, invert this jump and point
4315 it to the target of the second jump. We cannot do this for
4316 annulled jumps, though. Again, don't convert a jump to a RETURN
4318 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
4319 && next && GET_CODE (next) == JUMP_INSN
4320 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
4321 && next_active_insn (target_label) == next_active_insn (next)
4322 && no_labels_between_p (insn, next))
4324 rtx label = JUMP_LABEL (next);
4325 rtx old_label = JUMP_LABEL (delay_insn);
4328 label = find_end_label ();
4330 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
4332 /* Be careful how we do this to avoid deleting code or labels
4333 that are momentarily dead. See similar optimization in
4336 ++LABEL_NUSES (old_label);
4338 if (invert_jump (delay_insn, label))
4342 /* Must update the INSN_FROM_TARGET_P bits now that
4343 the branch is reversed, so that mark_target_live_regs
4344 will handle the delay slot insn correctly. */
4345 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
4347 rtx slot = XVECEXP (PATTERN (insn), 0, i);
4348 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
4355 if (old_label && --LABEL_NUSES (old_label) == 0)
4356 delete_insn (old_label);
4361 /* If we own the thread opposite the way this insn branches, see if we
4362 can merge its delay slots with following insns. */
4363 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4364 && own_thread_p (NEXT_INSN (insn), 0, 1))
4365 try_merge_delay_insns (insn, next);
4366 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4367 && own_thread_p (target_label, target_label, 0))
4368 try_merge_delay_insns (insn, next_active_insn (target_label));
4370 /* If we get here, we haven't deleted INSN. But we may have deleted
4371 NEXT, so recompute it. */
4372 next = next_active_insn (insn);
4378 /* Look for filled jumps to the end of function label. We can try to convert
4379 them into RETURN insns if the insns in the delay slot are valid for the
4383 make_return_insns (first)
4386 rtx insn, jump_insn, pat;
4387 rtx real_return_label = end_of_function_label;
4390 /* See if there is a RETURN insn in the function other than the one we
4391 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
4392 into a RETURN to jump to it. */
4393 for (insn = first; insn; insn = NEXT_INSN (insn))
4394 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
4396 real_return_label = get_label_before (insn);
4400 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
4401 was equal to END_OF_FUNCTION_LABEL. */
4402 LABEL_NUSES (real_return_label)++;
4404 /* Clear the list of insns to fill so we can use it. */
4405 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4407 for (insn = first; insn; insn = NEXT_INSN (insn))
4411 /* Only look at filled JUMP_INSNs that go to the end of function
4413 if (GET_CODE (insn) != INSN
4414 || GET_CODE (PATTERN (insn)) != SEQUENCE
4415 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
4416 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
4419 pat = PATTERN (insn);
4420 jump_insn = XVECEXP (pat, 0, 0);
4422 /* If we can't make the jump into a RETURN, try to redirect it to the best
4423 RETURN and go on to the next insn. */
4424 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
4426 /* Make sure redirecting the jump will not invalidate the delay
4428 if (redirect_with_delay_slots_safe_p (jump_insn,
4431 reorg_redirect_jump (jump_insn, real_return_label);
4435 /* See if this RETURN can accept the insns current in its delay slot.
4436 It can if it has more or an equal number of slots and the contents
4437 of each is valid. */
4439 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
4440 slots = num_delay_slots (jump_insn);
4441 if (slots >= XVECLEN (pat, 0) - 1)
4443 for (i = 1; i < XVECLEN (pat, 0); i++)
4445 #ifdef ANNUL_IFFALSE_SLOTS
4446 (INSN_ANNULLED_BRANCH_P (jump_insn)
4447 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4448 ? eligible_for_annul_false (jump_insn, i - 1,
4449 XVECEXP (pat, 0, i), flags) :
4451 #ifdef ANNUL_IFTRUE_SLOTS
4452 (INSN_ANNULLED_BRANCH_P (jump_insn)
4453 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4454 ? eligible_for_annul_true (jump_insn, i - 1,
4455 XVECEXP (pat, 0, i), flags) :
4457 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
4463 if (i == XVECLEN (pat, 0))
4466 /* We have to do something with this insn. If it is an unconditional
4467 RETURN, delete the SEQUENCE and output the individual insns,
4468 followed by the RETURN. Then set things up so we try to find
4469 insns for its delay slots, if it needs some. */
4470 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
4472 rtx prev = PREV_INSN (insn);
4475 for (i = 1; i < XVECLEN (pat, 0); i++)
4476 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
4478 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
4479 emit_barrier_after (insn);
4482 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4485 /* It is probably more efficient to keep this with its current
4486 delay slot as a branch to a RETURN. */
4487 reorg_redirect_jump (jump_insn, real_return_label);
4490 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
4491 new delay slots we have created. */
4492 if (--LABEL_NUSES (real_return_label) == 0)
4493 delete_insn (real_return_label);
4495 fill_simple_delay_slots (1);
4496 fill_simple_delay_slots (0);
4500 /* Try to find insns to place in delay slots. */
4503 dbr_schedule (first, file)
4507 rtx insn, next, epilogue_insn = 0;
4510 int old_flag_no_peephole = flag_no_peephole;
4512 /* Execute `final' once in prescan mode to delete any insns that won't be
4513 used. Don't let final try to do any peephole optimization--it will
4514 ruin dataflow information for this pass. */
4516 flag_no_peephole = 1;
4517 final (first, 0, NO_DEBUG, 1, 1);
4518 flag_no_peephole = old_flag_no_peephole;
4521 /* If the current function has no insns other than the prologue and
4522 epilogue, then do not try to fill any delay slots. */
4523 if (n_basic_blocks == 0)
4526 /* Find the highest INSN_UID and allocate and initialize our map from
4527 INSN_UID's to position in code. */
4528 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
4530 if (INSN_UID (insn) > max_uid)
4531 max_uid = INSN_UID (insn);
4532 if (GET_CODE (insn) == NOTE
4533 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4534 epilogue_insn = insn;
4537 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int));
4538 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
4539 uid_to_ruid[INSN_UID (insn)] = i;
4541 /* Initialize the list of insns that need filling. */
4542 if (unfilled_firstobj == 0)
4544 gcc_obstack_init (&unfilled_slots_obstack);
4545 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4548 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
4552 INSN_ANNULLED_BRANCH_P (insn) = 0;
4553 INSN_FROM_TARGET_P (insn) = 0;
4555 /* Skip vector tables. We can't get attributes for them. */
4556 if (GET_CODE (insn) == JUMP_INSN
4557 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4558 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4561 if (num_delay_slots (insn) > 0)
4562 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4564 /* Ensure all jumps go to the last of a set of consecutive labels. */
4565 if (GET_CODE (insn) == JUMP_INSN
4566 && (condjump_p (insn) || condjump_in_parallel_p (insn))
4567 && JUMP_LABEL (insn) != 0
4568 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
4569 != JUMP_LABEL (insn)))
4570 redirect_jump (insn, target);
4573 /* Indicate what resources are required to be valid at the end of the current
4574 function. The condition code never is and memory always is. If the
4575 frame pointer is needed, it is and so is the stack pointer unless
4576 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4577 stack pointer is. Registers used to return the function value are
4578 needed. Registers holding global variables are needed. */
4580 end_of_function_needs.cc = 0;
4581 end_of_function_needs.memory = 1;
4582 end_of_function_needs.unch_memory = 0;
4583 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
4585 if (frame_pointer_needed)
4587 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
4588 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4589 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
4591 #ifdef EXIT_IGNORE_STACK
4592 if (! EXIT_IGNORE_STACK
4593 || current_function_sp_is_unchanging)
4595 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4598 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4600 if (current_function_return_rtx != 0)
4601 mark_referenced_resources (current_function_return_rtx,
4602 &end_of_function_needs, 1);
4604 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4606 #ifdef EPILOGUE_USES
4607 || EPILOGUE_USES (i)
4610 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
4612 /* The registers required to be live at the end of the function are
4613 represented in the flow information as being dead just prior to
4614 reaching the end of the function. For example, the return of a value
4615 might be represented by a USE of the return register immediately
4616 followed by an unconditional jump to the return label where the
4617 return label is the end of the RTL chain. The end of the RTL chain
4618 is then taken to mean that the return register is live.
4620 This sequence is no longer maintained when epilogue instructions are
4621 added to the RTL chain. To reconstruct the original meaning, the
4622 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4623 point where these registers become live (start_of_epilogue_needs).
4624 If epilogue instructions are present, the registers set by those
4625 instructions won't have been processed by flow. Thus, those
4626 registers are additionally required at the end of the RTL chain
4627 (end_of_function_needs). */
4629 start_of_epilogue_needs = end_of_function_needs;
4631 while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
4632 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
4634 /* Show we haven't computed an end-of-function label yet. */
4635 end_of_function_label = 0;
4637 /* Allocate and initialize the tables used by mark_target_live_regs. */
4639 = (struct target_info **) alloca ((TARGET_HASH_PRIME
4640 * sizeof (struct target_info *)));
4641 bzero ((char *) target_hash_table,
4642 TARGET_HASH_PRIME * sizeof (struct target_info *));
4644 bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
4645 bzero ((char *) bb_ticks, n_basic_blocks * sizeof (int));
4647 /* Initialize the statistics for this function. */
4648 bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
4649 bzero ((char *) num_filled_delays, sizeof num_filled_delays);
4651 /* Now do the delay slot filling. Try everything twice in case earlier
4652 changes make more slots fillable. */
4654 for (reorg_pass_number = 0;
4655 reorg_pass_number < MAX_REORG_PASSES;
4656 reorg_pass_number++)
4658 fill_simple_delay_slots (1);
4659 fill_simple_delay_slots (0);
4660 fill_eager_delay_slots ();
4661 relax_delay_slots (first);
4664 /* Delete any USE insns made by update_block; subsequent passes don't need
4665 them or know how to deal with them. */
4666 for (insn = first; insn; insn = next)
4668 next = NEXT_INSN (insn);
4670 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
4671 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
4672 next = delete_insn (insn);
4675 /* If we made an end of function label, indicate that it is now
4676 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4677 If it is now unused, delete it. */
4678 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
4679 delete_insn (end_of_function_label);
4682 if (HAVE_return && end_of_function_label != 0)
4683 make_return_insns (first);
4686 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4688 /* It is not clear why the line below is needed, but it does seem to be. */
4689 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4691 /* Reposition the prologue and epilogue notes in case we moved the
4692 prologue/epilogue insns. */
4693 reposition_prologue_and_epilogue_notes (first);
4697 register int i, j, need_comma;
4699 for (reorg_pass_number = 0;
4700 reorg_pass_number < MAX_REORG_PASSES;
4701 reorg_pass_number++)
4703 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
4704 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
4707 fprintf (file, ";; Reorg function #%d\n", i);
4709 fprintf (file, ";; %d insns needing delay slots\n;; ",
4710 num_insns_needing_delays[i][reorg_pass_number]);
4712 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
4713 if (num_filled_delays[i][j][reorg_pass_number])
4716 fprintf (file, ", ");
4718 fprintf (file, "%d got %d delays",
4719 num_filled_delays[i][j][reorg_pass_number], j);
4721 fprintf (file, "\n");
4726 /* For all JUMP insns, fill in branch prediction notes, so that during
4727 assembler output a target can set branch prediction bits in the code.
4728 We have to do this now, as up until this point the destinations of
4729 JUMPS can be moved around and changed, but past right here that cannot
4731 for (insn = first; insn; insn = NEXT_INSN (insn))
4735 if (GET_CODE (insn) == INSN)
4737 rtx pat = PATTERN (insn);
4739 if (GET_CODE (pat) == SEQUENCE)
4740 insn = XVECEXP (pat, 0, 0);
4742 if (GET_CODE (insn) != JUMP_INSN)
4745 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
4746 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
4747 GEN_INT (pred_flags),
4751 #endif /* DELAY_SLOTS */