1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 93, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 /* Instruction reorganization pass.
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
64 Three techniques for filling delay slots have been implemented so far:
66 (1) `fill_simple_delay_slots' is the simplest, most efficient way
67 to fill delay slots. This pass first looks for insns which come
68 from before the branch and which are safe to execute after the
69 branch. Then it searches after the insn requiring delay slots or,
70 in the case of a branch, for insns that are after the point at
71 which the branch merges into the fallthrough code, if such a point
72 exists. When such insns are found, the branch penalty decreases
73 and no code expansion takes place.
75 (2) `fill_eager_delay_slots' is more complicated: it is used for
76 scheduling conditional jumps, or for scheduling jumps which cannot
77 be filled using (1). A machine need not have annulled jumps to use
78 this strategy, but it helps (by keeping more options open).
79 `fill_eager_delay_slots' tries to guess the direction the branch
80 will go; if it guesses right 100% of the time, it can reduce the
81 branch penalty as much as `fill_simple_delay_slots' does. If it
82 guesses wrong 100% of the time, it might as well schedule nops (or
83 on the m88k, unexpose the branch slot). When
84 `fill_eager_delay_slots' takes insns from the fall-through path of
85 the jump, usually there is no code expansion; when it takes insns
86 from the branch target, there is code expansion if it is not the
87 only way to reach that target.
89 (3) `relax_delay_slots' uses a set of rules to simplify code that
90 has been reorganized by (1) and (2). It finds cases where
91 conditional test can be eliminated, jumps can be threaded, extra
92 insns can be eliminated, etc. It is the job of (1) and (2) to do a
93 good job of scheduling locally; `relax_delay_slots' takes care of
94 making the various individual schedules work well together. It is
95 especially tuned to handle the control flow interactions of branch
96 insns. It does nothing for insns with delay slots that do not
99 On machines that use CC0, we are very conservative. We will not make
100 a copy of an insn involving CC0 since we want to maintain a 1-1
101 correspondence between the insn that sets and uses CC0. The insns are
102 allowed to be separated by placing an insn that sets CC0 (but not an insn
103 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
104 delay slot. In that case, we point each insn at the other with REG_CC_USER
105 and REG_CC_SETTER notes. Note that these restrictions affect very few
106 machines because most RISC machines with delay slots will not use CC0
107 (the RT is the only known exception at this point).
111 The Acorn Risc Machine can conditionally execute most insns, so
112 it is profitable to move single insns into a position to execute
113 based on the condition code of the previous insn.
115 The HP-PA can conditionally nullify insns, providing a similar
116 effect to the ARM, differing mostly in which insn is "in charge". */
122 #include "insn-config.h"
123 #include "conditions.h"
124 #include "hard-reg-set.h"
125 #include "basic-block.h"
127 #include "insn-flags.h"
132 #include "insn-attr.h"
134 /* Import list of registers used as spill regs from reload. */
135 extern HARD_REG_SET used_spill_regs;
137 /* Import highest label used in function at end of reload. */
138 extern int max_label_num_after_reload;
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 #ifndef ANNUL_IFTRUE_SLOTS
147 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
149 #ifndef ANNUL_IFFALSE_SLOTS
150 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
153 /* Insns which have delay slots that have not yet been filled. */
155 static struct obstack unfilled_slots_obstack;
156 static rtx *unfilled_firstobj;
158 /* Define macros to refer to the first and last slot containing unfilled
159 insns. These are used because the list may move and its address
160 should be recomputed at each use. */
162 #define unfilled_slots_base \
163 ((rtx *) obstack_base (&unfilled_slots_obstack))
165 #define unfilled_slots_next \
166 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
168 /* This structure is used to indicate which hardware resources are set or
169 needed by insns so far. */
173 char memory; /* Insn sets or needs a memory location. */
174 char unch_memory; /* Insn sets of needs a "unchanging" MEM. */
175 char volatil; /* Insn sets or needs a volatile memory loc. */
176 char cc; /* Insn sets or needs the condition codes. */
177 HARD_REG_SET regs; /* Which registers are set or needed. */
180 /* Macro to clear all resources. */
181 #define CLEAR_RESOURCE(RES) \
182 do { (RES)->memory = (RES)->unch_memory = (RES)->volatil = (RES)->cc = 0; \
183 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
185 /* Indicates what resources are required at the beginning of the epilogue. */
186 static struct resources start_of_epilogue_needs;
188 /* Indicates what resources are required at function end. */
189 static struct resources end_of_function_needs;
191 /* Points to the label before the end of the function. */
192 static rtx end_of_function_label;
194 /* This structure is used to record liveness information at the targets or
195 fallthrough insns of branches. We will most likely need the information
196 at targets again, so save them in a hash table rather than recomputing them
201 int uid; /* INSN_UID of target. */
202 struct target_info *next; /* Next info for same hash bucket. */
203 HARD_REG_SET live_regs; /* Registers live at target. */
204 int block; /* Basic block number containing target. */
205 int bb_tick; /* Generation count of basic block info. */
208 #define TARGET_HASH_PRIME 257
210 /* Define the hash table itself. */
211 static struct target_info **target_hash_table;
213 /* For each basic block, we maintain a generation number of its basic
214 block info, which is updated each time we move an insn from the
215 target of a jump. This is the generation number indexed by block
218 static int *bb_ticks;
220 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
221 not always monotonically increase. */
222 static int *uid_to_ruid;
224 /* Highest valid index in `uid_to_ruid'. */
227 static void mark_referenced_resources PROTO((rtx, struct resources *, int));
228 static void mark_set_resources PROTO((rtx, struct resources *, int, int));
229 static int stop_search_p PROTO((rtx, int));
230 static int resource_conflicts_p PROTO((struct resources *,
231 struct resources *));
232 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
233 static int insn_sets_resource_p PROTO((rtx, struct resources *, int));
234 static rtx find_end_label PROTO((void));
235 static rtx emit_delay_sequence PROTO((rtx, rtx, int));
236 static rtx add_to_delay_list PROTO((rtx, rtx));
237 static rtx delete_from_delay_slot PROTO((rtx));
238 static void delete_scheduled_jump PROTO((rtx));
239 static void note_delay_statistics PROTO((int, int));
240 static rtx optimize_skip PROTO((rtx));
241 static int get_jump_flags PROTO((rtx, rtx));
242 static int rare_destination PROTO((rtx));
243 static int mostly_true_jump PROTO((rtx, rtx));
244 static rtx get_branch_condition PROTO((rtx, rtx));
245 static int condition_dominates_p PROTO((rtx, rtx));
246 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
250 int, int *, int *, rtx *));
251 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
256 static void try_merge_delay_insns PROTO((rtx, rtx));
257 static rtx redundant_insn PROTO((rtx, rtx, rtx));
258 static int own_thread_p PROTO((rtx, rtx, int));
259 static int find_basic_block PROTO((rtx));
260 static void update_block PROTO((rtx, rtx));
261 static int reorg_redirect_jump PROTO((rtx, rtx));
262 static void update_reg_dead_notes PROTO((rtx, rtx));
263 static void fix_reg_dead_note PROTO((rtx, rtx));
264 static void update_reg_unused_notes PROTO((rtx, rtx));
265 static void update_live_status PROTO((rtx, rtx));
266 static rtx next_insn_no_annul PROTO((rtx));
267 static rtx find_dead_or_set_registers PROTO ((rtx, struct resources *, rtx *,
268 int, struct resources,
270 static void mark_target_live_regs PROTO((rtx, struct resources *));
271 static void fill_simple_delay_slots PROTO((int));
272 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
273 int, int, int *, rtx));
274 static void fill_eager_delay_slots PROTO((void));
275 static void relax_delay_slots PROTO((rtx));
276 static void make_return_insns PROTO((rtx));
277 static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
278 static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
280 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
281 which resources are references by the insn. If INCLUDE_DELAYED_EFFECTS
282 is TRUE, resources used by the called routine will be included for
286 mark_referenced_resources (x, res, include_delayed_effects)
288 register struct resources *res;
289 register int include_delayed_effects;
291 register enum rtx_code code = GET_CODE (x);
293 register char *format_ptr;
295 /* Handle leaf items for which we set resource flags. Also, special-case
296 CALL, SET and CLOBBER operators. */
308 if (GET_CODE (SUBREG_REG (x)) != REG)
309 mark_referenced_resources (SUBREG_REG (x), res, 0);
312 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
313 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
314 for (i = regno; i < last_regno; i++)
315 SET_HARD_REG_BIT (res->regs, i);
320 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
321 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
325 /* If this memory shouldn't change, it really isn't referencing
327 if (RTX_UNCHANGING_P (x))
328 res->unch_memory = 1;
331 res->volatil = MEM_VOLATILE_P (x);
333 /* Mark registers used to access memory. */
334 mark_referenced_resources (XEXP (x, 0), res, 0);
341 case UNSPEC_VOLATILE:
343 /* Traditional asm's are always volatile. */
352 res->volatil = MEM_VOLATILE_P (x);
354 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
355 We can not just fall through here since then we would be confused
356 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
357 traditional asms unlike their normal usage. */
359 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
360 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
364 /* The first operand will be a (MEM (xxx)) but doesn't really reference
365 memory. The second operand may be referenced, though. */
366 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
367 mark_referenced_resources (XEXP (x, 1), res, 0);
371 /* Usually, the first operand of SET is set, not referenced. But
372 registers used to access memory are referenced. SET_DEST is
373 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
375 mark_referenced_resources (SET_SRC (x), res, 0);
378 if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
379 mark_referenced_resources (x, res, 0);
380 else if (GET_CODE (x) == SUBREG)
382 if (GET_CODE (x) == MEM)
383 mark_referenced_resources (XEXP (x, 0), res, 0);
390 if (include_delayed_effects)
392 /* A CALL references memory, the frame pointer if it exists, the
393 stack pointer, any global registers and any registers given in
394 USE insns immediately in front of the CALL.
396 However, we may have moved some of the parameter loading insns
397 into the delay slot of this CALL. If so, the USE's for them
398 don't count and should be skipped. */
399 rtx insn = PREV_INSN (x);
402 rtx next = NEXT_INSN (x);
405 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
406 if (NEXT_INSN (insn) != x)
408 next = NEXT_INSN (NEXT_INSN (insn));
409 sequence = PATTERN (NEXT_INSN (insn));
410 seq_size = XVECLEN (sequence, 0);
411 if (GET_CODE (sequence) != SEQUENCE)
416 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
417 if (frame_pointer_needed)
419 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
420 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
421 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
425 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
427 SET_HARD_REG_BIT (res->regs, i);
429 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
430 assume that this call can need any register.
432 This is done to be more conservative about how we handle setjmp.
433 We assume that they both use and set all registers. Using all
434 registers ensures that a register will not be considered dead
435 just because it crosses a setjmp call. A register should be
436 considered dead only if the setjmp call returns non-zero. */
437 if (next && GET_CODE (next) == NOTE
438 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
439 SET_HARD_REG_SET (res->regs);
444 for (link = CALL_INSN_FUNCTION_USAGE (x);
446 link = XEXP (link, 1))
447 if (GET_CODE (XEXP (link, 0)) == USE)
449 for (i = 1; i < seq_size; i++)
451 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
452 if (GET_CODE (slot_pat) == SET
453 && rtx_equal_p (SET_DEST (slot_pat),
454 SET_DEST (XEXP (link, 0))))
458 mark_referenced_resources (SET_DEST (XEXP (link, 0)),
464 /* ... fall through to other INSN processing ... */
469 #ifdef INSN_REFERENCES_ARE_DELAYED
470 if (! include_delayed_effects
471 && INSN_REFERENCES_ARE_DELAYED (x))
475 /* No special processing, just speed up. */
476 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
483 /* Process each sub-expression and flag what it needs. */
484 format_ptr = GET_RTX_FORMAT (code);
485 for (i = 0; i < GET_RTX_LENGTH (code); i++)
486 switch (*format_ptr++)
489 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
493 for (j = 0; j < XVECLEN (x, i); j++)
494 mark_referenced_resources (XVECEXP (x, i, j), res,
495 include_delayed_effects);
500 /* Given X, a part of an insn, and a pointer to a `struct resource',
501 RES, indicate which resources are modified by the insn. If
502 INCLUDE_DELAYED_EFFECTS is nonzero, also mark resources potentially
503 set by the called routine.
505 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
506 objects are being referenced instead of set.
508 We never mark the insn as modifying the condition code unless it explicitly
509 SETs CC0 even though this is not totally correct. The reason for this is
510 that we require a SET of CC0 to immediately precede the reference to CC0.
511 So if some other insn sets CC0 as a side-effect, we know it cannot affect
512 our computation and thus may be placed in a delay slot. */
515 mark_set_resources (x, res, in_dest, include_delayed_effects)
517 register struct resources *res;
519 int include_delayed_effects;
521 register enum rtx_code code;
523 register char *format_ptr;
541 /* These don't set any resources. */
550 /* Called routine modifies the condition code, memory, any registers
551 that aren't saved across calls, global registers and anything
552 explicitly CLOBBERed immediately after the CALL_INSN. */
554 if (include_delayed_effects)
556 rtx next = NEXT_INSN (x);
557 rtx prev = PREV_INSN (x);
560 res->cc = res->memory = 1;
561 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
562 if (call_used_regs[i] || global_regs[i])
563 SET_HARD_REG_BIT (res->regs, i);
565 /* If X is part of a delay slot sequence, then NEXT should be
566 the first insn after the sequence. */
567 if (NEXT_INSN (prev) != x)
568 next = NEXT_INSN (NEXT_INSN (prev));
570 for (link = CALL_INSN_FUNCTION_USAGE (x);
571 link; link = XEXP (link, 1))
572 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
573 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
575 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
576 assume that this call can clobber any register. */
577 if (next && GET_CODE (next) == NOTE
578 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
579 SET_HARD_REG_SET (res->regs);
582 /* ... and also what its RTL says it modifies, if anything. */
587 /* An insn consisting of just a CLOBBER (or USE) is just for flow
588 and doesn't actually do anything, so we ignore it. */
590 #ifdef INSN_SETS_ARE_DELAYED
591 if (! include_delayed_effects
592 && INSN_SETS_ARE_DELAYED (x))
597 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
602 /* If the source of a SET is a CALL, this is actually done by
603 the called routine. So only include it if we are to include the
604 effects of the calling routine. */
606 mark_set_resources (SET_DEST (x), res,
607 (include_delayed_effects
608 || GET_CODE (SET_SRC (x)) != CALL),
611 mark_set_resources (SET_SRC (x), res, 0, 0);
615 mark_set_resources (XEXP (x, 0), res, 1, 0);
619 for (i = 0; i < XVECLEN (x, 0); i++)
620 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
621 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
622 mark_set_resources (XVECEXP (x, 0, i), res, 0,
623 include_delayed_effects);
630 mark_set_resources (XEXP (x, 0), res, 1, 0);
634 mark_set_resources (XEXP (x, 0), res, in_dest, 0);
635 mark_set_resources (XEXP (x, 1), res, 0, 0);
636 mark_set_resources (XEXP (x, 2), res, 0, 0);
643 res->unch_memory = RTX_UNCHANGING_P (x);
644 res->volatil = MEM_VOLATILE_P (x);
647 mark_set_resources (XEXP (x, 0), res, 0, 0);
653 if (GET_CODE (SUBREG_REG (x)) != REG)
654 mark_set_resources (SUBREG_REG (x), res,
655 in_dest, include_delayed_effects);
658 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
659 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
660 for (i = regno; i < last_regno; i++)
661 SET_HARD_REG_BIT (res->regs, i);
668 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
669 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
676 /* Process each sub-expression and flag what it needs. */
677 format_ptr = GET_RTX_FORMAT (code);
678 for (i = 0; i < GET_RTX_LENGTH (code); i++)
679 switch (*format_ptr++)
682 mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
686 for (j = 0; j < XVECLEN (x, i); j++)
687 mark_set_resources (XVECEXP (x, i, j), res, in_dest,
688 include_delayed_effects);
693 /* Return TRUE if this insn should stop the search for insn to fill delay
694 slots. LABELS_P indicates that labels should terminate the search.
695 In all cases, jumps terminate the search. */
698 stop_search_p (insn, labels_p)
705 switch (GET_CODE (insn))
719 /* OK unless it contains a delay slot or is an `asm' insn of some type.
720 We don't know anything about these. */
721 return (GET_CODE (PATTERN (insn)) == SEQUENCE
722 || GET_CODE (PATTERN (insn)) == ASM_INPUT
723 || asm_noperands (PATTERN (insn)) >= 0);
730 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
731 resource set contains a volatile memory reference. Otherwise, return FALSE. */
734 resource_conflicts_p (res1, res2)
735 struct resources *res1, *res2;
737 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
738 || (res1->unch_memory && res2->unch_memory)
739 || res1->volatil || res2->volatil)
743 return (res1->regs & res2->regs) != HARD_CONST (0);
748 for (i = 0; i < HARD_REG_SET_LONGS; i++)
749 if ((res1->regs[i] & res2->regs[i]) != 0)
756 /* Return TRUE if any resource marked in RES, a `struct resources', is
757 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
758 routine is using those resources.
760 We compute this by computing all the resources referenced by INSN and
761 seeing if this conflicts with RES. It might be faster to directly check
762 ourselves, and this is the way it used to work, but it means duplicating
763 a large block of complex code. */
766 insn_references_resource_p (insn, res, include_delayed_effects)
768 register struct resources *res;
769 int include_delayed_effects;
771 struct resources insn_res;
773 CLEAR_RESOURCE (&insn_res);
774 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
775 return resource_conflicts_p (&insn_res, res);
778 /* Return TRUE if INSN modifies resources that are marked in RES.
779 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
780 included. CC0 is only modified if it is explicitly set; see comments
781 in front of mark_set_resources for details. */
784 insn_sets_resource_p (insn, res, include_delayed_effects)
786 register struct resources *res;
787 int include_delayed_effects;
789 struct resources insn_sets;
791 CLEAR_RESOURCE (&insn_sets);
792 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
793 return resource_conflicts_p (&insn_sets, res);
796 /* Find a label at the end of the function or before a RETURN. If there is
804 /* If we found one previously, return it. */
805 if (end_of_function_label)
806 return end_of_function_label;
808 /* Otherwise, see if there is a label at the end of the function. If there
809 is, it must be that RETURN insns aren't needed, so that is our return
810 label and we don't have to do anything else. */
812 insn = get_last_insn ();
813 while (GET_CODE (insn) == NOTE
814 || (GET_CODE (insn) == INSN
815 && (GET_CODE (PATTERN (insn)) == USE
816 || GET_CODE (PATTERN (insn)) == CLOBBER)))
817 insn = PREV_INSN (insn);
819 /* When a target threads its epilogue we might already have a
820 suitable return insn. If so put a label before it for the
821 end_of_function_label. */
822 if (GET_CODE (insn) == BARRIER
823 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
824 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
826 rtx temp = PREV_INSN (PREV_INSN (insn));
827 end_of_function_label = gen_label_rtx ();
828 LABEL_NUSES (end_of_function_label) = 0;
830 /* Put the label before an USE insns that may proceed the RETURN insn. */
831 while (GET_CODE (temp) == USE)
832 temp = PREV_INSN (temp);
834 emit_label_after (end_of_function_label, temp);
837 else if (GET_CODE (insn) == CODE_LABEL)
838 end_of_function_label = insn;
841 /* Otherwise, make a new label and emit a RETURN and BARRIER,
843 end_of_function_label = gen_label_rtx ();
844 LABEL_NUSES (end_of_function_label) = 0;
845 emit_label (end_of_function_label);
849 /* The return we make may have delay slots too. */
850 rtx insn = gen_return ();
851 insn = emit_jump_insn (insn);
853 if (num_delay_slots (insn) > 0)
854 obstack_ptr_grow (&unfilled_slots_obstack, insn);
859 /* Show one additional use for this label so it won't go away until
861 ++LABEL_NUSES (end_of_function_label);
863 return end_of_function_label;
866 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
867 the pattern of INSN with the SEQUENCE.
869 Chain the insns so that NEXT_INSN of each insn in the sequence points to
870 the next and NEXT_INSN of the last insn in the sequence points to
871 the first insn after the sequence. Similarly for PREV_INSN. This makes
872 it easier to scan all insns.
874 Returns the SEQUENCE that replaces INSN. */
877 emit_delay_sequence (insn, list, length)
886 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
887 rtvec seqv = rtvec_alloc (length + 1);
888 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
889 rtx seq_insn = make_insn_raw (seq);
890 rtx first = get_insns ();
891 rtx last = get_last_insn ();
893 /* Make a copy of the insn having delay slots. */
894 rtx delay_insn = copy_rtx (insn);
896 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
897 confuse further processing. Update LAST in case it was the last insn.
898 We will put the BARRIER back in later. */
899 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
901 delete_insn (NEXT_INSN (insn));
902 last = get_last_insn ();
906 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
907 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
908 PREV_INSN (seq_insn) = PREV_INSN (insn);
911 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
914 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
916 /* Note the calls to set_new_first_and_last_insn must occur after
917 SEQ_INSN has been completely spliced into the insn stream.
919 Otherwise CUR_INSN_UID will get set to an incorrect value because
920 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
922 set_new_first_and_last_insn (first, seq_insn);
925 set_new_first_and_last_insn (seq_insn, last);
927 /* Build our SEQUENCE and rebuild the insn chain. */
928 XVECEXP (seq, 0, 0) = delay_insn;
929 INSN_DELETED_P (delay_insn) = 0;
930 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
932 for (li = list; li; li = XEXP (li, 1), i++)
934 rtx tem = XEXP (li, 0);
937 /* Show that this copy of the insn isn't deleted. */
938 INSN_DELETED_P (tem) = 0;
940 XVECEXP (seq, 0, i) = tem;
941 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
942 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
944 /* Remove any REG_DEAD notes because we can't rely on them now
945 that the insn has been moved. */
946 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
947 if (REG_NOTE_KIND (note) == REG_DEAD)
948 XEXP (note, 0) = const0_rtx;
951 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
953 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
954 last insn in that SEQUENCE to point to us. Similarly for the first
955 insn in the following insn if it is a SEQUENCE. */
957 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
958 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
959 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
960 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
963 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
964 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
965 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
967 /* If there used to be a BARRIER, put it back. */
969 emit_barrier_after (seq_insn);
977 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
978 be in the order in which the insns are to be executed. */
981 add_to_delay_list (insn, delay_list)
985 /* If we have an empty list, just make a new list element. If
986 INSN has its block number recorded, clear it since we may
987 be moving the insn to a new block. */
991 struct target_info *tinfo;
993 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
994 tinfo; tinfo = tinfo->next)
995 if (tinfo->uid == INSN_UID (insn))
1001 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
1004 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
1006 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
1011 /* Delete INSN from the delay slot of the insn that it is in. This may
1012 produce an insn without anything in its delay slots. */
1015 delete_from_delay_slot (insn)
1018 rtx trial, seq_insn, seq, prev;
1022 /* We first must find the insn containing the SEQUENCE with INSN in its
1023 delay slot. Do this by finding an insn, TRIAL, where
1024 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
1027 PREV_INSN (NEXT_INSN (trial)) == trial;
1028 trial = NEXT_INSN (trial))
1031 seq_insn = PREV_INSN (NEXT_INSN (trial));
1032 seq = PATTERN (seq_insn);
1034 /* Create a delay list consisting of all the insns other than the one
1035 we are deleting (unless we were the only one). */
1036 if (XVECLEN (seq, 0) > 2)
1037 for (i = 1; i < XVECLEN (seq, 0); i++)
1038 if (XVECEXP (seq, 0, i) != insn)
1039 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
1041 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
1042 list, and rebuild the delay list if non-empty. */
1043 prev = PREV_INSN (seq_insn);
1044 trial = XVECEXP (seq, 0, 0);
1045 delete_insn (seq_insn);
1046 add_insn_after (trial, prev);
1048 if (GET_CODE (trial) == JUMP_INSN
1049 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
1050 emit_barrier_after (trial);
1052 /* If there are any delay insns, remit them. Otherwise clear the
1055 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
1057 INSN_ANNULLED_BRANCH_P (trial) = 0;
1059 INSN_FROM_TARGET_P (insn) = 0;
1061 /* Show we need to fill this insn again. */
1062 obstack_ptr_grow (&unfilled_slots_obstack, trial);
1067 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
1068 the insn that sets CC0 for it and delete it too. */
1071 delete_scheduled_jump (insn)
1074 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
1075 delete the insn that sets the condition code, but it is hard to find it.
1076 Since this case is rare anyway, don't bother trying; there would likely
1077 be other insns that became dead anyway, which we wouldn't know to
1081 if (reg_mentioned_p (cc0_rtx, insn))
1083 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1085 /* If a reg-note was found, it points to an insn to set CC0. This
1086 insn is in the delay list of some other insn. So delete it from
1087 the delay list it was in. */
1090 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
1091 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
1092 delete_from_delay_slot (XEXP (note, 0));
1096 /* The insn setting CC0 is our previous insn, but it may be in
1097 a delay slot. It will be the last insn in the delay slot, if
1099 rtx trial = previous_insn (insn);
1100 if (GET_CODE (trial) == NOTE)
1101 trial = prev_nonnote_insn (trial);
1102 if (sets_cc0_p (PATTERN (trial)) != 1
1103 || FIND_REG_INC_NOTE (trial, 0))
1105 if (PREV_INSN (NEXT_INSN (trial)) == trial)
1106 delete_insn (trial);
1108 delete_from_delay_slot (trial);
1116 /* Counters for delay-slot filling. */
1118 #define NUM_REORG_FUNCTIONS 2
1119 #define MAX_DELAY_HISTOGRAM 3
1120 #define MAX_REORG_PASSES 2
1122 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
1124 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
1126 static int reorg_pass_number;
1129 note_delay_statistics (slots_filled, index)
1130 int slots_filled, index;
1132 num_insns_needing_delays[index][reorg_pass_number]++;
1133 if (slots_filled > MAX_DELAY_HISTOGRAM)
1134 slots_filled = MAX_DELAY_HISTOGRAM;
1135 num_filled_delays[index][slots_filled][reorg_pass_number]++;
1138 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1140 /* Optimize the following cases:
1142 1. When a conditional branch skips over only one instruction,
1143 use an annulling branch and put that insn in the delay slot.
1144 Use either a branch that annuls when the condition if true or
1145 invert the test with a branch that annuls when the condition is
1146 false. This saves insns, since otherwise we must copy an insn
1149 (orig) (skip) (otherwise)
1150 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1157 2. When a conditional branch skips over only one instruction,
1158 and after that, it unconditionally branches somewhere else,
1159 perform the similar optimization. This saves executing the
1160 second branch in the case where the inverted condition is true.
1167 INSN is a JUMP_INSN.
1169 This should be expanded to skip over N insns, where N is the number
1170 of delay slots required. */
1173 optimize_skip (insn)
1176 register rtx trial = next_nonnote_insn (insn);
1177 rtx next_trial = next_active_insn (trial);
1182 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1185 || GET_CODE (trial) != INSN
1186 || GET_CODE (PATTERN (trial)) == SEQUENCE
1187 || recog_memoized (trial) < 0
1188 || (! eligible_for_annul_false (insn, 0, trial, flags)
1189 && ! eligible_for_annul_true (insn, 0, trial, flags)))
1192 /* There are two cases where we are just executing one insn (we assume
1193 here that a branch requires only one insn; this should be generalized
1194 at some point): Where the branch goes around a single insn or where
1195 we have one insn followed by a branch to the same label we branch to.
1196 In both of these cases, inverting the jump and annulling the delay
1197 slot give the same effect in fewer insns. */
1198 if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
1200 && GET_CODE (next_trial) == JUMP_INSN
1201 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
1202 && (simplejump_p (next_trial)
1203 || GET_CODE (PATTERN (next_trial)) == RETURN)))
1205 if (eligible_for_annul_false (insn, 0, trial, flags))
1207 if (invert_jump (insn, JUMP_LABEL (insn)))
1208 INSN_FROM_TARGET_P (trial) = 1;
1209 else if (! eligible_for_annul_true (insn, 0, trial, flags))
1213 delay_list = add_to_delay_list (trial, NULL_RTX);
1214 next_trial = next_active_insn (trial);
1215 update_block (trial, trial);
1216 delete_insn (trial);
1218 /* Also, if we are targeting an unconditional
1219 branch, thread our jump to the target of that branch. Don't
1220 change this into a RETURN here, because it may not accept what
1221 we have in the delay slot. We'll fix this up later. */
1222 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
1223 && (simplejump_p (next_trial)
1224 || GET_CODE (PATTERN (next_trial)) == RETURN))
1226 target_label = JUMP_LABEL (next_trial);
1227 if (target_label == 0)
1228 target_label = find_end_label ();
1230 /* Recompute the flags based on TARGET_LABEL since threading
1231 the jump to TARGET_LABEL may change the direction of the
1232 jump (which may change the circumstances in which the
1233 delay slot is nullified). */
1234 flags = get_jump_flags (insn, target_label);
1235 if (eligible_for_annul_true (insn, 0, trial, flags))
1236 reorg_redirect_jump (insn, target_label);
1239 INSN_ANNULLED_BRANCH_P (insn) = 1;
1247 /* Encode and return branch direction and prediction information for
1248 INSN assuming it will jump to LABEL.
1250 Non conditional branches return no direction information and
1251 are predicted as very likely taken. */
1254 get_jump_flags (insn, label)
1259 /* get_jump_flags can be passed any insn with delay slots, these may
1260 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1261 direction information, and only if they are conditional jumps.
1263 If LABEL is zero, then there is no way to determine the branch
1265 if (GET_CODE (insn) == JUMP_INSN
1266 && (condjump_p (insn) || condjump_in_parallel_p (insn))
1267 && INSN_UID (insn) <= max_uid
1269 && INSN_UID (label) <= max_uid)
1271 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
1272 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
1273 /* No valid direction information. */
1277 /* If insn is a conditional branch call mostly_true_jump to get
1278 determine the branch prediction.
1280 Non conditional branches are predicted as very likely taken. */
1281 if (GET_CODE (insn) == JUMP_INSN
1282 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
1286 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
1290 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1293 flags |= ATTR_FLAG_likely;
1296 flags |= ATTR_FLAG_unlikely;
1299 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
1307 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1312 /* Return 1 if INSN is a destination that will be branched to rarely (the
1313 return point of a function); return 2 if DEST will be branched to very
1314 rarely (a call to a function that doesn't return). Otherwise,
1318 rare_destination (insn)
1324 for (; insn; insn = next)
1326 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1327 insn = XVECEXP (PATTERN (insn), 0, 0);
1329 next = NEXT_INSN (insn);
1331 switch (GET_CODE (insn))
1336 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1337 don't scan past JUMP_INSNs, so any barrier we find here must
1338 have been after a CALL_INSN and hence mean the call doesn't
1342 if (GET_CODE (PATTERN (insn)) == RETURN)
1344 else if (simplejump_p (insn)
1345 && jump_count++ < 10)
1346 next = JUMP_LABEL (insn);
1355 /* If we got here it means we hit the end of the function. So this
1356 is an unlikely destination. */
1361 /* Return truth value of the statement that this branch
1362 is mostly taken. If we think that the branch is extremely likely
1363 to be taken, we return 2. If the branch is slightly more likely to be
1364 taken, return 1. If the branch is slightly less likely to be taken,
1365 return 0 and if the branch is highly unlikely to be taken, return -1.
1367 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1370 mostly_true_jump (jump_insn, condition)
1371 rtx jump_insn, condition;
1373 rtx target_label = JUMP_LABEL (jump_insn);
1375 int rare_dest = rare_destination (target_label);
1376 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1378 /* If branch probabilities are available, then use that number since it
1379 always gives a correct answer. */
1380 if (flag_branch_probabilities)
1382 rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);;
1385 int prob = XINT (note, 0);
1387 if (prob >= REG_BR_PROB_BASE * 9 / 10)
1389 else if (prob >= REG_BR_PROB_BASE / 2)
1391 else if (prob >= REG_BR_PROB_BASE / 10)
1398 /* If this is a branch outside a loop, it is highly unlikely. */
1399 if (GET_CODE (PATTERN (jump_insn)) == SET
1400 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
1401 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
1402 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
1403 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
1404 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
1409 /* If this is the test of a loop, it is very likely true. We scan
1410 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1411 before the next real insn, we assume the branch is to the top of
1413 for (insn = PREV_INSN (target_label);
1414 insn && GET_CODE (insn) == NOTE;
1415 insn = PREV_INSN (insn))
1416 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1419 /* If this is a jump to the test of a loop, it is likely true. We scan
1420 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1421 before the next real insn, we assume the branch is to the loop branch
1423 for (insn = NEXT_INSN (target_label);
1424 insn && GET_CODE (insn) == NOTE;
1425 insn = PREV_INSN (insn))
1426 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
1430 /* Look at the relative rarities of the fallthrough and destination. If
1431 they differ, we can predict the branch that way. */
1433 switch (rare_fallthrough - rare_dest)
1447 /* If we couldn't figure out what this jump was, assume it won't be
1448 taken. This should be rare. */
1452 /* EQ tests are usually false and NE tests are usually true. Also,
1453 most quantities are positive, so we can make the appropriate guesses
1454 about signed comparisons against zero. */
1455 switch (GET_CODE (condition))
1458 /* Unconditional branch. */
1466 if (XEXP (condition, 1) == const0_rtx)
1471 if (XEXP (condition, 1) == const0_rtx)
1479 /* Predict backward branches usually take, forward branches usually not. If
1480 we don't know whether this is forward or backward, assume the branch
1481 will be taken, since most are. */
1482 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1483 || INSN_UID (target_label) > max_uid
1484 || (uid_to_ruid[INSN_UID (jump_insn)]
1485 > uid_to_ruid[INSN_UID (target_label)]));;
1488 /* Return the condition under which INSN will branch to TARGET. If TARGET
1489 is zero, return the condition under which INSN will return. If INSN is
1490 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1491 type of jump, or it doesn't go to TARGET, return 0. */
1494 get_branch_condition (insn, target)
1498 rtx pat = PATTERN (insn);
1501 if (condjump_in_parallel_p (insn))
1502 pat = XVECEXP (pat, 0, 0);
1504 if (GET_CODE (pat) == RETURN)
1505 return target == 0 ? const_true_rtx : 0;
1507 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1510 src = SET_SRC (pat);
1511 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1512 return const_true_rtx;
1514 else if (GET_CODE (src) == IF_THEN_ELSE
1515 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1516 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1517 && XEXP (XEXP (src, 1), 0) == target))
1518 && XEXP (src, 2) == pc_rtx)
1519 return XEXP (src, 0);
1521 else if (GET_CODE (src) == IF_THEN_ELSE
1522 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1523 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1524 && XEXP (XEXP (src, 2), 0) == target))
1525 && XEXP (src, 1) == pc_rtx)
1526 return gen_rtx_fmt_ee (reverse_condition (GET_CODE (XEXP (src, 0))),
1527 GET_MODE (XEXP (src, 0)),
1528 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1533 /* Return non-zero if CONDITION is more strict than the condition of
1534 INSN, i.e., if INSN will always branch if CONDITION is true. */
1537 condition_dominates_p (condition, insn)
1541 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1542 enum rtx_code code = GET_CODE (condition);
1543 enum rtx_code other_code;
1545 if (rtx_equal_p (condition, other_condition)
1546 || other_condition == const_true_rtx)
1549 else if (condition == const_true_rtx || other_condition == 0)
1552 other_code = GET_CODE (other_condition);
1553 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1554 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1555 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1558 return comparison_dominates_p (code, other_code);
1561 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1562 any insns already in the delay slot of JUMP. */
1565 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1566 rtx jump, newlabel, seq;
1569 rtx pat = PATTERN (seq);
1571 /* Make sure all the delay slots of this jump would still
1572 be valid after threading the jump. If they are still
1573 valid, then return non-zero. */
1575 flags = get_jump_flags (jump, newlabel);
1576 for (i = 1; i < XVECLEN (pat, 0); i++)
1578 #ifdef ANNUL_IFFALSE_SLOTS
1579 (INSN_ANNULLED_BRANCH_P (jump)
1580 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1581 ? eligible_for_annul_false (jump, i - 1,
1582 XVECEXP (pat, 0, i), flags) :
1584 #ifdef ANNUL_IFTRUE_SLOTS
1585 (INSN_ANNULLED_BRANCH_P (jump)
1586 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1587 ? eligible_for_annul_true (jump, i - 1,
1588 XVECEXP (pat, 0, i), flags) :
1590 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1593 return (i == XVECLEN (pat, 0));
1596 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1597 any insns we wish to place in the delay slot of JUMP. */
1600 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1601 rtx jump, newlabel, delay_list;
1606 /* Make sure all the insns in DELAY_LIST would still be
1607 valid after threading the jump. If they are still
1608 valid, then return non-zero. */
1610 flags = get_jump_flags (jump, newlabel);
1611 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1613 #ifdef ANNUL_IFFALSE_SLOTS
1614 (INSN_ANNULLED_BRANCH_P (jump)
1615 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1616 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1618 #ifdef ANNUL_IFTRUE_SLOTS
1619 (INSN_ANNULLED_BRANCH_P (jump)
1620 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1621 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1623 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1626 return (li == NULL);
1629 /* DELAY_LIST is a list of insns that have already been placed into delay
1630 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1631 If not, return 0; otherwise return 1. */
1634 check_annul_list_true_false (annul_true_p, delay_list)
1642 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1644 rtx trial = XEXP (temp, 0);
1646 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1647 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1655 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1656 the condition tested by INSN is CONDITION and the resources shown in
1657 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1658 from SEQ's delay list, in addition to whatever insns it may execute
1659 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1660 needed while searching for delay slot insns. Return the concatenated
1661 delay list if possible, otherwise, return 0.
1663 SLOTS_TO_FILL is the total number of slots required by INSN, and
1664 PSLOTS_FILLED points to the number filled so far (also the number of
1665 insns in DELAY_LIST). It is updated with the number that have been
1666 filled from the SEQUENCE, if any.
1668 PANNUL_P points to a non-zero value if we already know that we need
1669 to annul INSN. If this routine determines that annulling is needed,
1670 it may set that value non-zero.
1672 PNEW_THREAD points to a location that is to receive the place at which
1673 execution should continue. */
1676 steal_delay_list_from_target (insn, condition, seq, delay_list,
1677 sets, needed, other_needed,
1678 slots_to_fill, pslots_filled, pannul_p,
1680 rtx insn, condition;
1683 struct resources *sets, *needed, *other_needed;
1690 int slots_remaining = slots_to_fill - *pslots_filled;
1691 int total_slots_filled = *pslots_filled;
1692 rtx new_delay_list = 0;
1693 int must_annul = *pannul_p;
1696 /* We can't do anything if there are more delay slots in SEQ than we
1697 can handle, or if we don't know that it will be a taken branch.
1698 We know that it will be a taken branch if it is either an unconditional
1699 branch or a conditional branch with a stricter branch condition.
1701 Also, exit if the branch has more than one set, since then it is computing
1702 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1703 ??? It may be possible to move other sets into INSN in addition to
1704 moving the instructions in the delay slots. */
1706 if (XVECLEN (seq, 0) - 1 > slots_remaining
1707 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1708 || ! single_set (XVECEXP (seq, 0, 0)))
1711 for (i = 1; i < XVECLEN (seq, 0); i++)
1713 rtx trial = XVECEXP (seq, 0, i);
1716 if (insn_references_resource_p (trial, sets, 0)
1717 || insn_sets_resource_p (trial, needed, 0)
1718 || insn_sets_resource_p (trial, sets, 0)
1720 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1722 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1724 /* If TRIAL is from the fallthrough code of an annulled branch insn
1725 in SEQ, we cannot use it. */
1726 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1727 && ! INSN_FROM_TARGET_P (trial)))
1730 /* If this insn was already done (usually in a previous delay slot),
1731 pretend we put it in our delay slot. */
1732 if (redundant_insn (trial, insn, new_delay_list))
1735 /* We will end up re-vectoring this branch, so compute flags
1736 based on jumping to the new label. */
1737 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1740 && ((condition == const_true_rtx
1741 || (! insn_sets_resource_p (trial, other_needed, 0)
1742 && ! may_trap_p (PATTERN (trial)))))
1743 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1744 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1746 check_annul_list_true_false (0, delay_list)
1747 && check_annul_list_true_false (0, new_delay_list)
1748 && eligible_for_annul_false (insn, total_slots_filled,
1753 temp = copy_rtx (trial);
1754 INSN_FROM_TARGET_P (temp) = 1;
1755 new_delay_list = add_to_delay_list (temp, new_delay_list);
1756 total_slots_filled++;
1758 if (--slots_remaining == 0)
1765 /* Show the place to which we will be branching. */
1766 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1768 /* Add any new insns to the delay list and update the count of the
1769 number of slots filled. */
1770 *pslots_filled = total_slots_filled;
1774 if (delay_list == 0)
1775 return new_delay_list;
1777 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1778 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1783 /* Similar to steal_delay_list_from_target except that SEQ is on the
1784 fallthrough path of INSN. Here we only do something if the delay insn
1785 of SEQ is an unconditional branch. In that case we steal its delay slot
1786 for INSN since unconditional branches are much easier to fill. */
1789 steal_delay_list_from_fallthrough (insn, condition, seq,
1790 delay_list, sets, needed, other_needed,
1791 slots_to_fill, pslots_filled, pannul_p)
1792 rtx insn, condition;
1795 struct resources *sets, *needed, *other_needed;
1802 int must_annul = *pannul_p;
1805 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1807 /* We can't do anything if SEQ's delay insn isn't an
1808 unconditional branch. */
1810 if (! simplejump_p (XVECEXP (seq, 0, 0))
1811 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1814 for (i = 1; i < XVECLEN (seq, 0); i++)
1816 rtx trial = XVECEXP (seq, 0, i);
1818 /* If TRIAL sets CC0, stealing it will move it too far from the use
1820 if (insn_references_resource_p (trial, sets, 0)
1821 || insn_sets_resource_p (trial, needed, 0)
1822 || insn_sets_resource_p (trial, sets, 0)
1824 || sets_cc0_p (PATTERN (trial))
1830 /* If this insn was already done, we don't need it. */
1831 if (redundant_insn (trial, insn, delay_list))
1833 delete_from_delay_slot (trial);
1838 && ((condition == const_true_rtx
1839 || (! insn_sets_resource_p (trial, other_needed, 0)
1840 && ! may_trap_p (PATTERN (trial)))))
1841 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1842 : (must_annul || delay_list == NULL) && (must_annul = 1,
1843 check_annul_list_true_false (1, delay_list)
1844 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1848 delete_from_delay_slot (trial);
1849 delay_list = add_to_delay_list (trial, delay_list);
1851 if (++(*pslots_filled) == slots_to_fill)
1864 /* Try merging insns starting at THREAD which match exactly the insns in
1867 If all insns were matched and the insn was previously annulling, the
1868 annul bit will be cleared.
1870 For each insn that is merged, if the branch is or will be non-annulling,
1871 we delete the merged insn. */
1874 try_merge_delay_insns (insn, thread)
1877 rtx trial, next_trial;
1878 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1879 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1880 int slot_number = 1;
1881 int num_slots = XVECLEN (PATTERN (insn), 0);
1882 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1883 struct resources set, needed;
1884 rtx merged_insns = 0;
1888 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1890 CLEAR_RESOURCE (&needed);
1891 CLEAR_RESOURCE (&set);
1893 /* If this is not an annulling branch, take into account anything needed in
1894 INSN's delay slot. This prevents two increments from being incorrectly
1895 folded into one. If we are annulling, this would be the correct
1896 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1897 will essentially disable this optimization. This method is somewhat of
1898 a kludge, but I don't see a better way.) */
1900 for (i = 1 ; i < num_slots ; i++)
1901 if (XVECEXP (PATTERN (insn), 0, i))
1902 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
1904 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1906 rtx pat = PATTERN (trial);
1907 rtx oldtrial = trial;
1909 next_trial = next_nonnote_insn (trial);
1911 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1912 if (GET_CODE (trial) == INSN
1913 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1916 if (GET_CODE (next_to_match) == GET_CODE (trial)
1918 /* We can't share an insn that sets cc0. */
1919 && ! sets_cc0_p (pat)
1921 && ! insn_references_resource_p (trial, &set, 1)
1922 && ! insn_sets_resource_p (trial, &set, 1)
1923 && ! insn_sets_resource_p (trial, &needed, 1)
1924 && (trial = try_split (pat, trial, 0)) != 0
1925 /* Update next_trial, in case try_split succeeded. */
1926 && (next_trial = next_nonnote_insn (trial))
1927 /* Likewise THREAD. */
1928 && (thread = oldtrial == thread ? trial : thread)
1929 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1930 /* Have to test this condition if annul condition is different
1931 from (and less restrictive than) non-annulling one. */
1932 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1937 update_block (trial, thread);
1938 if (trial == thread)
1939 thread = next_active_insn (thread);
1941 delete_insn (trial);
1942 INSN_FROM_TARGET_P (next_to_match) = 0;
1945 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1947 if (++slot_number == num_slots)
1950 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1953 mark_set_resources (trial, &set, 0, 1);
1954 mark_referenced_resources (trial, &needed, 1);
1957 /* See if we stopped on a filled insn. If we did, try to see if its
1958 delay slots match. */
1959 if (slot_number != num_slots
1960 && trial && GET_CODE (trial) == INSN
1961 && GET_CODE (PATTERN (trial)) == SEQUENCE
1962 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1964 rtx pat = PATTERN (trial);
1965 rtx filled_insn = XVECEXP (pat, 0, 0);
1967 /* Account for resources set/needed by the filled insn. */
1968 mark_set_resources (filled_insn, &set, 0, 1);
1969 mark_referenced_resources (filled_insn, &needed, 1);
1971 for (i = 1; i < XVECLEN (pat, 0); i++)
1973 rtx dtrial = XVECEXP (pat, 0, i);
1975 if (! insn_references_resource_p (dtrial, &set, 1)
1976 && ! insn_sets_resource_p (dtrial, &set, 1)
1977 && ! insn_sets_resource_p (dtrial, &needed, 1)
1979 && ! sets_cc0_p (PATTERN (dtrial))
1981 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1982 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1988 update_block (dtrial, thread);
1989 new = delete_from_delay_slot (dtrial);
1990 if (INSN_DELETED_P (thread))
1992 INSN_FROM_TARGET_P (next_to_match) = 0;
1995 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1998 if (++slot_number == num_slots)
2001 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
2005 /* Keep track of the set/referenced resources for
2006 mark_set_resources (dtrial, &set, 0, 1);
2007 mark_referenced_resources (dtrial, &needed, 1);
2012 /* If all insns in the delay slot have been matched and we were previously
2013 annulling the branch, we need not any more. In that case delete all the
2014 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
2015 the delay list so that we know that it isn't only being used at the
2017 if (slot_number == num_slots && annul_p)
2019 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
2021 if (GET_MODE (merged_insns) == SImode)
2025 update_block (XEXP (merged_insns, 0), thread);
2026 new = delete_from_delay_slot (XEXP (merged_insns, 0));
2027 if (INSN_DELETED_P (thread))
2032 update_block (XEXP (merged_insns, 0), thread);
2033 delete_insn (XEXP (merged_insns, 0));
2037 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
2039 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2040 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
2044 /* See if INSN is redundant with an insn in front of TARGET. Often this
2045 is called when INSN is a candidate for a delay slot of TARGET.
2046 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
2047 of INSN. Often INSN will be redundant with an insn in a delay slot of
2048 some previous insn. This happens when we have a series of branches to the
2049 same label; in that case the first insn at the target might want to go
2050 into each of the delay slots.
2052 If we are not careful, this routine can take up a significant fraction
2053 of the total compilation time (4%), but only wins rarely. Hence we
2054 speed this routine up by making two passes. The first pass goes back
2055 until it hits a label and sees if it find an insn with an identical
2056 pattern. Only in this (relatively rare) event does it check for
2059 We do not split insns we encounter. This could cause us not to find a
2060 redundant insn, but the cost of splitting seems greater than the possible
2061 gain in rare cases. */
2064 redundant_insn (insn, target, delay_list)
2069 rtx target_main = target;
2070 rtx ipat = PATTERN (insn);
2072 struct resources needed, set;
2075 /* If INSN has any REG_UNUSED notes, it can't match anything since we
2076 are allowed to not actually assign to such a register. */
2077 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
2080 /* Scan backwards looking for a match. */
2081 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
2083 if (GET_CODE (trial) == CODE_LABEL)
2086 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
2089 pat = PATTERN (trial);
2090 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2093 if (GET_CODE (pat) == SEQUENCE)
2095 /* Stop for a CALL and its delay slots because it is difficult to
2096 track its resource needs correctly. */
2097 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
2100 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
2101 slots because it is difficult to track its resource needs
2104 #ifdef INSN_SETS_ARE_DELAYED
2105 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2109 #ifdef INSN_REFERENCES_ARE_DELAYED
2110 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2114 /* See if any of the insns in the delay slot match, updating
2115 resource requirements as we go. */
2116 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2117 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
2118 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
2119 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
2122 /* If found a match, exit this loop early. */
2127 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
2128 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
2132 /* If we didn't find an insn that matches, return 0. */
2136 /* See what resources this insn sets and needs. If they overlap, or
2137 if this insn references CC0, it can't be redundant. */
2139 CLEAR_RESOURCE (&needed);
2140 CLEAR_RESOURCE (&set);
2141 mark_set_resources (insn, &set, 0, 1);
2142 mark_referenced_resources (insn, &needed, 1);
2144 /* If TARGET is a SEQUENCE, get the main insn. */
2145 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2146 target_main = XVECEXP (PATTERN (target), 0, 0);
2148 if (resource_conflicts_p (&needed, &set)
2150 || reg_mentioned_p (cc0_rtx, ipat)
2152 /* The insn requiring the delay may not set anything needed or set by
2154 || insn_sets_resource_p (target_main, &needed, 1)
2155 || insn_sets_resource_p (target_main, &set, 1))
2158 /* Insns we pass may not set either NEEDED or SET, so merge them for
2160 needed.memory |= set.memory;
2161 needed.unch_memory |= set.unch_memory;
2162 IOR_HARD_REG_SET (needed.regs, set.regs);
2164 /* This insn isn't redundant if it conflicts with an insn that either is
2165 or will be in a delay slot of TARGET. */
2169 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
2171 delay_list = XEXP (delay_list, 1);
2174 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2175 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
2176 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
2179 /* Scan backwards until we reach a label or an insn that uses something
2180 INSN sets or sets something insn uses or sets. */
2182 for (trial = PREV_INSN (target);
2183 trial && GET_CODE (trial) != CODE_LABEL;
2184 trial = PREV_INSN (trial))
2186 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
2187 && GET_CODE (trial) != JUMP_INSN)
2190 pat = PATTERN (trial);
2191 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2194 if (GET_CODE (pat) == SEQUENCE)
2196 /* If this is a CALL_INSN and its delay slots, it is hard to track
2197 the resource needs properly, so give up. */
2198 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
2201 /* If this is an INSN or JUMP_INSN with delayed effects, it
2202 is hard to track the resource needs properly, so give up. */
2204 #ifdef INSN_SETS_ARE_DELAYED
2205 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2209 #ifdef INSN_REFERENCES_ARE_DELAYED
2210 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2214 /* See if any of the insns in the delay slot match, updating
2215 resource requirements as we go. */
2216 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2218 rtx candidate = XVECEXP (pat, 0, i);
2220 /* If an insn will be annulled if the branch is false, it isn't
2221 considered as a possible duplicate insn. */
2222 if (rtx_equal_p (PATTERN (candidate), ipat)
2223 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2224 && INSN_FROM_TARGET_P (candidate)))
2226 /* Show that this insn will be used in the sequel. */
2227 INSN_FROM_TARGET_P (candidate) = 0;
2231 /* Unless this is an annulled insn from the target of a branch,
2232 we must stop if it sets anything needed or set by INSN. */
2233 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2234 || ! INSN_FROM_TARGET_P (candidate))
2235 && insn_sets_resource_p (candidate, &needed, 1))
2240 /* If the insn requiring the delay slot conflicts with INSN, we
2242 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
2247 /* See if TRIAL is the same as INSN. */
2248 pat = PATTERN (trial);
2249 if (rtx_equal_p (pat, ipat))
2252 /* Can't go any further if TRIAL conflicts with INSN. */
2253 if (insn_sets_resource_p (trial, &needed, 1))
2261 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2262 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2263 is non-zero, we are allowed to fall into this thread; otherwise, we are
2266 If LABEL is used more than one or we pass a label other than LABEL before
2267 finding an active insn, we do not own this thread. */
2270 own_thread_p (thread, label, allow_fallthrough)
2273 int allow_fallthrough;
2278 /* We don't own the function end. */
2282 /* Get the first active insn, or THREAD, if it is an active insn. */
2283 active_insn = next_active_insn (PREV_INSN (thread));
2285 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
2286 if (GET_CODE (insn) == CODE_LABEL
2287 && (insn != label || LABEL_NUSES (insn) != 1))
2290 if (allow_fallthrough)
2293 /* Ensure that we reach a BARRIER before any insn or label. */
2294 for (insn = prev_nonnote_insn (thread);
2295 insn == 0 || GET_CODE (insn) != BARRIER;
2296 insn = prev_nonnote_insn (insn))
2298 || GET_CODE (insn) == CODE_LABEL
2299 || (GET_CODE (insn) == INSN
2300 && GET_CODE (PATTERN (insn)) != USE
2301 && GET_CODE (PATTERN (insn)) != CLOBBER))
2307 /* Find the number of the basic block that starts closest to INSN. Return -1
2308 if we couldn't find such a basic block. */
2311 find_basic_block (insn)
2316 /* Scan backwards to the previous BARRIER. Then see if we can find a
2317 label that starts a basic block. Return the basic block number. */
2319 for (insn = prev_nonnote_insn (insn);
2320 insn && GET_CODE (insn) != BARRIER;
2321 insn = prev_nonnote_insn (insn))
2324 /* The start of the function is basic block zero. */
2328 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2329 anything other than a CODE_LABEL or note, we can't find this code. */
2330 for (insn = next_nonnote_insn (insn);
2331 insn && GET_CODE (insn) == CODE_LABEL;
2332 insn = next_nonnote_insn (insn))
2334 for (i = 0; i < n_basic_blocks; i++)
2335 if (insn == basic_block_head[i])
2342 /* Called when INSN is being moved from a location near the target of a jump.
2343 We leave a marker of the form (use (INSN)) immediately in front
2344 of WHERE for mark_target_live_regs. These markers will be deleted when
2347 We used to try to update the live status of registers if WHERE is at
2348 the start of a basic block, but that can't work since we may remove a
2349 BARRIER in relax_delay_slots. */
2352 update_block (insn, where)
2358 /* Ignore if this was in a delay slot and it came from the target of
2360 if (INSN_FROM_TARGET_P (insn))
2363 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
2365 /* INSN might be making a value live in a block where it didn't use to
2366 be. So recompute liveness information for this block. */
2368 b = find_basic_block (insn);
2373 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2374 the basic block containing the jump. */
2377 reorg_redirect_jump (jump, nlabel)
2381 int b = find_basic_block (jump);
2386 return redirect_jump (jump, nlabel);
2389 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2390 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2391 that reference values used in INSN. If we find one, then we move the
2392 REG_DEAD note to INSN.
2394 This is needed to handle the case where an later insn (after INSN) has a
2395 REG_DEAD note for a register used by INSN, and this later insn subsequently
2396 gets moved before a CODE_LABEL because it is a redundant insn. In this
2397 case, mark_target_live_regs may be confused into thinking the register
2398 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2401 update_reg_dead_notes (insn, delayed_insn)
2402 rtx insn, delayed_insn;
2406 for (p = next_nonnote_insn (insn); p != delayed_insn;
2407 p = next_nonnote_insn (p))
2408 for (link = REG_NOTES (p); link; link = next)
2410 next = XEXP (link, 1);
2412 if (REG_NOTE_KIND (link) != REG_DEAD
2413 || GET_CODE (XEXP (link, 0)) != REG)
2416 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
2418 /* Move the REG_DEAD note from P to INSN. */
2419 remove_note (p, link);
2420 XEXP (link, 1) = REG_NOTES (insn);
2421 REG_NOTES (insn) = link;
2426 /* Called when an insn redundant with start_insn is deleted. If there
2427 is a REG_DEAD note for the target of start_insn between start_insn
2428 and stop_insn, then the REG_DEAD note needs to be deleted since the
2429 value no longer dies there.
2431 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
2432 confused into thinking the register is dead. */
2435 fix_reg_dead_note (start_insn, stop_insn)
2436 rtx start_insn, stop_insn;
2440 for (p = next_nonnote_insn (start_insn); p != stop_insn;
2441 p = next_nonnote_insn (p))
2442 for (link = REG_NOTES (p); link; link = next)
2444 next = XEXP (link, 1);
2446 if (REG_NOTE_KIND (link) != REG_DEAD
2447 || GET_CODE (XEXP (link, 0)) != REG)
2450 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
2452 remove_note (p, link);
2458 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
2460 This handles the case of udivmodXi4 instructions which optimize their
2461 output depending on whether any REG_UNUSED notes are present.
2462 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2466 update_reg_unused_notes (insn, redundant_insn)
2467 rtx insn, redundant_insn;
2471 for (link = REG_NOTES (insn); link; link = next)
2473 next = XEXP (link, 1);
2475 if (REG_NOTE_KIND (link) != REG_UNUSED
2476 || GET_CODE (XEXP (link, 0)) != REG)
2479 if (! find_regno_note (redundant_insn, REG_UNUSED,
2480 REGNO (XEXP (link, 0))))
2481 remove_note (insn, link);
2485 /* Marks registers possibly live at the current place being scanned by
2486 mark_target_live_regs. Used only by next two function. */
2488 static HARD_REG_SET current_live_regs;
2490 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2491 Also only used by the next two functions. */
2493 static HARD_REG_SET pending_dead_regs;
2495 /* Utility function called from mark_target_live_regs via note_stores.
2496 It deadens any CLOBBERed registers and livens any SET registers. */
2499 update_live_status (dest, x)
2503 int first_regno, last_regno;
2506 if (GET_CODE (dest) != REG
2507 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
2510 if (GET_CODE (dest) == SUBREG)
2511 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2513 first_regno = REGNO (dest);
2515 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
2517 if (GET_CODE (x) == CLOBBER)
2518 for (i = first_regno; i < last_regno; i++)
2519 CLEAR_HARD_REG_BIT (current_live_regs, i);
2521 for (i = first_regno; i < last_regno; i++)
2523 SET_HARD_REG_BIT (current_live_regs, i);
2524 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
2528 /* Similar to next_insn, but ignores insns in the delay slots of
2529 an annulled branch. */
2532 next_insn_no_annul (insn)
2537 /* If INSN is an annulled branch, skip any insns from the target
2539 if (INSN_ANNULLED_BRANCH_P (insn)
2540 && NEXT_INSN (PREV_INSN (insn)) != insn)
2541 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
2542 insn = NEXT_INSN (insn);
2544 insn = NEXT_INSN (insn);
2545 if (insn && GET_CODE (insn) == INSN
2546 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2547 insn = XVECEXP (PATTERN (insn), 0, 0);
2553 /* A subroutine of mark_target_live_regs. Search forward from TARGET
2554 looking for registers that are set before they are used. These are dead.
2555 Stop after passing a few conditional jumps, and/or a small
2556 number of unconditional branches. */
2559 find_dead_or_set_registers (target, res, jump_target, jump_count, set, needed)
2561 struct resources *res;
2564 struct resources set, needed;
2566 HARD_REG_SET scratch;
2571 for (insn = target; insn; insn = next)
2573 rtx this_jump_insn = insn;
2575 next = NEXT_INSN (insn);
2576 switch (GET_CODE (insn))
2579 /* After a label, any pending dead registers that weren't yet
2580 used can be made dead. */
2581 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
2582 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
2583 CLEAR_HARD_REG_SET (pending_dead_regs);
2585 if (CODE_LABEL_NUMBER (insn) < max_label_num_after_reload)
2587 /* All spill registers are dead at a label, so kill all of the
2588 ones that aren't needed also. */
2589 COPY_HARD_REG_SET (scratch, used_spill_regs);
2590 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2591 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2600 if (GET_CODE (PATTERN (insn)) == USE)
2602 /* If INSN is a USE made by update_block, we care about the
2603 underlying insn. Any registers set by the underlying insn
2604 are live since the insn is being done somewhere else. */
2605 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2606 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
2608 /* All other USE insns are to be ignored. */
2611 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
2613 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2615 /* An unconditional jump can be used to fill the delay slot
2616 of a call, so search for a JUMP_INSN in any position. */
2617 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2619 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
2620 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2629 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2631 if (jump_count++ < 10)
2633 if (simplejump_p (this_jump_insn)
2634 || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
2636 next = JUMP_LABEL (this_jump_insn);
2641 *jump_target = JUMP_LABEL (this_jump_insn);
2644 else if (condjump_p (this_jump_insn)
2645 || condjump_in_parallel_p (this_jump_insn))
2647 struct resources target_set, target_res;
2648 struct resources fallthrough_res;
2650 /* We can handle conditional branches here by following
2651 both paths, and then IOR the results of the two paths
2652 together, which will give us registers that are dead
2653 on both paths. Since this is expensive, we give it
2654 a much higher cost than unconditional branches. The
2655 cost was chosen so that we will follow at most 1
2656 conditional branch. */
2659 if (jump_count >= 10)
2662 mark_referenced_resources (insn, &needed, 1);
2664 /* For an annulled branch, mark_set_resources ignores slots
2665 filled by instructions from the target. This is correct
2666 if the branch is not taken. Since we are following both
2667 paths from the branch, we must also compute correct info
2668 if the branch is taken. We do this by inverting all of
2669 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
2670 and then inverting the INSN_FROM_TARGET_P bits again. */
2672 if (GET_CODE (PATTERN (insn)) == SEQUENCE
2673 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
2675 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
2676 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
2677 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
2680 mark_set_resources (insn, &target_set, 0, 1);
2682 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
2683 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
2684 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
2686 mark_set_resources (insn, &set, 0, 1);
2690 mark_set_resources (insn, &set, 0, 1);
2695 COPY_HARD_REG_SET (scratch, target_set.regs);
2696 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2697 AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
2699 fallthrough_res = *res;
2700 COPY_HARD_REG_SET (scratch, set.regs);
2701 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2702 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
2704 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
2705 &target_res, 0, jump_count,
2706 target_set, needed);
2707 find_dead_or_set_registers (next,
2708 &fallthrough_res, 0, jump_count,
2710 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
2711 AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
2719 /* Don't try this optimization if we expired our jump count
2720 above, since that would mean there may be an infinite loop
2721 in the function being compiled. */
2727 mark_referenced_resources (insn, &needed, 1);
2728 mark_set_resources (insn, &set, 0, 1);
2730 COPY_HARD_REG_SET (scratch, set.regs);
2731 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2732 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2738 /* Set the resources that are live at TARGET.
2740 If TARGET is zero, we refer to the end of the current function and can
2741 return our precomputed value.
2743 Otherwise, we try to find out what is live by consulting the basic block
2744 information. This is tricky, because we must consider the actions of
2745 reload and jump optimization, which occur after the basic block information
2748 Accordingly, we proceed as follows::
2750 We find the previous BARRIER and look at all immediately following labels
2751 (with no intervening active insns) to see if any of them start a basic
2752 block. If we hit the start of the function first, we use block 0.
2754 Once we have found a basic block and a corresponding first insns, we can
2755 accurately compute the live status from basic_block_live_regs and
2756 reg_renumber. (By starting at a label following a BARRIER, we are immune
2757 to actions taken by reload and jump.) Then we scan all insns between
2758 that point and our target. For each CLOBBER (or for call-clobbered regs
2759 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2760 a SET, mark them as live.
2762 We have to be careful when using REG_DEAD notes because they are not
2763 updated by such things as find_equiv_reg. So keep track of registers
2764 marked as dead that haven't been assigned to, and mark them dead at the
2765 next CODE_LABEL since reload and jump won't propagate values across labels.
2767 If we cannot find the start of a basic block (should be a very rare
2768 case, if it can happen at all), mark everything as potentially live.
2770 Next, scan forward from TARGET looking for things set or clobbered
2771 before they are used. These are not live.
2773 Because we can be called many times on the same target, save our results
2774 in a hash table indexed by INSN_UID. */
2777 mark_target_live_regs (target, res)
2779 struct resources *res;
2783 struct target_info *tinfo;
2787 HARD_REG_SET scratch;
2788 struct resources set, needed;
2790 /* Handle end of function. */
2793 *res = end_of_function_needs;
2797 /* We have to assume memory is needed, but the CC isn't. */
2799 res->volatil = res->unch_memory = 0;
2802 /* See if we have computed this value already. */
2803 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2804 tinfo; tinfo = tinfo->next)
2805 if (tinfo->uid == INSN_UID (target))
2808 /* Start by getting the basic block number. If we have saved information,
2809 we can get it from there unless the insn at the start of the basic block
2810 has been deleted. */
2811 if (tinfo && tinfo->block != -1
2812 && ! INSN_DELETED_P (basic_block_head[tinfo->block]))
2816 b = find_basic_block (target);
2820 /* If the information is up-to-date, use it. Otherwise, we will
2822 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
2824 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
2830 /* Allocate a place to put our results and chain it into the
2832 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
2833 tinfo->uid = INSN_UID (target);
2835 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2836 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
2839 CLEAR_HARD_REG_SET (pending_dead_regs);
2841 /* If we found a basic block, get the live registers from it and update
2842 them with anything set or killed between its start and the insn before
2843 TARGET. Otherwise, we must assume everything is live. */
2846 regset regs_live = basic_block_live_at_start[b];
2849 rtx start_insn, stop_insn;
2851 /* Compute hard regs live at start of block -- this is the real hard regs
2852 marked live, plus live pseudo regs that have been renumbered to
2855 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
2857 EXECUTE_IF_SET_IN_REG_SET
2858 (regs_live, FIRST_PSEUDO_REGISTER, i,
2860 if ((regno = reg_renumber[i]) >= 0)
2862 j < regno + HARD_REGNO_NREGS (regno,
2863 PSEUDO_REGNO_MODE (i));
2865 SET_HARD_REG_BIT (current_live_regs, j);
2868 /* Get starting and ending insn, handling the case where each might
2870 start_insn = (b == 0 ? get_insns () : basic_block_head[b]);
2873 if (GET_CODE (start_insn) == INSN
2874 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
2875 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
2877 if (GET_CODE (stop_insn) == INSN
2878 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
2879 stop_insn = next_insn (PREV_INSN (stop_insn));
2881 for (insn = start_insn; insn != stop_insn;
2882 insn = next_insn_no_annul (insn))
2885 rtx real_insn = insn;
2887 /* If this insn is from the target of a branch, it isn't going to
2888 be used in the sequel. If it is used in both cases, this
2889 test will not be true. */
2890 if (INSN_FROM_TARGET_P (insn))
2893 /* If this insn is a USE made by update_block, we care about the
2895 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
2896 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2897 real_insn = XEXP (PATTERN (insn), 0);
2899 if (GET_CODE (real_insn) == CALL_INSN)
2901 /* CALL clobbers all call-used regs that aren't fixed except
2902 sp, ap, and fp. Do this before setting the result of the
2904 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2905 if (call_used_regs[i]
2906 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
2907 && i != ARG_POINTER_REGNUM
2908 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2909 && i != HARD_FRAME_POINTER_REGNUM
2911 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2912 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
2914 #ifdef PIC_OFFSET_TABLE_REGNUM
2915 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2918 CLEAR_HARD_REG_BIT (current_live_regs, i);
2920 /* A CALL_INSN sets any global register live, since it may
2921 have been modified by the call. */
2922 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2924 SET_HARD_REG_BIT (current_live_regs, i);
2927 /* Mark anything killed in an insn to be deadened at the next
2928 label. Ignore USE insns; the only REG_DEAD notes will be for
2929 parameters. But they might be early. A CALL_INSN will usually
2930 clobber registers used for parameters. It isn't worth bothering
2931 with the unlikely case when it won't. */
2932 if ((GET_CODE (real_insn) == INSN
2933 && GET_CODE (PATTERN (real_insn)) != USE
2934 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
2935 || GET_CODE (real_insn) == JUMP_INSN
2936 || GET_CODE (real_insn) == CALL_INSN)
2938 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2939 if (REG_NOTE_KIND (link) == REG_DEAD
2940 && GET_CODE (XEXP (link, 0)) == REG
2941 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2943 int first_regno = REGNO (XEXP (link, 0));
2946 + HARD_REGNO_NREGS (first_regno,
2947 GET_MODE (XEXP (link, 0))));
2949 for (i = first_regno; i < last_regno; i++)
2950 SET_HARD_REG_BIT (pending_dead_regs, i);
2953 note_stores (PATTERN (real_insn), update_live_status);
2955 /* If any registers were unused after this insn, kill them.
2956 These notes will always be accurate. */
2957 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2958 if (REG_NOTE_KIND (link) == REG_UNUSED
2959 && GET_CODE (XEXP (link, 0)) == REG
2960 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2962 int first_regno = REGNO (XEXP (link, 0));
2965 + HARD_REGNO_NREGS (first_regno,
2966 GET_MODE (XEXP (link, 0))));
2968 for (i = first_regno; i < last_regno; i++)
2969 CLEAR_HARD_REG_BIT (current_live_regs, i);
2973 else if (GET_CODE (real_insn) == CODE_LABEL)
2975 /* A label clobbers the pending dead registers since neither
2976 reload nor jump will propagate a value across a label. */
2977 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
2978 CLEAR_HARD_REG_SET (pending_dead_regs);
2981 /* The beginning of the epilogue corresponds to the end of the
2982 RTL chain when there are no epilogue insns. Certain resources
2983 are implicitly required at that point. */
2984 else if (GET_CODE (real_insn) == NOTE
2985 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
2986 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
2989 COPY_HARD_REG_SET (res->regs, current_live_regs);
2991 tinfo->bb_tick = bb_ticks[b];
2994 /* We didn't find the start of a basic block. Assume everything
2995 in use. This should happen only extremely rarely. */
2996 SET_HARD_REG_SET (res->regs);
2998 CLEAR_RESOURCE (&set);
2999 CLEAR_RESOURCE (&needed);
3001 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
3004 /* If we hit an unconditional branch, we have another way of finding out
3005 what is live: we can see what is live at the branch target and include
3006 anything used but not set before the branch. The only things that are
3007 live are those that are live using the above test and the test below. */
3011 struct resources new_resources;
3012 rtx stop_insn = next_active_insn (jump_insn);
3014 mark_target_live_regs (next_active_insn (jump_target), &new_resources);
3015 CLEAR_RESOURCE (&set);
3016 CLEAR_RESOURCE (&needed);
3018 /* Include JUMP_INSN in the needed registers. */
3019 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
3021 mark_referenced_resources (insn, &needed, 1);
3023 COPY_HARD_REG_SET (scratch, needed.regs);
3024 AND_COMPL_HARD_REG_SET (scratch, set.regs);
3025 IOR_HARD_REG_SET (new_resources.regs, scratch);
3027 mark_set_resources (insn, &set, 0, 1);
3030 AND_HARD_REG_SET (res->regs, new_resources.regs);
3033 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
3036 /* Scan a function looking for insns that need a delay slot and find insns to
3037 put into the delay slot.
3039 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
3040 as calls). We do these first since we don't want jump insns (that are
3041 easier to fill) to get the only insns that could be used for non-jump insns.
3042 When it is zero, only try to fill JUMP_INSNs.
3044 When slots are filled in this manner, the insns (including the
3045 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
3046 it is possible to tell whether a delay slot has really been filled
3047 or not. `final' knows how to deal with this, by communicating
3048 through FINAL_SEQUENCE. */
3051 fill_simple_delay_slots (non_jumps_p)
3054 register rtx insn, pat, trial, next_trial;
3056 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3057 struct resources needed, set;
3058 int slots_to_fill, slots_filled;
3061 for (i = 0; i < num_unfilled_slots; i++)
3064 /* Get the next insn to fill. If it has already had any slots assigned,
3065 we can't do anything with it. Maybe we'll improve this later. */
3067 insn = unfilled_slots_base[i];
3069 || INSN_DELETED_P (insn)
3070 || (GET_CODE (insn) == INSN
3071 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3072 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
3073 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
3076 if (GET_CODE (insn) == JUMP_INSN)
3077 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3079 flags = get_jump_flags (insn, NULL_RTX);
3080 slots_to_fill = num_delay_slots (insn);
3082 /* Some machine description have defined instructions to have
3083 delay slots only in certain circumstances which may depend on
3084 nearby insns (which change due to reorg's actions).
3086 For example, the PA port normally has delay slots for unconditional
3089 However, the PA port claims such jumps do not have a delay slot
3090 if they are immediate successors of certain CALL_INSNs. This
3091 allows the port to favor filling the delay slot of the call with
3092 the unconditional jump. */
3093 if (slots_to_fill == 0)
3096 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
3097 says how many. After initialization, first try optimizing
3100 nop add %o7,.-L1,%o7
3104 If this case applies, the delay slot of the call is filled with
3105 the unconditional jump. This is done first to avoid having the
3106 delay slot of the call filled in the backward scan. Also, since
3107 the unconditional jump is likely to also have a delay slot, that
3108 insn must exist when it is subsequently scanned.
3110 This is tried on each insn with delay slots as some machines
3111 have insns which perform calls, but are not represented as
3117 if ((trial = next_active_insn (insn))
3118 && GET_CODE (trial) == JUMP_INSN
3119 && simplejump_p (trial)
3120 && eligible_for_delay (insn, slots_filled, trial, flags)
3121 && no_labels_between_p (insn, trial))
3125 delay_list = add_to_delay_list (trial, delay_list);
3127 /* TRIAL may have had its delay slot filled, then unfilled. When
3128 the delay slot is unfilled, TRIAL is placed back on the unfilled
3129 slots obstack. Unfortunately, it is placed on the end of the
3130 obstack, not in its original location. Therefore, we must search
3131 from entry i + 1 to the end of the unfilled slots obstack to
3132 try and find TRIAL. */
3133 tmp = &unfilled_slots_base[i + 1];
3134 while (*tmp != trial && tmp != unfilled_slots_next)
3137 /* Remove the unconditional jump from consideration for delay slot
3138 filling and unthread it. */
3142 rtx next = NEXT_INSN (trial);
3143 rtx prev = PREV_INSN (trial);
3145 NEXT_INSN (prev) = next;
3147 PREV_INSN (next) = prev;
3151 /* Now, scan backwards from the insn to search for a potential
3152 delay-slot candidate. Stop searching when a label or jump is hit.
3154 For each candidate, if it is to go into the delay slot (moved
3155 forward in execution sequence), it must not need or set any resources
3156 that were set by later insns and must not set any resources that
3157 are needed for those insns.
3159 The delay slot insn itself sets resources unless it is a call
3160 (in which case the called routine, not the insn itself, is doing
3163 if (slots_filled < slots_to_fill)
3165 CLEAR_RESOURCE (&needed);
3166 CLEAR_RESOURCE (&set);
3167 mark_set_resources (insn, &set, 0, 0);
3168 mark_referenced_resources (insn, &needed, 0);
3170 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
3173 next_trial = prev_nonnote_insn (trial);
3175 /* This must be an INSN or CALL_INSN. */
3176 pat = PATTERN (trial);
3178 /* USE and CLOBBER at this level was just for flow; ignore it. */
3179 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3182 /* Check for resource conflict first, to avoid unnecessary
3184 if (! insn_references_resource_p (trial, &set, 1)
3185 && ! insn_sets_resource_p (trial, &set, 1)
3186 && ! insn_sets_resource_p (trial, &needed, 1)
3188 /* Can't separate set of cc0 from its use. */
3189 && ! (reg_mentioned_p (cc0_rtx, pat)
3190 && ! sets_cc0_p (cc0_rtx, pat))
3194 trial = try_split (pat, trial, 1);
3195 next_trial = prev_nonnote_insn (trial);
3196 if (eligible_for_delay (insn, slots_filled, trial, flags))
3198 /* In this case, we are searching backward, so if we
3199 find insns to put on the delay list, we want
3200 to put them at the head, rather than the
3201 tail, of the list. */
3203 update_reg_dead_notes (trial, insn);
3204 delay_list = gen_rtx_INSN_LIST (VOIDmode,
3206 update_block (trial, trial);
3207 delete_insn (trial);
3208 if (slots_to_fill == ++slots_filled)
3214 mark_set_resources (trial, &set, 0, 1);
3215 mark_referenced_resources (trial, &needed, 1);
3219 /* If all needed slots haven't been filled, we come here. */
3221 /* Try to optimize case of jumping around a single insn. */
3222 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
3223 if (slots_filled != slots_to_fill
3225 && GET_CODE (insn) == JUMP_INSN
3226 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
3228 delay_list = optimize_skip (insn);
3234 /* Try to get insns from beyond the insn needing the delay slot.
3235 These insns can neither set or reference resources set in insns being
3236 skipped, cannot set resources in the insn being skipped, and, if this
3237 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
3238 call might not return).
3240 There used to be code which continued past the target label if
3241 we saw all uses of the target label. This code did not work,
3242 because it failed to account for some instructions which were
3243 both annulled and marked as from the target. This can happen as a
3244 result of optimize_skip. Since this code was redundant with
3245 fill_eager_delay_slots anyways, it was just deleted. */
3247 if (slots_filled != slots_to_fill
3248 && (GET_CODE (insn) != JUMP_INSN
3249 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
3250 && ! simplejump_p (insn)
3251 && JUMP_LABEL (insn) != 0)))
3254 int maybe_never = 0;
3255 struct resources needed_at_jump;
3257 CLEAR_RESOURCE (&needed);
3258 CLEAR_RESOURCE (&set);
3260 if (GET_CODE (insn) == CALL_INSN)
3262 mark_set_resources (insn, &set, 0, 1);
3263 mark_referenced_resources (insn, &needed, 1);
3268 mark_set_resources (insn, &set, 0, 1);
3269 mark_referenced_resources (insn, &needed, 1);
3270 if (GET_CODE (insn) == JUMP_INSN)
3271 target = JUMP_LABEL (insn);
3274 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
3276 rtx pat, trial_delay;
3278 next_trial = next_nonnote_insn (trial);
3280 if (GET_CODE (trial) == CODE_LABEL
3281 || GET_CODE (trial) == BARRIER)
3284 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
3285 pat = PATTERN (trial);
3287 /* Stand-alone USE and CLOBBER are just for flow. */
3288 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3291 /* If this already has filled delay slots, get the insn needing
3293 if (GET_CODE (pat) == SEQUENCE)
3294 trial_delay = XVECEXP (pat, 0, 0);
3296 trial_delay = trial;
3298 /* If this is a jump insn to our target, indicate that we have
3299 seen another jump to it. If we aren't handling a conditional
3300 jump, stop our search. Otherwise, compute the needs at its
3301 target and add them to NEEDED. */
3302 if (GET_CODE (trial_delay) == JUMP_INSN)
3306 else if (JUMP_LABEL (trial_delay) != target)
3308 mark_target_live_regs
3309 (next_active_insn (JUMP_LABEL (trial_delay)),
3311 needed.memory |= needed_at_jump.memory;
3312 needed.unch_memory |= needed_at_jump.unch_memory;
3313 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
3317 /* See if we have a resource problem before we try to
3320 && GET_CODE (pat) != SEQUENCE
3321 && ! insn_references_resource_p (trial, &set, 1)
3322 && ! insn_sets_resource_p (trial, &set, 1)
3323 && ! insn_sets_resource_p (trial, &needed, 1)
3325 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
3327 && ! (maybe_never && may_trap_p (pat))
3328 && (trial = try_split (pat, trial, 0))
3329 && eligible_for_delay (insn, slots_filled, trial, flags))
3331 next_trial = next_nonnote_insn (trial);
3332 delay_list = add_to_delay_list (trial, delay_list);
3335 if (reg_mentioned_p (cc0_rtx, pat))
3336 link_cc0_insns (trial);
3339 delete_insn (trial);
3340 if (slots_to_fill == ++slots_filled)
3345 mark_set_resources (trial, &set, 0, 1);
3346 mark_referenced_resources (trial, &needed, 1);
3348 /* Ensure we don't put insns between the setting of cc and the
3349 comparison by moving a setting of cc into an earlier delay
3350 slot since these insns could clobber the condition code. */
3353 /* If this is a call or jump, we might not get here. */
3354 if (GET_CODE (trial_delay) == CALL_INSN
3355 || GET_CODE (trial_delay) == JUMP_INSN)
3359 /* If there are slots left to fill and our search was stopped by an
3360 unconditional branch, try the insn at the branch target. We can
3361 redirect the branch if it works.
3363 Don't do this if the insn at the branch target is a branch. */
3364 if (slots_to_fill != slots_filled
3366 && GET_CODE (trial) == JUMP_INSN
3367 && simplejump_p (trial)
3368 && (target == 0 || JUMP_LABEL (trial) == target)
3369 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
3370 && ! (GET_CODE (next_trial) == INSN
3371 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
3372 && GET_CODE (next_trial) != JUMP_INSN
3373 && ! insn_references_resource_p (next_trial, &set, 1)
3374 && ! insn_sets_resource_p (next_trial, &set, 1)
3375 && ! insn_sets_resource_p (next_trial, &needed, 1)
3377 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
3379 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
3380 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
3381 && eligible_for_delay (insn, slots_filled, next_trial, flags))
3383 rtx new_label = next_active_insn (next_trial);
3386 new_label = get_label_before (new_label);
3388 new_label = find_end_label ();
3391 = add_to_delay_list (copy_rtx (next_trial), delay_list);
3393 reorg_redirect_jump (trial, new_label);
3395 /* If we merged because we both jumped to the same place,
3396 redirect the original insn also. */
3398 reorg_redirect_jump (insn, new_label);
3402 /* If this is an unconditional jump, then try to get insns from the
3403 target of the jump. */
3404 if (GET_CODE (insn) == JUMP_INSN
3405 && simplejump_p (insn)
3406 && slots_filled != slots_to_fill)
3408 = fill_slots_from_thread (insn, const_true_rtx,
3409 next_active_insn (JUMP_LABEL (insn)),
3411 own_thread_p (JUMP_LABEL (insn),
3412 JUMP_LABEL (insn), 0),
3413 slots_to_fill, &slots_filled,
3417 unfilled_slots_base[i]
3418 = emit_delay_sequence (insn, delay_list, slots_filled);
3420 if (slots_to_fill == slots_filled)
3421 unfilled_slots_base[i] = 0;
3423 note_delay_statistics (slots_filled, 0);
3426 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3427 /* See if the epilogue needs any delay slots. Try to fill them if so.
3428 The only thing we can do is scan backwards from the end of the
3429 function. If we did this in a previous pass, it is incorrect to do it
3431 if (current_function_epilogue_delay_list)
3434 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
3435 if (slots_to_fill == 0)
3439 CLEAR_RESOURCE (&set);
3441 /* The frame pointer and stack pointer are needed at the beginning of
3442 the epilogue, so instructions setting them can not be put in the
3443 epilogue delay slot. However, everything else needed at function
3444 end is safe, so we don't want to use end_of_function_needs here. */
3445 CLEAR_RESOURCE (&needed);
3446 if (frame_pointer_needed)
3448 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
3449 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3450 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
3452 #ifdef EXIT_IGNORE_STACK
3453 if (! EXIT_IGNORE_STACK)
3455 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3458 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3460 #ifdef EPILOGUE_USES
3461 for (i = 0; i <FIRST_PSEUDO_REGISTER; i++)
3463 if (EPILOGUE_USES (i))
3464 SET_HARD_REG_BIT (needed.regs, i);
3468 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
3469 trial = PREV_INSN (trial))
3471 if (GET_CODE (trial) == NOTE)
3473 pat = PATTERN (trial);
3474 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3477 if (! insn_references_resource_p (trial, &set, 1)
3478 && ! insn_sets_resource_p (trial, &needed, 1)
3479 && ! insn_sets_resource_p (trial, &set, 1)
3481 /* Don't want to mess with cc0 here. */
3482 && ! reg_mentioned_p (cc0_rtx, pat)
3486 trial = try_split (pat, trial, 1);
3487 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
3489 /* Here as well we are searching backward, so put the
3490 insns we find on the head of the list. */
3492 current_function_epilogue_delay_list
3493 = gen_rtx_INSN_LIST (VOIDmode, trial,
3494 current_function_epilogue_delay_list);
3495 mark_referenced_resources (trial, &end_of_function_needs, 1);
3496 update_block (trial, trial);
3497 delete_insn (trial);
3499 /* Clear deleted bit so final.c will output the insn. */
3500 INSN_DELETED_P (trial) = 0;
3502 if (slots_to_fill == ++slots_filled)
3508 mark_set_resources (trial, &set, 0, 1);
3509 mark_referenced_resources (trial, &needed, 1);
3512 note_delay_statistics (slots_filled, 0);
3516 /* Try to find insns to place in delay slots.
3518 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3519 or is an unconditional branch if CONDITION is const_true_rtx.
3520 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3522 THREAD is a flow-of-control, either the insns to be executed if the
3523 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3525 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3526 to see if any potential delay slot insns set things needed there.
3528 LIKELY is non-zero if it is extremely likely that the branch will be
3529 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3530 end of a loop back up to the top.
3532 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3533 thread. I.e., it is the fallthrough code of our jump or the target of the
3534 jump when we are the only jump going there.
3536 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3537 case, we can only take insns from the head of the thread for our delay
3538 slot. We then adjust the jump to point after the insns we have taken. */
3541 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
3542 thread_if_true, own_thread,
3543 slots_to_fill, pslots_filled, delay_list)
3546 rtx thread, opposite_thread;
3550 int slots_to_fill, *pslots_filled;
3554 struct resources opposite_needed, set, needed;
3560 /* Validate our arguments. */
3561 if ((condition == const_true_rtx && ! thread_if_true)
3562 || (! own_thread && ! thread_if_true))
3565 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3567 /* If our thread is the end of subroutine, we can't get any delay
3572 /* If this is an unconditional branch, nothing is needed at the
3573 opposite thread. Otherwise, compute what is needed there. */
3574 if (condition == const_true_rtx)
3575 CLEAR_RESOURCE (&opposite_needed);
3577 mark_target_live_regs (opposite_thread, &opposite_needed);
3579 /* If the insn at THREAD can be split, do it here to avoid having to
3580 update THREAD and NEW_THREAD if it is done in the loop below. Also
3581 initialize NEW_THREAD. */
3583 new_thread = thread = try_split (PATTERN (thread), thread, 0);
3585 /* Scan insns at THREAD. We are looking for an insn that can be removed
3586 from THREAD (it neither sets nor references resources that were set
3587 ahead of it and it doesn't set anything needs by the insns ahead of
3588 it) and that either can be placed in an annulling insn or aren't
3589 needed at OPPOSITE_THREAD. */
3591 CLEAR_RESOURCE (&needed);
3592 CLEAR_RESOURCE (&set);
3594 /* If we do not own this thread, we must stop as soon as we find
3595 something that we can't put in a delay slot, since all we can do
3596 is branch into THREAD at a later point. Therefore, labels stop
3597 the search if this is not the `true' thread. */
3599 for (trial = thread;
3600 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
3601 trial = next_nonnote_insn (trial))
3605 /* If we have passed a label, we no longer own this thread. */
3606 if (GET_CODE (trial) == CODE_LABEL)
3612 pat = PATTERN (trial);
3613 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3616 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3617 don't separate or copy insns that set and use CC0. */
3618 if (! insn_references_resource_p (trial, &set, 1)
3619 && ! insn_sets_resource_p (trial, &set, 1)
3620 && ! insn_sets_resource_p (trial, &needed, 1)
3622 && ! (reg_mentioned_p (cc0_rtx, pat)
3623 && (! own_thread || ! sets_cc0_p (pat)))
3629 /* If TRIAL is redundant with some insn before INSN, we don't
3630 actually need to add it to the delay list; we can merely pretend
3632 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
3634 fix_reg_dead_note (prior_insn, insn);
3637 update_block (trial, thread);
3638 if (trial == thread)
3640 thread = next_active_insn (thread);
3641 if (new_thread == trial)
3642 new_thread = thread;
3645 delete_insn (trial);
3649 update_reg_unused_notes (prior_insn, trial);
3650 new_thread = next_active_insn (trial);
3656 /* There are two ways we can win: If TRIAL doesn't set anything
3657 needed at the opposite thread and can't trap, or if it can
3658 go into an annulled delay slot. */
3660 && (condition == const_true_rtx
3661 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
3662 && ! may_trap_p (pat))))
3665 trial = try_split (pat, trial, 0);
3666 if (new_thread == old_trial)
3668 if (thread == old_trial)
3670 pat = PATTERN (trial);
3671 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
3675 #ifdef ANNUL_IFTRUE_SLOTS
3678 #ifdef ANNUL_IFFALSE_SLOTS
3684 trial = try_split (pat, trial, 0);
3685 if (new_thread == old_trial)
3687 if (thread == old_trial)
3689 pat = PATTERN (trial);
3690 if ((must_annul || delay_list == NULL) && (thread_if_true
3691 ? check_annul_list_true_false (0, delay_list)
3692 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
3693 : check_annul_list_true_false (1, delay_list)
3694 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
3702 if (reg_mentioned_p (cc0_rtx, pat))
3703 link_cc0_insns (trial);
3706 /* If we own this thread, delete the insn. If this is the
3707 destination of a branch, show that a basic block status
3708 may have been updated. In any case, mark the new
3709 starting point of this thread. */
3712 update_block (trial, thread);
3713 if (trial == thread)
3715 thread = next_active_insn (thread);
3716 if (new_thread == trial)
3717 new_thread = thread;
3719 delete_insn (trial);
3722 new_thread = next_active_insn (trial);
3724 temp = own_thread ? trial : copy_rtx (trial);
3726 INSN_FROM_TARGET_P (temp) = 1;
3728 delay_list = add_to_delay_list (temp, delay_list);
3730 mark_set_resources (trial, &opposite_needed, 0, 1);
3732 if (slots_to_fill == ++(*pslots_filled))
3734 /* Even though we have filled all the slots, we
3735 may be branching to a location that has a
3736 redundant insn. Skip any if so. */
3737 while (new_thread && ! own_thread
3738 && ! insn_sets_resource_p (new_thread, &set, 1)
3739 && ! insn_sets_resource_p (new_thread, &needed, 1)
3740 && ! insn_references_resource_p (new_thread,
3743 = redundant_insn (new_thread, insn,
3746 /* We know we do not own the thread, so no need
3747 to call update_block and delete_insn. */
3748 fix_reg_dead_note (prior_insn, insn);
3749 update_reg_unused_notes (prior_insn, new_thread);
3750 new_thread = next_active_insn (new_thread);
3760 /* This insn can't go into a delay slot. */
3762 mark_set_resources (trial, &set, 0, 1);
3763 mark_referenced_resources (trial, &needed, 1);
3765 /* Ensure we don't put insns between the setting of cc and the comparison
3766 by moving a setting of cc into an earlier delay slot since these insns
3767 could clobber the condition code. */
3770 /* If this insn is a register-register copy and the next insn has
3771 a use of our destination, change it to use our source. That way,
3772 it will become a candidate for our delay slot the next time
3773 through this loop. This case occurs commonly in loops that
3776 We could check for more complex cases than those tested below,
3777 but it doesn't seem worth it. It might also be a good idea to try
3778 to swap the two insns. That might do better.
3780 We can't do this if the next insn modifies our destination, because
3781 that would make the replacement into the insn invalid. We also can't
3782 do this if it modifies our source, because it might be an earlyclobber
3783 operand. This latter test also prevents updating the contents of
3786 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
3787 && GET_CODE (SET_SRC (pat)) == REG
3788 && GET_CODE (SET_DEST (pat)) == REG)
3790 rtx next = next_nonnote_insn (trial);
3792 if (next && GET_CODE (next) == INSN
3793 && GET_CODE (PATTERN (next)) != USE
3794 && ! reg_set_p (SET_DEST (pat), next)
3795 && ! reg_set_p (SET_SRC (pat), next)
3796 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
3797 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
3801 /* If we stopped on a branch insn that has delay slots, see if we can
3802 steal some of the insns in those slots. */
3803 if (trial && GET_CODE (trial) == INSN
3804 && GET_CODE (PATTERN (trial)) == SEQUENCE
3805 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
3807 /* If this is the `true' thread, we will want to follow the jump,
3808 so we can only do this if we have taken everything up to here. */
3809 if (thread_if_true && trial == new_thread
3810 && ! insn_references_resource_p (XVECEXP (PATTERN (trial), 0, 0),
3811 &opposite_needed, 0))
3813 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
3814 delay_list, &set, &needed,
3815 &opposite_needed, slots_to_fill,
3816 pslots_filled, &must_annul,
3818 else if (! thread_if_true)
3820 = steal_delay_list_from_fallthrough (insn, condition,
3822 delay_list, &set, &needed,
3823 &opposite_needed, slots_to_fill,
3824 pslots_filled, &must_annul);
3827 /* If we haven't found anything for this delay slot and it is very
3828 likely that the branch will be taken, see if the insn at our target
3829 increments or decrements a register with an increment that does not
3830 depend on the destination register. If so, try to place the opposite
3831 arithmetic insn after the jump insn and put the arithmetic insn in the
3832 delay slot. If we can't do this, return. */
3833 if (delay_list == 0 && likely && new_thread
3834 && GET_CODE (new_thread) == INSN
3835 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
3836 && asm_noperands (PATTERN (new_thread)) < 0)
3838 rtx pat = PATTERN (new_thread);
3843 pat = PATTERN (trial);
3845 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
3846 || ! eligible_for_delay (insn, 0, trial, flags))
3849 dest = SET_DEST (pat), src = SET_SRC (pat);
3850 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
3851 && rtx_equal_p (XEXP (src, 0), dest)
3852 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
3854 rtx other = XEXP (src, 1);
3858 /* If this is a constant adjustment, use the same code with
3859 the negated constant. Otherwise, reverse the sense of the
3861 if (GET_CODE (other) == CONST_INT)
3862 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
3863 negate_rtx (GET_MODE (src), other));
3865 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
3866 GET_MODE (src), dest, other);
3868 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
3871 if (recog_memoized (ninsn) < 0
3872 || (insn_extract (ninsn),
3873 ! constrain_operands (INSN_CODE (ninsn), 1)))
3875 delete_insn (ninsn);
3881 update_block (trial, thread);
3882 if (trial == thread)
3884 thread = next_active_insn (thread);
3885 if (new_thread == trial)
3886 new_thread = thread;
3888 delete_insn (trial);
3891 new_thread = next_active_insn (trial);
3893 ninsn = own_thread ? trial : copy_rtx (trial);
3895 INSN_FROM_TARGET_P (ninsn) = 1;
3897 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3902 if (delay_list && must_annul)
3903 INSN_ANNULLED_BRANCH_P (insn) = 1;
3905 /* If we are to branch into the middle of this thread, find an appropriate
3906 label or make a new one if none, and redirect INSN to it. If we hit the
3907 end of the function, use the end-of-function label. */
3908 if (new_thread != thread)
3912 if (! thread_if_true)
3915 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
3916 && (simplejump_p (new_thread)
3917 || GET_CODE (PATTERN (new_thread)) == RETURN)
3918 && redirect_with_delay_list_safe_p (insn,
3919 JUMP_LABEL (new_thread),
3921 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3923 if (new_thread == 0)
3924 label = find_end_label ();
3925 else if (GET_CODE (new_thread) == CODE_LABEL)
3928 label = get_label_before (new_thread);
3930 reorg_redirect_jump (insn, label);
3936 /* Make another attempt to find insns to place in delay slots.
3938 We previously looked for insns located in front of the delay insn
3939 and, for non-jump delay insns, located behind the delay insn.
3941 Here only try to schedule jump insns and try to move insns from either
3942 the target or the following insns into the delay slot. If annulling is
3943 supported, we will be likely to do this. Otherwise, we can do this only
3947 fill_eager_delay_slots ()
3951 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3953 for (i = 0; i < num_unfilled_slots; i++)
3956 rtx target_label, insn_at_target, fallthrough_insn;
3959 int own_fallthrough;
3960 int prediction, slots_to_fill, slots_filled;
3962 insn = unfilled_slots_base[i];
3964 || INSN_DELETED_P (insn)
3965 || GET_CODE (insn) != JUMP_INSN
3966 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3969 slots_to_fill = num_delay_slots (insn);
3970 /* Some machine description have defined instructions to have
3971 delay slots only in certain circumstances which may depend on
3972 nearby insns (which change due to reorg's actions).
3974 For example, the PA port normally has delay slots for unconditional
3977 However, the PA port claims such jumps do not have a delay slot
3978 if they are immediate successors of certain CALL_INSNs. This
3979 allows the port to favor filling the delay slot of the call with
3980 the unconditional jump. */
3981 if (slots_to_fill == 0)
3985 target_label = JUMP_LABEL (insn);
3986 condition = get_branch_condition (insn, target_label);
3991 /* Get the next active fallthrough and target insns and see if we own
3992 them. Then see whether the branch is likely true. We don't need
3993 to do a lot of this for unconditional branches. */
3995 insn_at_target = next_active_insn (target_label);
3996 own_target = own_thread_p (target_label, target_label, 0);
3998 if (condition == const_true_rtx)
4000 own_fallthrough = 0;
4001 fallthrough_insn = 0;
4006 fallthrough_insn = next_active_insn (insn);
4007 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
4008 prediction = mostly_true_jump (insn, condition);
4011 /* If this insn is expected to branch, first try to get insns from our
4012 target, then our fallthrough insns. If it is not, expected to branch,
4013 try the other order. */
4018 = fill_slots_from_thread (insn, condition, insn_at_target,
4019 fallthrough_insn, prediction == 2, 1,
4021 slots_to_fill, &slots_filled, delay_list);
4023 if (delay_list == 0 && own_fallthrough)
4025 /* Even though we didn't find anything for delay slots,
4026 we might have found a redundant insn which we deleted
4027 from the thread that was filled. So we have to recompute
4028 the next insn at the target. */
4029 target_label = JUMP_LABEL (insn);
4030 insn_at_target = next_active_insn (target_label);
4033 = fill_slots_from_thread (insn, condition, fallthrough_insn,
4034 insn_at_target, 0, 0,
4036 slots_to_fill, &slots_filled,
4042 if (own_fallthrough)
4044 = fill_slots_from_thread (insn, condition, fallthrough_insn,
4045 insn_at_target, 0, 0,
4047 slots_to_fill, &slots_filled,
4050 if (delay_list == 0)
4052 = fill_slots_from_thread (insn, condition, insn_at_target,
4053 next_active_insn (insn), 0, 1,
4055 slots_to_fill, &slots_filled,
4060 unfilled_slots_base[i]
4061 = emit_delay_sequence (insn, delay_list, slots_filled);
4063 if (slots_to_fill == slots_filled)
4064 unfilled_slots_base[i] = 0;
4066 note_delay_statistics (slots_filled, 1);
4070 /* Once we have tried two ways to fill a delay slot, make a pass over the
4071 code to try to improve the results and to do such things as more jump
4075 relax_delay_slots (first)
4078 register rtx insn, next, pat;
4079 register rtx trial, delay_insn, target_label;
4081 /* Look at every JUMP_INSN and see if we can improve it. */
4082 for (insn = first; insn; insn = next)
4086 next = next_active_insn (insn);
4088 /* If this is a jump insn, see if it now jumps to a jump, jumps to
4089 the next insn, or jumps to a label that is not the last of a
4090 group of consecutive labels. */
4091 if (GET_CODE (insn) == JUMP_INSN
4092 && (condjump_p (insn) || condjump_in_parallel_p (insn))
4093 && (target_label = JUMP_LABEL (insn)) != 0)
4095 target_label = follow_jumps (target_label);
4096 target_label = prev_label (next_active_insn (target_label));
4098 if (target_label == 0)
4099 target_label = find_end_label ();
4101 if (next_active_insn (target_label) == next
4102 && ! condjump_in_parallel_p (insn))
4108 if (target_label != JUMP_LABEL (insn))
4109 reorg_redirect_jump (insn, target_label);
4111 /* See if this jump branches around a unconditional jump.
4112 If so, invert this jump and point it to the target of the
4114 if (next && GET_CODE (next) == JUMP_INSN
4115 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
4116 && next_active_insn (target_label) == next_active_insn (next)
4117 && no_labels_between_p (insn, next))
4119 rtx label = JUMP_LABEL (next);
4121 /* Be careful how we do this to avoid deleting code or
4122 labels that are momentarily dead. See similar optimization
4125 We also need to ensure we properly handle the case when
4126 invert_jump fails. */
4128 ++LABEL_NUSES (target_label);
4130 ++LABEL_NUSES (label);
4132 if (invert_jump (insn, label))
4139 --LABEL_NUSES (label);
4141 if (--LABEL_NUSES (target_label) == 0)
4142 delete_insn (target_label);
4148 /* If this is an unconditional jump and the previous insn is a
4149 conditional jump, try reversing the condition of the previous
4150 insn and swapping our targets. The next pass might be able to
4153 Don't do this if we expect the conditional branch to be true, because
4154 we would then be making the more common case longer. */
4156 if (GET_CODE (insn) == JUMP_INSN
4157 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
4158 && (other = prev_active_insn (insn)) != 0
4159 && (condjump_p (other) || condjump_in_parallel_p (other))
4160 && no_labels_between_p (other, insn)
4161 && 0 < mostly_true_jump (other,
4162 get_branch_condition (other,
4163 JUMP_LABEL (other))))
4165 rtx other_target = JUMP_LABEL (other);
4166 target_label = JUMP_LABEL (insn);
4168 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
4169 as we move the label. */
4171 ++LABEL_NUSES (other_target);
4173 if (invert_jump (other, target_label))
4174 reorg_redirect_jump (insn, other_target);
4177 --LABEL_NUSES (other_target);
4180 /* Now look only at cases where we have filled a delay slot. */
4181 if (GET_CODE (insn) != INSN
4182 || GET_CODE (PATTERN (insn)) != SEQUENCE)
4185 pat = PATTERN (insn);
4186 delay_insn = XVECEXP (pat, 0, 0);
4188 /* See if the first insn in the delay slot is redundant with some
4189 previous insn. Remove it from the delay slot if so; then set up
4190 to reprocess this insn. */
4191 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
4193 delete_from_delay_slot (XVECEXP (pat, 0, 1));
4194 next = prev_active_insn (next);
4198 /* Now look only at the cases where we have a filled JUMP_INSN. */
4199 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
4200 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
4201 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
4204 target_label = JUMP_LABEL (delay_insn);
4208 /* If this jump goes to another unconditional jump, thread it, but
4209 don't convert a jump into a RETURN here. */
4210 trial = follow_jumps (target_label);
4211 /* We use next_real_insn instead of next_active_insn, so that
4212 the special USE insns emitted by reorg won't be ignored.
4213 If they are ignored, then they will get deleted if target_label
4214 is now unreachable, and that would cause mark_target_live_regs
4216 trial = prev_label (next_real_insn (trial));
4217 if (trial == 0 && target_label != 0)
4218 trial = find_end_label ();
4220 if (trial != target_label
4221 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
4223 reorg_redirect_jump (delay_insn, trial);
4224 target_label = trial;
4227 /* If the first insn at TARGET_LABEL is redundant with a previous
4228 insn, redirect the jump to the following insn process again. */
4229 trial = next_active_insn (target_label);
4230 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
4231 && redundant_insn (trial, insn, 0))
4235 /* Figure out where to emit the special USE insn so we don't
4236 later incorrectly compute register live/death info. */
4237 tmp = next_active_insn (trial);
4239 tmp = find_end_label ();
4241 /* Insert the special USE insn and update dataflow info. */
4242 update_block (trial, tmp);
4244 /* Now emit a label before the special USE insn, and
4245 redirect our jump to the new label. */
4246 target_label = get_label_before (PREV_INSN (tmp));
4247 reorg_redirect_jump (delay_insn, target_label);
4252 /* Similarly, if it is an unconditional jump with one insn in its
4253 delay list and that insn is redundant, thread the jump. */
4254 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
4255 && XVECLEN (PATTERN (trial), 0) == 2
4256 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
4257 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
4258 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
4259 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
4261 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
4262 if (target_label == 0)
4263 target_label = find_end_label ();
4265 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
4268 reorg_redirect_jump (delay_insn, target_label);
4275 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
4276 && prev_active_insn (target_label) == insn
4277 && ! condjump_in_parallel_p (delay_insn)
4279 /* If the last insn in the delay slot sets CC0 for some insn,
4280 various code assumes that it is in a delay slot. We could
4281 put it back where it belonged and delete the register notes,
4282 but it doesn't seem worthwhile in this uncommon case. */
4283 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
4284 REG_CC_USER, NULL_RTX)
4290 /* All this insn does is execute its delay list and jump to the
4291 following insn. So delete the jump and just execute the delay
4294 We do this by deleting the INSN containing the SEQUENCE, then
4295 re-emitting the insns separately, and then deleting the jump.
4296 This allows the count of the jump target to be properly
4299 /* Clear the from target bit, since these insns are no longer
4301 for (i = 0; i < XVECLEN (pat, 0); i++)
4302 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
4304 trial = PREV_INSN (insn);
4306 emit_insn_after (pat, trial);
4307 delete_scheduled_jump (delay_insn);
4311 /* See if this is an unconditional jump around a single insn which is
4312 identical to the one in its delay slot. In this case, we can just
4313 delete the branch and the insn in its delay slot. */
4314 if (next && GET_CODE (next) == INSN
4315 && prev_label (next_active_insn (next)) == target_label
4316 && simplejump_p (insn)
4317 && XVECLEN (pat, 0) == 2
4318 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
4324 /* See if this jump (with its delay slots) branches around another
4325 jump (without delay slots). If so, invert this jump and point
4326 it to the target of the second jump. We cannot do this for
4327 annulled jumps, though. Again, don't convert a jump to a RETURN
4329 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
4330 && next && GET_CODE (next) == JUMP_INSN
4331 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
4332 && next_active_insn (target_label) == next_active_insn (next)
4333 && no_labels_between_p (insn, next))
4335 rtx label = JUMP_LABEL (next);
4336 rtx old_label = JUMP_LABEL (delay_insn);
4339 label = find_end_label ();
4341 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
4343 /* Be careful how we do this to avoid deleting code or labels
4344 that are momentarily dead. See similar optimization in
4347 ++LABEL_NUSES (old_label);
4349 if (invert_jump (delay_insn, label))
4353 /* Must update the INSN_FROM_TARGET_P bits now that
4354 the branch is reversed, so that mark_target_live_regs
4355 will handle the delay slot insn correctly. */
4356 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
4358 rtx slot = XVECEXP (PATTERN (insn), 0, i);
4359 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
4366 if (old_label && --LABEL_NUSES (old_label) == 0)
4367 delete_insn (old_label);
4372 /* If we own the thread opposite the way this insn branches, see if we
4373 can merge its delay slots with following insns. */
4374 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4375 && own_thread_p (NEXT_INSN (insn), 0, 1))
4376 try_merge_delay_insns (insn, next);
4377 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4378 && own_thread_p (target_label, target_label, 0))
4379 try_merge_delay_insns (insn, next_active_insn (target_label));
4381 /* If we get here, we haven't deleted INSN. But we may have deleted
4382 NEXT, so recompute it. */
4383 next = next_active_insn (insn);
4389 /* Look for filled jumps to the end of function label. We can try to convert
4390 them into RETURN insns if the insns in the delay slot are valid for the
4394 make_return_insns (first)
4397 rtx insn, jump_insn, pat;
4398 rtx real_return_label = end_of_function_label;
4401 /* See if there is a RETURN insn in the function other than the one we
4402 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
4403 into a RETURN to jump to it. */
4404 for (insn = first; insn; insn = NEXT_INSN (insn))
4405 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
4407 real_return_label = get_label_before (insn);
4411 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
4412 was equal to END_OF_FUNCTION_LABEL. */
4413 LABEL_NUSES (real_return_label)++;
4415 /* Clear the list of insns to fill so we can use it. */
4416 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4418 for (insn = first; insn; insn = NEXT_INSN (insn))
4422 /* Only look at filled JUMP_INSNs that go to the end of function
4424 if (GET_CODE (insn) != INSN
4425 || GET_CODE (PATTERN (insn)) != SEQUENCE
4426 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
4427 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
4430 pat = PATTERN (insn);
4431 jump_insn = XVECEXP (pat, 0, 0);
4433 /* If we can't make the jump into a RETURN, try to redirect it to the best
4434 RETURN and go on to the next insn. */
4435 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
4437 /* Make sure redirecting the jump will not invalidate the delay
4439 if (redirect_with_delay_slots_safe_p (jump_insn,
4442 reorg_redirect_jump (jump_insn, real_return_label);
4446 /* See if this RETURN can accept the insns current in its delay slot.
4447 It can if it has more or an equal number of slots and the contents
4448 of each is valid. */
4450 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
4451 slots = num_delay_slots (jump_insn);
4452 if (slots >= XVECLEN (pat, 0) - 1)
4454 for (i = 1; i < XVECLEN (pat, 0); i++)
4456 #ifdef ANNUL_IFFALSE_SLOTS
4457 (INSN_ANNULLED_BRANCH_P (jump_insn)
4458 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4459 ? eligible_for_annul_false (jump_insn, i - 1,
4460 XVECEXP (pat, 0, i), flags) :
4462 #ifdef ANNUL_IFTRUE_SLOTS
4463 (INSN_ANNULLED_BRANCH_P (jump_insn)
4464 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4465 ? eligible_for_annul_true (jump_insn, i - 1,
4466 XVECEXP (pat, 0, i), flags) :
4468 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
4474 if (i == XVECLEN (pat, 0))
4477 /* We have to do something with this insn. If it is an unconditional
4478 RETURN, delete the SEQUENCE and output the individual insns,
4479 followed by the RETURN. Then set things up so we try to find
4480 insns for its delay slots, if it needs some. */
4481 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
4483 rtx prev = PREV_INSN (insn);
4486 for (i = 1; i < XVECLEN (pat, 0); i++)
4487 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
4489 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
4490 emit_barrier_after (insn);
4493 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4496 /* It is probably more efficient to keep this with its current
4497 delay slot as a branch to a RETURN. */
4498 reorg_redirect_jump (jump_insn, real_return_label);
4501 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
4502 new delay slots we have created. */
4503 if (--LABEL_NUSES (real_return_label) == 0)
4504 delete_insn (real_return_label);
4506 fill_simple_delay_slots (1);
4507 fill_simple_delay_slots (0);
4511 /* Try to find insns to place in delay slots. */
4514 dbr_schedule (first, file)
4518 rtx insn, next, epilogue_insn = 0;
4521 int old_flag_no_peephole = flag_no_peephole;
4523 /* Execute `final' once in prescan mode to delete any insns that won't be
4524 used. Don't let final try to do any peephole optimization--it will
4525 ruin dataflow information for this pass. */
4527 flag_no_peephole = 1;
4528 final (first, 0, NO_DEBUG, 1, 1);
4529 flag_no_peephole = old_flag_no_peephole;
4532 /* If the current function has no insns other than the prologue and
4533 epilogue, then do not try to fill any delay slots. */
4534 if (n_basic_blocks == 0)
4537 /* Find the highest INSN_UID and allocate and initialize our map from
4538 INSN_UID's to position in code. */
4539 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
4541 if (INSN_UID (insn) > max_uid)
4542 max_uid = INSN_UID (insn);
4543 if (GET_CODE (insn) == NOTE
4544 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4545 epilogue_insn = insn;
4548 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int *));
4549 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
4550 uid_to_ruid[INSN_UID (insn)] = i;
4552 /* Initialize the list of insns that need filling. */
4553 if (unfilled_firstobj == 0)
4555 gcc_obstack_init (&unfilled_slots_obstack);
4556 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4559 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
4563 INSN_ANNULLED_BRANCH_P (insn) = 0;
4564 INSN_FROM_TARGET_P (insn) = 0;
4566 /* Skip vector tables. We can't get attributes for them. */
4567 if (GET_CODE (insn) == JUMP_INSN
4568 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4569 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4572 if (num_delay_slots (insn) > 0)
4573 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4575 /* Ensure all jumps go to the last of a set of consecutive labels. */
4576 if (GET_CODE (insn) == JUMP_INSN
4577 && (condjump_p (insn) || condjump_in_parallel_p (insn))
4578 && JUMP_LABEL (insn) != 0
4579 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
4580 != JUMP_LABEL (insn)))
4581 redirect_jump (insn, target);
4584 /* Indicate what resources are required to be valid at the end of the current
4585 function. The condition code never is and memory always is. If the
4586 frame pointer is needed, it is and so is the stack pointer unless
4587 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4588 stack pointer is. Registers used to return the function value are
4589 needed. Registers holding global variables are needed. */
4591 end_of_function_needs.cc = 0;
4592 end_of_function_needs.memory = 1;
4593 end_of_function_needs.unch_memory = 0;
4594 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
4596 if (frame_pointer_needed)
4598 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
4599 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4600 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
4602 #ifdef EXIT_IGNORE_STACK
4603 if (! EXIT_IGNORE_STACK)
4605 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4608 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4610 if (current_function_return_rtx != 0)
4611 mark_referenced_resources (current_function_return_rtx,
4612 &end_of_function_needs, 1);
4614 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4616 #ifdef EPILOGUE_USES
4617 || EPILOGUE_USES (i)
4620 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
4622 /* The registers required to be live at the end of the function are
4623 represented in the flow information as being dead just prior to
4624 reaching the end of the function. For example, the return of a value
4625 might be represented by a USE of the return register immediately
4626 followed by an unconditional jump to the return label where the
4627 return label is the end of the RTL chain. The end of the RTL chain
4628 is then taken to mean that the return register is live.
4630 This sequence is no longer maintained when epilogue instructions are
4631 added to the RTL chain. To reconstruct the original meaning, the
4632 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4633 point where these registers become live (start_of_epilogue_needs).
4634 If epilogue instructions are present, the registers set by those
4635 instructions won't have been processed by flow. Thus, those
4636 registers are additionally required at the end of the RTL chain
4637 (end_of_function_needs). */
4639 start_of_epilogue_needs = end_of_function_needs;
4641 while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
4642 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
4644 /* Show we haven't computed an end-of-function label yet. */
4645 end_of_function_label = 0;
4647 /* Allocate and initialize the tables used by mark_target_live_regs. */
4649 = (struct target_info **) alloca ((TARGET_HASH_PRIME
4650 * sizeof (struct target_info *)));
4651 bzero ((char *) target_hash_table,
4652 TARGET_HASH_PRIME * sizeof (struct target_info *));
4654 bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
4655 bzero ((char *) bb_ticks, n_basic_blocks * sizeof (int));
4657 /* Initialize the statistics for this function. */
4658 bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
4659 bzero ((char *) num_filled_delays, sizeof num_filled_delays);
4661 /* Now do the delay slot filling. Try everything twice in case earlier
4662 changes make more slots fillable. */
4664 for (reorg_pass_number = 0;
4665 reorg_pass_number < MAX_REORG_PASSES;
4666 reorg_pass_number++)
4668 fill_simple_delay_slots (1);
4669 fill_simple_delay_slots (0);
4670 fill_eager_delay_slots ();
4671 relax_delay_slots (first);
4674 /* Delete any USE insns made by update_block; subsequent passes don't need
4675 them or know how to deal with them. */
4676 for (insn = first; insn; insn = next)
4678 next = NEXT_INSN (insn);
4680 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
4681 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
4682 next = delete_insn (insn);
4685 /* If we made an end of function label, indicate that it is now
4686 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4687 If it is now unused, delete it. */
4688 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
4689 delete_insn (end_of_function_label);
4692 if (HAVE_return && end_of_function_label != 0)
4693 make_return_insns (first);
4696 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4698 /* It is not clear why the line below is needed, but it does seem to be. */
4699 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4701 /* Reposition the prologue and epilogue notes in case we moved the
4702 prologue/epilogue insns. */
4703 reposition_prologue_and_epilogue_notes (first);
4707 register int i, j, need_comma;
4709 for (reorg_pass_number = 0;
4710 reorg_pass_number < MAX_REORG_PASSES;
4711 reorg_pass_number++)
4713 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
4714 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
4717 fprintf (file, ";; Reorg function #%d\n", i);
4719 fprintf (file, ";; %d insns needing delay slots\n;; ",
4720 num_insns_needing_delays[i][reorg_pass_number]);
4722 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
4723 if (num_filled_delays[i][j][reorg_pass_number])
4726 fprintf (file, ", ");
4728 fprintf (file, "%d got %d delays",
4729 num_filled_delays[i][j][reorg_pass_number], j);
4731 fprintf (file, "\n");
4736 /* For all JUMP insns, fill in branch prediction notes, so that during
4737 assembler output a target can set branch prediction bits in the code.
4738 We have to do this now, as up until this point the destinations of
4739 JUMPS can be moved around and changed, but past right here that cannot
4741 for (insn = first; insn; insn = NEXT_INSN (insn))
4745 if (GET_CODE (insn) == INSN)
4747 rtx pat = PATTERN (insn);
4749 if (GET_CODE (pat) == SEQUENCE)
4750 insn = XVECEXP (pat, 0, 0);
4752 if (GET_CODE (insn) != JUMP_INSN)
4755 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
4756 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
4757 GEN_INT (pred_flags),
4761 #endif /* DELAY_SLOTS */