1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
22 /* Instruction reorganization pass.
24 This pass runs after register allocation and final jump
25 optimization. It should be the last pass to run before peephole.
26 It serves primarily to fill delay slots of insns, typically branch
27 and call insns. Other insns typically involve more complicated
28 interactions of data dependencies and resource constraints, and
29 are better handled by scheduling before register allocation (by the
30 function `schedule_insns').
32 The Branch Penalty is the number of extra cycles that are needed to
33 execute a branch insn. On an ideal machine, branches take a single
34 cycle, and the Branch Penalty is 0. Several RISC machines approach
35 branch delays differently:
37 The MIPS and AMD 29000 have a single branch delay slot. Most insns
38 (except other branches) can be used to fill this slot. When the
39 slot is filled, two insns execute in two cycles, reducing the
40 branch penalty to zero.
42 The Motorola 88000 conditionally exposes its branch delay slot,
43 so code is shorter when it is turned off, but will run faster
44 when useful insns are scheduled there.
46 The IBM ROMP has two forms of branch and call insns, both with and
47 without a delay slot. Much like the 88k, insns not using the delay
48 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
50 The SPARC always has a branch delay slot, but its effects can be
51 annulled when the branch is not taken. This means that failing to
52 find other sources of insns, we can hoist an insn from the branch
53 target that would only be safe to execute knowing that the branch
56 The HP-PA always has a branch delay slot. For unconditional branches
57 its effects can be annulled when the branch is taken. The effects
58 of the delay slot in a conditional branch can be nullified for forward
59 taken branches, or for untaken backward branches. This means
60 we can hoist insns from the fall-through path for forward branches or
61 steal insns from the target of backward branches.
63 Three techniques for filling delay slots have been implemented so far:
65 (1) `fill_simple_delay_slots' is the simplest, most efficient way
66 to fill delay slots. This pass first looks for insns which come
67 from before the branch and which are safe to execute after the
68 branch. Then it searches after the insn requiring delay slots or,
69 in the case of a branch, for insns that are after the point at
70 which the branch merges into the fallthrough code, if such a point
71 exists. When such insns are found, the branch penalty decreases
72 and no code expansion takes place.
74 (2) `fill_eager_delay_slots' is more complicated: it is used for
75 scheduling conditional jumps, or for scheduling jumps which cannot
76 be filled using (1). A machine need not have annulled jumps to use
77 this strategy, but it helps (by keeping more options open).
78 `fill_eager_delay_slots' tries to guess the direction the branch
79 will go; if it guesses right 100% of the time, it can reduce the
80 branch penalty as much as `fill_simple_delay_slots' does. If it
81 guesses wrong 100% of the time, it might as well schedule nops (or
82 on the m88k, unexpose the branch slot). When
83 `fill_eager_delay_slots' takes insns from the fall-through path of
84 the jump, usually there is no code expansion; when it takes insns
85 from the branch target, there is code expansion if it is not the
86 only way to reach that target.
88 (3) `relax_delay_slots' uses a set of rules to simplify code that
89 has been reorganized by (1) and (2). It finds cases where
90 conditional test can be eliminated, jumps can be threaded, extra
91 insns can be eliminated, etc. It is the job of (1) and (2) to do a
92 good job of scheduling locally; `relax_delay_slots' takes care of
93 making the various individual schedules work well together. It is
94 especially tuned to handle the control flow interactions of branch
95 insns. It does nothing for insns with delay slots that do not
98 On machines that use CC0, we are very conservative. We will not make
99 a copy of an insn involving CC0 since we want to maintain a 1-1
100 correspondence between the insn that sets and uses CC0. The insns are
101 allowed to be separated by placing an insn that sets CC0 (but not an insn
102 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
103 delay slot. In that case, we point each insn at the other with REG_CC_USER
104 and REG_CC_SETTER notes. Note that these restrictions affect very few
105 machines because most RISC machines with delay slots will not use CC0
106 (the RT is the only known exception at this point).
110 The Acorn Risc Machine can conditionally execute most insns, so
111 it is profitable to move single insns into a position to execute
112 based on the condition code of the previous insn.
114 The HP-PA can conditionally nullify insns, providing a similar
115 effect to the ARM, differing mostly in which insn is "in charge". */
120 #include "insn-config.h"
121 #include "conditions.h"
122 #include "hard-reg-set.h"
123 #include "basic-block.h"
125 #include "insn-flags.h"
130 #include "insn-attr.h"
134 #define obstack_chunk_alloc xmalloc
135 #define obstack_chunk_free free
137 #ifndef ANNUL_IFTRUE_SLOTS
138 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
140 #ifndef ANNUL_IFFALSE_SLOTS
141 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
144 /* Insns which have delay slots that have not yet been filled. */
146 static struct obstack unfilled_slots_obstack;
147 static rtx *unfilled_firstobj;
149 /* Define macros to refer to the first and last slot containing unfilled
150 insns. These are used because the list may move and its address
151 should be recomputed at each use. */
153 #define unfilled_slots_base \
154 ((rtx *) obstack_base (&unfilled_slots_obstack))
156 #define unfilled_slots_next \
157 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
159 /* This structure is used to indicate which hardware resources are set or
160 needed by insns so far. */
164 char memory; /* Insn sets or needs a memory location. */
165 char volatil; /* Insn sets or needs a volatile memory loc. */
166 char cc; /* Insn sets or needs the condition codes. */
167 HARD_REG_SET regs; /* Which registers are set or needed. */
170 /* Macro to clear all resources. */
171 #define CLEAR_RESOURCE(RES) \
172 do { (RES)->memory = (RES)->volatil = (RES)->cc = 0; \
173 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
175 /* Indicates what resources are required at the beginning of the epilogue. */
176 static struct resources start_of_epilogue_needs;
178 /* Indicates what resources are required at function end. */
179 static struct resources end_of_function_needs;
181 /* Points to the label before the end of the function. */
182 static rtx end_of_function_label;
184 /* This structure is used to record liveness information at the targets or
185 fallthrough insns of branches. We will most likely need the information
186 at targets again, so save them in a hash table rather than recomputing them
191 int uid; /* INSN_UID of target. */
192 struct target_info *next; /* Next info for same hash bucket. */
193 HARD_REG_SET live_regs; /* Registers live at target. */
194 int block; /* Basic block number containing target. */
195 int bb_tick; /* Generation count of basic block info. */
198 #define TARGET_HASH_PRIME 257
200 /* Define the hash table itself. */
201 static struct target_info **target_hash_table;
203 /* For each basic block, we maintain a generation number of its basic
204 block info, which is updated each time we move an insn from the
205 target of a jump. This is the generation number indexed by block
208 static int *bb_ticks;
210 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
211 not always monotonically increase. */
212 static int *uid_to_ruid;
214 /* Highest valid index in `uid_to_ruid'. */
217 static void mark_referenced_resources PROTO((rtx, struct resources *, int));
218 static void mark_set_resources PROTO((rtx, struct resources *, int, int));
219 static int stop_search_p PROTO((rtx, int));
220 static int resource_conflicts_p PROTO((struct resources *,
221 struct resources *));
222 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
223 static int insn_sets_resources_p PROTO((rtx, struct resources *, int));
224 static rtx find_end_label PROTO((void));
225 static rtx emit_delay_sequence PROTO((rtx, rtx, int, int));
226 static rtx add_to_delay_list PROTO((rtx, rtx));
227 static void delete_from_delay_slot PROTO((rtx));
228 static void delete_scheduled_jump PROTO((rtx));
229 static void note_delay_statistics PROTO((int, int));
230 static rtx optimize_skip PROTO((rtx));
231 static int get_jump_flags PROTO((rtx, rtx));
232 static int rare_destination PROTO((rtx));
233 static int mostly_true_jump PROTO((rtx, rtx));
234 static rtx get_branch_condition PROTO((rtx, rtx));
235 static int condition_dominates_p PROTO((rtx, rtx));
236 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
240 int, int *, int *, rtx *));
241 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
246 static void try_merge_delay_insns PROTO((rtx, rtx));
247 static int redundant_insn_p PROTO((rtx, rtx, rtx));
248 static int own_thread_p PROTO((rtx, rtx, int));
249 static int find_basic_block PROTO((rtx));
250 static void update_block PROTO((rtx, rtx));
251 static int reorg_redirect_jump PROTO((rtx, rtx));
252 static void update_reg_dead_notes PROTO((rtx, rtx));
253 static void update_live_status PROTO((rtx, rtx));
254 static rtx next_insn_no_annul PROTO((rtx));
255 static void mark_target_live_regs PROTO((rtx, struct resources *));
256 static void fill_simple_delay_slots PROTO((rtx, int));
257 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
258 int, int, int, int *));
259 static void fill_eager_delay_slots PROTO((rtx));
260 static void relax_delay_slots PROTO((rtx));
261 static void make_return_insns PROTO((rtx));
263 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
264 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
265 is TRUE, resources used by the called routine will be included for
269 mark_referenced_resources (x, res, include_delayed_effects)
271 register struct resources *res;
272 register int include_delayed_effects;
274 register enum rtx_code code = GET_CODE (x);
276 register char *format_ptr;
278 /* Handle leaf items for which we set resource flags. Also, special-case
279 CALL, SET and CLOBBER operators. */
291 if (GET_CODE (SUBREG_REG (x)) != REG)
292 mark_referenced_resources (SUBREG_REG (x), res, 0);
295 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
296 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
297 for (i = regno; i < last_regno; i++)
298 SET_HARD_REG_BIT (res->regs, i);
303 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
304 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
308 /* If this memory shouldn't change, it really isn't referencing
310 if (! RTX_UNCHANGING_P (x))
312 res->volatil = MEM_VOLATILE_P (x);
314 /* Mark registers used to access memory. */
315 mark_referenced_resources (XEXP (x, 0), res, 0);
322 case UNSPEC_VOLATILE:
324 /* Traditional asm's are always volatile. */
329 res->volatil = MEM_VOLATILE_P (x);
331 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
332 We can not just fall through here since then we would be confused
333 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
334 traditional asms unlike their normal usage. */
336 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
337 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
341 /* The first operand will be a (MEM (xxx)) but doesn't really reference
342 memory. The second operand may be referenced, though. */
343 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
344 mark_referenced_resources (XEXP (x, 1), res, 0);
348 /* Usually, the first operand of SET is set, not referenced. But
349 registers used to access memory are referenced. SET_DEST is
350 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
352 mark_referenced_resources (SET_SRC (x), res, 0);
355 if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
356 mark_referenced_resources (x, res, 0);
357 else if (GET_CODE (x) == SUBREG)
359 if (GET_CODE (x) == MEM)
360 mark_referenced_resources (XEXP (x, 0), res, 0);
367 if (include_delayed_effects)
369 /* A CALL references memory, the frame pointer if it exists, the
370 stack pointer, any global registers and any registers given in
371 USE insns immediately in front of the CALL.
373 However, we may have moved some of the parameter loading insns
374 into the delay slot of this CALL. If so, the USE's for them
375 don't count and should be skipped. */
376 rtx insn = PREV_INSN (x);
381 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
382 if (NEXT_INSN (insn) != x)
384 sequence = PATTERN (NEXT_INSN (insn));
385 seq_size = XVECLEN (sequence, 0);
386 if (GET_CODE (sequence) != SEQUENCE)
391 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
392 if (frame_pointer_needed)
394 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
395 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
396 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
402 SET_HARD_REG_BIT (res->regs, i);
404 /* Skip any labels between the CALL_INSN and possible USE insns. */
405 while (GET_CODE (insn) == CODE_LABEL)
406 insn = PREV_INSN (insn);
408 for ( ; (insn && GET_CODE (insn) == INSN
409 && GET_CODE (PATTERN (insn)) == USE);
410 insn = PREV_INSN (insn))
412 for (i = 1; i < seq_size; i++)
414 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
415 if (GET_CODE (slot_pat) == SET
416 && rtx_equal_p (SET_DEST (slot_pat),
417 XEXP (PATTERN (insn), 0)))
421 mark_referenced_resources (XEXP (PATTERN (insn), 0), res, 0);
425 /* ... fall through to other INSN processing ... */
430 #ifdef INSN_REFERENCES_ARE_DELAYED
431 if (! include_delayed_effects
432 && INSN_REFERENCES_ARE_DELAYED (x))
436 /* No special processing, just speed up. */
437 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
441 /* Process each sub-expression and flag what it needs. */
442 format_ptr = GET_RTX_FORMAT (code);
443 for (i = 0; i < GET_RTX_LENGTH (code); i++)
444 switch (*format_ptr++)
447 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
451 for (j = 0; j < XVECLEN (x, i); j++)
452 mark_referenced_resources (XVECEXP (x, i, j), res,
453 include_delayed_effects);
458 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
459 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
460 is nonzero, also mark resources potentially set by the called routine.
462 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
463 objects are being referenced instead of set.
465 We never mark the insn as modifying the condition code unless it explicitly
466 SETs CC0 even though this is not totally correct. The reason for this is
467 that we require a SET of CC0 to immediately precede the reference to CC0.
468 So if some other insn sets CC0 as a side-effect, we know it cannot affect
469 our computation and thus may be placed in a delay slot. */
472 mark_set_resources (x, res, in_dest, include_delayed_effects)
474 register struct resources *res;
476 int include_delayed_effects;
478 register enum rtx_code code;
480 register char *format_ptr;
498 /* These don't set any resources. */
507 /* Called routine modifies the condition code, memory, any registers
508 that aren't saved across calls, global registers and anything
509 explicitly CLOBBERed immediately after the CALL_INSN. */
511 if (include_delayed_effects)
513 rtx next = NEXT_INSN (x);
514 rtx prev = PREV_INSN (x);
516 res->cc = res->memory = 1;
517 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
518 if (call_used_regs[i] || global_regs[i])
519 SET_HARD_REG_BIT (res->regs, i);
521 /* If X is part of a delay slot sequence, then NEXT should be
522 the first insn after the sequence. */
523 if (NEXT_INSN (prev) != x)
524 next = NEXT_INSN (NEXT_INSN (prev));
526 /* Skip any possible labels between the CALL_INSN and CLOBBERs. */
527 while (GET_CODE (next) == CODE_LABEL)
528 next = NEXT_INSN (next);
530 for (; (next && GET_CODE (next) == INSN
531 && GET_CODE (PATTERN (next)) == CLOBBER);
532 next = NEXT_INSN (next))
533 mark_set_resources (XEXP (PATTERN (next), 0), res, 1, 0);
535 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
536 assume that this call can clobber any register. */
537 if (next && GET_CODE (next) == NOTE
538 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
539 SET_HARD_REG_SET (res->regs);
542 /* ... and also what it's RTL says it modifies, if anything. */
547 /* An insn consisting of just a CLOBBER (or USE) is just for flow
548 and doesn't actually do anything, so we ignore it. */
550 #ifdef INSN_SETS_ARE_DELAYED
551 if (! include_delayed_effects
552 && INSN_SETS_ARE_DELAYED (x))
557 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
562 /* If the source of a SET is a CALL, this is actually done by
563 the called routine. So only include it if we are to include the
564 effects of the calling routine. */
566 mark_set_resources (SET_DEST (x), res,
567 (include_delayed_effects
568 || GET_CODE (SET_SRC (x)) != CALL),
571 mark_set_resources (SET_SRC (x), res, 0, 0);
575 mark_set_resources (XEXP (x, 0), res, 1, 0);
579 for (i = 0; i < XVECLEN (x, 0); i++)
580 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
581 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
582 mark_set_resources (XVECEXP (x, 0, i), res, 0,
583 include_delayed_effects);
590 mark_set_resources (XEXP (x, 0), res, 1, 0);
594 mark_set_resources (XEXP (x, 0), res, in_dest, 0);
595 mark_set_resources (XEXP (x, 1), res, 0, 0);
596 mark_set_resources (XEXP (x, 2), res, 0, 0);
603 res->volatil = MEM_VOLATILE_P (x);
606 mark_set_resources (XEXP (x, 0), res, 0, 0);
611 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
612 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
616 /* Process each sub-expression and flag what it needs. */
617 format_ptr = GET_RTX_FORMAT (code);
618 for (i = 0; i < GET_RTX_LENGTH (code); i++)
619 switch (*format_ptr++)
622 mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
626 for (j = 0; j < XVECLEN (x, i); j++)
627 mark_set_resources (XVECEXP (x, i, j), res, in_dest,
628 include_delayed_effects);
633 /* Return TRUE if this insn should stop the search for insn to fill delay
634 slots. LABELS_P indicates that labels should terminate the search.
635 In all cases, jumps terminate the search. */
638 stop_search_p (insn, labels_p)
645 switch (GET_CODE (insn))
659 /* OK unless it contains a delay slot or is an `asm' insn of some type.
660 We don't know anything about these. */
661 return (GET_CODE (PATTERN (insn)) == SEQUENCE
662 || GET_CODE (PATTERN (insn)) == ASM_INPUT
663 || asm_noperands (PATTERN (insn)) >= 0);
670 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
671 resource set contains a volatile memory reference. Otherwise, return FALSE. */
674 resource_conflicts_p (res1, res2)
675 struct resources *res1, *res2;
677 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
678 || res1->volatil || res2->volatil)
682 return (res1->regs & res2->regs) != HARD_CONST (0);
687 for (i = 0; i < HARD_REG_SET_LONGS; i++)
688 if ((res1->regs[i] & res2->regs[i]) != 0)
695 /* Return TRUE if any resource marked in RES, a `struct resources', is
696 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
697 routine is using those resources.
699 We compute this by computing all the resources referenced by INSN and
700 seeing if this conflicts with RES. It might be faster to directly check
701 ourselves, and this is the way it used to work, but it means duplicating
702 a large block of complex code. */
705 insn_references_resource_p (insn, res, include_delayed_effects)
707 register struct resources *res;
708 int include_delayed_effects;
710 struct resources insn_res;
712 CLEAR_RESOURCE (&insn_res);
713 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
714 return resource_conflicts_p (&insn_res, res);
717 /* Return TRUE if INSN modifies resources that are marked in RES.
718 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
719 included. CC0 is only modified if it is explicitly set; see comments
720 in front of mark_set_resources for details. */
723 insn_sets_resource_p (insn, res, include_delayed_effects)
725 register struct resources *res;
726 int include_delayed_effects;
728 struct resources insn_sets;
730 CLEAR_RESOURCE (&insn_sets);
731 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
732 return resource_conflicts_p (&insn_sets, res);
735 /* Find a label at the end of the function or before a RETURN. If there is
743 /* If we found one previously, return it. */
744 if (end_of_function_label)
745 return end_of_function_label;
747 /* Otherwise, see if there is a label at the end of the function. If there
748 is, it must be that RETURN insns aren't needed, so that is our return
749 label and we don't have to do anything else. */
751 insn = get_last_insn ();
752 while (GET_CODE (insn) == NOTE
753 || (GET_CODE (insn) == INSN
754 && (GET_CODE (PATTERN (insn)) == USE
755 || GET_CODE (PATTERN (insn)) == CLOBBER)))
756 insn = PREV_INSN (insn);
758 /* When a target threads its epilogue we might already have a
759 suitable return insn. If so put a label before it for the
760 end_of_function_label. */
761 if (GET_CODE (insn) == BARRIER
762 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
763 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
765 rtx temp = PREV_INSN (PREV_INSN (insn));
766 end_of_function_label = gen_label_rtx ();
767 LABEL_NUSES (end_of_function_label) = 0;
769 /* Put the label before an USE insns that may proceed the RETURN insn. */
770 while (GET_CODE (temp) == USE)
771 temp = PREV_INSN (temp);
773 emit_label_after (end_of_function_label, temp);
776 else if (GET_CODE (insn) == CODE_LABEL)
777 end_of_function_label = insn;
780 /* Otherwise, make a new label and emit a RETURN and BARRIER,
782 end_of_function_label = gen_label_rtx ();
783 LABEL_NUSES (end_of_function_label) = 0;
784 emit_label (end_of_function_label);
788 /* The return we make may have delay slots too. */
789 rtx insn = gen_return ();
790 insn = emit_jump_insn (insn);
792 if (num_delay_slots (insn) > 0)
793 obstack_ptr_grow (&unfilled_slots_obstack, insn);
798 /* Show one additional use for this label so it won't go away until
800 ++LABEL_NUSES (end_of_function_label);
802 return end_of_function_label;
805 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
806 the pattern of INSN with the SEQUENCE.
808 Chain the insns so that NEXT_INSN of each insn in the sequence points to
809 the next and NEXT_INSN of the last insn in the sequence points to
810 the first insn after the sequence. Similarly for PREV_INSN. This makes
811 it easier to scan all insns.
813 Returns the SEQUENCE that replaces INSN. */
816 emit_delay_sequence (insn, list, length, avail)
826 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
827 rtvec seqv = rtvec_alloc (length + 1);
828 rtx seq = gen_rtx (SEQUENCE, VOIDmode, seqv);
829 rtx seq_insn = make_insn_raw (seq);
830 rtx first = get_insns ();
831 rtx last = get_last_insn ();
833 /* Make a copy of the insn having delay slots. */
834 rtx delay_insn = copy_rtx (insn);
836 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
837 confuse further processing. Update LAST in case it was the last insn.
838 We will put the BARRIER back in later. */
839 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
841 delete_insn (NEXT_INSN (insn));
842 last = get_last_insn ();
846 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
847 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
848 PREV_INSN (seq_insn) = PREV_INSN (insn);
851 set_new_first_and_last_insn (first, seq_insn);
853 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
856 set_new_first_and_last_insn (seq_insn, last);
858 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
860 /* Build our SEQUENCE and rebuild the insn chain. */
861 XVECEXP (seq, 0, 0) = delay_insn;
862 INSN_DELETED_P (delay_insn) = 0;
863 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
865 for (li = list; li; li = XEXP (li, 1), i++)
867 rtx tem = XEXP (li, 0);
870 /* Show that this copy of the insn isn't deleted. */
871 INSN_DELETED_P (tem) = 0;
873 XVECEXP (seq, 0, i) = tem;
874 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
875 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
877 /* Remove any REG_DEAD notes because we can't rely on them now
878 that the insn has been moved. */
879 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
880 if (REG_NOTE_KIND (note) == REG_DEAD)
881 XEXP (note, 0) = const0_rtx;
884 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
886 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
887 last insn in that SEQUENCE to point to us. Similarly for the first
888 insn in the following insn if it is a SEQUENCE. */
890 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
891 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
892 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
893 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
896 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
897 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
898 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
900 /* If there used to be a BARRIER, put it back. */
902 emit_barrier_after (seq_insn);
910 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
911 be in the order in which the insns are to be executed. */
914 add_to_delay_list (insn, delay_list)
918 /* If we have an empty list, just make a new list element. If
919 INSN has it's block number recorded, clear it since we may
920 be moving the insn to a new block. */
924 struct target_info *tinfo;
926 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
927 tinfo; tinfo = tinfo->next)
928 if (tinfo->uid == INSN_UID (insn))
934 return gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
937 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
939 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
944 /* Delete INSN from the the delay slot of the insn that it is in. This may
945 produce an insn without anything in its delay slots. */
948 delete_from_delay_slot (insn)
951 rtx trial, seq_insn, seq, prev;
955 /* We first must find the insn containing the SEQUENCE with INSN in its
956 delay slot. Do this by finding an insn, TRIAL, where
957 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
960 PREV_INSN (NEXT_INSN (trial)) == trial;
961 trial = NEXT_INSN (trial))
964 seq_insn = PREV_INSN (NEXT_INSN (trial));
965 seq = PATTERN (seq_insn);
967 /* Create a delay list consisting of all the insns other than the one
968 we are deleting (unless we were the only one). */
969 if (XVECLEN (seq, 0) > 2)
970 for (i = 1; i < XVECLEN (seq, 0); i++)
971 if (XVECEXP (seq, 0, i) != insn)
972 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
974 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
975 list, and rebuild the delay list if non-empty. */
976 prev = PREV_INSN (seq_insn);
977 trial = XVECEXP (seq, 0, 0);
978 delete_insn (seq_insn);
979 add_insn_after (trial, prev);
981 if (GET_CODE (trial) == JUMP_INSN
982 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
983 emit_barrier_after (trial);
985 /* If there are any delay insns, remit them. Otherwise clear the
988 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2, 0);
990 INSN_ANNULLED_BRANCH_P (trial) = 0;
992 INSN_FROM_TARGET_P (insn) = 0;
994 /* Show we need to fill this insn again. */
995 obstack_ptr_grow (&unfilled_slots_obstack, trial);
998 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
999 the insn that sets CC0 for it and delete it too. */
1002 delete_scheduled_jump (insn)
1005 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
1006 delete the insn that sets the condition code, but it is hard to find it.
1007 Since this case is rare anyway, don't bother trying; there would likely
1008 be other insns that became dead anyway, which we wouldn't know to
1012 if (reg_mentioned_p (cc0_rtx, insn))
1014 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1016 /* If a reg-note was found, it points to an insn to set CC0. This
1017 insn is in the delay list of some other insn. So delete it from
1018 the delay list it was in. */
1021 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
1022 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
1023 delete_from_delay_slot (XEXP (note, 0));
1027 /* The insn setting CC0 is our previous insn, but it may be in
1028 a delay slot. It will be the last insn in the delay slot, if
1030 rtx trial = previous_insn (insn);
1031 if (GET_CODE (trial) == NOTE)
1032 trial = prev_nonnote_insn (trial);
1033 if (sets_cc0_p (PATTERN (trial)) != 1
1034 || FIND_REG_INC_NOTE (trial, 0))
1036 if (PREV_INSN (NEXT_INSN (trial)) == trial)
1037 delete_insn (trial);
1039 delete_from_delay_slot (trial);
1047 /* Counters for delay-slot filling. */
1049 #define NUM_REORG_FUNCTIONS 2
1050 #define MAX_DELAY_HISTOGRAM 3
1051 #define MAX_REORG_PASSES 2
1053 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
1055 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
1057 static int reorg_pass_number;
1060 note_delay_statistics (slots_filled, index)
1061 int slots_filled, index;
1063 num_insns_needing_delays[index][reorg_pass_number]++;
1064 if (slots_filled > MAX_DELAY_HISTOGRAM)
1065 slots_filled = MAX_DELAY_HISTOGRAM;
1066 num_filled_delays[index][slots_filled][reorg_pass_number]++;
1069 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1071 /* Optimize the following cases:
1073 1. When a conditional branch skips over only one instruction,
1074 use an annulling branch and put that insn in the delay slot.
1075 Use either a branch that annuls when the condition if true or
1076 invert the test with a branch that annuls when the condition is
1077 false. This saves insns, since otherwise we must copy an insn
1080 (orig) (skip) (otherwise)
1081 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1088 2. When a conditional branch skips over only one instruction,
1089 and after that, it unconditionally branches somewhere else,
1090 perform the similar optimization. This saves executing the
1091 second branch in the case where the inverted condition is true.
1098 INSN is a JUMP_INSN.
1100 This should be expanded to skip over N insns, where N is the number
1101 of delay slots required. */
1104 optimize_skip (insn)
1107 register rtx trial = next_nonnote_insn (insn);
1108 rtx next_trial = next_active_insn (trial);
1113 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1116 || GET_CODE (trial) != INSN
1117 || GET_CODE (PATTERN (trial)) == SEQUENCE
1118 || recog_memoized (trial) < 0
1119 || (! eligible_for_annul_false (insn, 0, trial, flags)
1120 && ! eligible_for_annul_true (insn, 0, trial, flags)))
1123 /* There are two cases where we are just executing one insn (we assume
1124 here that a branch requires only one insn; this should be generalized
1125 at some point): Where the branch goes around a single insn or where
1126 we have one insn followed by a branch to the same label we branch to.
1127 In both of these cases, inverting the jump and annulling the delay
1128 slot give the same effect in fewer insns. */
1129 if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
1131 && GET_CODE (next_trial) == JUMP_INSN
1132 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
1133 && (simplejump_p (next_trial)
1134 || GET_CODE (PATTERN (next_trial)) == RETURN)))
1136 if (eligible_for_annul_false (insn, 0, trial, flags))
1138 if (invert_jump (insn, JUMP_LABEL (insn)))
1139 INSN_FROM_TARGET_P (trial) = 1;
1140 else if (! eligible_for_annul_true (insn, 0, trial, flags))
1144 delay_list = add_to_delay_list (trial, NULL_RTX);
1145 next_trial = next_active_insn (trial);
1146 update_block (trial, trial);
1147 delete_insn (trial);
1149 /* Also, if we are targeting an unconditional
1150 branch, thread our jump to the target of that branch. Don't
1151 change this into a RETURN here, because it may not accept what
1152 we have in the delay slot. We'll fix this up later. */
1153 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
1154 && (simplejump_p (next_trial)
1155 || GET_CODE (PATTERN (next_trial)) == RETURN))
1157 target_label = JUMP_LABEL (next_trial);
1158 if (target_label == 0)
1159 target_label = find_end_label ();
1161 /* Recompute the flags based on TARGET_LABEL since threading
1162 the jump to TARGET_LABEL may change the direction of the
1163 jump (which may change the circumstances in which the
1164 delay slot is nullified). */
1165 flags = get_jump_flags (insn, target_label);
1166 if (eligible_for_annul_true (insn, 0, trial, flags))
1167 reorg_redirect_jump (insn, target_label);
1170 INSN_ANNULLED_BRANCH_P (insn) = 1;
1178 /* Encode and return branch direction and prediction information for
1179 INSN assuming it will jump to LABEL.
1181 Non conditional branches return no direction information and
1182 are predicted as very likely taken. */
1184 get_jump_flags (insn, label)
1189 /* get_jump_flags can be passed any insn with delay slots, these may
1190 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1191 direction information, and only if they are conditional jumps.
1193 If LABEL is zero, then there is no way to determine the branch
1195 if (GET_CODE (insn) == JUMP_INSN
1196 && condjump_p (insn)
1197 && INSN_UID (insn) <= max_uid
1199 && INSN_UID (label) <= max_uid)
1201 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
1202 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
1203 /* No valid direction information. */
1207 /* If insn is a conditional branch call mostly_true_jump to get
1208 determine the branch prediction.
1210 Non conditional branches are predicted as very likely taken. */
1211 if (GET_CODE (insn) == JUMP_INSN
1212 && condjump_p (insn))
1216 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
1220 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1223 flags |= ATTR_FLAG_likely;
1226 flags |= ATTR_FLAG_unlikely;
1229 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
1237 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1242 /* Return 1 if INSN is a destination that will be branched to rarely (the
1243 return point of a function); return 2 if DEST will be branched to very
1244 rarely (a call to a function that doesn't return). Otherwise,
1248 rare_destination (insn)
1254 for (; insn; insn = next)
1256 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1257 insn = XVECEXP (PATTERN (insn), 0, 0);
1259 next = NEXT_INSN (insn);
1261 switch (GET_CODE (insn))
1266 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1267 don't scan past JUMP_INSNs, so any barrier we find here must
1268 have been after a CALL_INSN and hence mean the call doesn't
1272 if (GET_CODE (PATTERN (insn)) == RETURN)
1274 else if (simplejump_p (insn)
1275 && jump_count++ < 10)
1276 next = JUMP_LABEL (insn);
1282 /* If we got here it means we hit the end of the function. So this
1283 is an unlikely destination. */
1288 /* Return truth value of the statement that this branch
1289 is mostly taken. If we think that the branch is extremely likely
1290 to be taken, we return 2. If the branch is slightly more likely to be
1291 taken, return 1. If the branch is slightly less likely to be taken,
1292 return 0 and if the branch is highly unlikely to be taken, return -1.
1294 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1297 mostly_true_jump (jump_insn, condition)
1298 rtx jump_insn, condition;
1300 rtx target_label = JUMP_LABEL (jump_insn);
1302 int rare_dest = rare_destination (target_label);
1303 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1305 /* If this is a branch outside a loop, it is highly unlikely. */
1306 if (GET_CODE (PATTERN (jump_insn)) == SET
1307 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
1308 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
1309 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
1310 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
1311 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
1316 /* If this is the test of a loop, it is very likely true. We scan
1317 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1318 before the next real insn, we assume the branch is to the top of
1320 for (insn = PREV_INSN (target_label);
1321 insn && GET_CODE (insn) == NOTE;
1322 insn = PREV_INSN (insn))
1323 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1326 /* If this is a jump to the test of a loop, it is likely true. We scan
1327 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1328 before the next real insn, we assume the branch is to the loop branch
1330 for (insn = NEXT_INSN (target_label);
1331 insn && GET_CODE (insn) == NOTE;
1332 insn = PREV_INSN (insn))
1333 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
1337 /* Look at the relative rarities of the fallthough and destination. If
1338 they differ, we can predict the branch that way. */
1340 switch (rare_fallthrough - rare_dest)
1354 /* If we couldn't figure out what this jump was, assume it won't be
1355 taken. This should be rare. */
1359 /* EQ tests are usually false and NE tests are usually true. Also,
1360 most quantities are positive, so we can make the appropriate guesses
1361 about signed comparisons against zero. */
1362 switch (GET_CODE (condition))
1365 /* Unconditional branch. */
1373 if (XEXP (condition, 1) == const0_rtx)
1378 if (XEXP (condition, 1) == const0_rtx)
1383 /* Predict backward branches usually take, forward branches usually not. If
1384 we don't know whether this is forward or backward, assume the branch
1385 will be taken, since most are. */
1386 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1387 || INSN_UID (target_label) > max_uid
1388 || (uid_to_ruid[INSN_UID (jump_insn)]
1389 > uid_to_ruid[INSN_UID (target_label)]));;
1392 /* Return the condition under which INSN will branch to TARGET. If TARGET
1393 is zero, return the condition under which INSN will return. If INSN is
1394 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1395 type of jump, or it doesn't go to TARGET, return 0. */
1398 get_branch_condition (insn, target)
1402 rtx pat = PATTERN (insn);
1405 if (GET_CODE (pat) == RETURN)
1406 return target == 0 ? const_true_rtx : 0;
1408 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1411 src = SET_SRC (pat);
1412 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1413 return const_true_rtx;
1415 else if (GET_CODE (src) == IF_THEN_ELSE
1416 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1417 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1418 && XEXP (XEXP (src, 1), 0) == target))
1419 && XEXP (src, 2) == pc_rtx)
1420 return XEXP (src, 0);
1422 else if (GET_CODE (src) == IF_THEN_ELSE
1423 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1424 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1425 && XEXP (XEXP (src, 2), 0) == target))
1426 && XEXP (src, 1) == pc_rtx)
1427 return gen_rtx (reverse_condition (GET_CODE (XEXP (src, 0))),
1428 GET_MODE (XEXP (src, 0)),
1429 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1434 /* Return non-zero if CONDITION is more strict than the condition of
1435 INSN, i.e., if INSN will always branch if CONDITION is true. */
1438 condition_dominates_p (condition, insn)
1442 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1443 enum rtx_code code = GET_CODE (condition);
1444 enum rtx_code other_code;
1446 if (rtx_equal_p (condition, other_condition)
1447 || other_condition == const_true_rtx)
1450 else if (condition == const_true_rtx || other_condition == 0)
1453 other_code = GET_CODE (other_condition);
1454 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1455 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1456 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1459 return comparison_dominates_p (code, other_code);
1462 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1463 any insns already in the delay slot of JUMP. */
1466 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1467 rtx jump, newlabel, seq;
1469 int flags, slots, i;
1470 rtx pat = PATTERN (seq);
1472 /* Make sure all the delay slots of this jump would still
1473 be valid after threading the jump. If they are still
1474 valid, then return non-zero. */
1476 flags = get_jump_flags (jump, newlabel);
1477 for (i = 1; i < XVECLEN (pat, 0); i++)
1479 #ifdef ANNUL_IFFALSE_SLOTS
1480 (INSN_ANNULLED_BRANCH_P (jump)
1481 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1482 ? eligible_for_annul_false (jump, i - 1,
1483 XVECEXP (pat, 0, i), flags) :
1485 #ifdef ANNUL_IFTRUE_SLOTS
1486 (INSN_ANNULLED_BRANCH_P (jump)
1487 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1488 ? eligible_for_annul_true (jump, i - 1,
1489 XVECEXP (pat, 0, i), flags) :
1491 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1494 return (i == XVECLEN (pat, 0));
1498 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1499 the condition tested by INSN is CONDITION and the resources shown in
1500 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1501 from SEQ's delay list, in addition to whatever insns it may execute
1502 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1503 needed while searching for delay slot insns. Return the concatenated
1504 delay list if possible, otherwise, return 0.
1506 SLOTS_TO_FILL is the total number of slots required by INSN, and
1507 PSLOTS_FILLED points to the number filled so far (also the number of
1508 insns in DELAY_LIST). It is updated with the number that have been
1509 filled from the SEQUENCE, if any.
1511 PANNUL_P points to a non-zero value if we already know that we need
1512 to annul INSN. If this routine determines that annulling is needed,
1513 it may set that value non-zero.
1515 PNEW_THREAD points to a location that is to receive the place at which
1516 execution should continue. */
1519 steal_delay_list_from_target (insn, condition, seq, delay_list,
1520 sets, needed, other_needed,
1521 slots_to_fill, pslots_filled, pannul_p,
1523 rtx insn, condition;
1526 struct resources *sets, *needed, *other_needed;
1533 int slots_remaining = slots_to_fill - *pslots_filled;
1534 int total_slots_filled = *pslots_filled;
1535 rtx new_delay_list = 0;
1536 int must_annul = *pannul_p;
1539 /* We can't do anything if there are more delay slots in SEQ than we
1540 can handle, or if we don't know that it will be a taken branch.
1542 We know that it will be a taken branch if it is either an unconditional
1543 branch or a conditional branch with a stricter branch condition. */
1545 if (XVECLEN (seq, 0) - 1 > slots_remaining
1546 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0)))
1549 for (i = 1; i < XVECLEN (seq, 0); i++)
1551 rtx trial = XVECEXP (seq, 0, i);
1554 if (insn_references_resource_p (trial, sets, 0)
1555 || insn_sets_resource_p (trial, needed, 0)
1556 || insn_sets_resource_p (trial, sets, 0)
1558 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1560 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1562 /* If TRIAL is from the fallthrough code of an annulled branch insn
1563 in SEQ, we cannot use it. */
1564 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1565 && ! INSN_FROM_TARGET_P (trial)))
1568 /* If this insn was already done (usually in a previous delay slot),
1569 pretend we put it in our delay slot. */
1570 if (redundant_insn_p (trial, insn, new_delay_list))
1573 /* We will end up re-vectoring this branch, so compute flags
1574 based on jumping to the new label. */
1575 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1578 && ((condition == const_true_rtx
1579 || (! insn_sets_resource_p (trial, other_needed, 0)
1580 && ! may_trap_p (PATTERN (trial)))))
1581 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1583 eligible_for_annul_false (insn, total_slots_filled, trial, flags)))
1585 temp = copy_rtx (trial);
1586 INSN_FROM_TARGET_P (temp) = 1;
1587 new_delay_list = add_to_delay_list (temp, new_delay_list);
1588 total_slots_filled++;
1590 if (--slots_remaining == 0)
1597 /* Show the place to which we will be branching. */
1598 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1600 /* Add any new insns to the delay list and update the count of the
1601 number of slots filled. */
1602 *pslots_filled = total_slots_filled;
1603 *pannul_p = must_annul;
1605 if (delay_list == 0)
1606 return new_delay_list;
1608 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1609 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1614 /* Similar to steal_delay_list_from_target except that SEQ is on the
1615 fallthrough path of INSN. Here we only do something if the delay insn
1616 of SEQ is an unconditional branch. In that case we steal its delay slot
1617 for INSN since unconditional branches are much easier to fill. */
1620 steal_delay_list_from_fallthrough (insn, condition, seq,
1621 delay_list, sets, needed, other_needed,
1622 slots_to_fill, pslots_filled, pannul_p)
1623 rtx insn, condition;
1626 struct resources *sets, *needed, *other_needed;
1634 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1636 /* We can't do anything if SEQ's delay insn isn't an
1637 unconditional branch. */
1639 if (! simplejump_p (XVECEXP (seq, 0, 0))
1640 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1643 for (i = 1; i < XVECLEN (seq, 0); i++)
1645 rtx trial = XVECEXP (seq, 0, i);
1647 /* If TRIAL sets CC0, stealing it will move it too far from the use
1649 if (insn_references_resource_p (trial, sets, 0)
1650 || insn_sets_resource_p (trial, needed, 0)
1651 || insn_sets_resource_p (trial, sets, 0)
1653 || sets_cc0_p (PATTERN (trial))
1659 /* If this insn was already done, we don't need it. */
1660 if (redundant_insn_p (trial, insn, delay_list))
1662 delete_from_delay_slot (trial);
1667 && ((condition == const_true_rtx
1668 || (! insn_sets_resource_p (trial, other_needed, 0)
1669 && ! may_trap_p (PATTERN (trial)))))
1670 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1672 eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1674 delete_from_delay_slot (trial);
1675 delay_list = add_to_delay_list (trial, delay_list);
1677 if (++(*pslots_filled) == slots_to_fill)
1687 /* Try merging insns starting at THREAD which match exactly the insns in
1690 If all insns were matched and the insn was previously annulling, the
1691 annul bit will be cleared.
1693 For each insn that is merged, if the branch is or will be non-annulling,
1694 we delete the merged insn. */
1697 try_merge_delay_insns (insn, thread)
1700 rtx trial, next_trial;
1701 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1702 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1703 int slot_number = 1;
1704 int num_slots = XVECLEN (PATTERN (insn), 0);
1705 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1706 struct resources set, needed;
1707 rtx merged_insns = 0;
1711 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1713 CLEAR_RESOURCE (&needed);
1714 CLEAR_RESOURCE (&set);
1716 /* If this is not an annulling branch, take into account anything needed in
1717 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1718 folded into one. If we are annulling, this would be the correct
1719 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1720 will essentially disable this optimization. This method is somewhat of
1721 a kludge, but I don't see a better way.) */
1723 mark_referenced_resources (next_to_match, &needed, 1);
1725 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1727 rtx pat = PATTERN (trial);
1729 next_trial = next_nonnote_insn (trial);
1731 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1732 if (GET_CODE (trial) == INSN
1733 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1736 if (GET_CODE (next_to_match) == GET_CODE (trial)
1738 /* We can't share an insn that sets cc0. */
1739 && ! sets_cc0_p (pat)
1741 && ! insn_references_resource_p (trial, &set, 1)
1742 && ! insn_sets_resource_p (trial, &set, 1)
1743 && ! insn_sets_resource_p (trial, &needed, 1)
1744 && (trial = try_split (pat, trial, 0)) != 0
1745 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1746 /* Have to test this condition if annul condition is different
1747 from (and less restrictive than) non-annulling one. */
1748 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1750 next_trial = next_nonnote_insn (trial);
1754 update_block (trial, thread);
1755 delete_insn (trial);
1756 INSN_FROM_TARGET_P (next_to_match) = 0;
1759 merged_insns = gen_rtx (INSN_LIST, VOIDmode, trial, merged_insns);
1761 if (++slot_number == num_slots)
1764 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1766 mark_referenced_resources (next_to_match, &needed, 1);
1769 mark_set_resources (trial, &set, 0, 1);
1770 mark_referenced_resources (trial, &needed, 1);
1773 /* See if we stopped on a filled insn. If we did, try to see if its
1774 delay slots match. */
1775 if (slot_number != num_slots
1776 && trial && GET_CODE (trial) == INSN
1777 && GET_CODE (PATTERN (trial)) == SEQUENCE
1778 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1780 rtx pat = PATTERN (trial);
1782 for (i = 1; i < XVECLEN (pat, 0); i++)
1784 rtx dtrial = XVECEXP (pat, 0, i);
1786 if (! insn_references_resource_p (dtrial, &set, 1)
1787 && ! insn_sets_resource_p (dtrial, &set, 1)
1788 && ! insn_sets_resource_p (dtrial, &needed, 1)
1790 && ! sets_cc0_p (PATTERN (dtrial))
1792 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1793 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1797 update_block (dtrial, thread);
1798 delete_from_delay_slot (dtrial);
1799 INSN_FROM_TARGET_P (next_to_match) = 0;
1802 merged_insns = gen_rtx (INSN_LIST, SImode, dtrial,
1805 if (++slot_number == num_slots)
1808 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1813 /* If all insns in the delay slot have been matched and we were previously
1814 annulling the branch, we need not any more. In that case delete all the
1815 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1816 the delay list so that we know that it isn't only being used at the
1818 if (next_to_match == 0 && annul_p)
1820 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1822 if (GET_MODE (merged_insns) == SImode)
1824 update_block (XEXP (merged_insns, 0), thread);
1825 delete_from_delay_slot (XEXP (merged_insns, 0));
1829 update_block (XEXP (merged_insns, 0), thread);
1830 delete_insn (XEXP (merged_insns, 0));
1834 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1836 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1837 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1841 /* See if INSN is redundant with an insn in front of TARGET. Often this
1842 is called when INSN is a candidate for a delay slot of TARGET.
1843 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1844 of INSN. Often INSN will be redundant with an insn in a delay slot of
1845 some previous insn. This happens when we have a series of branches to the
1846 same label; in that case the first insn at the target might want to go
1847 into each of the delay slots.
1849 If we are not careful, this routine can take up a significant fraction
1850 of the total compilation time (4%), but only wins rarely. Hence we
1851 speed this routine up by making two passes. The first pass goes back
1852 until it hits a label and sees if it find an insn with an identical
1853 pattern. Only in this (relatively rare) event does it check for
1856 We do not split insns we encounter. This could cause us not to find a
1857 redundant insn, but the cost of splitting seems greater than the possible
1858 gain in rare cases. */
1861 redundant_insn_p (insn, target, delay_list)
1866 rtx target_main = target;
1867 rtx ipat = PATTERN (insn);
1869 struct resources needed, set;
1872 /* Scan backwards looking for a match. */
1873 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
1875 if (GET_CODE (trial) == CODE_LABEL)
1878 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
1881 pat = PATTERN (trial);
1882 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1885 if (GET_CODE (pat) == SEQUENCE)
1887 /* Stop for a CALL and its delay slots because it is difficult to
1888 track its resource needs correctly. */
1889 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1892 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1893 slots because it is difficult to track its resource needs
1896 #ifdef INSN_SETS_ARE_DELAYED
1897 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1901 #ifdef INSN_REFERENCES_ARE_DELAYED
1902 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1906 /* See if any of the insns in the delay slot match, updating
1907 resource requirements as we go. */
1908 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1909 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1910 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat))
1913 /* If found a match, exit this loop early. */
1918 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat))
1922 /* If we didn't find an insn that matches, return 0. */
1926 /* See what resources this insn sets and needs. If they overlap, or
1927 if this insn references CC0, it can't be redundant. */
1929 CLEAR_RESOURCE (&needed);
1930 CLEAR_RESOURCE (&set);
1931 mark_set_resources (insn, &set, 0, 1);
1932 mark_referenced_resources (insn, &needed, 1);
1934 /* If TARGET is a SEQUENCE, get the main insn. */
1935 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1936 target_main = XVECEXP (PATTERN (target), 0, 0);
1938 if (resource_conflicts_p (&needed, &set)
1940 || reg_mentioned_p (cc0_rtx, ipat)
1942 /* The insn requiring the delay may not set anything needed or set by
1944 || insn_sets_resource_p (target_main, &needed, 1)
1945 || insn_sets_resource_p (target_main, &set, 1))
1948 /* Insns we pass may not set either NEEDED or SET, so merge them for
1950 needed.memory |= set.memory;
1951 IOR_HARD_REG_SET (needed.regs, set.regs);
1953 /* This insn isn't redundant if it conflicts with an insn that either is
1954 or will be in a delay slot of TARGET. */
1958 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1960 delay_list = XEXP (delay_list, 1);
1963 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1964 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1965 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1968 /* Scan backwards until we reach a label or an insn that uses something
1969 INSN sets or sets something insn uses or sets. */
1971 for (trial = PREV_INSN (target);
1972 trial && GET_CODE (trial) != CODE_LABEL;
1973 trial = PREV_INSN (trial))
1975 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
1976 && GET_CODE (trial) != JUMP_INSN)
1979 pat = PATTERN (trial);
1980 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1983 if (GET_CODE (pat) == SEQUENCE)
1985 /* If this is a CALL_INSN and its delay slots, it is hard to track
1986 the resource needs properly, so give up. */
1987 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1990 /* If this this is an INSN or JUMP_INSN with delayed effects, it
1991 is hard to track the resource needs properly, so give up. */
1993 #ifdef INSN_SETS_ARE_DELAYED
1994 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1998 #ifdef INSN_REFERENCES_ARE_DELAYED
1999 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2003 /* See if any of the insns in the delay slot match, updating
2004 resource requirements as we go. */
2005 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2007 rtx candidate = XVECEXP (pat, 0, i);
2009 /* If an insn will be annulled if the branch is false, it isn't
2010 considered as a possible duplicate insn. */
2011 if (rtx_equal_p (PATTERN (candidate), ipat)
2012 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2013 && INSN_FROM_TARGET_P (candidate)))
2015 /* Show that this insn will be used in the sequel. */
2016 INSN_FROM_TARGET_P (candidate) = 0;
2020 /* Unless this is an annulled insn from the target of a branch,
2021 we must stop if it sets anything needed or set by INSN. */
2022 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2023 || ! INSN_FROM_TARGET_P (candidate))
2024 && insn_sets_resource_p (candidate, &needed, 1))
2029 /* If the insn requiring the delay slot conflicts with INSN, we
2031 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
2036 /* See if TRIAL is the same as INSN. */
2037 pat = PATTERN (trial);
2038 if (rtx_equal_p (pat, ipat))
2041 /* Can't go any further if TRIAL conflicts with INSN. */
2042 if (insn_sets_resource_p (trial, &needed, 1))
2050 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2051 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2052 is non-zero, we are allowed to fall into this thread; otherwise, we are
2055 If LABEL is used more than one or we pass a label other than LABEL before
2056 finding an active insn, we do not own this thread. */
2059 own_thread_p (thread, label, allow_fallthrough)
2062 int allow_fallthrough;
2067 /* We don't own the function end. */
2071 /* Get the first active insn, or THREAD, if it is an active insn. */
2072 active_insn = next_active_insn (PREV_INSN (thread));
2074 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
2075 if (GET_CODE (insn) == CODE_LABEL
2076 && (insn != label || LABEL_NUSES (insn) != 1))
2079 if (allow_fallthrough)
2082 /* Ensure that we reach a BARRIER before any insn or label. */
2083 for (insn = prev_nonnote_insn (thread);
2084 insn == 0 || GET_CODE (insn) != BARRIER;
2085 insn = prev_nonnote_insn (insn))
2087 || GET_CODE (insn) == CODE_LABEL
2088 || (GET_CODE (insn) == INSN
2089 && GET_CODE (PATTERN (insn)) != USE
2090 && GET_CODE (PATTERN (insn)) != CLOBBER))
2096 /* Find the number of the basic block that starts closest to INSN. Return -1
2097 if we couldn't find such a basic block. */
2100 find_basic_block (insn)
2105 /* Scan backwards to the previous BARRIER. Then see if we can find a
2106 label that starts a basic block. Return the basic block number. */
2108 for (insn = prev_nonnote_insn (insn);
2109 insn && GET_CODE (insn) != BARRIER;
2110 insn = prev_nonnote_insn (insn))
2113 /* The start of the function is basic block zero. */
2117 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2118 anything other than a CODE_LABEL or note, we can't find this code. */
2119 for (insn = next_nonnote_insn (insn);
2120 insn && GET_CODE (insn) == CODE_LABEL;
2121 insn = next_nonnote_insn (insn))
2123 for (i = 0; i < n_basic_blocks; i++)
2124 if (insn == basic_block_head[i])
2131 /* Called when INSN is being moved from a location near the target of a jump.
2132 We leave a marker of the form (use (INSN)) immediately in front
2133 of WHERE for mark_target_live_regs. These markers will be deleted when
2136 We used to try to update the live status of registers if WHERE is at
2137 the start of a basic block, but that can't work since we may remove a
2138 BARRIER in relax_delay_slots. */
2141 update_block (insn, where)
2147 /* Ignore if this was in a delay slot and it came from the target of
2149 if (INSN_FROM_TARGET_P (insn))
2152 emit_insn_before (gen_rtx (USE, VOIDmode, insn), where);
2154 /* INSN might be making a value live in a block where it didn't use to
2155 be. So recompute liveness information for this block. */
2157 b = find_basic_block (insn);
2162 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2163 the basic block containing the jump. */
2166 reorg_redirect_jump (jump, nlabel)
2170 int b = find_basic_block (jump);
2175 return redirect_jump (jump, nlabel);
2178 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2179 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2180 that reference values used in INSN. If we find one, then we move the
2181 REG_DEAD note to INSN.
2183 This is needed to handle the case where an later insn (after INSN) has a
2184 REG_DEAD note for a register used by INSN, and this later insn subsequently
2185 gets moved before a CODE_LABEL because it is a redundant insn. In this
2186 case, mark_target_live_regs may be confused into thinking the register
2187 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2190 update_reg_dead_notes (insn, delayed_insn)
2191 rtx insn, delayed_insn;
2195 for (p = next_nonnote_insn (insn); p != delayed_insn;
2196 p = next_nonnote_insn (p))
2197 for (link = REG_NOTES (p); link; link = next)
2199 next = XEXP (link, 1);
2201 if (REG_NOTE_KIND (link) != REG_DEAD
2202 || GET_CODE (XEXP (link, 0)) != REG)
2205 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
2207 /* Move the REG_DEAD note from P to INSN. */
2208 remove_note (p, link);
2209 XEXP (link, 1) = REG_NOTES (insn);
2210 REG_NOTES (insn) = link;
2215 /* Marks registers possibly live at the current place being scanned by
2216 mark_target_live_regs. Used only by next two function. */
2218 static HARD_REG_SET current_live_regs;
2220 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2221 Also only used by the next two functions. */
2223 static HARD_REG_SET pending_dead_regs;
2225 /* Utility function called from mark_target_live_regs via note_stores.
2226 It deadens any CLOBBERed registers and livens any SET registers. */
2229 update_live_status (dest, x)
2233 int first_regno, last_regno;
2236 if (GET_CODE (dest) != REG
2237 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
2240 if (GET_CODE (dest) == SUBREG)
2241 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2243 first_regno = REGNO (dest);
2245 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
2247 if (GET_CODE (x) == CLOBBER)
2248 for (i = first_regno; i < last_regno; i++)
2249 CLEAR_HARD_REG_BIT (current_live_regs, i);
2251 for (i = first_regno; i < last_regno; i++)
2253 SET_HARD_REG_BIT (current_live_regs, i);
2254 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
2258 /* Similar to next_insn, but ignores insns in the delay slots of
2259 an annulled branch. */
2262 next_insn_no_annul (insn)
2267 /* If INSN is an annulled branch, skip any insns from the target
2269 if (INSN_ANNULLED_BRANCH_P (insn)
2270 && NEXT_INSN (PREV_INSN (insn)) != insn)
2271 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
2272 insn = NEXT_INSN (insn);
2274 insn = NEXT_INSN (insn);
2275 if (insn && GET_CODE (insn) == INSN
2276 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2277 insn = XVECEXP (PATTERN (insn), 0, 0);
2283 /* Set the resources that are live at TARGET.
2285 If TARGET is zero, we refer to the end of the current function and can
2286 return our precomputed value.
2288 Otherwise, we try to find out what is live by consulting the basic block
2289 information. This is tricky, because we must consider the actions of
2290 reload and jump optimization, which occur after the basic block information
2293 Accordingly, we proceed as follows::
2295 We find the previous BARRIER and look at all immediately following labels
2296 (with no intervening active insns) to see if any of them start a basic
2297 block. If we hit the start of the function first, we use block 0.
2299 Once we have found a basic block and a corresponding first insns, we can
2300 accurately compute the live status from basic_block_live_regs and
2301 reg_renumber. (By starting at a label following a BARRIER, we are immune
2302 to actions taken by reload and jump.) Then we scan all insns between
2303 that point and our target. For each CLOBBER (or for call-clobbered regs
2304 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2305 a SET, mark them as live.
2307 We have to be careful when using REG_DEAD notes because they are not
2308 updated by such things as find_equiv_reg. So keep track of registers
2309 marked as dead that haven't been assigned to, and mark them dead at the
2310 next CODE_LABEL since reload and jump won't propagate values across labels.
2312 If we cannot find the start of a basic block (should be a very rare
2313 case, if it can happen at all), mark everything as potentially live.
2315 Next, scan forward from TARGET looking for things set or clobbered
2316 before they are used. These are not live.
2318 Because we can be called many times on the same target, save our results
2319 in a hash table indexed by INSN_UID. */
2322 mark_target_live_regs (target, res)
2324 struct resources *res;
2328 struct target_info *tinfo;
2332 HARD_REG_SET scratch;
2333 struct resources set, needed;
2336 /* Handle end of function. */
2339 *res = end_of_function_needs;
2343 /* We have to assume memory is needed, but the CC isn't. */
2348 /* See if we have computed this value already. */
2349 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2350 tinfo; tinfo = tinfo->next)
2351 if (tinfo->uid == INSN_UID (target))
2354 /* Start by getting the basic block number. If we have saved information,
2355 we can get it from there unless the insn at the start of the basic block
2356 has been deleted. */
2357 if (tinfo && tinfo->block != -1
2358 && ! INSN_DELETED_P (basic_block_head[tinfo->block]))
2362 b = find_basic_block (target);
2366 /* If the information is up-to-date, use it. Otherwise, we will
2368 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
2370 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
2376 /* Allocate a place to put our results and chain it into the
2378 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
2379 tinfo->uid = INSN_UID (target);
2381 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2382 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
2385 CLEAR_HARD_REG_SET (pending_dead_regs);
2387 /* If we found a basic block, get the live registers from it and update
2388 them with anything set or killed between its start and the insn before
2389 TARGET. Otherwise, we must assume everything is live. */
2392 regset regs_live = basic_block_live_at_start[b];
2394 REGSET_ELT_TYPE bit;
2396 rtx start_insn, stop_insn;
2398 /* Compute hard regs live at start of block -- this is the real hard regs
2399 marked live, plus live pseudo regs that have been renumbered to
2403 current_live_regs = *regs_live;
2405 COPY_HARD_REG_SET (current_live_regs, regs_live);
2408 for (offset = 0, i = 0; offset < regset_size; offset++)
2410 if (regs_live[offset] == 0)
2411 i += REGSET_ELT_BITS;
2413 for (bit = 1; bit && i < max_regno; bit <<= 1, i++)
2414 if ((regs_live[offset] & bit)
2415 && (regno = reg_renumber[i]) >= 0)
2417 j < regno + HARD_REGNO_NREGS (regno,
2418 PSEUDO_REGNO_MODE (i));
2420 SET_HARD_REG_BIT (current_live_regs, j);
2423 /* Get starting and ending insn, handling the case where each might
2425 start_insn = (b == 0 ? get_insns () : basic_block_head[b]);
2428 if (GET_CODE (start_insn) == INSN
2429 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
2430 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
2432 if (GET_CODE (stop_insn) == INSN
2433 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
2434 stop_insn = next_insn (PREV_INSN (stop_insn));
2436 for (insn = start_insn; insn != stop_insn;
2437 insn = next_insn_no_annul (insn))
2440 rtx real_insn = insn;
2442 /* If this insn is from the target of a branch, it isn't going to
2443 be used in the sequel. If it is used in both cases, this
2444 test will not be true. */
2445 if (INSN_FROM_TARGET_P (insn))
2448 /* If this insn is a USE made by update_block, we care about the
2450 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
2451 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2452 real_insn = XEXP (PATTERN (insn), 0);
2454 if (GET_CODE (real_insn) == CALL_INSN)
2456 /* CALL clobbers all call-used regs that aren't fixed except
2457 sp, ap, and fp. Do this before setting the result of the
2459 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2460 if (call_used_regs[i]
2461 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
2462 && i != ARG_POINTER_REGNUM
2463 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2464 && i != HARD_FRAME_POINTER_REGNUM
2466 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2467 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
2469 #ifdef PIC_OFFSET_TABLE_REGNUM
2470 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2473 CLEAR_HARD_REG_BIT (current_live_regs, i);
2475 /* A CALL_INSN sets any global register live, since it may
2476 have been modified by the call. */
2477 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2479 SET_HARD_REG_BIT (current_live_regs, i);
2482 /* Mark anything killed in an insn to be deadened at the next
2483 label. Ignore USE insns; the only REG_DEAD notes will be for
2484 parameters. But they might be early. A CALL_INSN will usually
2485 clobber registers used for parameters. It isn't worth bothering
2486 with the unlikely case when it won't. */
2487 if ((GET_CODE (real_insn) == INSN
2488 && GET_CODE (PATTERN (real_insn)) != USE
2489 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
2490 || GET_CODE (real_insn) == JUMP_INSN
2491 || GET_CODE (real_insn) == CALL_INSN)
2493 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2494 if (REG_NOTE_KIND (link) == REG_DEAD
2495 && GET_CODE (XEXP (link, 0)) == REG
2496 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2498 int first_regno = REGNO (XEXP (link, 0));
2501 + HARD_REGNO_NREGS (first_regno,
2502 GET_MODE (XEXP (link, 0))));
2504 for (i = first_regno; i < last_regno; i++)
2505 SET_HARD_REG_BIT (pending_dead_regs, i);
2508 note_stores (PATTERN (real_insn), update_live_status);
2510 /* If any registers were unused after this insn, kill them.
2511 These notes will always be accurate. */
2512 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2513 if (REG_NOTE_KIND (link) == REG_UNUSED
2514 && GET_CODE (XEXP (link, 0)) == REG
2515 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2517 int first_regno = REGNO (XEXP (link, 0));
2520 + HARD_REGNO_NREGS (first_regno,
2521 GET_MODE (XEXP (link, 0))));
2523 for (i = first_regno; i < last_regno; i++)
2524 CLEAR_HARD_REG_BIT (current_live_regs, i);
2528 else if (GET_CODE (real_insn) == CODE_LABEL)
2530 /* A label clobbers the pending dead registers since neither
2531 reload nor jump will propagate a value across a label. */
2532 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
2533 CLEAR_HARD_REG_SET (pending_dead_regs);
2536 /* The beginning of the epilogue corresponds to the end of the
2537 RTL chain when there are no epilogue insns. Certain resources
2538 are implicitly required at that point. */
2539 else if (GET_CODE (real_insn) == NOTE
2540 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
2541 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
2544 COPY_HARD_REG_SET (res->regs, current_live_regs);
2546 tinfo->bb_tick = bb_ticks[b];
2549 /* We didn't find the start of a basic block. Assume everything
2550 in use. This should happen only extremely rarely. */
2551 SET_HARD_REG_SET (res->regs);
2553 /* Now step forward from TARGET looking for registers that are set before
2554 they are used. These are dead. If we pass a label, any pending dead
2555 registers that weren't yet used can be made dead. Stop when we pass a
2556 conditional JUMP_INSN; follow the first few unconditional branches. */
2558 CLEAR_RESOURCE (&set);
2559 CLEAR_RESOURCE (&needed);
2561 for (insn = target; insn; insn = next)
2563 rtx this_jump_insn = insn;
2565 next = NEXT_INSN (insn);
2566 switch (GET_CODE (insn))
2569 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
2570 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
2571 CLEAR_HARD_REG_SET (pending_dead_regs);
2579 if (GET_CODE (PATTERN (insn)) == USE)
2581 /* If INSN is a USE made by update_block, we care about the
2582 underlying insn. Any registers set by the underlying insn
2583 are live since the insn is being done somewhere else. */
2584 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2585 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
2587 /* All other USE insns are to be ignored. */
2590 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
2592 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2594 /* An unconditional jump can be used to fill the delay slot
2595 of a call, so search for a JUMP_INSN in any position. */
2596 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2598 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
2599 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2605 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2607 if (jump_count++ < 10
2608 && (simplejump_p (this_jump_insn)
2609 || GET_CODE (PATTERN (this_jump_insn)) == RETURN))
2611 next = next_active_insn (JUMP_LABEL (this_jump_insn));
2615 jump_target = JUMP_LABEL (this_jump_insn);
2622 mark_referenced_resources (insn, &needed, 1);
2623 mark_set_resources (insn, &set, 0, 1);
2625 COPY_HARD_REG_SET (scratch, set.regs);
2626 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2627 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2630 /* If we hit an unconditional branch, we have another way of finding out
2631 what is live: we can see what is live at the branch target and include
2632 anything used but not set before the branch. The only things that are
2633 live are those that are live using the above test and the test below.
2635 Don't try this if we expired our jump count above, since that would
2636 mean there may be an infinite loop in the function being compiled. */
2638 if (jump_insn && jump_count < 10)
2640 struct resources new_resources;
2641 rtx stop_insn = next_active_insn (jump_insn);
2643 mark_target_live_regs (next_active_insn (jump_target), &new_resources);
2644 CLEAR_RESOURCE (&set);
2645 CLEAR_RESOURCE (&needed);
2647 /* Include JUMP_INSN in the needed registers. */
2648 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
2650 mark_referenced_resources (insn, &needed, 1);
2652 COPY_HARD_REG_SET (scratch, needed.regs);
2653 AND_COMPL_HARD_REG_SET (scratch, set.regs);
2654 IOR_HARD_REG_SET (new_resources.regs, scratch);
2656 mark_set_resources (insn, &set, 0, 1);
2659 AND_HARD_REG_SET (res->regs, new_resources.regs);
2662 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
2665 /* Scan a function looking for insns that need a delay slot and find insns to
2666 put into the delay slot.
2668 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2669 as calls). We do these first since we don't want jump insns (that are
2670 easier to fill) to get the only insns that could be used for non-jump insns.
2671 When it is zero, only try to fill JUMP_INSNs.
2673 When slots are filled in this manner, the insns (including the
2674 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2675 it is possible to tell whether a delay slot has really been filled
2676 or not. `final' knows how to deal with this, by communicating
2677 through FINAL_SEQUENCE. */
2680 fill_simple_delay_slots (first, non_jumps_p)
2684 register rtx insn, pat, trial, next_trial;
2686 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2687 struct resources needed, set;
2688 register int slots_to_fill, slots_filled;
2691 for (i = 0; i < num_unfilled_slots; i++)
2694 /* Get the next insn to fill. If it has already had any slots assigned,
2695 we can't do anything with it. Maybe we'll improve this later. */
2697 insn = unfilled_slots_base[i];
2699 || INSN_DELETED_P (insn)
2700 || (GET_CODE (insn) == INSN
2701 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2702 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2703 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2706 if (GET_CODE (insn) == JUMP_INSN)
2707 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2709 flags = get_jump_flags (insn, NULL_RTX);
2710 slots_to_fill = num_delay_slots (insn);
2711 if (slots_to_fill == 0)
2714 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2715 says how many. After initialization, first try optimizing
2718 nop add %o7,.-L1,%o7
2722 If this case applies, the delay slot of the call is filled with
2723 the unconditional jump. This is done first to avoid having the
2724 delay slot of the call filled in the backward scan. Also, since
2725 the unconditional jump is likely to also have a delay slot, that
2726 insn must exist when it is subsequently scanned.
2728 This is tried on each insn with delay slots as some machines
2729 have insns which perform calls, but are not represented as
2735 if ((trial = next_active_insn (insn))
2736 && GET_CODE (trial) == JUMP_INSN
2737 && simplejump_p (trial)
2738 && eligible_for_delay (insn, slots_filled, trial, flags)
2739 && no_labels_between_p (insn, trial))
2742 delay_list = add_to_delay_list (trial, delay_list);
2743 /* Remove the unconditional jump from consideration for delay slot
2744 filling and unthread it. */
2745 if (unfilled_slots_base[i + 1] == trial)
2746 unfilled_slots_base[i + 1] = 0;
2748 rtx next = NEXT_INSN (trial);
2749 rtx prev = PREV_INSN (trial);
2751 NEXT_INSN (prev) = next;
2753 PREV_INSN (next) = prev;
2757 /* Now, scan backwards from the insn to search for a potential
2758 delay-slot candidate. Stop searching when a label or jump is hit.
2760 For each candidate, if it is to go into the delay slot (moved
2761 forward in execution sequence), it must not need or set any resources
2762 that were set by later insns and must not set any resources that
2763 are needed for those insns.
2765 The delay slot insn itself sets resources unless it is a call
2766 (in which case the called routine, not the insn itself, is doing
2769 if (slots_filled < slots_to_fill)
2771 CLEAR_RESOURCE (&needed);
2772 CLEAR_RESOURCE (&set);
2773 mark_set_resources (insn, &set, 0, 0);
2774 mark_referenced_resources (insn, &needed, 0);
2776 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2779 next_trial = prev_nonnote_insn (trial);
2781 /* This must be an INSN or CALL_INSN. */
2782 pat = PATTERN (trial);
2784 /* USE and CLOBBER at this level was just for flow; ignore it. */
2785 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2788 /* Check for resource conflict first, to avoid unnecessary
2790 if (! insn_references_resource_p (trial, &set, 1)
2791 && ! insn_sets_resource_p (trial, &set, 1)
2792 && ! insn_sets_resource_p (trial, &needed, 1)
2794 /* Can't separate set of cc0 from its use. */
2795 && ! (reg_mentioned_p (cc0_rtx, pat)
2796 && ! sets_cc0_p (cc0_rtx, pat))
2800 trial = try_split (pat, trial, 1);
2801 next_trial = prev_nonnote_insn (trial);
2802 if (eligible_for_delay (insn, slots_filled, trial, flags))
2804 /* In this case, we are searching backward, so if we
2805 find insns to put on the delay list, we want
2806 to put them at the head, rather than the
2807 tail, of the list. */
2809 update_reg_dead_notes (trial, insn);
2810 delay_list = gen_rtx (INSN_LIST, VOIDmode,
2812 update_block (trial, trial);
2813 delete_insn (trial);
2814 if (slots_to_fill == ++slots_filled)
2820 mark_set_resources (trial, &set, 0, 1);
2821 mark_referenced_resources (trial, &needed, 1);
2825 /* If all needed slots haven't been filled, we come here. */
2827 /* Try to optimize case of jumping around a single insn. */
2828 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2829 if (slots_filled != slots_to_fill
2831 && GET_CODE (insn) == JUMP_INSN && condjump_p (insn))
2833 delay_list = optimize_skip (insn);
2839 /* Try to get insns from beyond the insn needing the delay slot.
2840 These insns can neither set or reference resources set in insns being
2841 skipped, cannot set resources in the insn being skipped, and, if this
2842 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2843 call might not return).
2845 If this is a conditional jump, see if it merges back to us early
2846 enough for us to pick up insns from the merge point. Don't do
2847 this if there is another branch to our label unless we pass all of
2850 Another similar merge is if we jump to the same place that a
2851 later unconditional jump branches to. In that case, we don't
2852 care about the number of uses of our label. */
2854 if (slots_filled != slots_to_fill
2855 && (GET_CODE (insn) != JUMP_INSN
2856 || (condjump_p (insn) && ! simplejump_p (insn)
2857 && JUMP_LABEL (insn) != 0)))
2860 int maybe_never = 0;
2861 int passed_label = 0;
2863 struct resources needed_at_jump;
2865 CLEAR_RESOURCE (&needed);
2866 CLEAR_RESOURCE (&set);
2868 if (GET_CODE (insn) == CALL_INSN)
2870 mark_set_resources (insn, &set, 0, 1);
2871 mark_referenced_resources (insn, &needed, 1);
2876 mark_set_resources (insn, &set, 0, 1);
2877 mark_referenced_resources (insn, &needed, 1);
2878 if (GET_CODE (insn) == JUMP_INSN)
2880 /* Get our target and show how many more uses we want to
2881 see before we hit the label. */
2882 target = JUMP_LABEL (insn);
2883 target_uses = LABEL_NUSES (target) - 1;
2888 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2890 rtx pat, trial_delay;
2892 next_trial = next_nonnote_insn (trial);
2894 if (GET_CODE (trial) == CODE_LABEL)
2898 /* If this is our target, see if we have seen all its uses.
2899 If so, indicate we have passed our target and ignore it.
2900 All other labels cause us to stop our search. */
2901 if (trial == target && target_uses == 0)
2909 else if (GET_CODE (trial) == BARRIER)
2912 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2913 pat = PATTERN (trial);
2915 /* Stand-alone USE and CLOBBER are just for flow. */
2916 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2919 /* If this already has filled delay slots, get the insn needing
2921 if (GET_CODE (pat) == SEQUENCE)
2922 trial_delay = XVECEXP (pat, 0, 0);
2924 trial_delay = trial;
2926 /* If this is a jump insn to our target, indicate that we have
2927 seen another jump to it. If we aren't handling a conditional
2928 jump, stop our search. Otherwise, compute the needs at its
2929 target and add them to NEEDED. */
2930 if (GET_CODE (trial_delay) == JUMP_INSN)
2934 else if (JUMP_LABEL (trial_delay) == target)
2938 mark_target_live_regs
2939 (next_active_insn (JUMP_LABEL (trial_delay)),
2941 needed.memory |= needed_at_jump.memory;
2942 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
2946 /* See if we have a resource problem before we try to
2949 && GET_CODE (pat) != SEQUENCE
2950 && ! insn_references_resource_p (trial, &set, 1)
2951 && ! insn_sets_resource_p (trial, &set, 1)
2952 && ! insn_sets_resource_p (trial, &needed, 1)
2954 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2956 && ! (maybe_never && may_trap_p (pat))
2957 && (trial = try_split (pat, trial, 0))
2958 && eligible_for_delay (insn, slots_filled, trial, flags))
2960 next_trial = next_nonnote_insn (trial);
2961 delay_list = add_to_delay_list (trial, delay_list);
2964 if (reg_mentioned_p (cc0_rtx, pat))
2965 link_cc0_insns (trial);
2969 update_block (trial, trial);
2970 delete_insn (trial);
2971 if (slots_to_fill == ++slots_filled)
2976 mark_set_resources (trial, &set, 0, 1);
2977 mark_referenced_resources (trial, &needed, 1);
2979 /* Ensure we don't put insns between the setting of cc and the
2980 comparison by moving a setting of cc into an earlier delay
2981 slot since these insns could clobber the condition code. */
2984 /* If this is a call or jump, we might not get here. */
2985 if (GET_CODE (trial) == CALL_INSN
2986 || GET_CODE (trial) == JUMP_INSN)
2990 /* If there are slots left to fill and our search was stopped by an
2991 unconditional branch, try the insn at the branch target. We can
2992 redirect the branch if it works. */
2993 if (slots_to_fill != slots_filled
2995 && GET_CODE (trial) == JUMP_INSN
2996 && simplejump_p (trial)
2997 && (target == 0 || JUMP_LABEL (trial) == target)
2998 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2999 && ! (GET_CODE (next_trial) == INSN
3000 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
3001 && ! insn_references_resource_p (next_trial, &set, 1)
3002 && ! insn_sets_resource_p (next_trial, &set, 1)
3003 && ! insn_sets_resource_p (next_trial, &needed, 1)
3005 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
3007 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
3008 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
3009 && eligible_for_delay (insn, slots_filled, next_trial, flags))
3011 rtx new_label = next_active_insn (next_trial);
3014 new_label = get_label_before (new_label);
3016 new_label = find_end_label ();
3019 = add_to_delay_list (copy_rtx (next_trial), delay_list);
3021 reorg_redirect_jump (trial, new_label);
3023 /* If we merged because we both jumped to the same place,
3024 redirect the original insn also. */
3026 reorg_redirect_jump (insn, new_label);
3031 unfilled_slots_base[i]
3032 = emit_delay_sequence (insn, delay_list,
3033 slots_filled, slots_to_fill);
3035 if (slots_to_fill == slots_filled)
3036 unfilled_slots_base[i] = 0;
3038 note_delay_statistics (slots_filled, 0);
3041 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3042 /* See if the epilogue needs any delay slots. Try to fill them if so.
3043 The only thing we can do is scan backwards from the end of the
3044 function. If we did this in a previous pass, it is incorrect to do it
3046 if (current_function_epilogue_delay_list)
3049 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
3050 if (slots_to_fill == 0)
3054 CLEAR_RESOURCE (&needed);
3055 CLEAR_RESOURCE (&set);
3057 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
3058 trial = PREV_INSN (trial))
3060 if (GET_CODE (trial) == NOTE)
3062 pat = PATTERN (trial);
3063 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3066 if (! insn_references_resource_p (trial, &set, 1)
3067 && ! insn_sets_resource_p (trial, &needed, 1)
3069 /* Don't want to mess with cc0 here. */
3070 && ! reg_mentioned_p (cc0_rtx, pat)
3074 trial = try_split (pat, trial, 1);
3075 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
3077 /* Here as well we are searching backward, so put the
3078 insns we find on the head of the list. */
3080 current_function_epilogue_delay_list
3081 = gen_rtx (INSN_LIST, VOIDmode, trial,
3082 current_function_epilogue_delay_list);
3083 mark_referenced_resources (trial, &end_of_function_needs, 1);
3084 update_block (trial, trial);
3085 delete_insn (trial);
3087 /* Clear deleted bit so final.c will output the insn. */
3088 INSN_DELETED_P (trial) = 0;
3090 if (slots_to_fill == ++slots_filled)
3096 mark_set_resources (trial, &set, 0, 1);
3097 mark_referenced_resources (trial, &needed, 1);
3100 note_delay_statistics (slots_filled, 0);
3104 /* Try to find insns to place in delay slots.
3106 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3107 or is an unconditional branch if CONDITION is const_true_rtx.
3108 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3110 THREAD is a flow-of-control, either the insns to be executed if the
3111 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3113 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3114 to see if any potential delay slot insns set things needed there.
3116 LIKELY is non-zero if it is extremely likely that the branch will be
3117 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3118 end of a loop back up to the top.
3120 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3121 thread. I.e., it is the fallthrough code of our jump or the target of the
3122 jump when we are the only jump going there.
3124 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3125 case, we can only take insns from the head of the thread for our delay
3126 slot. We then adjust the jump to point after the insns we have taken. */
3129 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
3130 thread_if_true, own_thread, own_opposite_thread,
3131 slots_to_fill, pslots_filled)
3134 rtx thread, opposite_thread;
3137 int own_thread, own_opposite_thread;
3138 int slots_to_fill, *pslots_filled;
3142 struct resources opposite_needed, set, needed;
3148 /* Validate our arguments. */
3149 if ((condition == const_true_rtx && ! thread_if_true)
3150 || (! own_thread && ! thread_if_true))
3153 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3155 /* If our thread is the end of subroutine, we can't get any delay
3160 /* If this is an unconditional branch, nothing is needed at the
3161 opposite thread. Otherwise, compute what is needed there. */
3162 if (condition == const_true_rtx)
3163 CLEAR_RESOURCE (&opposite_needed);
3165 mark_target_live_regs (opposite_thread, &opposite_needed);
3167 /* If the insn at THREAD can be split, do it here to avoid having to
3168 update THREAD and NEW_THREAD if it is done in the loop below. Also
3169 initialize NEW_THREAD. */
3171 new_thread = thread = try_split (PATTERN (thread), thread, 0);
3173 /* Scan insns at THREAD. We are looking for an insn that can be removed
3174 from THREAD (it neither sets nor references resources that were set
3175 ahead of it and it doesn't set anything needs by the insns ahead of
3176 it) and that either can be placed in an annulling insn or aren't
3177 needed at OPPOSITE_THREAD. */
3179 CLEAR_RESOURCE (&needed);
3180 CLEAR_RESOURCE (&set);
3182 /* If we do not own this thread, we must stop as soon as we find
3183 something that we can't put in a delay slot, since all we can do
3184 is branch into THREAD at a later point. Therefore, labels stop
3185 the search if this is not the `true' thread. */
3187 for (trial = thread;
3188 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
3189 trial = next_nonnote_insn (trial))
3193 /* If we have passed a label, we no longer own this thread. */
3194 if (GET_CODE (trial) == CODE_LABEL)
3200 pat = PATTERN (trial);
3201 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3204 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3205 don't separate or copy insns that set and use CC0. */
3206 if (! insn_references_resource_p (trial, &set, 1)
3207 && ! insn_sets_resource_p (trial, &set, 1)
3208 && ! insn_sets_resource_p (trial, &needed, 1)
3210 && ! (reg_mentioned_p (cc0_rtx, pat)
3211 && (! own_thread || ! sets_cc0_p (pat)))
3215 /* If TRIAL is redundant with some insn before INSN, we don't
3216 actually need to add it to the delay list; we can merely pretend
3218 if (redundant_insn_p (trial, insn, delay_list))
3222 update_block (trial, thread);
3223 delete_insn (trial);
3226 new_thread = next_active_insn (trial);
3231 /* There are two ways we can win: If TRIAL doesn't set anything
3232 needed at the opposite thread and can't trap, or if it can
3233 go into an annulled delay slot. */
3234 if (condition == const_true_rtx
3235 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
3236 && ! may_trap_p (pat)))
3239 trial = try_split (pat, trial, 0);
3240 if (new_thread == old_trial)
3242 pat = PATTERN (trial);
3243 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
3247 #ifdef ANNUL_IFTRUE_SLOTS
3250 #ifdef ANNUL_IFFALSE_SLOTS
3256 trial = try_split (pat, trial, 0);
3257 if (new_thread == old_trial)
3259 pat = PATTERN (trial);
3261 ? eligible_for_annul_false (insn, *pslots_filled, trial, flags)
3262 : eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
3270 if (reg_mentioned_p (cc0_rtx, pat))
3271 link_cc0_insns (trial);
3274 /* If we own this thread, delete the insn. If this is the
3275 destination of a branch, show that a basic block status
3276 may have been updated. In any case, mark the new
3277 starting point of this thread. */
3280 update_block (trial, thread);
3281 delete_insn (trial);
3284 new_thread = next_active_insn (trial);
3286 temp = own_thread ? trial : copy_rtx (trial);
3288 INSN_FROM_TARGET_P (temp) = 1;
3290 delay_list = add_to_delay_list (temp, delay_list);
3292 if (slots_to_fill == ++(*pslots_filled))
3294 /* Even though we have filled all the slots, we
3295 may be branching to a location that has a
3296 redundant insn. Skip any if so. */
3297 while (new_thread && ! own_thread
3298 && ! insn_sets_resource_p (new_thread, &set, 1)
3299 && ! insn_sets_resource_p (new_thread, &needed, 1)
3300 && ! insn_references_resource_p (new_thread,
3302 && redundant_insn_p (new_thread, insn,
3304 new_thread = next_active_insn (new_thread);
3313 /* This insn can't go into a delay slot. */
3315 mark_set_resources (trial, &set, 0, 1);
3316 mark_referenced_resources (trial, &needed, 1);
3318 /* Ensure we don't put insns between the setting of cc and the comparison
3319 by moving a setting of cc into an earlier delay slot since these insns
3320 could clobber the condition code. */
3323 /* If this insn is a register-register copy and the next insn has
3324 a use of our destination, change it to use our source. That way,
3325 it will become a candidate for our delay slot the next time
3326 through this loop. This case occurs commonly in loops that
3329 We could check for more complex cases than those tested below,
3330 but it doesn't seem worth it. It might also be a good idea to try
3331 to swap the two insns. That might do better.
3333 We can't do this if the next insn modifies our destination, because
3334 that would make the replacement into the insn invalid. We also can't
3335 do this if it modifies our source, because it might be an earlyclobber
3336 operand. This latter test also prevents updating the contents of
3339 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
3340 && GET_CODE (SET_SRC (pat)) == REG
3341 && GET_CODE (SET_DEST (pat)) == REG)
3343 rtx next = next_nonnote_insn (trial);
3345 if (next && GET_CODE (next) == INSN
3346 && GET_CODE (PATTERN (next)) != USE
3347 && ! reg_set_p (SET_DEST (pat), next)
3348 && ! reg_set_p (SET_SRC (pat), next)
3349 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
3350 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
3354 /* If we stopped on a branch insn that has delay slots, see if we can
3355 steal some of the insns in those slots. */
3356 if (trial && GET_CODE (trial) == INSN
3357 && GET_CODE (PATTERN (trial)) == SEQUENCE
3358 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
3360 /* If this is the `true' thread, we will want to follow the jump,
3361 so we can only do this if we have taken everything up to here. */
3362 if (thread_if_true && trial == new_thread)
3364 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
3365 delay_list, &set, &needed,
3366 &opposite_needed, slots_to_fill,
3367 pslots_filled, &must_annul,
3369 else if (! thread_if_true)
3371 = steal_delay_list_from_fallthrough (insn, condition,
3373 delay_list, &set, &needed,
3374 &opposite_needed, slots_to_fill,
3375 pslots_filled, &must_annul);
3378 /* If we haven't found anything for this delay slot and it is very
3379 likely that the branch will be taken, see if the insn at our target
3380 increments or decrements a register with an increment that does not
3381 depend on the destination register. If so, try to place the opposite
3382 arithmetic insn after the jump insn and put the arithmetic insn in the
3383 delay slot. If we can't do this, return. */
3384 if (delay_list == 0 && likely && new_thread && GET_CODE (new_thread) == INSN)
3386 rtx pat = PATTERN (new_thread);
3391 pat = PATTERN (trial);
3393 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
3394 || ! eligible_for_delay (insn, 0, trial, flags))
3397 dest = SET_DEST (pat), src = SET_SRC (pat);
3398 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
3399 && rtx_equal_p (XEXP (src, 0), dest)
3400 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
3402 rtx other = XEXP (src, 1);
3406 /* If this is a constant adjustment, use the same code with
3407 the negated constant. Otherwise, reverse the sense of the
3409 if (GET_CODE (other) == CONST_INT)
3410 new_arith = gen_rtx (GET_CODE (src), GET_MODE (src), dest,
3411 negate_rtx (GET_MODE (src), other));
3413 new_arith = gen_rtx (GET_CODE (src) == PLUS ? MINUS : PLUS,
3414 GET_MODE (src), dest, other);
3416 ninsn = emit_insn_after (gen_rtx (SET, VOIDmode, dest, new_arith),
3419 if (recog_memoized (ninsn) < 0
3420 || (insn_extract (ninsn),
3421 ! constrain_operands (INSN_CODE (ninsn), 1)))
3423 delete_insn (ninsn);
3429 update_block (trial, thread);
3430 delete_insn (trial);
3433 new_thread = next_active_insn (trial);
3435 ninsn = own_thread ? trial : copy_rtx (trial);
3437 INSN_FROM_TARGET_P (ninsn) = 1;
3439 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3444 if (delay_list && must_annul)
3445 INSN_ANNULLED_BRANCH_P (insn) = 1;
3447 /* If we are to branch into the middle of this thread, find an appropriate
3448 label or make a new one if none, and redirect INSN to it. If we hit the
3449 end of the function, use the end-of-function label. */
3450 if (new_thread != thread)
3454 if (! thread_if_true)
3457 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
3458 && (simplejump_p (new_thread)
3459 || GET_CODE (PATTERN (new_thread)) == RETURN))
3460 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3462 if (new_thread == 0)
3463 label = find_end_label ();
3464 else if (GET_CODE (new_thread) == CODE_LABEL)
3467 label = get_label_before (new_thread);
3469 reorg_redirect_jump (insn, label);
3475 /* Make another attempt to find insns to place in delay slots.
3477 We previously looked for insns located in front of the delay insn
3478 and, for non-jump delay insns, located behind the delay insn.
3480 Here only try to schedule jump insns and try to move insns from either
3481 the target or the following insns into the delay slot. If annulling is
3482 supported, we will be likely to do this. Otherwise, we can do this only
3486 fill_eager_delay_slots (first)
3491 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3493 for (i = 0; i < num_unfilled_slots; i++)
3496 rtx target_label, insn_at_target, fallthrough_insn;
3499 int own_fallthrough;
3500 int prediction, slots_to_fill, slots_filled;
3502 insn = unfilled_slots_base[i];
3504 || INSN_DELETED_P (insn)
3505 || GET_CODE (insn) != JUMP_INSN
3506 || ! condjump_p (insn))
3509 slots_to_fill = num_delay_slots (insn);
3510 if (slots_to_fill == 0)
3514 target_label = JUMP_LABEL (insn);
3515 condition = get_branch_condition (insn, target_label);
3520 /* Get the next active fallthough and target insns and see if we own
3521 them. Then see whether the branch is likely true. We don't need
3522 to do a lot of this for unconditional branches. */
3524 insn_at_target = next_active_insn (target_label);
3525 own_target = own_thread_p (target_label, target_label, 0);
3527 if (condition == const_true_rtx)
3529 own_fallthrough = 0;
3530 fallthrough_insn = 0;
3535 fallthrough_insn = next_active_insn (insn);
3536 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3537 prediction = mostly_true_jump (insn, condition);
3540 /* If this insn is expected to branch, first try to get insns from our
3541 target, then our fallthrough insns. If it is not, expected to branch,
3542 try the other order. */
3547 = fill_slots_from_thread (insn, condition, insn_at_target,
3548 fallthrough_insn, prediction == 2, 1,
3549 own_target, own_fallthrough,
3550 slots_to_fill, &slots_filled);
3552 if (delay_list == 0 && own_fallthrough)
3554 /* Even though we didn't find anything for delay slots,
3555 we might have found a redundant insn which we deleted
3556 from the thread that was filled. So we have to recompute
3557 the next insn at the target. */
3558 target_label = JUMP_LABEL (insn);
3559 insn_at_target = next_active_insn (target_label);
3562 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3563 insn_at_target, 0, 0,
3564 own_fallthrough, own_target,
3565 slots_to_fill, &slots_filled);
3570 if (own_fallthrough)
3572 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3573 insn_at_target, 0, 0,
3574 own_fallthrough, own_target,
3575 slots_to_fill, &slots_filled);
3577 if (delay_list == 0)
3579 = fill_slots_from_thread (insn, condition, insn_at_target,
3580 next_active_insn (insn), 0, 1,
3581 own_target, own_fallthrough,
3582 slots_to_fill, &slots_filled);
3586 unfilled_slots_base[i]
3587 = emit_delay_sequence (insn, delay_list,
3588 slots_filled, slots_to_fill);
3590 if (slots_to_fill == slots_filled)
3591 unfilled_slots_base[i] = 0;
3593 note_delay_statistics (slots_filled, 1);
3597 /* Once we have tried two ways to fill a delay slot, make a pass over the
3598 code to try to improve the results and to do such things as more jump
3602 relax_delay_slots (first)
3605 register rtx insn, next, pat;
3606 register rtx trial, delay_insn, target_label;
3608 /* Look at every JUMP_INSN and see if we can improve it. */
3609 for (insn = first; insn; insn = next)
3613 next = next_active_insn (insn);
3615 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3616 the next insn, or jumps to a label that is not the last of a
3617 group of consecutive labels. */
3618 if (GET_CODE (insn) == JUMP_INSN
3619 && condjump_p (insn)
3620 && (target_label = JUMP_LABEL (insn)) != 0)
3622 target_label = follow_jumps (target_label);
3623 target_label = prev_label (next_active_insn (target_label));
3625 if (target_label == 0)
3626 target_label = find_end_label ();
3628 if (next_active_insn (target_label) == next)
3634 if (target_label != JUMP_LABEL (insn))
3635 reorg_redirect_jump (insn, target_label);
3637 /* See if this jump branches around a unconditional jump.
3638 If so, invert this jump and point it to the target of the
3640 if (next && GET_CODE (next) == JUMP_INSN
3641 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3642 && next_active_insn (target_label) == next_active_insn (next)
3643 && no_labels_between_p (insn, next))
3645 rtx label = JUMP_LABEL (next);
3647 /* Be careful how we do this to avoid deleting code or
3648 labels that are momentarily dead. See similar optimization
3651 We also need to ensure we properly handle the case when
3652 invert_jump fails. */
3654 ++LABEL_NUSES (target_label);
3656 ++LABEL_NUSES (label);
3658 if (invert_jump (insn, label))
3665 --LABEL_NUSES (label);
3667 if (--LABEL_NUSES (target_label) == 0)
3668 delete_insn (target_label);
3674 /* If this is an unconditional jump and the previous insn is a
3675 conditional jump, try reversing the condition of the previous
3676 insn and swapping our targets. The next pass might be able to
3679 Don't do this if we expect the conditional branch to be true, because
3680 we would then be making the more common case longer. */
3682 if (GET_CODE (insn) == JUMP_INSN
3683 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3684 && (other = prev_active_insn (insn)) != 0
3685 && condjump_p (other)
3686 && no_labels_between_p (other, insn)
3687 && 0 < mostly_true_jump (other,
3688 get_branch_condition (other,
3689 JUMP_LABEL (other))))
3691 rtx other_target = JUMP_LABEL (other);
3692 target_label = JUMP_LABEL (insn);
3694 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3695 as we move the label. */
3697 ++LABEL_NUSES (other_target);
3699 if (invert_jump (other, target_label))
3700 reorg_redirect_jump (insn, other_target);
3703 --LABEL_NUSES (other_target);
3706 /* Now look only at cases where we have filled a delay slot. */
3707 if (GET_CODE (insn) != INSN
3708 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3711 pat = PATTERN (insn);
3712 delay_insn = XVECEXP (pat, 0, 0);
3714 /* See if the first insn in the delay slot is redundant with some
3715 previous insn. Remove it from the delay slot if so; then set up
3716 to reprocess this insn. */
3717 if (redundant_insn_p (XVECEXP (pat, 0, 1), delay_insn, 0))
3719 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3720 next = prev_active_insn (next);
3724 /* Now look only at the cases where we have a filled JUMP_INSN. */
3725 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3726 || ! condjump_p (XVECEXP (PATTERN (insn), 0, 0)))
3729 target_label = JUMP_LABEL (delay_insn);
3733 /* If this jump goes to another unconditional jump, thread it, but
3734 don't convert a jump into a RETURN here. */
3735 trial = follow_jumps (target_label);
3736 trial = prev_label (next_active_insn (trial));
3737 if (trial == 0 && target_label != 0)
3738 trial = find_end_label ();
3740 if (trial != target_label
3741 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3743 reorg_redirect_jump (delay_insn, trial);
3744 target_label = trial;
3747 /* If the first insn at TARGET_LABEL is redundant with a previous
3748 insn, redirect the jump to the following insn process again. */
3749 trial = next_active_insn (target_label);
3750 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3751 && redundant_insn_p (trial, insn, 0))
3753 trial = next_active_insn (trial);
3755 target_label = find_end_label ();
3757 target_label = get_label_before (trial);
3758 reorg_redirect_jump (delay_insn, target_label);
3763 /* Similarly, if it is an unconditional jump with one insn in its
3764 delay list and that insn is redundant, thread the jump. */
3765 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3766 && XVECLEN (PATTERN (trial), 0) == 2
3767 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3768 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3769 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3770 && redundant_insn_p (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3772 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3773 if (target_label == 0)
3774 target_label = find_end_label ();
3776 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
3779 reorg_redirect_jump (delay_insn, target_label);
3786 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3787 && prev_active_insn (target_label) == insn
3789 /* If the last insn in the delay slot sets CC0 for some insn,
3790 various code assumes that it is in a delay slot. We could
3791 put it back where it belonged and delete the register notes,
3792 but it doesn't seem worthwhile in this uncommon case. */
3793 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3794 REG_CC_USER, NULL_RTX)
3800 /* All this insn does is execute its delay list and jump to the
3801 following insn. So delete the jump and just execute the delay
3804 We do this by deleting the INSN containing the SEQUENCE, then
3805 re-emitting the insns separately, and then deleting the jump.
3806 This allows the count of the jump target to be properly
3809 /* Clear the from target bit, since these insns are no longer
3811 for (i = 0; i < XVECLEN (pat, 0); i++)
3812 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3814 trial = PREV_INSN (insn);
3816 emit_insn_after (pat, trial);
3817 delete_scheduled_jump (delay_insn);
3821 /* See if this is an unconditional jump around a single insn which is
3822 identical to the one in its delay slot. In this case, we can just
3823 delete the branch and the insn in its delay slot. */
3824 if (next && GET_CODE (next) == INSN
3825 && prev_label (next_active_insn (next)) == target_label
3826 && simplejump_p (insn)
3827 && XVECLEN (pat, 0) == 2
3828 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3834 /* See if this jump (with its delay slots) branches around another
3835 jump (without delay slots). If so, invert this jump and point
3836 it to the target of the second jump. We cannot do this for
3837 annulled jumps, though. Again, don't convert a jump to a RETURN
3839 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3840 && next && GET_CODE (next) == JUMP_INSN
3841 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3842 && next_active_insn (target_label) == next_active_insn (next)
3843 && no_labels_between_p (insn, next))
3845 rtx label = JUMP_LABEL (next);
3846 rtx old_label = JUMP_LABEL (delay_insn);
3849 label = find_end_label ();
3851 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3853 /* Be careful how we do this to avoid deleting code or labels
3854 that are momentarily dead. See similar optimization in
3857 ++LABEL_NUSES (old_label);
3859 if (invert_jump (delay_insn, label))
3865 if (old_label && --LABEL_NUSES (old_label) == 0)
3866 delete_insn (old_label);
3871 /* If we own the thread opposite the way this insn branches, see if we
3872 can merge its delay slots with following insns. */
3873 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3874 && own_thread_p (NEXT_INSN (insn), 0, 1))
3875 try_merge_delay_insns (insn, next);
3876 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3877 && own_thread_p (target_label, target_label, 0))
3878 try_merge_delay_insns (insn, next_active_insn (target_label));
3880 /* If we get here, we haven't deleted INSN. But we may have deleted
3881 NEXT, so recompute it. */
3882 next = next_active_insn (insn);
3888 /* Look for filled jumps to the end of function label. We can try to convert
3889 them into RETURN insns if the insns in the delay slot are valid for the
3893 make_return_insns (first)
3896 rtx insn, jump_insn, pat;
3897 rtx real_return_label = end_of_function_label;
3900 /* See if there is a RETURN insn in the function other than the one we
3901 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3902 into a RETURN to jump to it. */
3903 for (insn = first; insn; insn = NEXT_INSN (insn))
3904 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
3906 real_return_label = get_label_before (insn);
3910 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3911 was equal to END_OF_FUNCTION_LABEL. */
3912 LABEL_NUSES (real_return_label)++;
3914 /* Clear the list of insns to fill so we can use it. */
3915 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3917 for (insn = first; insn; insn = NEXT_INSN (insn))
3921 /* Only look at filled JUMP_INSNs that go to the end of function
3923 if (GET_CODE (insn) != INSN
3924 || GET_CODE (PATTERN (insn)) != SEQUENCE
3925 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3926 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3929 pat = PATTERN (insn);
3930 jump_insn = XVECEXP (pat, 0, 0);
3932 /* If we can't make the jump into a RETURN, redirect it to the best
3933 RETURN and go on to the next insn. */
3934 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
3936 reorg_redirect_jump (jump_insn, real_return_label);
3940 /* See if this RETURN can accept the insns current in its delay slot.
3941 It can if it has more or an equal number of slots and the contents
3942 of each is valid. */
3944 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3945 slots = num_delay_slots (jump_insn);
3946 if (slots >= XVECLEN (pat, 0) - 1)
3948 for (i = 1; i < XVECLEN (pat, 0); i++)
3950 #ifdef ANNUL_IFFALSE_SLOTS
3951 (INSN_ANNULLED_BRANCH_P (jump_insn)
3952 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3953 ? eligible_for_annul_false (jump_insn, i - 1,
3954 XVECEXP (pat, 0, i), flags) :
3956 #ifdef ANNUL_IFTRUE_SLOTS
3957 (INSN_ANNULLED_BRANCH_P (jump_insn)
3958 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3959 ? eligible_for_annul_true (jump_insn, i - 1,
3960 XVECEXP (pat, 0, i), flags) :
3962 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
3968 if (i == XVECLEN (pat, 0))
3971 /* We have to do something with this insn. If it is an unconditional
3972 RETURN, delete the SEQUENCE and output the individual insns,
3973 followed by the RETURN. Then set things up so we try to find
3974 insns for its delay slots, if it needs some. */
3975 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3977 rtx prev = PREV_INSN (insn);
3980 for (i = 1; i < XVECLEN (pat, 0); i++)
3981 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3983 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3984 emit_barrier_after (insn);
3987 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3990 /* It is probably more efficient to keep this with its current
3991 delay slot as a branch to a RETURN. */
3992 reorg_redirect_jump (jump_insn, real_return_label);
3995 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3996 new delay slots we have created. */
3997 if (--LABEL_NUSES (real_return_label) == 0)
3998 delete_insn (real_return_label);
4000 fill_simple_delay_slots (first, 1);
4001 fill_simple_delay_slots (first, 0);
4005 /* Try to find insns to place in delay slots. */
4008 dbr_schedule (first, file)
4012 rtx insn, next, epilogue_insn = 0;
4015 int old_flag_no_peephole = flag_no_peephole;
4017 /* Execute `final' once in prescan mode to delete any insns that won't be
4018 used. Don't let final try to do any peephole optimization--it will
4019 ruin dataflow information for this pass. */
4021 flag_no_peephole = 1;
4022 final (first, 0, NO_DEBUG, 1, 1);
4023 flag_no_peephole = old_flag_no_peephole;
4026 /* If the current function has no insns other than the prologue and
4027 epilogue, then do not try to fill any delay slots. */
4028 if (n_basic_blocks == 0)
4031 /* Find the highest INSN_UID and allocate and initialize our map from
4032 INSN_UID's to position in code. */
4033 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
4035 if (INSN_UID (insn) > max_uid)
4036 max_uid = INSN_UID (insn);
4037 if (GET_CODE (insn) == NOTE
4038 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4039 epilogue_insn = insn;
4042 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int *));
4043 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
4044 uid_to_ruid[INSN_UID (insn)] = i;
4046 /* Initialize the list of insns that need filling. */
4047 if (unfilled_firstobj == 0)
4049 gcc_obstack_init (&unfilled_slots_obstack);
4050 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4053 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
4057 INSN_ANNULLED_BRANCH_P (insn) = 0;
4058 INSN_FROM_TARGET_P (insn) = 0;
4060 /* Skip vector tables. We can't get attributes for them. */
4061 if (GET_CODE (insn) == JUMP_INSN
4062 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4063 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4066 if (num_delay_slots (insn) > 0)
4067 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4069 /* Ensure all jumps go to the last of a set of consecutive labels. */
4070 if (GET_CODE (insn) == JUMP_INSN && condjump_p (insn)
4071 && JUMP_LABEL (insn) != 0
4072 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
4073 != JUMP_LABEL (insn)))
4074 redirect_jump (insn, target);
4077 /* Indicate what resources are required to be valid at the end of the current
4078 function. The condition code never is and memory always is. If the
4079 frame pointer is needed, it is and so is the stack pointer unless
4080 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4081 stack pointer is. Registers used to return the function value are
4082 needed. Registers holding global variables are needed. */
4084 end_of_function_needs.cc = 0;
4085 end_of_function_needs.memory = 1;
4086 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
4088 if (frame_pointer_needed)
4090 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
4091 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4092 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
4094 #ifdef EXIT_IGNORE_STACK
4095 if (! EXIT_IGNORE_STACK)
4097 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4100 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4102 if (current_function_return_rtx != 0
4103 && GET_CODE (current_function_return_rtx) == REG)
4104 mark_referenced_resources (current_function_return_rtx,
4105 &end_of_function_needs, 1);
4107 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4109 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
4111 /* The registers required to be live at the end of the function are
4112 represented in the flow information as being dead just prior to
4113 reaching the end of the function. For example, the return of a value
4114 might be represented by a USE of the return register immediately
4115 followed by an unconditional jump to the return label where the
4116 return label is the end of the RTL chain. The end of the RTL chain
4117 is then taken to mean that the return register is live.
4119 This sequence is no longer maintained when epilogue instructions are
4120 added to the RTL chain. To reconstruct the original meaning, the
4121 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4122 point where these registers become live (start_of_epilogue_needs).
4123 If epilogue instructions are present, the registers set by those
4124 instructions won't have been processed by flow. Thus, those
4125 registers are additionally required at the end of the RTL chain
4126 (end_of_function_needs). */
4128 start_of_epilogue_needs = end_of_function_needs;
4130 while (epilogue_insn = next_nonnote_insn (epilogue_insn))
4131 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
4133 /* Show we haven't computed an end-of-function label yet. */
4134 end_of_function_label = 0;
4136 /* Allocate and initialize the tables used by mark_target_live_regs. */
4138 = (struct target_info **) alloca ((TARGET_HASH_PRIME
4139 * sizeof (struct target_info *)));
4140 bzero (target_hash_table, TARGET_HASH_PRIME * sizeof (struct target_info *));
4142 bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
4143 bzero (bb_ticks, n_basic_blocks * sizeof (int));
4145 /* Initialize the statistics for this function. */
4146 bzero (num_insns_needing_delays, sizeof num_insns_needing_delays);
4147 bzero (num_filled_delays, sizeof num_filled_delays);
4149 /* Now do the delay slot filling. Try everything twice in case earlier
4150 changes make more slots fillable. */
4152 for (reorg_pass_number = 0;
4153 reorg_pass_number < MAX_REORG_PASSES;
4154 reorg_pass_number++)
4156 fill_simple_delay_slots (first, 1);
4157 fill_simple_delay_slots (first, 0);
4158 fill_eager_delay_slots (first);
4159 relax_delay_slots (first);
4162 /* Delete any USE insns made by update_block; subsequent passes don't need
4163 them or know how to deal with them. */
4164 for (insn = first; insn; insn = next)
4166 next = NEXT_INSN (insn);
4168 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
4169 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
4170 next = delete_insn (insn);
4173 /* If we made an end of function label, indicate that it is now
4174 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4175 If it is now unused, delete it. */
4176 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
4177 delete_insn (end_of_function_label);
4180 if (HAVE_return && end_of_function_label != 0)
4181 make_return_insns (first);
4184 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4186 /* It is not clear why the line below is needed, but it does seem to be. */
4187 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4189 /* Reposition the prologue and epilogue notes in case we moved the
4190 prologue/epilogue insns. */
4191 reposition_prologue_and_epilogue_notes (first);
4195 register int i, j, need_comma;
4197 for (reorg_pass_number = 0;
4198 reorg_pass_number < MAX_REORG_PASSES;
4199 reorg_pass_number++)
4201 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
4202 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
4205 fprintf (file, ";; Reorg function #%d\n", i);
4207 fprintf (file, ";; %d insns needing delay slots\n;; ",
4208 num_insns_needing_delays[i][reorg_pass_number]);
4210 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
4211 if (num_filled_delays[i][j][reorg_pass_number])
4214 fprintf (file, ", ");
4216 fprintf (file, "%d got %d delays",
4217 num_filled_delays[i][j][reorg_pass_number], j);
4219 fprintf (file, "\n");
4224 #endif /* DELAY_SLOTS */