1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 /* Instruction reorganization pass.
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
64 Three techniques for filling delay slots have been implemented so far:
66 (1) `fill_simple_delay_slots' is the simplest, most efficient way
67 to fill delay slots. This pass first looks for insns which come
68 from before the branch and which are safe to execute after the
69 branch. Then it searches after the insn requiring delay slots or,
70 in the case of a branch, for insns that are after the point at
71 which the branch merges into the fallthrough code, if such a point
72 exists. When such insns are found, the branch penalty decreases
73 and no code expansion takes place.
75 (2) `fill_eager_delay_slots' is more complicated: it is used for
76 scheduling conditional jumps, or for scheduling jumps which cannot
77 be filled using (1). A machine need not have annulled jumps to use
78 this strategy, but it helps (by keeping more options open).
79 `fill_eager_delay_slots' tries to guess the direction the branch
80 will go; if it guesses right 100% of the time, it can reduce the
81 branch penalty as much as `fill_simple_delay_slots' does. If it
82 guesses wrong 100% of the time, it might as well schedule nops (or
83 on the m88k, unexpose the branch slot). When
84 `fill_eager_delay_slots' takes insns from the fall-through path of
85 the jump, usually there is no code expansion; when it takes insns
86 from the branch target, there is code expansion if it is not the
87 only way to reach that target.
89 (3) `relax_delay_slots' uses a set of rules to simplify code that
90 has been reorganized by (1) and (2). It finds cases where
91 conditional test can be eliminated, jumps can be threaded, extra
92 insns can be eliminated, etc. It is the job of (1) and (2) to do a
93 good job of scheduling locally; `relax_delay_slots' takes care of
94 making the various individual schedules work well together. It is
95 especially tuned to handle the control flow interactions of branch
96 insns. It does nothing for insns with delay slots that do not
99 On machines that use CC0, we are very conservative. We will not make
100 a copy of an insn involving CC0 since we want to maintain a 1-1
101 correspondence between the insn that sets and uses CC0. The insns are
102 allowed to be separated by placing an insn that sets CC0 (but not an insn
103 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
104 delay slot. In that case, we point each insn at the other with REG_CC_USER
105 and REG_CC_SETTER notes. Note that these restrictions affect very few
106 machines because most RISC machines with delay slots will not use CC0
107 (the RT is the only known exception at this point).
111 The Acorn Risc Machine can conditionally execute most insns, so
112 it is profitable to move single insns into a position to execute
113 based on the condition code of the previous insn.
115 The HP-PA can conditionally nullify insns, providing a similar
116 effect to the ARM, differing mostly in which insn is "in charge". */
121 #include "insn-config.h"
122 #include "conditions.h"
123 #include "hard-reg-set.h"
124 #include "basic-block.h"
126 #include "insn-flags.h"
131 #include "insn-attr.h"
133 /* Import list of registers used as spill regs from reload. */
134 extern HARD_REG_SET used_spill_regs;
136 /* Import highest label used in function at end of reload. */
137 extern int max_label_num_after_reload;
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
145 #ifndef ANNUL_IFTRUE_SLOTS
146 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
148 #ifndef ANNUL_IFFALSE_SLOTS
149 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
152 /* Insns which have delay slots that have not yet been filled. */
154 static struct obstack unfilled_slots_obstack;
155 static rtx *unfilled_firstobj;
157 /* Define macros to refer to the first and last slot containing unfilled
158 insns. These are used because the list may move and its address
159 should be recomputed at each use. */
161 #define unfilled_slots_base \
162 ((rtx *) obstack_base (&unfilled_slots_obstack))
164 #define unfilled_slots_next \
165 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
167 /* This structure is used to indicate which hardware resources are set or
168 needed by insns so far. */
172 char memory; /* Insn sets or needs a memory location. */
173 char unch_memory; /* Insn sets of needs a "unchanging" MEM. */
174 char volatil; /* Insn sets or needs a volatile memory loc. */
175 char cc; /* Insn sets or needs the condition codes. */
176 HARD_REG_SET regs; /* Which registers are set or needed. */
179 /* Macro to clear all resources. */
180 #define CLEAR_RESOURCE(RES) \
181 do { (RES)->memory = (RES)->unch_memory = (RES)->volatil = (RES)->cc = 0; \
182 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
184 /* Indicates what resources are required at the beginning of the epilogue. */
185 static struct resources start_of_epilogue_needs;
187 /* Indicates what resources are required at function end. */
188 static struct resources end_of_function_needs;
190 /* Points to the label before the end of the function. */
191 static rtx end_of_function_label;
193 /* This structure is used to record liveness information at the targets or
194 fallthrough insns of branches. We will most likely need the information
195 at targets again, so save them in a hash table rather than recomputing them
200 int uid; /* INSN_UID of target. */
201 struct target_info *next; /* Next info for same hash bucket. */
202 HARD_REG_SET live_regs; /* Registers live at target. */
203 int block; /* Basic block number containing target. */
204 int bb_tick; /* Generation count of basic block info. */
207 #define TARGET_HASH_PRIME 257
209 /* Define the hash table itself. */
210 static struct target_info **target_hash_table;
212 /* For each basic block, we maintain a generation number of its basic
213 block info, which is updated each time we move an insn from the
214 target of a jump. This is the generation number indexed by block
217 static int *bb_ticks;
219 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
220 not always monotonically increase. */
221 static int *uid_to_ruid;
223 /* Highest valid index in `uid_to_ruid'. */
226 static void mark_referenced_resources PROTO((rtx, struct resources *, int));
227 static void mark_set_resources PROTO((rtx, struct resources *, int, int));
228 static int stop_search_p PROTO((rtx, int));
229 static int resource_conflicts_p PROTO((struct resources *,
230 struct resources *));
231 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
232 static int insn_sets_resources_p PROTO((rtx, struct resources *, int));
233 static rtx find_end_label PROTO((void));
234 static rtx emit_delay_sequence PROTO((rtx, rtx, int, int));
235 static rtx add_to_delay_list PROTO((rtx, rtx));
236 static void delete_from_delay_slot PROTO((rtx));
237 static void delete_scheduled_jump PROTO((rtx));
238 static void note_delay_statistics PROTO((int, int));
239 static rtx optimize_skip PROTO((rtx));
240 static int get_jump_flags PROTO((rtx, rtx));
241 static int rare_destination PROTO((rtx));
242 static int mostly_true_jump PROTO((rtx, rtx));
243 static rtx get_branch_condition PROTO((rtx, rtx));
244 static int condition_dominates_p PROTO((rtx, rtx));
245 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
249 int, int *, int *, rtx *));
250 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
255 static void try_merge_delay_insns PROTO((rtx, rtx));
256 static rtx redundant_insn PROTO((rtx, rtx, rtx));
257 static int own_thread_p PROTO((rtx, rtx, int));
258 static int find_basic_block PROTO((rtx));
259 static void update_block PROTO((rtx, rtx));
260 static int reorg_redirect_jump PROTO((rtx, rtx));
261 static void update_reg_dead_notes PROTO((rtx, rtx));
262 static void fix_reg_dead_note PROTO((rtx, rtx));
263 static void update_reg_unused_notes PROTO((rtx, rtx));
264 static void update_live_status PROTO((rtx, rtx));
265 static rtx next_insn_no_annul PROTO((rtx));
266 static void mark_target_live_regs PROTO((rtx, struct resources *));
267 static void fill_simple_delay_slots PROTO((rtx, int));
268 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
269 int, int, int, int *));
270 static void fill_eager_delay_slots PROTO((rtx));
271 static void relax_delay_slots PROTO((rtx));
272 static void make_return_insns PROTO((rtx));
273 static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
274 static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
276 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
277 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
278 is TRUE, resources used by the called routine will be included for
282 mark_referenced_resources (x, res, include_delayed_effects)
284 register struct resources *res;
285 register int include_delayed_effects;
287 register enum rtx_code code = GET_CODE (x);
289 register char *format_ptr;
291 /* Handle leaf items for which we set resource flags. Also, special-case
292 CALL, SET and CLOBBER operators. */
304 if (GET_CODE (SUBREG_REG (x)) != REG)
305 mark_referenced_resources (SUBREG_REG (x), res, 0);
308 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
309 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
310 for (i = regno; i < last_regno; i++)
311 SET_HARD_REG_BIT (res->regs, i);
316 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
317 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
321 /* If this memory shouldn't change, it really isn't referencing
323 if (RTX_UNCHANGING_P (x))
324 res->unch_memory = 1;
327 res->volatil = MEM_VOLATILE_P (x);
329 /* Mark registers used to access memory. */
330 mark_referenced_resources (XEXP (x, 0), res, 0);
337 case UNSPEC_VOLATILE:
340 /* Traditional asm's are always volatile. */
345 res->volatil = MEM_VOLATILE_P (x);
347 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
348 We can not just fall through here since then we would be confused
349 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
350 traditional asms unlike their normal usage. */
352 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
353 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
357 /* The first operand will be a (MEM (xxx)) but doesn't really reference
358 memory. The second operand may be referenced, though. */
359 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
360 mark_referenced_resources (XEXP (x, 1), res, 0);
364 /* Usually, the first operand of SET is set, not referenced. But
365 registers used to access memory are referenced. SET_DEST is
366 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
368 mark_referenced_resources (SET_SRC (x), res, 0);
371 if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
372 mark_referenced_resources (x, res, 0);
373 else if (GET_CODE (x) == SUBREG)
375 if (GET_CODE (x) == MEM)
376 mark_referenced_resources (XEXP (x, 0), res, 0);
383 if (include_delayed_effects)
385 /* A CALL references memory, the frame pointer if it exists, the
386 stack pointer, any global registers and any registers given in
387 USE insns immediately in front of the CALL.
389 However, we may have moved some of the parameter loading insns
390 into the delay slot of this CALL. If so, the USE's for them
391 don't count and should be skipped. */
392 rtx insn = PREV_INSN (x);
395 rtx next = NEXT_INSN (x);
398 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
399 if (NEXT_INSN (insn) != x)
401 next = NEXT_INSN (NEXT_INSN (insn));
402 sequence = PATTERN (NEXT_INSN (insn));
403 seq_size = XVECLEN (sequence, 0);
404 if (GET_CODE (sequence) != SEQUENCE)
409 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
410 if (frame_pointer_needed)
412 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
413 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
414 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
418 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
420 SET_HARD_REG_BIT (res->regs, i);
422 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
423 assume that this call can need any register.
425 This is done to be more conservative about how we handle setjmp.
426 We assume that they both use and set all registers. Using all
427 registers ensures that a register will not be considered dead
428 just because it crosses a setjmp call. A register should be
429 considered dead only if the setjmp call returns non-zero. */
430 if (next && GET_CODE (next) == NOTE
431 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
432 SET_HARD_REG_SET (res->regs);
437 for (link = CALL_INSN_FUNCTION_USAGE (x);
439 link = XEXP (link, 1))
440 if (GET_CODE (XEXP (link, 0)) == USE)
442 for (i = 1; i < seq_size; i++)
444 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
445 if (GET_CODE (slot_pat) == SET
446 && rtx_equal_p (SET_DEST (slot_pat),
447 SET_DEST (XEXP (link, 0))))
451 mark_referenced_resources (SET_DEST (XEXP (link, 0)),
457 /* ... fall through to other INSN processing ... */
462 #ifdef INSN_REFERENCES_ARE_DELAYED
463 if (! include_delayed_effects
464 && INSN_REFERENCES_ARE_DELAYED (x))
468 /* No special processing, just speed up. */
469 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
473 /* Process each sub-expression and flag what it needs. */
474 format_ptr = GET_RTX_FORMAT (code);
475 for (i = 0; i < GET_RTX_LENGTH (code); i++)
476 switch (*format_ptr++)
479 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
483 for (j = 0; j < XVECLEN (x, i); j++)
484 mark_referenced_resources (XVECEXP (x, i, j), res,
485 include_delayed_effects);
490 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
491 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
492 is nonzero, also mark resources potentially set by the called routine.
494 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
495 objects are being referenced instead of set.
497 We never mark the insn as modifying the condition code unless it explicitly
498 SETs CC0 even though this is not totally correct. The reason for this is
499 that we require a SET of CC0 to immediately precede the reference to CC0.
500 So if some other insn sets CC0 as a side-effect, we know it cannot affect
501 our computation and thus may be placed in a delay slot. */
504 mark_set_resources (x, res, in_dest, include_delayed_effects)
506 register struct resources *res;
508 int include_delayed_effects;
510 register enum rtx_code code;
512 register char *format_ptr;
530 /* These don't set any resources. */
539 /* Called routine modifies the condition code, memory, any registers
540 that aren't saved across calls, global registers and anything
541 explicitly CLOBBERed immediately after the CALL_INSN. */
543 if (include_delayed_effects)
545 rtx next = NEXT_INSN (x);
546 rtx prev = PREV_INSN (x);
549 res->cc = res->memory = 1;
550 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
551 if (call_used_regs[i] || global_regs[i])
552 SET_HARD_REG_BIT (res->regs, i);
554 /* If X is part of a delay slot sequence, then NEXT should be
555 the first insn after the sequence. */
556 if (NEXT_INSN (prev) != x)
557 next = NEXT_INSN (NEXT_INSN (prev));
559 for (link = CALL_INSN_FUNCTION_USAGE (x);
560 link; link = XEXP (link, 1))
561 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
562 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
564 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
565 assume that this call can clobber any register. */
566 if (next && GET_CODE (next) == NOTE
567 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
568 SET_HARD_REG_SET (res->regs);
571 /* ... and also what it's RTL says it modifies, if anything. */
576 /* An insn consisting of just a CLOBBER (or USE) is just for flow
577 and doesn't actually do anything, so we ignore it. */
579 #ifdef INSN_SETS_ARE_DELAYED
580 if (! include_delayed_effects
581 && INSN_SETS_ARE_DELAYED (x))
586 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
591 /* If the source of a SET is a CALL, this is actually done by
592 the called routine. So only include it if we are to include the
593 effects of the calling routine. */
595 mark_set_resources (SET_DEST (x), res,
596 (include_delayed_effects
597 || GET_CODE (SET_SRC (x)) != CALL),
600 mark_set_resources (SET_SRC (x), res, 0, 0);
604 mark_set_resources (XEXP (x, 0), res, 1, 0);
608 for (i = 0; i < XVECLEN (x, 0); i++)
609 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
610 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
611 mark_set_resources (XVECEXP (x, 0, i), res, 0,
612 include_delayed_effects);
619 mark_set_resources (XEXP (x, 0), res, 1, 0);
623 mark_set_resources (XEXP (x, 0), res, in_dest, 0);
624 mark_set_resources (XEXP (x, 1), res, 0, 0);
625 mark_set_resources (XEXP (x, 2), res, 0, 0);
632 res->unch_memory = RTX_UNCHANGING_P (x);
633 res->volatil = MEM_VOLATILE_P (x);
636 mark_set_resources (XEXP (x, 0), res, 0, 0);
642 if (GET_CODE (SUBREG_REG (x)) != REG)
643 mark_set_resources (SUBREG_REG (x), res,
644 in_dest, include_delayed_effects);
647 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
648 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
649 for (i = regno; i < last_regno; i++)
650 SET_HARD_REG_BIT (res->regs, i);
657 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
658 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
662 /* Process each sub-expression and flag what it needs. */
663 format_ptr = GET_RTX_FORMAT (code);
664 for (i = 0; i < GET_RTX_LENGTH (code); i++)
665 switch (*format_ptr++)
668 mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
672 for (j = 0; j < XVECLEN (x, i); j++)
673 mark_set_resources (XVECEXP (x, i, j), res, in_dest,
674 include_delayed_effects);
679 /* Return TRUE if this insn should stop the search for insn to fill delay
680 slots. LABELS_P indicates that labels should terminate the search.
681 In all cases, jumps terminate the search. */
684 stop_search_p (insn, labels_p)
691 switch (GET_CODE (insn))
705 /* OK unless it contains a delay slot or is an `asm' insn of some type.
706 We don't know anything about these. */
707 return (GET_CODE (PATTERN (insn)) == SEQUENCE
708 || GET_CODE (PATTERN (insn)) == ASM_INPUT
709 || asm_noperands (PATTERN (insn)) >= 0);
716 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
717 resource set contains a volatile memory reference. Otherwise, return FALSE. */
720 resource_conflicts_p (res1, res2)
721 struct resources *res1, *res2;
723 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
724 || (res1->unch_memory && res2->unch_memory)
725 || res1->volatil || res2->volatil)
729 return (res1->regs & res2->regs) != HARD_CONST (0);
734 for (i = 0; i < HARD_REG_SET_LONGS; i++)
735 if ((res1->regs[i] & res2->regs[i]) != 0)
742 /* Return TRUE if any resource marked in RES, a `struct resources', is
743 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
744 routine is using those resources.
746 We compute this by computing all the resources referenced by INSN and
747 seeing if this conflicts with RES. It might be faster to directly check
748 ourselves, and this is the way it used to work, but it means duplicating
749 a large block of complex code. */
752 insn_references_resource_p (insn, res, include_delayed_effects)
754 register struct resources *res;
755 int include_delayed_effects;
757 struct resources insn_res;
759 CLEAR_RESOURCE (&insn_res);
760 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
761 return resource_conflicts_p (&insn_res, res);
764 /* Return TRUE if INSN modifies resources that are marked in RES.
765 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
766 included. CC0 is only modified if it is explicitly set; see comments
767 in front of mark_set_resources for details. */
770 insn_sets_resource_p (insn, res, include_delayed_effects)
772 register struct resources *res;
773 int include_delayed_effects;
775 struct resources insn_sets;
777 CLEAR_RESOURCE (&insn_sets);
778 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
779 return resource_conflicts_p (&insn_sets, res);
782 /* Find a label at the end of the function or before a RETURN. If there is
790 /* If we found one previously, return it. */
791 if (end_of_function_label)
792 return end_of_function_label;
794 /* Otherwise, see if there is a label at the end of the function. If there
795 is, it must be that RETURN insns aren't needed, so that is our return
796 label and we don't have to do anything else. */
798 insn = get_last_insn ();
799 while (GET_CODE (insn) == NOTE
800 || (GET_CODE (insn) == INSN
801 && (GET_CODE (PATTERN (insn)) == USE
802 || GET_CODE (PATTERN (insn)) == CLOBBER)))
803 insn = PREV_INSN (insn);
805 /* When a target threads its epilogue we might already have a
806 suitable return insn. If so put a label before it for the
807 end_of_function_label. */
808 if (GET_CODE (insn) == BARRIER
809 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
810 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
812 rtx temp = PREV_INSN (PREV_INSN (insn));
813 end_of_function_label = gen_label_rtx ();
814 LABEL_NUSES (end_of_function_label) = 0;
816 /* Put the label before an USE insns that may proceed the RETURN insn. */
817 while (GET_CODE (temp) == USE)
818 temp = PREV_INSN (temp);
820 emit_label_after (end_of_function_label, temp);
823 else if (GET_CODE (insn) == CODE_LABEL)
824 end_of_function_label = insn;
827 /* Otherwise, make a new label and emit a RETURN and BARRIER,
829 end_of_function_label = gen_label_rtx ();
830 LABEL_NUSES (end_of_function_label) = 0;
831 emit_label (end_of_function_label);
835 /* The return we make may have delay slots too. */
836 rtx insn = gen_return ();
837 insn = emit_jump_insn (insn);
839 if (num_delay_slots (insn) > 0)
840 obstack_ptr_grow (&unfilled_slots_obstack, insn);
845 /* Show one additional use for this label so it won't go away until
847 ++LABEL_NUSES (end_of_function_label);
849 return end_of_function_label;
852 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
853 the pattern of INSN with the SEQUENCE.
855 Chain the insns so that NEXT_INSN of each insn in the sequence points to
856 the next and NEXT_INSN of the last insn in the sequence points to
857 the first insn after the sequence. Similarly for PREV_INSN. This makes
858 it easier to scan all insns.
860 Returns the SEQUENCE that replaces INSN. */
863 emit_delay_sequence (insn, list, length, avail)
873 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
874 rtvec seqv = rtvec_alloc (length + 1);
875 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
876 rtx seq_insn = make_insn_raw (seq);
877 rtx first = get_insns ();
878 rtx last = get_last_insn ();
880 /* Make a copy of the insn having delay slots. */
881 rtx delay_insn = copy_rtx (insn);
883 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
884 confuse further processing. Update LAST in case it was the last insn.
885 We will put the BARRIER back in later. */
886 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
888 delete_insn (NEXT_INSN (insn));
889 last = get_last_insn ();
893 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
894 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
895 PREV_INSN (seq_insn) = PREV_INSN (insn);
898 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
901 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
903 /* Note the calls to set_new_first_and_last_insn must occur after
904 SEQ_INSN has been completely spliced into the insn stream.
906 Otherwise CUR_INSN_UID will get set to an incorrect value because
907 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
909 set_new_first_and_last_insn (first, seq_insn);
912 set_new_first_and_last_insn (seq_insn, last);
914 /* Build our SEQUENCE and rebuild the insn chain. */
915 XVECEXP (seq, 0, 0) = delay_insn;
916 INSN_DELETED_P (delay_insn) = 0;
917 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
919 for (li = list; li; li = XEXP (li, 1), i++)
921 rtx tem = XEXP (li, 0);
924 /* Show that this copy of the insn isn't deleted. */
925 INSN_DELETED_P (tem) = 0;
927 XVECEXP (seq, 0, i) = tem;
928 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
929 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
931 /* Remove any REG_DEAD notes because we can't rely on them now
932 that the insn has been moved. */
933 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
934 if (REG_NOTE_KIND (note) == REG_DEAD)
935 XEXP (note, 0) = const0_rtx;
938 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
940 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
941 last insn in that SEQUENCE to point to us. Similarly for the first
942 insn in the following insn if it is a SEQUENCE. */
944 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
945 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
946 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
947 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
950 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
951 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
952 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
954 /* If there used to be a BARRIER, put it back. */
956 emit_barrier_after (seq_insn);
964 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
965 be in the order in which the insns are to be executed. */
968 add_to_delay_list (insn, delay_list)
972 /* If we have an empty list, just make a new list element. If
973 INSN has it's block number recorded, clear it since we may
974 be moving the insn to a new block. */
978 struct target_info *tinfo;
980 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
981 tinfo; tinfo = tinfo->next)
982 if (tinfo->uid == INSN_UID (insn))
988 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
991 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
993 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
998 /* Delete INSN from the the delay slot of the insn that it is in. This may
999 produce an insn without anything in its delay slots. */
1002 delete_from_delay_slot (insn)
1005 rtx trial, seq_insn, seq, prev;
1009 /* We first must find the insn containing the SEQUENCE with INSN in its
1010 delay slot. Do this by finding an insn, TRIAL, where
1011 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
1014 PREV_INSN (NEXT_INSN (trial)) == trial;
1015 trial = NEXT_INSN (trial))
1018 seq_insn = PREV_INSN (NEXT_INSN (trial));
1019 seq = PATTERN (seq_insn);
1021 /* Create a delay list consisting of all the insns other than the one
1022 we are deleting (unless we were the only one). */
1023 if (XVECLEN (seq, 0) > 2)
1024 for (i = 1; i < XVECLEN (seq, 0); i++)
1025 if (XVECEXP (seq, 0, i) != insn)
1026 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
1028 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
1029 list, and rebuild the delay list if non-empty. */
1030 prev = PREV_INSN (seq_insn);
1031 trial = XVECEXP (seq, 0, 0);
1032 delete_insn (seq_insn);
1033 add_insn_after (trial, prev);
1035 if (GET_CODE (trial) == JUMP_INSN
1036 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
1037 emit_barrier_after (trial);
1039 /* If there are any delay insns, remit them. Otherwise clear the
1042 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2, 0);
1044 INSN_ANNULLED_BRANCH_P (trial) = 0;
1046 INSN_FROM_TARGET_P (insn) = 0;
1048 /* Show we need to fill this insn again. */
1049 obstack_ptr_grow (&unfilled_slots_obstack, trial);
1052 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
1053 the insn that sets CC0 for it and delete it too. */
1056 delete_scheduled_jump (insn)
1059 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
1060 delete the insn that sets the condition code, but it is hard to find it.
1061 Since this case is rare anyway, don't bother trying; there would likely
1062 be other insns that became dead anyway, which we wouldn't know to
1066 if (reg_mentioned_p (cc0_rtx, insn))
1068 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1070 /* If a reg-note was found, it points to an insn to set CC0. This
1071 insn is in the delay list of some other insn. So delete it from
1072 the delay list it was in. */
1075 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
1076 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
1077 delete_from_delay_slot (XEXP (note, 0));
1081 /* The insn setting CC0 is our previous insn, but it may be in
1082 a delay slot. It will be the last insn in the delay slot, if
1084 rtx trial = previous_insn (insn);
1085 if (GET_CODE (trial) == NOTE)
1086 trial = prev_nonnote_insn (trial);
1087 if (sets_cc0_p (PATTERN (trial)) != 1
1088 || FIND_REG_INC_NOTE (trial, 0))
1090 if (PREV_INSN (NEXT_INSN (trial)) == trial)
1091 delete_insn (trial);
1093 delete_from_delay_slot (trial);
1101 /* Counters for delay-slot filling. */
1103 #define NUM_REORG_FUNCTIONS 2
1104 #define MAX_DELAY_HISTOGRAM 3
1105 #define MAX_REORG_PASSES 2
1107 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
1109 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
1111 static int reorg_pass_number;
1114 note_delay_statistics (slots_filled, index)
1115 int slots_filled, index;
1117 num_insns_needing_delays[index][reorg_pass_number]++;
1118 if (slots_filled > MAX_DELAY_HISTOGRAM)
1119 slots_filled = MAX_DELAY_HISTOGRAM;
1120 num_filled_delays[index][slots_filled][reorg_pass_number]++;
1123 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1125 /* Optimize the following cases:
1127 1. When a conditional branch skips over only one instruction,
1128 use an annulling branch and put that insn in the delay slot.
1129 Use either a branch that annuls when the condition if true or
1130 invert the test with a branch that annuls when the condition is
1131 false. This saves insns, since otherwise we must copy an insn
1134 (orig) (skip) (otherwise)
1135 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1142 2. When a conditional branch skips over only one instruction,
1143 and after that, it unconditionally branches somewhere else,
1144 perform the similar optimization. This saves executing the
1145 second branch in the case where the inverted condition is true.
1152 INSN is a JUMP_INSN.
1154 This should be expanded to skip over N insns, where N is the number
1155 of delay slots required. */
1158 optimize_skip (insn)
1161 register rtx trial = next_nonnote_insn (insn);
1162 rtx next_trial = next_active_insn (trial);
1167 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1170 || GET_CODE (trial) != INSN
1171 || GET_CODE (PATTERN (trial)) == SEQUENCE
1172 || recog_memoized (trial) < 0
1173 || (! eligible_for_annul_false (insn, 0, trial, flags)
1174 && ! eligible_for_annul_true (insn, 0, trial, flags)))
1177 /* There are two cases where we are just executing one insn (we assume
1178 here that a branch requires only one insn; this should be generalized
1179 at some point): Where the branch goes around a single insn or where
1180 we have one insn followed by a branch to the same label we branch to.
1181 In both of these cases, inverting the jump and annulling the delay
1182 slot give the same effect in fewer insns. */
1183 if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
1185 && GET_CODE (next_trial) == JUMP_INSN
1186 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
1187 && (simplejump_p (next_trial)
1188 || GET_CODE (PATTERN (next_trial)) == RETURN)))
1190 if (eligible_for_annul_false (insn, 0, trial, flags))
1192 if (invert_jump (insn, JUMP_LABEL (insn)))
1193 INSN_FROM_TARGET_P (trial) = 1;
1194 else if (! eligible_for_annul_true (insn, 0, trial, flags))
1198 delay_list = add_to_delay_list (trial, NULL_RTX);
1199 next_trial = next_active_insn (trial);
1200 update_block (trial, trial);
1201 delete_insn (trial);
1203 /* Also, if we are targeting an unconditional
1204 branch, thread our jump to the target of that branch. Don't
1205 change this into a RETURN here, because it may not accept what
1206 we have in the delay slot. We'll fix this up later. */
1207 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
1208 && (simplejump_p (next_trial)
1209 || GET_CODE (PATTERN (next_trial)) == RETURN))
1211 target_label = JUMP_LABEL (next_trial);
1212 if (target_label == 0)
1213 target_label = find_end_label ();
1215 /* Recompute the flags based on TARGET_LABEL since threading
1216 the jump to TARGET_LABEL may change the direction of the
1217 jump (which may change the circumstances in which the
1218 delay slot is nullified). */
1219 flags = get_jump_flags (insn, target_label);
1220 if (eligible_for_annul_true (insn, 0, trial, flags))
1221 reorg_redirect_jump (insn, target_label);
1224 INSN_ANNULLED_BRANCH_P (insn) = 1;
1232 /* Encode and return branch direction and prediction information for
1233 INSN assuming it will jump to LABEL.
1235 Non conditional branches return no direction information and
1236 are predicted as very likely taken. */
1239 get_jump_flags (insn, label)
1244 /* get_jump_flags can be passed any insn with delay slots, these may
1245 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1246 direction information, and only if they are conditional jumps.
1248 If LABEL is zero, then there is no way to determine the branch
1250 if (GET_CODE (insn) == JUMP_INSN
1251 && (condjump_p (insn) || condjump_in_parallel_p (insn))
1252 && INSN_UID (insn) <= max_uid
1254 && INSN_UID (label) <= max_uid)
1256 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
1257 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
1258 /* No valid direction information. */
1262 /* If insn is a conditional branch call mostly_true_jump to get
1263 determine the branch prediction.
1265 Non conditional branches are predicted as very likely taken. */
1266 if (GET_CODE (insn) == JUMP_INSN
1267 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
1271 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
1275 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1278 flags |= ATTR_FLAG_likely;
1281 flags |= ATTR_FLAG_unlikely;
1284 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
1292 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1297 /* Return 1 if INSN is a destination that will be branched to rarely (the
1298 return point of a function); return 2 if DEST will be branched to very
1299 rarely (a call to a function that doesn't return). Otherwise,
1303 rare_destination (insn)
1309 for (; insn; insn = next)
1311 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1312 insn = XVECEXP (PATTERN (insn), 0, 0);
1314 next = NEXT_INSN (insn);
1316 switch (GET_CODE (insn))
1321 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1322 don't scan past JUMP_INSNs, so any barrier we find here must
1323 have been after a CALL_INSN and hence mean the call doesn't
1327 if (GET_CODE (PATTERN (insn)) == RETURN)
1329 else if (simplejump_p (insn)
1330 && jump_count++ < 10)
1331 next = JUMP_LABEL (insn);
1337 /* If we got here it means we hit the end of the function. So this
1338 is an unlikely destination. */
1343 /* Return truth value of the statement that this branch
1344 is mostly taken. If we think that the branch is extremely likely
1345 to be taken, we return 2. If the branch is slightly more likely to be
1346 taken, return 1. If the branch is slightly less likely to be taken,
1347 return 0 and if the branch is highly unlikely to be taken, return -1.
1349 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1352 mostly_true_jump (jump_insn, condition)
1353 rtx jump_insn, condition;
1355 rtx target_label = JUMP_LABEL (jump_insn);
1357 int rare_dest = rare_destination (target_label);
1358 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1360 /* If branch probabilities are available, then use that number since it
1361 always gives a correct answer. */
1362 if (flag_branch_probabilities)
1364 rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);;
1367 int prob = XINT (note, 0);
1369 if (prob >= REG_BR_PROB_BASE * 9 / 10)
1371 else if (prob >= REG_BR_PROB_BASE / 2)
1373 else if (prob >= REG_BR_PROB_BASE / 10)
1380 /* If this is a branch outside a loop, it is highly unlikely. */
1381 if (GET_CODE (PATTERN (jump_insn)) == SET
1382 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
1383 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
1384 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
1385 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
1386 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
1391 /* If this is the test of a loop, it is very likely true. We scan
1392 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1393 before the next real insn, we assume the branch is to the top of
1395 for (insn = PREV_INSN (target_label);
1396 insn && GET_CODE (insn) == NOTE;
1397 insn = PREV_INSN (insn))
1398 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1401 /* If this is a jump to the test of a loop, it is likely true. We scan
1402 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1403 before the next real insn, we assume the branch is to the loop branch
1405 for (insn = NEXT_INSN (target_label);
1406 insn && GET_CODE (insn) == NOTE;
1407 insn = PREV_INSN (insn))
1408 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
1412 /* Look at the relative rarities of the fallthrough and destination. If
1413 they differ, we can predict the branch that way. */
1415 switch (rare_fallthrough - rare_dest)
1429 /* If we couldn't figure out what this jump was, assume it won't be
1430 taken. This should be rare. */
1434 /* EQ tests are usually false and NE tests are usually true. Also,
1435 most quantities are positive, so we can make the appropriate guesses
1436 about signed comparisons against zero. */
1437 switch (GET_CODE (condition))
1440 /* Unconditional branch. */
1448 if (XEXP (condition, 1) == const0_rtx)
1453 if (XEXP (condition, 1) == const0_rtx)
1458 /* Predict backward branches usually take, forward branches usually not. If
1459 we don't know whether this is forward or backward, assume the branch
1460 will be taken, since most are. */
1461 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1462 || INSN_UID (target_label) > max_uid
1463 || (uid_to_ruid[INSN_UID (jump_insn)]
1464 > uid_to_ruid[INSN_UID (target_label)]));;
1467 /* Return the condition under which INSN will branch to TARGET. If TARGET
1468 is zero, return the condition under which INSN will return. If INSN is
1469 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1470 type of jump, or it doesn't go to TARGET, return 0. */
1473 get_branch_condition (insn, target)
1477 rtx pat = PATTERN (insn);
1480 if (condjump_in_parallel_p (insn))
1481 pat = XVECEXP (pat, 0, 0);
1483 if (GET_CODE (pat) == RETURN)
1484 return target == 0 ? const_true_rtx : 0;
1486 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1489 src = SET_SRC (pat);
1490 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1491 return const_true_rtx;
1493 else if (GET_CODE (src) == IF_THEN_ELSE
1494 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1495 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1496 && XEXP (XEXP (src, 1), 0) == target))
1497 && XEXP (src, 2) == pc_rtx)
1498 return XEXP (src, 0);
1500 else if (GET_CODE (src) == IF_THEN_ELSE
1501 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1502 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1503 && XEXP (XEXP (src, 2), 0) == target))
1504 && XEXP (src, 1) == pc_rtx)
1505 return gen_rtx_fmt_ee (reverse_condition (GET_CODE (XEXP (src, 0))),
1506 GET_MODE (XEXP (src, 0)),
1507 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1512 /* Return non-zero if CONDITION is more strict than the condition of
1513 INSN, i.e., if INSN will always branch if CONDITION is true. */
1516 condition_dominates_p (condition, insn)
1520 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1521 enum rtx_code code = GET_CODE (condition);
1522 enum rtx_code other_code;
1524 if (rtx_equal_p (condition, other_condition)
1525 || other_condition == const_true_rtx)
1528 else if (condition == const_true_rtx || other_condition == 0)
1531 other_code = GET_CODE (other_condition);
1532 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1533 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1534 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1537 return comparison_dominates_p (code, other_code);
1540 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1541 any insns already in the delay slot of JUMP. */
1544 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1545 rtx jump, newlabel, seq;
1547 int flags, slots, i;
1548 rtx pat = PATTERN (seq);
1550 /* Make sure all the delay slots of this jump would still
1551 be valid after threading the jump. If they are still
1552 valid, then return non-zero. */
1554 flags = get_jump_flags (jump, newlabel);
1555 for (i = 1; i < XVECLEN (pat, 0); i++)
1557 #ifdef ANNUL_IFFALSE_SLOTS
1558 (INSN_ANNULLED_BRANCH_P (jump)
1559 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1560 ? eligible_for_annul_false (jump, i - 1,
1561 XVECEXP (pat, 0, i), flags) :
1563 #ifdef ANNUL_IFTRUE_SLOTS
1564 (INSN_ANNULLED_BRANCH_P (jump)
1565 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1566 ? eligible_for_annul_true (jump, i - 1,
1567 XVECEXP (pat, 0, i), flags) :
1569 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1572 return (i == XVECLEN (pat, 0));
1575 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1576 any insns we wish to place in the delay slot of JUMP. */
1579 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1580 rtx jump, newlabel, delay_list;
1585 /* Make sure all the insns in DELAY_LIST would still be
1586 valid after threading the jump. If they are still
1587 valid, then return non-zero. */
1589 flags = get_jump_flags (jump, newlabel);
1590 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1592 #ifdef ANNUL_IFFALSE_SLOTS
1593 (INSN_ANNULLED_BRANCH_P (jump)
1594 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1595 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1597 #ifdef ANNUL_IFTRUE_SLOTS
1598 (INSN_ANNULLED_BRANCH_P (jump)
1599 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1600 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1602 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1605 return (li == NULL);
1609 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1610 the condition tested by INSN is CONDITION and the resources shown in
1611 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1612 from SEQ's delay list, in addition to whatever insns it may execute
1613 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1614 needed while searching for delay slot insns. Return the concatenated
1615 delay list if possible, otherwise, return 0.
1617 SLOTS_TO_FILL is the total number of slots required by INSN, and
1618 PSLOTS_FILLED points to the number filled so far (also the number of
1619 insns in DELAY_LIST). It is updated with the number that have been
1620 filled from the SEQUENCE, if any.
1622 PANNUL_P points to a non-zero value if we already know that we need
1623 to annul INSN. If this routine determines that annulling is needed,
1624 it may set that value non-zero.
1626 PNEW_THREAD points to a location that is to receive the place at which
1627 execution should continue. */
1630 steal_delay_list_from_target (insn, condition, seq, delay_list,
1631 sets, needed, other_needed,
1632 slots_to_fill, pslots_filled, pannul_p,
1634 rtx insn, condition;
1637 struct resources *sets, *needed, *other_needed;
1644 int slots_remaining = slots_to_fill - *pslots_filled;
1645 int total_slots_filled = *pslots_filled;
1646 rtx new_delay_list = 0;
1647 int must_annul = *pannul_p;
1650 /* We can't do anything if there are more delay slots in SEQ than we
1651 can handle, or if we don't know that it will be a taken branch.
1652 We know that it will be a taken branch if it is either an unconditional
1653 branch or a conditional branch with a stricter branch condition.
1655 Also, exit if the branch has more than one set, since then it is computing
1656 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1657 ??? It may be possible to move other sets into INSN in addition to
1658 moving the instructions in the delay slots. */
1660 if (XVECLEN (seq, 0) - 1 > slots_remaining
1661 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1662 || ! single_set (XVECEXP (seq, 0, 0)))
1665 for (i = 1; i < XVECLEN (seq, 0); i++)
1667 rtx trial = XVECEXP (seq, 0, i);
1670 if (insn_references_resource_p (trial, sets, 0)
1671 || insn_sets_resource_p (trial, needed, 0)
1672 || insn_sets_resource_p (trial, sets, 0)
1674 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1676 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1678 /* If TRIAL is from the fallthrough code of an annulled branch insn
1679 in SEQ, we cannot use it. */
1680 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1681 && ! INSN_FROM_TARGET_P (trial)))
1684 /* If this insn was already done (usually in a previous delay slot),
1685 pretend we put it in our delay slot. */
1686 if (redundant_insn (trial, insn, new_delay_list))
1689 /* We will end up re-vectoring this branch, so compute flags
1690 based on jumping to the new label. */
1691 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1694 && ((condition == const_true_rtx
1695 || (! insn_sets_resource_p (trial, other_needed, 0)
1696 && ! may_trap_p (PATTERN (trial)))))
1697 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1699 eligible_for_annul_false (insn, total_slots_filled, trial, flags)))
1701 temp = copy_rtx (trial);
1702 INSN_FROM_TARGET_P (temp) = 1;
1703 new_delay_list = add_to_delay_list (temp, new_delay_list);
1704 total_slots_filled++;
1706 if (--slots_remaining == 0)
1713 /* Show the place to which we will be branching. */
1714 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1716 /* Add any new insns to the delay list and update the count of the
1717 number of slots filled. */
1718 *pslots_filled = total_slots_filled;
1719 *pannul_p = must_annul;
1721 if (delay_list == 0)
1722 return new_delay_list;
1724 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1725 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1730 /* Similar to steal_delay_list_from_target except that SEQ is on the
1731 fallthrough path of INSN. Here we only do something if the delay insn
1732 of SEQ is an unconditional branch. In that case we steal its delay slot
1733 for INSN since unconditional branches are much easier to fill. */
1736 steal_delay_list_from_fallthrough (insn, condition, seq,
1737 delay_list, sets, needed, other_needed,
1738 slots_to_fill, pslots_filled, pannul_p)
1739 rtx insn, condition;
1742 struct resources *sets, *needed, *other_needed;
1750 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1752 /* We can't do anything if SEQ's delay insn isn't an
1753 unconditional branch. */
1755 if (! simplejump_p (XVECEXP (seq, 0, 0))
1756 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1759 for (i = 1; i < XVECLEN (seq, 0); i++)
1761 rtx trial = XVECEXP (seq, 0, i);
1763 /* If TRIAL sets CC0, stealing it will move it too far from the use
1765 if (insn_references_resource_p (trial, sets, 0)
1766 || insn_sets_resource_p (trial, needed, 0)
1767 || insn_sets_resource_p (trial, sets, 0)
1769 || sets_cc0_p (PATTERN (trial))
1775 /* If this insn was already done, we don't need it. */
1776 if (redundant_insn (trial, insn, delay_list))
1778 delete_from_delay_slot (trial);
1783 && ((condition == const_true_rtx
1784 || (! insn_sets_resource_p (trial, other_needed, 0)
1785 && ! may_trap_p (PATTERN (trial)))))
1786 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1788 eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1790 delete_from_delay_slot (trial);
1791 delay_list = add_to_delay_list (trial, delay_list);
1793 if (++(*pslots_filled) == slots_to_fill)
1803 /* Try merging insns starting at THREAD which match exactly the insns in
1806 If all insns were matched and the insn was previously annulling, the
1807 annul bit will be cleared.
1809 For each insn that is merged, if the branch is or will be non-annulling,
1810 we delete the merged insn. */
1813 try_merge_delay_insns (insn, thread)
1816 rtx trial, next_trial;
1817 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1818 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1819 int slot_number = 1;
1820 int num_slots = XVECLEN (PATTERN (insn), 0);
1821 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1822 struct resources set, needed;
1823 rtx merged_insns = 0;
1827 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1829 CLEAR_RESOURCE (&needed);
1830 CLEAR_RESOURCE (&set);
1832 /* If this is not an annulling branch, take into account anything needed in
1833 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1834 folded into one. If we are annulling, this would be the correct
1835 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1836 will essentially disable this optimization. This method is somewhat of
1837 a kludge, but I don't see a better way.) */
1839 mark_referenced_resources (next_to_match, &needed, 1);
1841 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1843 rtx pat = PATTERN (trial);
1844 rtx oldtrial = trial;
1846 next_trial = next_nonnote_insn (trial);
1848 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1849 if (GET_CODE (trial) == INSN
1850 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1853 if (GET_CODE (next_to_match) == GET_CODE (trial)
1855 /* We can't share an insn that sets cc0. */
1856 && ! sets_cc0_p (pat)
1858 && ! insn_references_resource_p (trial, &set, 1)
1859 && ! insn_sets_resource_p (trial, &set, 1)
1860 && ! insn_sets_resource_p (trial, &needed, 1)
1861 && (trial = try_split (pat, trial, 0)) != 0
1862 /* Update next_trial, in case try_split succeeded. */
1863 && (next_trial = next_nonnote_insn (trial))
1864 /* Likewise THREAD. */
1865 && (thread = oldtrial == thread ? trial : thread)
1866 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1867 /* Have to test this condition if annul condition is different
1868 from (and less restrictive than) non-annulling one. */
1869 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1874 update_block (trial, thread);
1875 if (trial == thread)
1876 thread = next_active_insn (thread);
1878 delete_insn (trial);
1879 INSN_FROM_TARGET_P (next_to_match) = 0;
1882 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1884 if (++slot_number == num_slots)
1887 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1889 mark_referenced_resources (next_to_match, &needed, 1);
1892 mark_set_resources (trial, &set, 0, 1);
1893 mark_referenced_resources (trial, &needed, 1);
1896 /* See if we stopped on a filled insn. If we did, try to see if its
1897 delay slots match. */
1898 if (slot_number != num_slots
1899 && trial && GET_CODE (trial) == INSN
1900 && GET_CODE (PATTERN (trial)) == SEQUENCE
1901 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1903 rtx pat = PATTERN (trial);
1904 rtx filled_insn = XVECEXP (pat, 0, 0);
1906 /* Account for resources set/needed by the filled insn. */
1907 mark_set_resources (filled_insn, &set, 0, 1);
1908 mark_referenced_resources (filled_insn, &needed, 1);
1910 for (i = 1; i < XVECLEN (pat, 0); i++)
1912 rtx dtrial = XVECEXP (pat, 0, i);
1914 if (! insn_references_resource_p (dtrial, &set, 1)
1915 && ! insn_sets_resource_p (dtrial, &set, 1)
1916 && ! insn_sets_resource_p (dtrial, &needed, 1)
1918 && ! sets_cc0_p (PATTERN (dtrial))
1920 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1921 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1925 update_block (dtrial, thread);
1926 delete_from_delay_slot (dtrial);
1927 INSN_FROM_TARGET_P (next_to_match) = 0;
1930 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1933 if (++slot_number == num_slots)
1936 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1941 /* If all insns in the delay slot have been matched and we were previously
1942 annulling the branch, we need not any more. In that case delete all the
1943 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1944 the delay list so that we know that it isn't only being used at the
1946 if (slot_number == num_slots && annul_p)
1948 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1950 if (GET_MODE (merged_insns) == SImode)
1952 update_block (XEXP (merged_insns, 0), thread);
1953 delete_from_delay_slot (XEXP (merged_insns, 0));
1957 update_block (XEXP (merged_insns, 0), thread);
1958 delete_insn (XEXP (merged_insns, 0));
1962 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1964 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1965 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1969 /* See if INSN is redundant with an insn in front of TARGET. Often this
1970 is called when INSN is a candidate for a delay slot of TARGET.
1971 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1972 of INSN. Often INSN will be redundant with an insn in a delay slot of
1973 some previous insn. This happens when we have a series of branches to the
1974 same label; in that case the first insn at the target might want to go
1975 into each of the delay slots.
1977 If we are not careful, this routine can take up a significant fraction
1978 of the total compilation time (4%), but only wins rarely. Hence we
1979 speed this routine up by making two passes. The first pass goes back
1980 until it hits a label and sees if it find an insn with an identical
1981 pattern. Only in this (relatively rare) event does it check for
1984 We do not split insns we encounter. This could cause us not to find a
1985 redundant insn, but the cost of splitting seems greater than the possible
1986 gain in rare cases. */
1989 redundant_insn (insn, target, delay_list)
1994 rtx target_main = target;
1995 rtx ipat = PATTERN (insn);
1997 struct resources needed, set;
2000 /* If INSN has any REG_UNUSED notes, it can't match anything since we
2001 are allowed to not actually assign to such a register. */
2002 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
2005 /* Scan backwards looking for a match. */
2006 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
2008 if (GET_CODE (trial) == CODE_LABEL)
2011 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
2014 pat = PATTERN (trial);
2015 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2018 if (GET_CODE (pat) == SEQUENCE)
2020 /* Stop for a CALL and its delay slots because it is difficult to
2021 track its resource needs correctly. */
2022 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
2025 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
2026 slots because it is difficult to track its resource needs
2029 #ifdef INSN_SETS_ARE_DELAYED
2030 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2034 #ifdef INSN_REFERENCES_ARE_DELAYED
2035 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2039 /* See if any of the insns in the delay slot match, updating
2040 resource requirements as we go. */
2041 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2042 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
2043 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
2044 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
2047 /* If found a match, exit this loop early. */
2052 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
2053 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
2057 /* If we didn't find an insn that matches, return 0. */
2061 /* See what resources this insn sets and needs. If they overlap, or
2062 if this insn references CC0, it can't be redundant. */
2064 CLEAR_RESOURCE (&needed);
2065 CLEAR_RESOURCE (&set);
2066 mark_set_resources (insn, &set, 0, 1);
2067 mark_referenced_resources (insn, &needed, 1);
2069 /* If TARGET is a SEQUENCE, get the main insn. */
2070 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2071 target_main = XVECEXP (PATTERN (target), 0, 0);
2073 if (resource_conflicts_p (&needed, &set)
2075 || reg_mentioned_p (cc0_rtx, ipat)
2077 /* The insn requiring the delay may not set anything needed or set by
2079 || insn_sets_resource_p (target_main, &needed, 1)
2080 || insn_sets_resource_p (target_main, &set, 1))
2083 /* Insns we pass may not set either NEEDED or SET, so merge them for
2085 needed.memory |= set.memory;
2086 needed.unch_memory |= set.unch_memory;
2087 IOR_HARD_REG_SET (needed.regs, set.regs);
2089 /* This insn isn't redundant if it conflicts with an insn that either is
2090 or will be in a delay slot of TARGET. */
2094 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
2096 delay_list = XEXP (delay_list, 1);
2099 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2100 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
2101 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
2104 /* Scan backwards until we reach a label or an insn that uses something
2105 INSN sets or sets something insn uses or sets. */
2107 for (trial = PREV_INSN (target);
2108 trial && GET_CODE (trial) != CODE_LABEL;
2109 trial = PREV_INSN (trial))
2111 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
2112 && GET_CODE (trial) != JUMP_INSN)
2115 pat = PATTERN (trial);
2116 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2119 if (GET_CODE (pat) == SEQUENCE)
2121 /* If this is a CALL_INSN and its delay slots, it is hard to track
2122 the resource needs properly, so give up. */
2123 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
2126 /* If this this is an INSN or JUMP_INSN with delayed effects, it
2127 is hard to track the resource needs properly, so give up. */
2129 #ifdef INSN_SETS_ARE_DELAYED
2130 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2134 #ifdef INSN_REFERENCES_ARE_DELAYED
2135 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2139 /* See if any of the insns in the delay slot match, updating
2140 resource requirements as we go. */
2141 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2143 rtx candidate = XVECEXP (pat, 0, i);
2145 /* If an insn will be annulled if the branch is false, it isn't
2146 considered as a possible duplicate insn. */
2147 if (rtx_equal_p (PATTERN (candidate), ipat)
2148 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2149 && INSN_FROM_TARGET_P (candidate)))
2151 /* Show that this insn will be used in the sequel. */
2152 INSN_FROM_TARGET_P (candidate) = 0;
2156 /* Unless this is an annulled insn from the target of a branch,
2157 we must stop if it sets anything needed or set by INSN. */
2158 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2159 || ! INSN_FROM_TARGET_P (candidate))
2160 && insn_sets_resource_p (candidate, &needed, 1))
2165 /* If the insn requiring the delay slot conflicts with INSN, we
2167 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
2172 /* See if TRIAL is the same as INSN. */
2173 pat = PATTERN (trial);
2174 if (rtx_equal_p (pat, ipat))
2177 /* Can't go any further if TRIAL conflicts with INSN. */
2178 if (insn_sets_resource_p (trial, &needed, 1))
2186 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2187 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2188 is non-zero, we are allowed to fall into this thread; otherwise, we are
2191 If LABEL is used more than one or we pass a label other than LABEL before
2192 finding an active insn, we do not own this thread. */
2195 own_thread_p (thread, label, allow_fallthrough)
2198 int allow_fallthrough;
2203 /* We don't own the function end. */
2207 /* Get the first active insn, or THREAD, if it is an active insn. */
2208 active_insn = next_active_insn (PREV_INSN (thread));
2210 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
2211 if (GET_CODE (insn) == CODE_LABEL
2212 && (insn != label || LABEL_NUSES (insn) != 1))
2215 if (allow_fallthrough)
2218 /* Ensure that we reach a BARRIER before any insn or label. */
2219 for (insn = prev_nonnote_insn (thread);
2220 insn == 0 || GET_CODE (insn) != BARRIER;
2221 insn = prev_nonnote_insn (insn))
2223 || GET_CODE (insn) == CODE_LABEL
2224 || (GET_CODE (insn) == INSN
2225 && GET_CODE (PATTERN (insn)) != USE
2226 && GET_CODE (PATTERN (insn)) != CLOBBER))
2232 /* Find the number of the basic block that starts closest to INSN. Return -1
2233 if we couldn't find such a basic block. */
2236 find_basic_block (insn)
2241 /* Scan backwards to the previous BARRIER. Then see if we can find a
2242 label that starts a basic block. Return the basic block number. */
2244 for (insn = prev_nonnote_insn (insn);
2245 insn && GET_CODE (insn) != BARRIER;
2246 insn = prev_nonnote_insn (insn))
2249 /* The start of the function is basic block zero. */
2253 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2254 anything other than a CODE_LABEL or note, we can't find this code. */
2255 for (insn = next_nonnote_insn (insn);
2256 insn && GET_CODE (insn) == CODE_LABEL;
2257 insn = next_nonnote_insn (insn))
2259 for (i = 0; i < n_basic_blocks; i++)
2260 if (insn == basic_block_head[i])
2267 /* Called when INSN is being moved from a location near the target of a jump.
2268 We leave a marker of the form (use (INSN)) immediately in front
2269 of WHERE for mark_target_live_regs. These markers will be deleted when
2272 We used to try to update the live status of registers if WHERE is at
2273 the start of a basic block, but that can't work since we may remove a
2274 BARRIER in relax_delay_slots. */
2277 update_block (insn, where)
2283 /* Ignore if this was in a delay slot and it came from the target of
2285 if (INSN_FROM_TARGET_P (insn))
2288 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
2290 /* INSN might be making a value live in a block where it didn't use to
2291 be. So recompute liveness information for this block. */
2293 b = find_basic_block (insn);
2298 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2299 the basic block containing the jump. */
2302 reorg_redirect_jump (jump, nlabel)
2306 int b = find_basic_block (jump);
2311 return redirect_jump (jump, nlabel);
2314 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2315 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2316 that reference values used in INSN. If we find one, then we move the
2317 REG_DEAD note to INSN.
2319 This is needed to handle the case where an later insn (after INSN) has a
2320 REG_DEAD note for a register used by INSN, and this later insn subsequently
2321 gets moved before a CODE_LABEL because it is a redundant insn. In this
2322 case, mark_target_live_regs may be confused into thinking the register
2323 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2326 update_reg_dead_notes (insn, delayed_insn)
2327 rtx insn, delayed_insn;
2331 for (p = next_nonnote_insn (insn); p != delayed_insn;
2332 p = next_nonnote_insn (p))
2333 for (link = REG_NOTES (p); link; link = next)
2335 next = XEXP (link, 1);
2337 if (REG_NOTE_KIND (link) != REG_DEAD
2338 || GET_CODE (XEXP (link, 0)) != REG)
2341 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
2343 /* Move the REG_DEAD note from P to INSN. */
2344 remove_note (p, link);
2345 XEXP (link, 1) = REG_NOTES (insn);
2346 REG_NOTES (insn) = link;
2351 /* Called when an insn redundant with start_insn is deleted. If there
2352 is a REG_DEAD note for the target of start_insn between start_insn
2353 and stop_insn, then the REG_DEAD note needs to be deleted since the
2354 value no longer dies there.
2356 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
2357 confused into thinking the register is dead. */
2360 fix_reg_dead_note (start_insn, stop_insn)
2361 rtx start_insn, stop_insn;
2365 for (p = next_nonnote_insn (start_insn); p != stop_insn;
2366 p = next_nonnote_insn (p))
2367 for (link = REG_NOTES (p); link; link = next)
2369 next = XEXP (link, 1);
2371 if (REG_NOTE_KIND (link) != REG_DEAD
2372 || GET_CODE (XEXP (link, 0)) != REG)
2375 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
2377 remove_note (p, link);
2383 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
2385 This handles the case of udivmodXi4 instructions which optimize their
2386 output depending on whether any REG_UNUSED notes are present.
2387 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2391 update_reg_unused_notes (insn, redundant_insn)
2392 rtx insn, redundant_insn;
2396 for (link = REG_NOTES (insn); link; link = next)
2398 next = XEXP (link, 1);
2400 if (REG_NOTE_KIND (link) != REG_UNUSED
2401 || GET_CODE (XEXP (link, 0)) != REG)
2404 if (! find_regno_note (redundant_insn, REG_UNUSED,
2405 REGNO (XEXP (link, 0))))
2406 remove_note (insn, link);
2410 /* Marks registers possibly live at the current place being scanned by
2411 mark_target_live_regs. Used only by next two function. */
2413 static HARD_REG_SET current_live_regs;
2415 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2416 Also only used by the next two functions. */
2418 static HARD_REG_SET pending_dead_regs;
2420 /* Utility function called from mark_target_live_regs via note_stores.
2421 It deadens any CLOBBERed registers and livens any SET registers. */
2424 update_live_status (dest, x)
2428 int first_regno, last_regno;
2431 if (GET_CODE (dest) != REG
2432 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
2435 if (GET_CODE (dest) == SUBREG)
2436 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2438 first_regno = REGNO (dest);
2440 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
2442 if (GET_CODE (x) == CLOBBER)
2443 for (i = first_regno; i < last_regno; i++)
2444 CLEAR_HARD_REG_BIT (current_live_regs, i);
2446 for (i = first_regno; i < last_regno; i++)
2448 SET_HARD_REG_BIT (current_live_regs, i);
2449 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
2453 /* Similar to next_insn, but ignores insns in the delay slots of
2454 an annulled branch. */
2457 next_insn_no_annul (insn)
2462 /* If INSN is an annulled branch, skip any insns from the target
2464 if (INSN_ANNULLED_BRANCH_P (insn)
2465 && NEXT_INSN (PREV_INSN (insn)) != insn)
2466 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
2467 insn = NEXT_INSN (insn);
2469 insn = NEXT_INSN (insn);
2470 if (insn && GET_CODE (insn) == INSN
2471 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2472 insn = XVECEXP (PATTERN (insn), 0, 0);
2478 /* A subroutine of mark_target_live_regs. Search forward from TARGET
2479 looking for registers that are set before they are used. These are dead.
2480 Stop after passing a few conditional jumps, and/or a small
2481 number of unconditional branches. */
2484 find_dead_or_set_registers (target, res, jump_target, jump_count, set, needed)
2486 struct resources *res;
2489 struct resources set, needed;
2491 HARD_REG_SET scratch;
2496 for (insn = target; insn; insn = next)
2498 rtx this_jump_insn = insn;
2500 next = NEXT_INSN (insn);
2501 switch (GET_CODE (insn))
2504 /* After a label, any pending dead registers that weren't yet
2505 used can be made dead. */
2506 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
2507 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
2508 CLEAR_HARD_REG_SET (pending_dead_regs);
2510 if (CODE_LABEL_NUMBER (insn) < max_label_num_after_reload)
2512 /* All spill registers are dead at a label, so kill all of the
2513 ones that aren't needed also. */
2514 COPY_HARD_REG_SET (scratch, used_spill_regs);
2515 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2516 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2525 if (GET_CODE (PATTERN (insn)) == USE)
2527 /* If INSN is a USE made by update_block, we care about the
2528 underlying insn. Any registers set by the underlying insn
2529 are live since the insn is being done somewhere else. */
2530 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2531 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
2533 /* All other USE insns are to be ignored. */
2536 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
2538 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2540 /* An unconditional jump can be used to fill the delay slot
2541 of a call, so search for a JUMP_INSN in any position. */
2542 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2544 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
2545 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2551 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2553 if (jump_count++ < 10)
2555 if (simplejump_p (this_jump_insn)
2556 || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
2558 next = JUMP_LABEL (this_jump_insn);
2563 *jump_target = JUMP_LABEL (this_jump_insn);
2566 else if (condjump_p (this_jump_insn)
2567 || condjump_in_parallel_p (this_jump_insn))
2569 struct resources target_set, target_res;
2570 struct resources fallthrough_res;
2572 /* We can handle conditional branches here by following
2573 both paths, and then IOR the results of the two paths
2574 together, which will give us registers that are dead
2575 on both paths. Since this is expensive, we give it
2576 a much higher cost than unconditional branches. The
2577 cost was chosen so that we will follow at most 1
2578 conditional branch. */
2581 if (jump_count >= 10)
2584 mark_referenced_resources (insn, &needed, 1);
2586 /* For an annulled branch, mark_set_resources ignores slots
2587 filled by instructions from the target. This is correct
2588 if the branch is not taken. Since we are following both
2589 paths from the branch, we must also compute correct info
2590 if the branch is taken. We do this by inverting all of
2591 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
2592 and then inverting the INSN_FROM_TARGET_P bits again. */
2594 if (GET_CODE (PATTERN (insn)) == SEQUENCE
2595 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
2597 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
2598 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
2599 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
2602 mark_set_resources (insn, &target_set, 0, 1);
2604 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
2605 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
2606 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
2608 mark_set_resources (insn, &set, 0, 1);
2612 mark_set_resources (insn, &set, 0, 1);
2617 COPY_HARD_REG_SET (scratch, target_set.regs);
2618 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2619 AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
2621 fallthrough_res = *res;
2622 COPY_HARD_REG_SET (scratch, set.regs);
2623 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2624 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
2626 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
2627 &target_res, 0, jump_count,
2628 target_set, needed);
2629 find_dead_or_set_registers (next,
2630 &fallthrough_res, 0, jump_count,
2632 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
2633 AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
2641 /* Don't try this optimization if we expired our jump count
2642 above, since that would mean there may be an infinite loop
2643 in the function being compiled. */
2649 mark_referenced_resources (insn, &needed, 1);
2650 mark_set_resources (insn, &set, 0, 1);
2652 COPY_HARD_REG_SET (scratch, set.regs);
2653 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2654 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2660 /* Set the resources that are live at TARGET.
2662 If TARGET is zero, we refer to the end of the current function and can
2663 return our precomputed value.
2665 Otherwise, we try to find out what is live by consulting the basic block
2666 information. This is tricky, because we must consider the actions of
2667 reload and jump optimization, which occur after the basic block information
2670 Accordingly, we proceed as follows::
2672 We find the previous BARRIER and look at all immediately following labels
2673 (with no intervening active insns) to see if any of them start a basic
2674 block. If we hit the start of the function first, we use block 0.
2676 Once we have found a basic block and a corresponding first insns, we can
2677 accurately compute the live status from basic_block_live_regs and
2678 reg_renumber. (By starting at a label following a BARRIER, we are immune
2679 to actions taken by reload and jump.) Then we scan all insns between
2680 that point and our target. For each CLOBBER (or for call-clobbered regs
2681 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2682 a SET, mark them as live.
2684 We have to be careful when using REG_DEAD notes because they are not
2685 updated by such things as find_equiv_reg. So keep track of registers
2686 marked as dead that haven't been assigned to, and mark them dead at the
2687 next CODE_LABEL since reload and jump won't propagate values across labels.
2689 If we cannot find the start of a basic block (should be a very rare
2690 case, if it can happen at all), mark everything as potentially live.
2692 Next, scan forward from TARGET looking for things set or clobbered
2693 before they are used. These are not live.
2695 Because we can be called many times on the same target, save our results
2696 in a hash table indexed by INSN_UID. */
2699 mark_target_live_regs (target, res)
2701 struct resources *res;
2705 struct target_info *tinfo;
2709 HARD_REG_SET scratch;
2710 struct resources set, needed;
2713 /* Handle end of function. */
2716 *res = end_of_function_needs;
2720 /* We have to assume memory is needed, but the CC isn't. */
2722 res->volatil = res->unch_memory = 0;
2725 /* See if we have computed this value already. */
2726 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2727 tinfo; tinfo = tinfo->next)
2728 if (tinfo->uid == INSN_UID (target))
2731 /* Start by getting the basic block number. If we have saved information,
2732 we can get it from there unless the insn at the start of the basic block
2733 has been deleted. */
2734 if (tinfo && tinfo->block != -1
2735 && ! INSN_DELETED_P (basic_block_head[tinfo->block]))
2739 b = find_basic_block (target);
2743 /* If the information is up-to-date, use it. Otherwise, we will
2745 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
2747 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
2753 /* Allocate a place to put our results and chain it into the
2755 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
2756 tinfo->uid = INSN_UID (target);
2758 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2759 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
2762 CLEAR_HARD_REG_SET (pending_dead_regs);
2764 /* If we found a basic block, get the live registers from it and update
2765 them with anything set or killed between its start and the insn before
2766 TARGET. Otherwise, we must assume everything is live. */
2769 regset regs_live = basic_block_live_at_start[b];
2772 rtx start_insn, stop_insn;
2774 /* Compute hard regs live at start of block -- this is the real hard regs
2775 marked live, plus live pseudo regs that have been renumbered to
2778 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
2780 EXECUTE_IF_SET_IN_REG_SET
2781 (regs_live, FIRST_PSEUDO_REGISTER, i,
2783 if ((regno = reg_renumber[i]) >= 0)
2785 j < regno + HARD_REGNO_NREGS (regno,
2786 PSEUDO_REGNO_MODE (i));
2788 SET_HARD_REG_BIT (current_live_regs, j);
2791 /* Get starting and ending insn, handling the case where each might
2793 start_insn = (b == 0 ? get_insns () : basic_block_head[b]);
2796 if (GET_CODE (start_insn) == INSN
2797 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
2798 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
2800 if (GET_CODE (stop_insn) == INSN
2801 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
2802 stop_insn = next_insn (PREV_INSN (stop_insn));
2804 for (insn = start_insn; insn != stop_insn;
2805 insn = next_insn_no_annul (insn))
2808 rtx real_insn = insn;
2810 /* If this insn is from the target of a branch, it isn't going to
2811 be used in the sequel. If it is used in both cases, this
2812 test will not be true. */
2813 if (INSN_FROM_TARGET_P (insn))
2816 /* If this insn is a USE made by update_block, we care about the
2818 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
2819 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2820 real_insn = XEXP (PATTERN (insn), 0);
2822 if (GET_CODE (real_insn) == CALL_INSN)
2824 /* CALL clobbers all call-used regs that aren't fixed except
2825 sp, ap, and fp. Do this before setting the result of the
2827 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2828 if (call_used_regs[i]
2829 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
2830 && i != ARG_POINTER_REGNUM
2831 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2832 && i != HARD_FRAME_POINTER_REGNUM
2834 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2835 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
2837 #ifdef PIC_OFFSET_TABLE_REGNUM
2838 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2841 CLEAR_HARD_REG_BIT (current_live_regs, i);
2843 /* A CALL_INSN sets any global register live, since it may
2844 have been modified by the call. */
2845 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2847 SET_HARD_REG_BIT (current_live_regs, i);
2850 /* Mark anything killed in an insn to be deadened at the next
2851 label. Ignore USE insns; the only REG_DEAD notes will be for
2852 parameters. But they might be early. A CALL_INSN will usually
2853 clobber registers used for parameters. It isn't worth bothering
2854 with the unlikely case when it won't. */
2855 if ((GET_CODE (real_insn) == INSN
2856 && GET_CODE (PATTERN (real_insn)) != USE
2857 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
2858 || GET_CODE (real_insn) == JUMP_INSN
2859 || GET_CODE (real_insn) == CALL_INSN)
2861 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2862 if (REG_NOTE_KIND (link) == REG_DEAD
2863 && GET_CODE (XEXP (link, 0)) == REG
2864 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2866 int first_regno = REGNO (XEXP (link, 0));
2869 + HARD_REGNO_NREGS (first_regno,
2870 GET_MODE (XEXP (link, 0))));
2872 for (i = first_regno; i < last_regno; i++)
2873 SET_HARD_REG_BIT (pending_dead_regs, i);
2876 note_stores (PATTERN (real_insn), update_live_status);
2878 /* If any registers were unused after this insn, kill them.
2879 These notes will always be accurate. */
2880 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2881 if (REG_NOTE_KIND (link) == REG_UNUSED
2882 && GET_CODE (XEXP (link, 0)) == REG
2883 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2885 int first_regno = REGNO (XEXP (link, 0));
2888 + HARD_REGNO_NREGS (first_regno,
2889 GET_MODE (XEXP (link, 0))));
2891 for (i = first_regno; i < last_regno; i++)
2892 CLEAR_HARD_REG_BIT (current_live_regs, i);
2896 else if (GET_CODE (real_insn) == CODE_LABEL)
2898 /* A label clobbers the pending dead registers since neither
2899 reload nor jump will propagate a value across a label. */
2900 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
2901 CLEAR_HARD_REG_SET (pending_dead_regs);
2904 /* The beginning of the epilogue corresponds to the end of the
2905 RTL chain when there are no epilogue insns. Certain resources
2906 are implicitly required at that point. */
2907 else if (GET_CODE (real_insn) == NOTE
2908 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
2909 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
2912 COPY_HARD_REG_SET (res->regs, current_live_regs);
2914 tinfo->bb_tick = bb_ticks[b];
2917 /* We didn't find the start of a basic block. Assume everything
2918 in use. This should happen only extremely rarely. */
2919 SET_HARD_REG_SET (res->regs);
2921 CLEAR_RESOURCE (&set);
2922 CLEAR_RESOURCE (&needed);
2924 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
2927 /* If we hit an unconditional branch, we have another way of finding out
2928 what is live: we can see what is live at the branch target and include
2929 anything used but not set before the branch. The only things that are
2930 live are those that are live using the above test and the test below. */
2934 struct resources new_resources;
2935 rtx stop_insn = next_active_insn (jump_insn);
2937 mark_target_live_regs (next_active_insn (jump_target), &new_resources);
2938 CLEAR_RESOURCE (&set);
2939 CLEAR_RESOURCE (&needed);
2941 /* Include JUMP_INSN in the needed registers. */
2942 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
2944 mark_referenced_resources (insn, &needed, 1);
2946 COPY_HARD_REG_SET (scratch, needed.regs);
2947 AND_COMPL_HARD_REG_SET (scratch, set.regs);
2948 IOR_HARD_REG_SET (new_resources.regs, scratch);
2950 mark_set_resources (insn, &set, 0, 1);
2953 AND_HARD_REG_SET (res->regs, new_resources.regs);
2956 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
2959 /* Scan a function looking for insns that need a delay slot and find insns to
2960 put into the delay slot.
2962 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2963 as calls). We do these first since we don't want jump insns (that are
2964 easier to fill) to get the only insns that could be used for non-jump insns.
2965 When it is zero, only try to fill JUMP_INSNs.
2967 When slots are filled in this manner, the insns (including the
2968 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2969 it is possible to tell whether a delay slot has really been filled
2970 or not. `final' knows how to deal with this, by communicating
2971 through FINAL_SEQUENCE. */
2974 fill_simple_delay_slots (first, non_jumps_p)
2978 register rtx insn, pat, trial, next_trial;
2980 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2981 struct resources needed, set;
2982 int slots_to_fill, slots_filled;
2985 for (i = 0; i < num_unfilled_slots; i++)
2988 /* Get the next insn to fill. If it has already had any slots assigned,
2989 we can't do anything with it. Maybe we'll improve this later. */
2991 insn = unfilled_slots_base[i];
2993 || INSN_DELETED_P (insn)
2994 || (GET_CODE (insn) == INSN
2995 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2996 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2997 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
3000 if (GET_CODE (insn) == JUMP_INSN)
3001 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3003 flags = get_jump_flags (insn, NULL_RTX);
3004 slots_to_fill = num_delay_slots (insn);
3005 if (slots_to_fill == 0)
3008 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
3009 says how many. After initialization, first try optimizing
3012 nop add %o7,.-L1,%o7
3016 If this case applies, the delay slot of the call is filled with
3017 the unconditional jump. This is done first to avoid having the
3018 delay slot of the call filled in the backward scan. Also, since
3019 the unconditional jump is likely to also have a delay slot, that
3020 insn must exist when it is subsequently scanned.
3022 This is tried on each insn with delay slots as some machines
3023 have insns which perform calls, but are not represented as
3029 if ((trial = next_active_insn (insn))
3030 && GET_CODE (trial) == JUMP_INSN
3031 && simplejump_p (trial)
3032 && eligible_for_delay (insn, slots_filled, trial, flags)
3033 && no_labels_between_p (insn, trial))
3037 delay_list = add_to_delay_list (trial, delay_list);
3039 /* TRIAL may have had its delay slot filled, then unfilled. When
3040 the delay slot is unfilled, TRIAL is placed back on the unfilled
3041 slots obstack. Unfortunately, it is placed on the end of the
3042 obstack, not in its original location. Therefore, we must search
3043 from entry i + 1 to the end of the unfilled slots obstack to
3044 try and find TRIAL. */
3045 tmp = &unfilled_slots_base[i + 1];
3046 while (*tmp != trial && tmp != unfilled_slots_next)
3049 /* Remove the unconditional jump from consideration for delay slot
3050 filling and unthread it. */
3054 rtx next = NEXT_INSN (trial);
3055 rtx prev = PREV_INSN (trial);
3057 NEXT_INSN (prev) = next;
3059 PREV_INSN (next) = prev;
3063 /* Now, scan backwards from the insn to search for a potential
3064 delay-slot candidate. Stop searching when a label or jump is hit.
3066 For each candidate, if it is to go into the delay slot (moved
3067 forward in execution sequence), it must not need or set any resources
3068 that were set by later insns and must not set any resources that
3069 are needed for those insns.
3071 The delay slot insn itself sets resources unless it is a call
3072 (in which case the called routine, not the insn itself, is doing
3075 if (slots_filled < slots_to_fill)
3077 CLEAR_RESOURCE (&needed);
3078 CLEAR_RESOURCE (&set);
3079 mark_set_resources (insn, &set, 0, 0);
3080 mark_referenced_resources (insn, &needed, 0);
3082 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
3085 next_trial = prev_nonnote_insn (trial);
3087 /* This must be an INSN or CALL_INSN. */
3088 pat = PATTERN (trial);
3090 /* USE and CLOBBER at this level was just for flow; ignore it. */
3091 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3094 /* Check for resource conflict first, to avoid unnecessary
3096 if (! insn_references_resource_p (trial, &set, 1)
3097 && ! insn_sets_resource_p (trial, &set, 1)
3098 && ! insn_sets_resource_p (trial, &needed, 1)
3100 /* Can't separate set of cc0 from its use. */
3101 && ! (reg_mentioned_p (cc0_rtx, pat)
3102 && ! sets_cc0_p (cc0_rtx, pat))
3106 trial = try_split (pat, trial, 1);
3107 next_trial = prev_nonnote_insn (trial);
3108 if (eligible_for_delay (insn, slots_filled, trial, flags))
3110 /* In this case, we are searching backward, so if we
3111 find insns to put on the delay list, we want
3112 to put them at the head, rather than the
3113 tail, of the list. */
3115 update_reg_dead_notes (trial, insn);
3116 delay_list = gen_rtx_INSN_LIST (VOIDmode,
3118 update_block (trial, trial);
3119 delete_insn (trial);
3120 if (slots_to_fill == ++slots_filled)
3126 mark_set_resources (trial, &set, 0, 1);
3127 mark_referenced_resources (trial, &needed, 1);
3131 /* If all needed slots haven't been filled, we come here. */
3133 /* Try to optimize case of jumping around a single insn. */
3134 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
3135 if (slots_filled != slots_to_fill
3137 && GET_CODE (insn) == JUMP_INSN
3138 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
3140 delay_list = optimize_skip (insn);
3146 /* Try to get insns from beyond the insn needing the delay slot.
3147 These insns can neither set or reference resources set in insns being
3148 skipped, cannot set resources in the insn being skipped, and, if this
3149 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
3150 call might not return).
3152 There used to be code which continued past the target label if
3153 we saw all uses of the target label. This code did not work,
3154 because it failed to account for some instructions which were
3155 both annulled and marked as from the target. This can happen as a
3156 result of optimize_skip. Since this code was redundant with
3157 fill_eager_delay_slots anyways, it was just deleted. */
3159 if (slots_filled != slots_to_fill
3160 && (GET_CODE (insn) != JUMP_INSN
3161 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
3162 && ! simplejump_p (insn)
3163 && JUMP_LABEL (insn) != 0)))
3166 int maybe_never = 0;
3167 struct resources needed_at_jump;
3169 CLEAR_RESOURCE (&needed);
3170 CLEAR_RESOURCE (&set);
3172 if (GET_CODE (insn) == CALL_INSN)
3174 mark_set_resources (insn, &set, 0, 1);
3175 mark_referenced_resources (insn, &needed, 1);
3180 mark_set_resources (insn, &set, 0, 1);
3181 mark_referenced_resources (insn, &needed, 1);
3182 if (GET_CODE (insn) == JUMP_INSN)
3183 target = JUMP_LABEL (insn);
3186 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
3188 rtx pat, trial_delay;
3190 next_trial = next_nonnote_insn (trial);
3192 if (GET_CODE (trial) == CODE_LABEL
3193 || GET_CODE (trial) == BARRIER)
3196 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
3197 pat = PATTERN (trial);
3199 /* Stand-alone USE and CLOBBER are just for flow. */
3200 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3203 /* If this already has filled delay slots, get the insn needing
3205 if (GET_CODE (pat) == SEQUENCE)
3206 trial_delay = XVECEXP (pat, 0, 0);
3208 trial_delay = trial;
3210 /* If this is a jump insn to our target, indicate that we have
3211 seen another jump to it. If we aren't handling a conditional
3212 jump, stop our search. Otherwise, compute the needs at its
3213 target and add them to NEEDED. */
3214 if (GET_CODE (trial_delay) == JUMP_INSN)
3218 else if (JUMP_LABEL (trial_delay) != target)
3220 mark_target_live_regs
3221 (next_active_insn (JUMP_LABEL (trial_delay)),
3223 needed.memory |= needed_at_jump.memory;
3224 needed.unch_memory |= needed_at_jump.unch_memory;
3225 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
3229 /* See if we have a resource problem before we try to
3232 && GET_CODE (pat) != SEQUENCE
3233 && ! insn_references_resource_p (trial, &set, 1)
3234 && ! insn_sets_resource_p (trial, &set, 1)
3235 && ! insn_sets_resource_p (trial, &needed, 1)
3237 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
3239 && ! (maybe_never && may_trap_p (pat))
3240 && (trial = try_split (pat, trial, 0))
3241 && eligible_for_delay (insn, slots_filled, trial, flags))
3243 next_trial = next_nonnote_insn (trial);
3244 delay_list = add_to_delay_list (trial, delay_list);
3247 if (reg_mentioned_p (cc0_rtx, pat))
3248 link_cc0_insns (trial);
3251 delete_insn (trial);
3252 if (slots_to_fill == ++slots_filled)
3257 mark_set_resources (trial, &set, 0, 1);
3258 mark_referenced_resources (trial, &needed, 1);
3260 /* Ensure we don't put insns between the setting of cc and the
3261 comparison by moving a setting of cc into an earlier delay
3262 slot since these insns could clobber the condition code. */
3265 /* If this is a call or jump, we might not get here. */
3266 if (GET_CODE (trial_delay) == CALL_INSN
3267 || GET_CODE (trial_delay) == JUMP_INSN)
3271 /* If there are slots left to fill and our search was stopped by an
3272 unconditional branch, try the insn at the branch target. We can
3273 redirect the branch if it works.
3275 Don't do this if the insn at the branch target is a branch. */
3276 if (slots_to_fill != slots_filled
3278 && GET_CODE (trial) == JUMP_INSN
3279 && simplejump_p (trial)
3280 && (target == 0 || JUMP_LABEL (trial) == target)
3281 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
3282 && ! (GET_CODE (next_trial) == INSN
3283 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
3284 && GET_CODE (next_trial) != JUMP_INSN
3285 && ! insn_references_resource_p (next_trial, &set, 1)
3286 && ! insn_sets_resource_p (next_trial, &set, 1)
3287 && ! insn_sets_resource_p (next_trial, &needed, 1)
3289 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
3291 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
3292 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
3293 && eligible_for_delay (insn, slots_filled, next_trial, flags))
3295 rtx new_label = next_active_insn (next_trial);
3298 new_label = get_label_before (new_label);
3300 new_label = find_end_label ();
3303 = add_to_delay_list (copy_rtx (next_trial), delay_list);
3305 reorg_redirect_jump (trial, new_label);
3307 /* If we merged because we both jumped to the same place,
3308 redirect the original insn also. */
3310 reorg_redirect_jump (insn, new_label);
3314 /* If this is an unconditional jump, then try to get insns from the
3315 target of the jump. */
3316 if (GET_CODE (insn) == JUMP_INSN
3317 && simplejump_p (insn)
3318 && slots_filled != slots_to_fill)
3320 = fill_slots_from_thread (insn, const_true_rtx,
3321 next_active_insn (JUMP_LABEL (insn)),
3323 own_thread_p (JUMP_LABEL (insn),
3324 JUMP_LABEL (insn), 0),
3325 0, slots_to_fill, &slots_filled);
3328 unfilled_slots_base[i]
3329 = emit_delay_sequence (insn, delay_list,
3330 slots_filled, slots_to_fill);
3332 if (slots_to_fill == slots_filled)
3333 unfilled_slots_base[i] = 0;
3335 note_delay_statistics (slots_filled, 0);
3338 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3339 /* See if the epilogue needs any delay slots. Try to fill them if so.
3340 The only thing we can do is scan backwards from the end of the
3341 function. If we did this in a previous pass, it is incorrect to do it
3343 if (current_function_epilogue_delay_list)
3346 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
3347 if (slots_to_fill == 0)
3351 CLEAR_RESOURCE (&set);
3353 /* The frame pointer and stack pointer are needed at the beginning of
3354 the epilogue, so instructions setting them can not be put in the
3355 epilogue delay slot. However, everything else needed at function
3356 end is safe, so we don't want to use end_of_function_needs here. */
3357 CLEAR_RESOURCE (&needed);
3358 if (frame_pointer_needed)
3360 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
3361 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3362 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
3364 #ifdef EXIT_IGNORE_STACK
3365 if (! EXIT_IGNORE_STACK)
3367 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3370 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3372 #ifdef EPILOGUE_USES
3373 for (i = 0; i <FIRST_PSEUDO_REGISTER; i++)
3375 if (EPILOGUE_USES (i))
3376 SET_HARD_REG_BIT (needed.regs, i);
3380 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
3381 trial = PREV_INSN (trial))
3383 if (GET_CODE (trial) == NOTE)
3385 pat = PATTERN (trial);
3386 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3389 if (! insn_references_resource_p (trial, &set, 1)
3390 && ! insn_sets_resource_p (trial, &needed, 1)
3391 && ! insn_sets_resource_p (trial, &set, 1)
3393 /* Don't want to mess with cc0 here. */
3394 && ! reg_mentioned_p (cc0_rtx, pat)
3398 trial = try_split (pat, trial, 1);
3399 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
3401 /* Here as well we are searching backward, so put the
3402 insns we find on the head of the list. */
3404 current_function_epilogue_delay_list
3405 = gen_rtx_INSN_LIST (VOIDmode, trial,
3406 current_function_epilogue_delay_list);
3407 mark_referenced_resources (trial, &end_of_function_needs, 1);
3408 update_block (trial, trial);
3409 delete_insn (trial);
3411 /* Clear deleted bit so final.c will output the insn. */
3412 INSN_DELETED_P (trial) = 0;
3414 if (slots_to_fill == ++slots_filled)
3420 mark_set_resources (trial, &set, 0, 1);
3421 mark_referenced_resources (trial, &needed, 1);
3424 note_delay_statistics (slots_filled, 0);
3428 /* Try to find insns to place in delay slots.
3430 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3431 or is an unconditional branch if CONDITION is const_true_rtx.
3432 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3434 THREAD is a flow-of-control, either the insns to be executed if the
3435 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3437 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3438 to see if any potential delay slot insns set things needed there.
3440 LIKELY is non-zero if it is extremely likely that the branch will be
3441 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3442 end of a loop back up to the top.
3444 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3445 thread. I.e., it is the fallthrough code of our jump or the target of the
3446 jump when we are the only jump going there.
3448 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3449 case, we can only take insns from the head of the thread for our delay
3450 slot. We then adjust the jump to point after the insns we have taken. */
3453 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
3454 thread_if_true, own_thread, own_opposite_thread,
3455 slots_to_fill, pslots_filled)
3458 rtx thread, opposite_thread;
3461 int own_thread, own_opposite_thread;
3462 int slots_to_fill, *pslots_filled;
3466 struct resources opposite_needed, set, needed;
3472 /* Validate our arguments. */
3473 if ((condition == const_true_rtx && ! thread_if_true)
3474 || (! own_thread && ! thread_if_true))
3477 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3479 /* If our thread is the end of subroutine, we can't get any delay
3484 /* If this is an unconditional branch, nothing is needed at the
3485 opposite thread. Otherwise, compute what is needed there. */
3486 if (condition == const_true_rtx)
3487 CLEAR_RESOURCE (&opposite_needed);
3489 mark_target_live_regs (opposite_thread, &opposite_needed);
3491 /* If the insn at THREAD can be split, do it here to avoid having to
3492 update THREAD and NEW_THREAD if it is done in the loop below. Also
3493 initialize NEW_THREAD. */
3495 new_thread = thread = try_split (PATTERN (thread), thread, 0);
3497 /* Scan insns at THREAD. We are looking for an insn that can be removed
3498 from THREAD (it neither sets nor references resources that were set
3499 ahead of it and it doesn't set anything needs by the insns ahead of
3500 it) and that either can be placed in an annulling insn or aren't
3501 needed at OPPOSITE_THREAD. */
3503 CLEAR_RESOURCE (&needed);
3504 CLEAR_RESOURCE (&set);
3506 /* If we do not own this thread, we must stop as soon as we find
3507 something that we can't put in a delay slot, since all we can do
3508 is branch into THREAD at a later point. Therefore, labels stop
3509 the search if this is not the `true' thread. */
3511 for (trial = thread;
3512 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
3513 trial = next_nonnote_insn (trial))
3517 /* If we have passed a label, we no longer own this thread. */
3518 if (GET_CODE (trial) == CODE_LABEL)
3524 pat = PATTERN (trial);
3525 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3528 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3529 don't separate or copy insns that set and use CC0. */
3530 if (! insn_references_resource_p (trial, &set, 1)
3531 && ! insn_sets_resource_p (trial, &set, 1)
3532 && ! insn_sets_resource_p (trial, &needed, 1)
3534 && ! (reg_mentioned_p (cc0_rtx, pat)
3535 && (! own_thread || ! sets_cc0_p (pat)))
3541 /* If TRIAL is redundant with some insn before INSN, we don't
3542 actually need to add it to the delay list; we can merely pretend
3544 if (prior_insn = redundant_insn (trial, insn, delay_list))
3546 fix_reg_dead_note (prior_insn, insn);
3549 update_block (trial, thread);
3550 if (trial == thread)
3552 thread = next_active_insn (thread);
3553 if (new_thread == trial)
3554 new_thread = thread;
3557 delete_insn (trial);
3561 update_reg_unused_notes (prior_insn, trial);
3562 new_thread = next_active_insn (trial);
3568 /* There are two ways we can win: If TRIAL doesn't set anything
3569 needed at the opposite thread and can't trap, or if it can
3570 go into an annulled delay slot. */
3571 if (condition == const_true_rtx
3572 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
3573 && ! may_trap_p (pat)))
3576 trial = try_split (pat, trial, 0);
3577 if (new_thread == old_trial)
3579 if (thread == old_trial)
3581 pat = PATTERN (trial);
3582 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
3586 #ifdef ANNUL_IFTRUE_SLOTS
3589 #ifdef ANNUL_IFFALSE_SLOTS
3595 trial = try_split (pat, trial, 0);
3596 if (new_thread == old_trial)
3598 if (thread == old_trial)
3600 pat = PATTERN (trial);
3602 ? eligible_for_annul_false (insn, *pslots_filled, trial, flags)
3603 : eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
3611 if (reg_mentioned_p (cc0_rtx, pat))
3612 link_cc0_insns (trial);
3615 /* If we own this thread, delete the insn. If this is the
3616 destination of a branch, show that a basic block status
3617 may have been updated. In any case, mark the new
3618 starting point of this thread. */
3621 update_block (trial, thread);
3622 if (trial == thread)
3624 thread = next_active_insn (thread);
3625 if (new_thread == trial)
3626 new_thread = thread;
3628 delete_insn (trial);
3631 new_thread = next_active_insn (trial);
3633 temp = own_thread ? trial : copy_rtx (trial);
3635 INSN_FROM_TARGET_P (temp) = 1;
3637 delay_list = add_to_delay_list (temp, delay_list);
3639 if (slots_to_fill == ++(*pslots_filled))
3641 /* Even though we have filled all the slots, we
3642 may be branching to a location that has a
3643 redundant insn. Skip any if so. */
3644 while (new_thread && ! own_thread
3645 && ! insn_sets_resource_p (new_thread, &set, 1)
3646 && ! insn_sets_resource_p (new_thread, &needed, 1)
3647 && ! insn_references_resource_p (new_thread,
3649 && redundant_insn (new_thread, insn, delay_list))
3650 new_thread = next_active_insn (new_thread);
3659 /* This insn can't go into a delay slot. */
3661 mark_set_resources (trial, &set, 0, 1);
3662 mark_referenced_resources (trial, &needed, 1);
3664 /* Ensure we don't put insns between the setting of cc and the comparison
3665 by moving a setting of cc into an earlier delay slot since these insns
3666 could clobber the condition code. */
3669 /* If this insn is a register-register copy and the next insn has
3670 a use of our destination, change it to use our source. That way,
3671 it will become a candidate for our delay slot the next time
3672 through this loop. This case occurs commonly in loops that
3675 We could check for more complex cases than those tested below,
3676 but it doesn't seem worth it. It might also be a good idea to try
3677 to swap the two insns. That might do better.
3679 We can't do this if the next insn modifies our destination, because
3680 that would make the replacement into the insn invalid. We also can't
3681 do this if it modifies our source, because it might be an earlyclobber
3682 operand. This latter test also prevents updating the contents of
3685 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
3686 && GET_CODE (SET_SRC (pat)) == REG
3687 && GET_CODE (SET_DEST (pat)) == REG)
3689 rtx next = next_nonnote_insn (trial);
3691 if (next && GET_CODE (next) == INSN
3692 && GET_CODE (PATTERN (next)) != USE
3693 && ! reg_set_p (SET_DEST (pat), next)
3694 && ! reg_set_p (SET_SRC (pat), next)
3695 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
3696 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
3700 /* If we stopped on a branch insn that has delay slots, see if we can
3701 steal some of the insns in those slots. */
3702 if (trial && GET_CODE (trial) == INSN
3703 && GET_CODE (PATTERN (trial)) == SEQUENCE
3704 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
3706 /* If this is the `true' thread, we will want to follow the jump,
3707 so we can only do this if we have taken everything up to here. */
3708 if (thread_if_true && trial == new_thread)
3710 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
3711 delay_list, &set, &needed,
3712 &opposite_needed, slots_to_fill,
3713 pslots_filled, &must_annul,
3715 else if (! thread_if_true)
3717 = steal_delay_list_from_fallthrough (insn, condition,
3719 delay_list, &set, &needed,
3720 &opposite_needed, slots_to_fill,
3721 pslots_filled, &must_annul);
3724 /* If we haven't found anything for this delay slot and it is very
3725 likely that the branch will be taken, see if the insn at our target
3726 increments or decrements a register with an increment that does not
3727 depend on the destination register. If so, try to place the opposite
3728 arithmetic insn after the jump insn and put the arithmetic insn in the
3729 delay slot. If we can't do this, return. */
3730 if (delay_list == 0 && likely && new_thread
3731 && GET_CODE (new_thread) == INSN
3732 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
3733 && asm_noperands (PATTERN (new_thread)) < 0)
3735 rtx pat = PATTERN (new_thread);
3740 pat = PATTERN (trial);
3742 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
3743 || ! eligible_for_delay (insn, 0, trial, flags))
3746 dest = SET_DEST (pat), src = SET_SRC (pat);
3747 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
3748 && rtx_equal_p (XEXP (src, 0), dest)
3749 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
3751 rtx other = XEXP (src, 1);
3755 /* If this is a constant adjustment, use the same code with
3756 the negated constant. Otherwise, reverse the sense of the
3758 if (GET_CODE (other) == CONST_INT)
3759 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
3760 negate_rtx (GET_MODE (src), other));
3762 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
3763 GET_MODE (src), dest, other);
3765 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
3768 if (recog_memoized (ninsn) < 0
3769 || (insn_extract (ninsn),
3770 ! constrain_operands (INSN_CODE (ninsn), 1)))
3772 delete_insn (ninsn);
3778 update_block (trial, thread);
3779 if (trial == thread)
3781 thread = next_active_insn (thread);
3782 if (new_thread == trial)
3783 new_thread = thread;
3785 delete_insn (trial);
3788 new_thread = next_active_insn (trial);
3790 ninsn = own_thread ? trial : copy_rtx (trial);
3792 INSN_FROM_TARGET_P (ninsn) = 1;
3794 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3799 if (delay_list && must_annul)
3800 INSN_ANNULLED_BRANCH_P (insn) = 1;
3802 /* If we are to branch into the middle of this thread, find an appropriate
3803 label or make a new one if none, and redirect INSN to it. If we hit the
3804 end of the function, use the end-of-function label. */
3805 if (new_thread != thread)
3809 if (! thread_if_true)
3812 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
3813 && (simplejump_p (new_thread)
3814 || GET_CODE (PATTERN (new_thread)) == RETURN)
3815 && redirect_with_delay_list_safe_p (insn,
3816 JUMP_LABEL (new_thread),
3818 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3820 if (new_thread == 0)
3821 label = find_end_label ();
3822 else if (GET_CODE (new_thread) == CODE_LABEL)
3825 label = get_label_before (new_thread);
3827 reorg_redirect_jump (insn, label);
3833 /* Make another attempt to find insns to place in delay slots.
3835 We previously looked for insns located in front of the delay insn
3836 and, for non-jump delay insns, located behind the delay insn.
3838 Here only try to schedule jump insns and try to move insns from either
3839 the target or the following insns into the delay slot. If annulling is
3840 supported, we will be likely to do this. Otherwise, we can do this only
3844 fill_eager_delay_slots (first)
3849 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3851 for (i = 0; i < num_unfilled_slots; i++)
3854 rtx target_label, insn_at_target, fallthrough_insn;
3857 int own_fallthrough;
3858 int prediction, slots_to_fill, slots_filled;
3860 insn = unfilled_slots_base[i];
3862 || INSN_DELETED_P (insn)
3863 || GET_CODE (insn) != JUMP_INSN
3864 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3867 slots_to_fill = num_delay_slots (insn);
3868 if (slots_to_fill == 0)
3872 target_label = JUMP_LABEL (insn);
3873 condition = get_branch_condition (insn, target_label);
3878 /* Get the next active fallthrough and target insns and see if we own
3879 them. Then see whether the branch is likely true. We don't need
3880 to do a lot of this for unconditional branches. */
3882 insn_at_target = next_active_insn (target_label);
3883 own_target = own_thread_p (target_label, target_label, 0);
3885 if (condition == const_true_rtx)
3887 own_fallthrough = 0;
3888 fallthrough_insn = 0;
3893 fallthrough_insn = next_active_insn (insn);
3894 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3895 prediction = mostly_true_jump (insn, condition);
3898 /* If this insn is expected to branch, first try to get insns from our
3899 target, then our fallthrough insns. If it is not, expected to branch,
3900 try the other order. */
3905 = fill_slots_from_thread (insn, condition, insn_at_target,
3906 fallthrough_insn, prediction == 2, 1,
3907 own_target, own_fallthrough,
3908 slots_to_fill, &slots_filled);
3910 if (delay_list == 0 && own_fallthrough)
3912 /* Even though we didn't find anything for delay slots,
3913 we might have found a redundant insn which we deleted
3914 from the thread that was filled. So we have to recompute
3915 the next insn at the target. */
3916 target_label = JUMP_LABEL (insn);
3917 insn_at_target = next_active_insn (target_label);
3920 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3921 insn_at_target, 0, 0,
3922 own_fallthrough, own_target,
3923 slots_to_fill, &slots_filled);
3928 if (own_fallthrough)
3930 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3931 insn_at_target, 0, 0,
3932 own_fallthrough, own_target,
3933 slots_to_fill, &slots_filled);
3935 if (delay_list == 0)
3937 = fill_slots_from_thread (insn, condition, insn_at_target,
3938 next_active_insn (insn), 0, 1,
3939 own_target, own_fallthrough,
3940 slots_to_fill, &slots_filled);
3944 unfilled_slots_base[i]
3945 = emit_delay_sequence (insn, delay_list,
3946 slots_filled, slots_to_fill);
3948 if (slots_to_fill == slots_filled)
3949 unfilled_slots_base[i] = 0;
3951 note_delay_statistics (slots_filled, 1);
3955 /* Once we have tried two ways to fill a delay slot, make a pass over the
3956 code to try to improve the results and to do such things as more jump
3960 relax_delay_slots (first)
3963 register rtx insn, next, pat;
3964 register rtx trial, delay_insn, target_label;
3966 /* Look at every JUMP_INSN and see if we can improve it. */
3967 for (insn = first; insn; insn = next)
3971 next = next_active_insn (insn);
3973 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3974 the next insn, or jumps to a label that is not the last of a
3975 group of consecutive labels. */
3976 if (GET_CODE (insn) == JUMP_INSN
3977 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3978 && (target_label = JUMP_LABEL (insn)) != 0)
3980 target_label = follow_jumps (target_label);
3981 target_label = prev_label (next_active_insn (target_label));
3983 if (target_label == 0)
3984 target_label = find_end_label ();
3986 if (next_active_insn (target_label) == next
3987 && ! condjump_in_parallel_p (insn))
3993 if (target_label != JUMP_LABEL (insn))
3994 reorg_redirect_jump (insn, target_label);
3996 /* See if this jump branches around a unconditional jump.
3997 If so, invert this jump and point it to the target of the
3999 if (next && GET_CODE (next) == JUMP_INSN
4000 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
4001 && next_active_insn (target_label) == next_active_insn (next)
4002 && no_labels_between_p (insn, next))
4004 rtx label = JUMP_LABEL (next);
4006 /* Be careful how we do this to avoid deleting code or
4007 labels that are momentarily dead. See similar optimization
4010 We also need to ensure we properly handle the case when
4011 invert_jump fails. */
4013 ++LABEL_NUSES (target_label);
4015 ++LABEL_NUSES (label);
4017 if (invert_jump (insn, label))
4024 --LABEL_NUSES (label);
4026 if (--LABEL_NUSES (target_label) == 0)
4027 delete_insn (target_label);
4033 /* If this is an unconditional jump and the previous insn is a
4034 conditional jump, try reversing the condition of the previous
4035 insn and swapping our targets. The next pass might be able to
4038 Don't do this if we expect the conditional branch to be true, because
4039 we would then be making the more common case longer. */
4041 if (GET_CODE (insn) == JUMP_INSN
4042 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
4043 && (other = prev_active_insn (insn)) != 0
4044 && (condjump_p (other) || condjump_in_parallel_p (other))
4045 && no_labels_between_p (other, insn)
4046 && 0 < mostly_true_jump (other,
4047 get_branch_condition (other,
4048 JUMP_LABEL (other))))
4050 rtx other_target = JUMP_LABEL (other);
4051 target_label = JUMP_LABEL (insn);
4053 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
4054 as we move the label. */
4056 ++LABEL_NUSES (other_target);
4058 if (invert_jump (other, target_label))
4059 reorg_redirect_jump (insn, other_target);
4062 --LABEL_NUSES (other_target);
4065 /* Now look only at cases where we have filled a delay slot. */
4066 if (GET_CODE (insn) != INSN
4067 || GET_CODE (PATTERN (insn)) != SEQUENCE)
4070 pat = PATTERN (insn);
4071 delay_insn = XVECEXP (pat, 0, 0);
4073 /* See if the first insn in the delay slot is redundant with some
4074 previous insn. Remove it from the delay slot if so; then set up
4075 to reprocess this insn. */
4076 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
4078 delete_from_delay_slot (XVECEXP (pat, 0, 1));
4079 next = prev_active_insn (next);
4083 /* Now look only at the cases where we have a filled JUMP_INSN. */
4084 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
4085 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
4086 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
4089 target_label = JUMP_LABEL (delay_insn);
4093 /* If this jump goes to another unconditional jump, thread it, but
4094 don't convert a jump into a RETURN here. */
4095 trial = follow_jumps (target_label);
4096 /* We use next_real_insn instead of next_active_insn, so that
4097 the special USE insns emitted by reorg won't be ignored.
4098 If they are ignored, then they will get deleted if target_label
4099 is now unreachable, and that would cause mark_target_live_regs
4101 trial = prev_label (next_real_insn (trial));
4102 if (trial == 0 && target_label != 0)
4103 trial = find_end_label ();
4105 if (trial != target_label
4106 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
4108 reorg_redirect_jump (delay_insn, trial);
4109 target_label = trial;
4112 /* If the first insn at TARGET_LABEL is redundant with a previous
4113 insn, redirect the jump to the following insn process again. */
4114 trial = next_active_insn (target_label);
4115 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
4116 && redundant_insn (trial, insn, 0))
4120 /* Figure out where to emit the special USE insn so we don't
4121 later incorrectly compute register live/death info. */
4122 tmp = next_active_insn (trial);
4124 tmp = find_end_label ();
4126 /* Insert the special USE insn and update dataflow info. */
4127 update_block (trial, tmp);
4129 /* Now emit a label before the special USE insn, and
4130 redirect our jump to the new label. */
4131 target_label = get_label_before (PREV_INSN (tmp));
4132 reorg_redirect_jump (delay_insn, target_label);
4137 /* Similarly, if it is an unconditional jump with one insn in its
4138 delay list and that insn is redundant, thread the jump. */
4139 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
4140 && XVECLEN (PATTERN (trial), 0) == 2
4141 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
4142 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
4143 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
4144 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
4146 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
4147 if (target_label == 0)
4148 target_label = find_end_label ();
4150 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
4153 reorg_redirect_jump (delay_insn, target_label);
4160 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
4161 && prev_active_insn (target_label) == insn
4162 && ! condjump_in_parallel_p (delay_insn)
4164 /* If the last insn in the delay slot sets CC0 for some insn,
4165 various code assumes that it is in a delay slot. We could
4166 put it back where it belonged and delete the register notes,
4167 but it doesn't seem worthwhile in this uncommon case. */
4168 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
4169 REG_CC_USER, NULL_RTX)
4175 /* All this insn does is execute its delay list and jump to the
4176 following insn. So delete the jump and just execute the delay
4179 We do this by deleting the INSN containing the SEQUENCE, then
4180 re-emitting the insns separately, and then deleting the jump.
4181 This allows the count of the jump target to be properly
4184 /* Clear the from target bit, since these insns are no longer
4186 for (i = 0; i < XVECLEN (pat, 0); i++)
4187 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
4189 trial = PREV_INSN (insn);
4191 emit_insn_after (pat, trial);
4192 delete_scheduled_jump (delay_insn);
4196 /* See if this is an unconditional jump around a single insn which is
4197 identical to the one in its delay slot. In this case, we can just
4198 delete the branch and the insn in its delay slot. */
4199 if (next && GET_CODE (next) == INSN
4200 && prev_label (next_active_insn (next)) == target_label
4201 && simplejump_p (insn)
4202 && XVECLEN (pat, 0) == 2
4203 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
4209 /* See if this jump (with its delay slots) branches around another
4210 jump (without delay slots). If so, invert this jump and point
4211 it to the target of the second jump. We cannot do this for
4212 annulled jumps, though. Again, don't convert a jump to a RETURN
4214 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
4215 && next && GET_CODE (next) == JUMP_INSN
4216 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
4217 && next_active_insn (target_label) == next_active_insn (next)
4218 && no_labels_between_p (insn, next))
4220 rtx label = JUMP_LABEL (next);
4221 rtx old_label = JUMP_LABEL (delay_insn);
4224 label = find_end_label ();
4226 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
4228 /* Be careful how we do this to avoid deleting code or labels
4229 that are momentarily dead. See similar optimization in
4232 ++LABEL_NUSES (old_label);
4234 if (invert_jump (delay_insn, label))
4238 /* Must update the INSN_FROM_TARGET_P bits now that
4239 the branch is reversed, so that mark_target_live_regs
4240 will handle the delay slot insn correctly. */
4241 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
4243 rtx slot = XVECEXP (PATTERN (insn), 0, i);
4244 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
4251 if (old_label && --LABEL_NUSES (old_label) == 0)
4252 delete_insn (old_label);
4257 /* If we own the thread opposite the way this insn branches, see if we
4258 can merge its delay slots with following insns. */
4259 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4260 && own_thread_p (NEXT_INSN (insn), 0, 1))
4261 try_merge_delay_insns (insn, next);
4262 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4263 && own_thread_p (target_label, target_label, 0))
4264 try_merge_delay_insns (insn, next_active_insn (target_label));
4266 /* If we get here, we haven't deleted INSN. But we may have deleted
4267 NEXT, so recompute it. */
4268 next = next_active_insn (insn);
4274 /* Look for filled jumps to the end of function label. We can try to convert
4275 them into RETURN insns if the insns in the delay slot are valid for the
4279 make_return_insns (first)
4282 rtx insn, jump_insn, pat;
4283 rtx real_return_label = end_of_function_label;
4286 /* See if there is a RETURN insn in the function other than the one we
4287 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
4288 into a RETURN to jump to it. */
4289 for (insn = first; insn; insn = NEXT_INSN (insn))
4290 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
4292 real_return_label = get_label_before (insn);
4296 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
4297 was equal to END_OF_FUNCTION_LABEL. */
4298 LABEL_NUSES (real_return_label)++;
4300 /* Clear the list of insns to fill so we can use it. */
4301 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4303 for (insn = first; insn; insn = NEXT_INSN (insn))
4307 /* Only look at filled JUMP_INSNs that go to the end of function
4309 if (GET_CODE (insn) != INSN
4310 || GET_CODE (PATTERN (insn)) != SEQUENCE
4311 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
4312 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
4315 pat = PATTERN (insn);
4316 jump_insn = XVECEXP (pat, 0, 0);
4318 /* If we can't make the jump into a RETURN, try to redirect it to the best
4319 RETURN and go on to the next insn. */
4320 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
4322 /* Make sure redirecting the jump will not invalidate the delay
4324 if (redirect_with_delay_slots_safe_p (jump_insn,
4327 reorg_redirect_jump (jump_insn, real_return_label);
4331 /* See if this RETURN can accept the insns current in its delay slot.
4332 It can if it has more or an equal number of slots and the contents
4333 of each is valid. */
4335 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
4336 slots = num_delay_slots (jump_insn);
4337 if (slots >= XVECLEN (pat, 0) - 1)
4339 for (i = 1; i < XVECLEN (pat, 0); i++)
4341 #ifdef ANNUL_IFFALSE_SLOTS
4342 (INSN_ANNULLED_BRANCH_P (jump_insn)
4343 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4344 ? eligible_for_annul_false (jump_insn, i - 1,
4345 XVECEXP (pat, 0, i), flags) :
4347 #ifdef ANNUL_IFTRUE_SLOTS
4348 (INSN_ANNULLED_BRANCH_P (jump_insn)
4349 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4350 ? eligible_for_annul_true (jump_insn, i - 1,
4351 XVECEXP (pat, 0, i), flags) :
4353 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
4359 if (i == XVECLEN (pat, 0))
4362 /* We have to do something with this insn. If it is an unconditional
4363 RETURN, delete the SEQUENCE and output the individual insns,
4364 followed by the RETURN. Then set things up so we try to find
4365 insns for its delay slots, if it needs some. */
4366 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
4368 rtx prev = PREV_INSN (insn);
4371 for (i = 1; i < XVECLEN (pat, 0); i++)
4372 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
4374 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
4375 emit_barrier_after (insn);
4378 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4381 /* It is probably more efficient to keep this with its current
4382 delay slot as a branch to a RETURN. */
4383 reorg_redirect_jump (jump_insn, real_return_label);
4386 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
4387 new delay slots we have created. */
4388 if (--LABEL_NUSES (real_return_label) == 0)
4389 delete_insn (real_return_label);
4391 fill_simple_delay_slots (first, 1);
4392 fill_simple_delay_slots (first, 0);
4396 /* Try to find insns to place in delay slots. */
4399 dbr_schedule (first, file)
4403 rtx insn, next, epilogue_insn = 0;
4406 int old_flag_no_peephole = flag_no_peephole;
4408 /* Execute `final' once in prescan mode to delete any insns that won't be
4409 used. Don't let final try to do any peephole optimization--it will
4410 ruin dataflow information for this pass. */
4412 flag_no_peephole = 1;
4413 final (first, 0, NO_DEBUG, 1, 1);
4414 flag_no_peephole = old_flag_no_peephole;
4417 /* If the current function has no insns other than the prologue and
4418 epilogue, then do not try to fill any delay slots. */
4419 if (n_basic_blocks == 0)
4422 /* Find the highest INSN_UID and allocate and initialize our map from
4423 INSN_UID's to position in code. */
4424 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
4426 if (INSN_UID (insn) > max_uid)
4427 max_uid = INSN_UID (insn);
4428 if (GET_CODE (insn) == NOTE
4429 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4430 epilogue_insn = insn;
4433 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int *));
4434 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
4435 uid_to_ruid[INSN_UID (insn)] = i;
4437 /* Initialize the list of insns that need filling. */
4438 if (unfilled_firstobj == 0)
4440 gcc_obstack_init (&unfilled_slots_obstack);
4441 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4444 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
4448 INSN_ANNULLED_BRANCH_P (insn) = 0;
4449 INSN_FROM_TARGET_P (insn) = 0;
4451 /* Skip vector tables. We can't get attributes for them. */
4452 if (GET_CODE (insn) == JUMP_INSN
4453 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4454 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4457 if (num_delay_slots (insn) > 0)
4458 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4460 /* Ensure all jumps go to the last of a set of consecutive labels. */
4461 if (GET_CODE (insn) == JUMP_INSN
4462 && (condjump_p (insn) || condjump_in_parallel_p (insn))
4463 && JUMP_LABEL (insn) != 0
4464 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
4465 != JUMP_LABEL (insn)))
4466 redirect_jump (insn, target);
4469 /* Indicate what resources are required to be valid at the end of the current
4470 function. The condition code never is and memory always is. If the
4471 frame pointer is needed, it is and so is the stack pointer unless
4472 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4473 stack pointer is. Registers used to return the function value are
4474 needed. Registers holding global variables are needed. */
4476 end_of_function_needs.cc = 0;
4477 end_of_function_needs.memory = 1;
4478 end_of_function_needs.unch_memory = 0;
4479 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
4481 if (frame_pointer_needed)
4483 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
4484 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4485 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
4487 #ifdef EXIT_IGNORE_STACK
4488 if (! EXIT_IGNORE_STACK)
4490 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4493 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4495 if (current_function_return_rtx != 0)
4496 mark_referenced_resources (current_function_return_rtx,
4497 &end_of_function_needs, 1);
4499 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4501 #ifdef EPILOGUE_USES
4502 || EPILOGUE_USES (i)
4505 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
4507 /* The registers required to be live at the end of the function are
4508 represented in the flow information as being dead just prior to
4509 reaching the end of the function. For example, the return of a value
4510 might be represented by a USE of the return register immediately
4511 followed by an unconditional jump to the return label where the
4512 return label is the end of the RTL chain. The end of the RTL chain
4513 is then taken to mean that the return register is live.
4515 This sequence is no longer maintained when epilogue instructions are
4516 added to the RTL chain. To reconstruct the original meaning, the
4517 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4518 point where these registers become live (start_of_epilogue_needs).
4519 If epilogue instructions are present, the registers set by those
4520 instructions won't have been processed by flow. Thus, those
4521 registers are additionally required at the end of the RTL chain
4522 (end_of_function_needs). */
4524 start_of_epilogue_needs = end_of_function_needs;
4526 while (epilogue_insn = next_nonnote_insn (epilogue_insn))
4527 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
4529 /* Show we haven't computed an end-of-function label yet. */
4530 end_of_function_label = 0;
4532 /* Allocate and initialize the tables used by mark_target_live_regs. */
4534 = (struct target_info **) alloca ((TARGET_HASH_PRIME
4535 * sizeof (struct target_info *)));
4536 bzero ((char *) target_hash_table,
4537 TARGET_HASH_PRIME * sizeof (struct target_info *));
4539 bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
4540 bzero ((char *) bb_ticks, n_basic_blocks * sizeof (int));
4542 /* Initialize the statistics for this function. */
4543 bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
4544 bzero ((char *) num_filled_delays, sizeof num_filled_delays);
4546 /* Now do the delay slot filling. Try everything twice in case earlier
4547 changes make more slots fillable. */
4549 for (reorg_pass_number = 0;
4550 reorg_pass_number < MAX_REORG_PASSES;
4551 reorg_pass_number++)
4553 fill_simple_delay_slots (first, 1);
4554 fill_simple_delay_slots (first, 0);
4555 fill_eager_delay_slots (first);
4556 relax_delay_slots (first);
4559 /* Delete any USE insns made by update_block; subsequent passes don't need
4560 them or know how to deal with them. */
4561 for (insn = first; insn; insn = next)
4563 next = NEXT_INSN (insn);
4565 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
4566 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
4567 next = delete_insn (insn);
4570 /* If we made an end of function label, indicate that it is now
4571 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4572 If it is now unused, delete it. */
4573 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
4574 delete_insn (end_of_function_label);
4577 if (HAVE_return && end_of_function_label != 0)
4578 make_return_insns (first);
4581 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4583 /* It is not clear why the line below is needed, but it does seem to be. */
4584 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4586 /* Reposition the prologue and epilogue notes in case we moved the
4587 prologue/epilogue insns. */
4588 reposition_prologue_and_epilogue_notes (first);
4592 register int i, j, need_comma;
4594 for (reorg_pass_number = 0;
4595 reorg_pass_number < MAX_REORG_PASSES;
4596 reorg_pass_number++)
4598 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
4599 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
4602 fprintf (file, ";; Reorg function #%d\n", i);
4604 fprintf (file, ";; %d insns needing delay slots\n;; ",
4605 num_insns_needing_delays[i][reorg_pass_number]);
4607 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
4608 if (num_filled_delays[i][j][reorg_pass_number])
4611 fprintf (file, ", ");
4613 fprintf (file, "%d got %d delays",
4614 num_filled_delays[i][j][reorg_pass_number], j);
4616 fprintf (file, "\n");
4621 /* For all JUMP insns, fill in branch prediction notes, so that during
4622 assembler output a target can set branch prediction bits in the code.
4623 We have to do this now, as up until this point the destinations of
4624 JUMPS can be moved around and changed, but past right here that cannot
4626 for (insn = first; insn; insn = NEXT_INSN (insn))
4630 if (GET_CODE (insn) != JUMP_INSN)
4633 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
4634 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
4635 GEN_INT (pred_flags),
4639 #endif /* DELAY_SLOTS */