1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 /* Instruction reorganization pass.
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
64 The TMS320C3x and C4x have three branch delay slots. When the three
65 slots are filled, the branch penalty is zero. Most insns can fill the
66 delay slots except jump insns.
68 Three techniques for filling delay slots have been implemented so far:
70 (1) `fill_simple_delay_slots' is the simplest, most efficient way
71 to fill delay slots. This pass first looks for insns which come
72 from before the branch and which are safe to execute after the
73 branch. Then it searches after the insn requiring delay slots or,
74 in the case of a branch, for insns that are after the point at
75 which the branch merges into the fallthrough code, if such a point
76 exists. When such insns are found, the branch penalty decreases
77 and no code expansion takes place.
79 (2) `fill_eager_delay_slots' is more complicated: it is used for
80 scheduling conditional jumps, or for scheduling jumps which cannot
81 be filled using (1). A machine need not have annulled jumps to use
82 this strategy, but it helps (by keeping more options open).
83 `fill_eager_delay_slots' tries to guess the direction the branch
84 will go; if it guesses right 100% of the time, it can reduce the
85 branch penalty as much as `fill_simple_delay_slots' does. If it
86 guesses wrong 100% of the time, it might as well schedule nops (or
87 on the m88k, unexpose the branch slot). When
88 `fill_eager_delay_slots' takes insns from the fall-through path of
89 the jump, usually there is no code expansion; when it takes insns
90 from the branch target, there is code expansion if it is not the
91 only way to reach that target.
93 (3) `relax_delay_slots' uses a set of rules to simplify code that
94 has been reorganized by (1) and (2). It finds cases where
95 conditional test can be eliminated, jumps can be threaded, extra
96 insns can be eliminated, etc. It is the job of (1) and (2) to do a
97 good job of scheduling locally; `relax_delay_slots' takes care of
98 making the various individual schedules work well together. It is
99 especially tuned to handle the control flow interactions of branch
100 insns. It does nothing for insns with delay slots that do not
103 On machines that use CC0, we are very conservative. We will not make
104 a copy of an insn involving CC0 since we want to maintain a 1-1
105 correspondence between the insn that sets and uses CC0. The insns are
106 allowed to be separated by placing an insn that sets CC0 (but not an insn
107 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
108 delay slot. In that case, we point each insn at the other with REG_CC_USER
109 and REG_CC_SETTER notes. Note that these restrictions affect very few
110 machines because most RISC machines with delay slots will not use CC0
111 (the RT is the only known exception at this point).
115 The Acorn Risc Machine can conditionally execute most insns, so
116 it is profitable to move single insns into a position to execute
117 based on the condition code of the previous insn.
119 The HP-PA can conditionally nullify insns, providing a similar
120 effect to the ARM, differing mostly in which insn is "in charge". */
128 #include "function.h"
129 #include "insn-config.h"
130 #include "conditions.h"
131 #include "hard-reg-set.h"
132 #include "basic-block.h"
134 #include "insn-flags.h"
139 #include "insn-attr.h"
140 #include "resource.h"
145 #define obstack_chunk_alloc xmalloc
146 #define obstack_chunk_free free
148 #ifndef ANNUL_IFTRUE_SLOTS
149 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
151 #ifndef ANNUL_IFFALSE_SLOTS
152 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
155 /* Insns which have delay slots that have not yet been filled. */
157 static struct obstack unfilled_slots_obstack;
158 static rtx *unfilled_firstobj;
160 /* Define macros to refer to the first and last slot containing unfilled
161 insns. These are used because the list may move and its address
162 should be recomputed at each use. */
164 #define unfilled_slots_base \
165 ((rtx *) obstack_base (&unfilled_slots_obstack))
167 #define unfilled_slots_next \
168 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
170 /* Points to the label before the end of the function. */
171 static rtx end_of_function_label;
173 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
174 not always monotonically increase. */
175 static int *uid_to_ruid;
177 /* Highest valid index in `uid_to_ruid'. */
180 static int stop_search_p PROTO((rtx, int));
181 static int resource_conflicts_p PROTO((struct resources *,
182 struct resources *));
183 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
184 static int insn_sets_resource_p PROTO((rtx, struct resources *, int));
185 static rtx find_end_label PROTO((void));
186 static rtx emit_delay_sequence PROTO((rtx, rtx, int));
187 static rtx add_to_delay_list PROTO((rtx, rtx));
188 static rtx delete_from_delay_slot PROTO((rtx));
189 static void delete_scheduled_jump PROTO((rtx));
190 static void note_delay_statistics PROTO((int, int));
191 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
192 static rtx optimize_skip PROTO((rtx));
194 static int get_jump_flags PROTO((rtx, rtx));
195 static int rare_destination PROTO((rtx));
196 static int mostly_true_jump PROTO((rtx, rtx));
197 static rtx get_branch_condition PROTO((rtx, rtx));
198 static int condition_dominates_p PROTO((rtx, rtx));
199 static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
200 static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
201 static int check_annul_list_true_false PROTO ((int, rtx));
202 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
206 int, int *, int *, rtx *));
207 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
212 static void try_merge_delay_insns PROTO((rtx, rtx));
213 static rtx redundant_insn PROTO((rtx, rtx, rtx));
214 static int own_thread_p PROTO((rtx, rtx, int));
215 static void update_block PROTO((rtx, rtx));
216 static int reorg_redirect_jump PROTO((rtx, rtx));
217 static void update_reg_dead_notes PROTO((rtx, rtx));
218 static void fix_reg_dead_note PROTO((rtx, rtx));
219 static void update_reg_unused_notes PROTO((rtx, rtx));
220 static void fill_simple_delay_slots PROTO((int));
221 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
222 int, int, int *, rtx));
223 static void fill_eager_delay_slots PROTO((void));
224 static void relax_delay_slots PROTO((rtx));
225 static void make_return_insns PROTO((rtx));
227 /* Return TRUE if this insn should stop the search for insn to fill delay
228 slots. LABELS_P indicates that labels should terminate the search.
229 In all cases, jumps terminate the search. */
232 stop_search_p (insn, labels_p)
239 switch (GET_CODE (insn))
253 /* OK unless it contains a delay slot or is an `asm' insn of some type.
254 We don't know anything about these. */
255 return (GET_CODE (PATTERN (insn)) == SEQUENCE
256 || GET_CODE (PATTERN (insn)) == ASM_INPUT
257 || asm_noperands (PATTERN (insn)) >= 0);
264 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
265 resource set contains a volatile memory reference. Otherwise, return FALSE. */
268 resource_conflicts_p (res1, res2)
269 struct resources *res1, *res2;
271 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
272 || (res1->unch_memory && res2->unch_memory)
273 || res1->volatil || res2->volatil)
277 return (res1->regs & res2->regs) != HARD_CONST (0);
282 for (i = 0; i < HARD_REG_SET_LONGS; i++)
283 if ((res1->regs[i] & res2->regs[i]) != 0)
290 /* Return TRUE if any resource marked in RES, a `struct resources', is
291 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
292 routine is using those resources.
294 We compute this by computing all the resources referenced by INSN and
295 seeing if this conflicts with RES. It might be faster to directly check
296 ourselves, and this is the way it used to work, but it means duplicating
297 a large block of complex code. */
300 insn_references_resource_p (insn, res, include_delayed_effects)
302 register struct resources *res;
303 int include_delayed_effects;
305 struct resources insn_res;
307 CLEAR_RESOURCE (&insn_res);
308 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
309 return resource_conflicts_p (&insn_res, res);
312 /* Return TRUE if INSN modifies resources that are marked in RES.
313 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
314 included. CC0 is only modified if it is explicitly set; see comments
315 in front of mark_set_resources for details. */
318 insn_sets_resource_p (insn, res, include_delayed_effects)
320 register struct resources *res;
321 int include_delayed_effects;
323 struct resources insn_sets;
325 CLEAR_RESOURCE (&insn_sets);
326 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
327 return resource_conflicts_p (&insn_sets, res);
330 /* Find a label at the end of the function or before a RETURN. If there is
338 /* If we found one previously, return it. */
339 if (end_of_function_label)
340 return end_of_function_label;
342 /* Otherwise, see if there is a label at the end of the function. If there
343 is, it must be that RETURN insns aren't needed, so that is our return
344 label and we don't have to do anything else. */
346 insn = get_last_insn ();
347 while (GET_CODE (insn) == NOTE
348 || (GET_CODE (insn) == INSN
349 && (GET_CODE (PATTERN (insn)) == USE
350 || GET_CODE (PATTERN (insn)) == CLOBBER)))
351 insn = PREV_INSN (insn);
353 /* When a target threads its epilogue we might already have a
354 suitable return insn. If so put a label before it for the
355 end_of_function_label. */
356 if (GET_CODE (insn) == BARRIER
357 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
358 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
360 rtx temp = PREV_INSN (PREV_INSN (insn));
361 end_of_function_label = gen_label_rtx ();
362 LABEL_NUSES (end_of_function_label) = 0;
364 /* Put the label before an USE insns that may proceed the RETURN insn. */
365 while (GET_CODE (temp) == USE)
366 temp = PREV_INSN (temp);
368 emit_label_after (end_of_function_label, temp);
371 else if (GET_CODE (insn) == CODE_LABEL)
372 end_of_function_label = insn;
375 /* Otherwise, make a new label and emit a RETURN and BARRIER,
377 end_of_function_label = gen_label_rtx ();
378 LABEL_NUSES (end_of_function_label) = 0;
379 emit_label (end_of_function_label);
383 /* The return we make may have delay slots too. */
384 rtx insn = gen_return ();
385 insn = emit_jump_insn (insn);
387 if (num_delay_slots (insn) > 0)
388 obstack_ptr_grow (&unfilled_slots_obstack, insn);
393 /* Show one additional use for this label so it won't go away until
395 ++LABEL_NUSES (end_of_function_label);
397 return end_of_function_label;
400 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
401 the pattern of INSN with the SEQUENCE.
403 Chain the insns so that NEXT_INSN of each insn in the sequence points to
404 the next and NEXT_INSN of the last insn in the sequence points to
405 the first insn after the sequence. Similarly for PREV_INSN. This makes
406 it easier to scan all insns.
408 Returns the SEQUENCE that replaces INSN. */
411 emit_delay_sequence (insn, list, length)
420 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
421 rtvec seqv = rtvec_alloc (length + 1);
422 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
423 rtx seq_insn = make_insn_raw (seq);
424 rtx first = get_insns ();
425 rtx last = get_last_insn ();
427 /* Make a copy of the insn having delay slots. */
428 rtx delay_insn = copy_rtx (insn);
430 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
431 confuse further processing. Update LAST in case it was the last insn.
432 We will put the BARRIER back in later. */
433 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
435 delete_insn (NEXT_INSN (insn));
436 last = get_last_insn ();
440 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
441 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
442 PREV_INSN (seq_insn) = PREV_INSN (insn);
445 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
448 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
450 /* Note the calls to set_new_first_and_last_insn must occur after
451 SEQ_INSN has been completely spliced into the insn stream.
453 Otherwise CUR_INSN_UID will get set to an incorrect value because
454 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
456 set_new_first_and_last_insn (first, seq_insn);
459 set_new_first_and_last_insn (seq_insn, last);
461 /* Build our SEQUENCE and rebuild the insn chain. */
462 XVECEXP (seq, 0, 0) = delay_insn;
463 INSN_DELETED_P (delay_insn) = 0;
464 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
466 for (li = list; li; li = XEXP (li, 1), i++)
468 rtx tem = XEXP (li, 0);
471 /* Show that this copy of the insn isn't deleted. */
472 INSN_DELETED_P (tem) = 0;
474 XVECEXP (seq, 0, i) = tem;
475 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
476 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
478 /* Remove any REG_DEAD notes because we can't rely on them now
479 that the insn has been moved. */
480 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
481 if (REG_NOTE_KIND (note) == REG_DEAD)
482 XEXP (note, 0) = const0_rtx;
485 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
487 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
488 last insn in that SEQUENCE to point to us. Similarly for the first
489 insn in the following insn if it is a SEQUENCE. */
491 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
492 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
493 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
494 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
497 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
498 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
499 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
501 /* If there used to be a BARRIER, put it back. */
503 emit_barrier_after (seq_insn);
511 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
512 be in the order in which the insns are to be executed. */
515 add_to_delay_list (insn, delay_list)
519 /* If we have an empty list, just make a new list element. If
520 INSN has its block number recorded, clear it since we may
521 be moving the insn to a new block. */
525 clear_hashed_info_for_insn (insn);
526 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
529 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
531 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
536 /* Delete INSN from the delay slot of the insn that it is in, which may
537 produce an insn with no delay slots. Return the new insn. */
540 delete_from_delay_slot (insn)
543 rtx trial, seq_insn, seq, prev;
547 /* We first must find the insn containing the SEQUENCE with INSN in its
548 delay slot. Do this by finding an insn, TRIAL, where
549 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
552 PREV_INSN (NEXT_INSN (trial)) == trial;
553 trial = NEXT_INSN (trial))
556 seq_insn = PREV_INSN (NEXT_INSN (trial));
557 seq = PATTERN (seq_insn);
559 /* Create a delay list consisting of all the insns other than the one
560 we are deleting (unless we were the only one). */
561 if (XVECLEN (seq, 0) > 2)
562 for (i = 1; i < XVECLEN (seq, 0); i++)
563 if (XVECEXP (seq, 0, i) != insn)
564 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
566 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
567 list, and rebuild the delay list if non-empty. */
568 prev = PREV_INSN (seq_insn);
569 trial = XVECEXP (seq, 0, 0);
570 delete_insn (seq_insn);
571 add_insn_after (trial, prev);
573 if (GET_CODE (trial) == JUMP_INSN
574 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
575 emit_barrier_after (trial);
577 /* If there are any delay insns, remit them. Otherwise clear the
580 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
582 INSN_ANNULLED_BRANCH_P (trial) = 0;
584 INSN_FROM_TARGET_P (insn) = 0;
586 /* Show we need to fill this insn again. */
587 obstack_ptr_grow (&unfilled_slots_obstack, trial);
592 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
593 the insn that sets CC0 for it and delete it too. */
596 delete_scheduled_jump (insn)
599 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
600 delete the insn that sets the condition code, but it is hard to find it.
601 Since this case is rare anyway, don't bother trying; there would likely
602 be other insns that became dead anyway, which we wouldn't know to
606 if (reg_mentioned_p (cc0_rtx, insn))
608 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
610 /* If a reg-note was found, it points to an insn to set CC0. This
611 insn is in the delay list of some other insn. So delete it from
612 the delay list it was in. */
615 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
616 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
617 delete_from_delay_slot (XEXP (note, 0));
621 /* The insn setting CC0 is our previous insn, but it may be in
622 a delay slot. It will be the last insn in the delay slot, if
624 rtx trial = previous_insn (insn);
625 if (GET_CODE (trial) == NOTE)
626 trial = prev_nonnote_insn (trial);
627 if (sets_cc0_p (PATTERN (trial)) != 1
628 || FIND_REG_INC_NOTE (trial, 0))
630 if (PREV_INSN (NEXT_INSN (trial)) == trial)
633 delete_from_delay_slot (trial);
641 /* Counters for delay-slot filling. */
643 #define NUM_REORG_FUNCTIONS 2
644 #define MAX_DELAY_HISTOGRAM 3
645 #define MAX_REORG_PASSES 2
647 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
649 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
651 static int reorg_pass_number;
654 note_delay_statistics (slots_filled, index)
655 int slots_filled, index;
657 num_insns_needing_delays[index][reorg_pass_number]++;
658 if (slots_filled > MAX_DELAY_HISTOGRAM)
659 slots_filled = MAX_DELAY_HISTOGRAM;
660 num_filled_delays[index][slots_filled][reorg_pass_number]++;
663 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
665 /* Optimize the following cases:
667 1. When a conditional branch skips over only one instruction,
668 use an annulling branch and put that insn in the delay slot.
669 Use either a branch that annuls when the condition if true or
670 invert the test with a branch that annuls when the condition is
671 false. This saves insns, since otherwise we must copy an insn
674 (orig) (skip) (otherwise)
675 Bcc.n L1 Bcc',a L1 Bcc,a L1'
682 2. When a conditional branch skips over only one instruction,
683 and after that, it unconditionally branches somewhere else,
684 perform the similar optimization. This saves executing the
685 second branch in the case where the inverted condition is true.
694 This should be expanded to skip over N insns, where N is the number
695 of delay slots required. */
701 register rtx trial = next_nonnote_insn (insn);
702 rtx next_trial = next_active_insn (trial);
707 flags = get_jump_flags (insn, JUMP_LABEL (insn));
710 || GET_CODE (trial) != INSN
711 || GET_CODE (PATTERN (trial)) == SEQUENCE
712 || recog_memoized (trial) < 0
713 || (! eligible_for_annul_false (insn, 0, trial, flags)
714 && ! eligible_for_annul_true (insn, 0, trial, flags)))
717 /* There are two cases where we are just executing one insn (we assume
718 here that a branch requires only one insn; this should be generalized
719 at some point): Where the branch goes around a single insn or where
720 we have one insn followed by a branch to the same label we branch to.
721 In both of these cases, inverting the jump and annulling the delay
722 slot give the same effect in fewer insns. */
723 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
724 && ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
726 && GET_CODE (next_trial) == JUMP_INSN
727 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
728 && (simplejump_p (next_trial)
729 || GET_CODE (PATTERN (next_trial)) == RETURN)))
731 if (eligible_for_annul_false (insn, 0, trial, flags))
733 if (invert_jump (insn, JUMP_LABEL (insn)))
734 INSN_FROM_TARGET_P (trial) = 1;
735 else if (! eligible_for_annul_true (insn, 0, trial, flags))
739 delay_list = add_to_delay_list (trial, NULL_RTX);
740 next_trial = next_active_insn (trial);
741 update_block (trial, trial);
744 /* Also, if we are targeting an unconditional
745 branch, thread our jump to the target of that branch. Don't
746 change this into a RETURN here, because it may not accept what
747 we have in the delay slot. We'll fix this up later. */
748 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
749 && (simplejump_p (next_trial)
750 || GET_CODE (PATTERN (next_trial)) == RETURN))
752 target_label = JUMP_LABEL (next_trial);
753 if (target_label == 0)
754 target_label = find_end_label ();
756 /* Recompute the flags based on TARGET_LABEL since threading
757 the jump to TARGET_LABEL may change the direction of the
758 jump (which may change the circumstances in which the
759 delay slot is nullified). */
760 flags = get_jump_flags (insn, target_label);
761 if (eligible_for_annul_true (insn, 0, trial, flags))
762 reorg_redirect_jump (insn, target_label);
765 INSN_ANNULLED_BRANCH_P (insn) = 1;
773 /* Encode and return branch direction and prediction information for
774 INSN assuming it will jump to LABEL.
776 Non conditional branches return no direction information and
777 are predicted as very likely taken. */
780 get_jump_flags (insn, label)
785 /* get_jump_flags can be passed any insn with delay slots, these may
786 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
787 direction information, and only if they are conditional jumps.
789 If LABEL is zero, then there is no way to determine the branch
791 if (GET_CODE (insn) == JUMP_INSN
792 && (condjump_p (insn) || condjump_in_parallel_p (insn))
793 && INSN_UID (insn) <= max_uid
795 && INSN_UID (label) <= max_uid)
797 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
798 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
799 /* No valid direction information. */
803 /* If insn is a conditional branch call mostly_true_jump to get
804 determine the branch prediction.
806 Non conditional branches are predicted as very likely taken. */
807 if (GET_CODE (insn) == JUMP_INSN
808 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
812 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
816 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
819 flags |= ATTR_FLAG_likely;
822 flags |= ATTR_FLAG_unlikely;
825 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
833 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
838 /* Return 1 if INSN is a destination that will be branched to rarely (the
839 return point of a function); return 2 if DEST will be branched to very
840 rarely (a call to a function that doesn't return). Otherwise,
844 rare_destination (insn)
850 for (; insn; insn = next)
852 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
853 insn = XVECEXP (PATTERN (insn), 0, 0);
855 next = NEXT_INSN (insn);
857 switch (GET_CODE (insn))
862 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
863 don't scan past JUMP_INSNs, so any barrier we find here must
864 have been after a CALL_INSN and hence mean the call doesn't
868 if (GET_CODE (PATTERN (insn)) == RETURN)
870 else if (simplejump_p (insn)
871 && jump_count++ < 10)
872 next = JUMP_LABEL (insn);
881 /* If we got here it means we hit the end of the function. So this
882 is an unlikely destination. */
887 /* Return truth value of the statement that this branch
888 is mostly taken. If we think that the branch is extremely likely
889 to be taken, we return 2. If the branch is slightly more likely to be
890 taken, return 1. If the branch is slightly less likely to be taken,
891 return 0 and if the branch is highly unlikely to be taken, return -1.
893 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
896 mostly_true_jump (jump_insn, condition)
897 rtx jump_insn, condition;
899 rtx target_label = JUMP_LABEL (jump_insn);
901 int rare_dest = rare_destination (target_label);
902 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
904 /* If branch probabilities are available, then use that number since it
905 always gives a correct answer. */
906 if (flag_branch_probabilities)
908 rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);
911 int prob = XINT (note, 0);
913 if (prob >= REG_BR_PROB_BASE * 9 / 10)
915 else if (prob >= REG_BR_PROB_BASE / 2)
917 else if (prob >= REG_BR_PROB_BASE / 10)
924 /* If this is a branch outside a loop, it is highly unlikely. */
925 if (GET_CODE (PATTERN (jump_insn)) == SET
926 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
927 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
928 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
929 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
930 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
935 /* If this is the test of a loop, it is very likely true. We scan
936 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
937 before the next real insn, we assume the branch is to the top of
939 for (insn = PREV_INSN (target_label);
940 insn && GET_CODE (insn) == NOTE;
941 insn = PREV_INSN (insn))
942 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
945 /* If this is a jump to the test of a loop, it is likely true. We scan
946 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
947 before the next real insn, we assume the branch is to the loop branch
949 for (insn = NEXT_INSN (target_label);
950 insn && GET_CODE (insn) == NOTE;
951 insn = PREV_INSN (insn))
952 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
956 /* Look at the relative rarities of the fallthrough and destination. If
957 they differ, we can predict the branch that way. */
959 switch (rare_fallthrough - rare_dest)
973 /* If we couldn't figure out what this jump was, assume it won't be
974 taken. This should be rare. */
978 /* EQ tests are usually false and NE tests are usually true. Also,
979 most quantities are positive, so we can make the appropriate guesses
980 about signed comparisons against zero. */
981 switch (GET_CODE (condition))
984 /* Unconditional branch. */
992 if (XEXP (condition, 1) == const0_rtx)
997 if (XEXP (condition, 1) == const0_rtx)
1005 /* Predict backward branches usually take, forward branches usually not. If
1006 we don't know whether this is forward or backward, assume the branch
1007 will be taken, since most are. */
1008 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1009 || INSN_UID (target_label) > max_uid
1010 || (uid_to_ruid[INSN_UID (jump_insn)]
1011 > uid_to_ruid[INSN_UID (target_label)]));
1014 /* Return the condition under which INSN will branch to TARGET. If TARGET
1015 is zero, return the condition under which INSN will return. If INSN is
1016 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1017 type of jump, or it doesn't go to TARGET, return 0. */
1020 get_branch_condition (insn, target)
1024 rtx pat = PATTERN (insn);
1027 if (condjump_in_parallel_p (insn))
1028 pat = XVECEXP (pat, 0, 0);
1030 if (GET_CODE (pat) == RETURN)
1031 return target == 0 ? const_true_rtx : 0;
1033 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1036 src = SET_SRC (pat);
1037 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1038 return const_true_rtx;
1040 else if (GET_CODE (src) == IF_THEN_ELSE
1041 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1042 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1043 && XEXP (XEXP (src, 1), 0) == target))
1044 && XEXP (src, 2) == pc_rtx)
1045 return XEXP (src, 0);
1047 else if (GET_CODE (src) == IF_THEN_ELSE
1048 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1049 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1050 && XEXP (XEXP (src, 2), 0) == target))
1051 && XEXP (src, 1) == pc_rtx)
1052 return gen_rtx_fmt_ee (reverse_condition (GET_CODE (XEXP (src, 0))),
1053 GET_MODE (XEXP (src, 0)),
1054 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1059 /* Return non-zero if CONDITION is more strict than the condition of
1060 INSN, i.e., if INSN will always branch if CONDITION is true. */
1063 condition_dominates_p (condition, insn)
1067 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1068 enum rtx_code code = GET_CODE (condition);
1069 enum rtx_code other_code;
1071 if (rtx_equal_p (condition, other_condition)
1072 || other_condition == const_true_rtx)
1075 else if (condition == const_true_rtx || other_condition == 0)
1078 other_code = GET_CODE (other_condition);
1079 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1080 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1081 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1084 return comparison_dominates_p (code, other_code);
1087 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1088 any insns already in the delay slot of JUMP. */
1091 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1092 rtx jump, newlabel, seq;
1095 rtx pat = PATTERN (seq);
1097 /* Make sure all the delay slots of this jump would still
1098 be valid after threading the jump. If they are still
1099 valid, then return non-zero. */
1101 flags = get_jump_flags (jump, newlabel);
1102 for (i = 1; i < XVECLEN (pat, 0); i++)
1104 #ifdef ANNUL_IFFALSE_SLOTS
1105 (INSN_ANNULLED_BRANCH_P (jump)
1106 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1107 ? eligible_for_annul_false (jump, i - 1,
1108 XVECEXP (pat, 0, i), flags) :
1110 #ifdef ANNUL_IFTRUE_SLOTS
1111 (INSN_ANNULLED_BRANCH_P (jump)
1112 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1113 ? eligible_for_annul_true (jump, i - 1,
1114 XVECEXP (pat, 0, i), flags) :
1116 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1119 return (i == XVECLEN (pat, 0));
1122 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1123 any insns we wish to place in the delay slot of JUMP. */
1126 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1127 rtx jump, newlabel, delay_list;
1132 /* Make sure all the insns in DELAY_LIST would still be
1133 valid after threading the jump. If they are still
1134 valid, then return non-zero. */
1136 flags = get_jump_flags (jump, newlabel);
1137 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1139 #ifdef ANNUL_IFFALSE_SLOTS
1140 (INSN_ANNULLED_BRANCH_P (jump)
1141 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1142 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1144 #ifdef ANNUL_IFTRUE_SLOTS
1145 (INSN_ANNULLED_BRANCH_P (jump)
1146 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1147 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1149 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1152 return (li == NULL);
1155 /* DELAY_LIST is a list of insns that have already been placed into delay
1156 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1157 If not, return 0; otherwise return 1. */
1160 check_annul_list_true_false (annul_true_p, delay_list)
1168 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1170 rtx trial = XEXP (temp, 0);
1172 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1173 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1182 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1183 the condition tested by INSN is CONDITION and the resources shown in
1184 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1185 from SEQ's delay list, in addition to whatever insns it may execute
1186 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1187 needed while searching for delay slot insns. Return the concatenated
1188 delay list if possible, otherwise, return 0.
1190 SLOTS_TO_FILL is the total number of slots required by INSN, and
1191 PSLOTS_FILLED points to the number filled so far (also the number of
1192 insns in DELAY_LIST). It is updated with the number that have been
1193 filled from the SEQUENCE, if any.
1195 PANNUL_P points to a non-zero value if we already know that we need
1196 to annul INSN. If this routine determines that annulling is needed,
1197 it may set that value non-zero.
1199 PNEW_THREAD points to a location that is to receive the place at which
1200 execution should continue. */
1203 steal_delay_list_from_target (insn, condition, seq, delay_list,
1204 sets, needed, other_needed,
1205 slots_to_fill, pslots_filled, pannul_p,
1207 rtx insn, condition;
1210 struct resources *sets, *needed, *other_needed;
1217 int slots_remaining = slots_to_fill - *pslots_filled;
1218 int total_slots_filled = *pslots_filled;
1219 rtx new_delay_list = 0;
1220 int must_annul = *pannul_p;
1223 struct resources cc_set;
1225 /* We can't do anything if there are more delay slots in SEQ than we
1226 can handle, or if we don't know that it will be a taken branch.
1227 We know that it will be a taken branch if it is either an unconditional
1228 branch or a conditional branch with a stricter branch condition.
1230 Also, exit if the branch has more than one set, since then it is computing
1231 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1232 ??? It may be possible to move other sets into INSN in addition to
1233 moving the instructions in the delay slots.
1235 We can not steal the delay list if one of the instructions in the
1236 current delay_list modifies the condition codes and the jump in the
1237 sequence is a conditional jump. We can not do this because we can
1238 not change the direction of the jump because the condition codes
1239 will effect the direction of the jump in the sequence. */
1241 CLEAR_RESOURCE (&cc_set);
1242 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1244 rtx trial = XEXP (temp, 0);
1246 mark_set_resources (trial, &cc_set, 0, 1);
1247 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, 0))
1251 if (XVECLEN (seq, 0) - 1 > slots_remaining
1252 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1253 || ! single_set (XVECEXP (seq, 0, 0)))
1256 for (i = 1; i < XVECLEN (seq, 0); i++)
1258 rtx trial = XVECEXP (seq, 0, i);
1261 if (insn_references_resource_p (trial, sets, 0)
1262 || insn_sets_resource_p (trial, needed, 0)
1263 || insn_sets_resource_p (trial, sets, 0)
1265 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1267 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1269 /* If TRIAL is from the fallthrough code of an annulled branch insn
1270 in SEQ, we cannot use it. */
1271 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1272 && ! INSN_FROM_TARGET_P (trial)))
1275 /* If this insn was already done (usually in a previous delay slot),
1276 pretend we put it in our delay slot. */
1277 if (redundant_insn (trial, insn, new_delay_list))
1280 /* We will end up re-vectoring this branch, so compute flags
1281 based on jumping to the new label. */
1282 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1285 && ((condition == const_true_rtx
1286 || (! insn_sets_resource_p (trial, other_needed, 0)
1287 && ! may_trap_p (PATTERN (trial)))))
1288 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1289 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1291 check_annul_list_true_false (0, delay_list)
1292 && check_annul_list_true_false (0, new_delay_list)
1293 && eligible_for_annul_false (insn, total_slots_filled,
1298 temp = copy_rtx (trial);
1299 INSN_FROM_TARGET_P (temp) = 1;
1300 new_delay_list = add_to_delay_list (temp, new_delay_list);
1301 total_slots_filled++;
1303 if (--slots_remaining == 0)
1310 /* Show the place to which we will be branching. */
1311 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1313 /* Add any new insns to the delay list and update the count of the
1314 number of slots filled. */
1315 *pslots_filled = total_slots_filled;
1319 if (delay_list == 0)
1320 return new_delay_list;
1322 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1323 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1328 /* Similar to steal_delay_list_from_target except that SEQ is on the
1329 fallthrough path of INSN. Here we only do something if the delay insn
1330 of SEQ is an unconditional branch. In that case we steal its delay slot
1331 for INSN since unconditional branches are much easier to fill. */
1334 steal_delay_list_from_fallthrough (insn, condition, seq,
1335 delay_list, sets, needed, other_needed,
1336 slots_to_fill, pslots_filled, pannul_p)
1337 rtx insn, condition;
1340 struct resources *sets, *needed, *other_needed;
1347 int must_annul = *pannul_p;
1350 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1352 /* We can't do anything if SEQ's delay insn isn't an
1353 unconditional branch. */
1355 if (! simplejump_p (XVECEXP (seq, 0, 0))
1356 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1359 for (i = 1; i < XVECLEN (seq, 0); i++)
1361 rtx trial = XVECEXP (seq, 0, i);
1363 /* If TRIAL sets CC0, stealing it will move it too far from the use
1365 if (insn_references_resource_p (trial, sets, 0)
1366 || insn_sets_resource_p (trial, needed, 0)
1367 || insn_sets_resource_p (trial, sets, 0)
1369 || sets_cc0_p (PATTERN (trial))
1375 /* If this insn was already done, we don't need it. */
1376 if (redundant_insn (trial, insn, delay_list))
1378 delete_from_delay_slot (trial);
1383 && ((condition == const_true_rtx
1384 || (! insn_sets_resource_p (trial, other_needed, 0)
1385 && ! may_trap_p (PATTERN (trial)))))
1386 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1387 : (must_annul || delay_list == NULL) && (must_annul = 1,
1388 check_annul_list_true_false (1, delay_list)
1389 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1393 delete_from_delay_slot (trial);
1394 delay_list = add_to_delay_list (trial, delay_list);
1396 if (++(*pslots_filled) == slots_to_fill)
1409 /* Try merging insns starting at THREAD which match exactly the insns in
1412 If all insns were matched and the insn was previously annulling, the
1413 annul bit will be cleared.
1415 For each insn that is merged, if the branch is or will be non-annulling,
1416 we delete the merged insn. */
1419 try_merge_delay_insns (insn, thread)
1422 rtx trial, next_trial;
1423 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1424 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1425 int slot_number = 1;
1426 int num_slots = XVECLEN (PATTERN (insn), 0);
1427 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1428 struct resources set, needed;
1429 rtx merged_insns = 0;
1433 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1435 CLEAR_RESOURCE (&needed);
1436 CLEAR_RESOURCE (&set);
1438 /* If this is not an annulling branch, take into account anything needed in
1439 INSN's delay slot. This prevents two increments from being incorrectly
1440 folded into one. If we are annulling, this would be the correct
1441 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1442 will essentially disable this optimization. This method is somewhat of
1443 a kludge, but I don't see a better way.) */
1445 for (i = 1 ; i < num_slots ; i++)
1446 if (XVECEXP (PATTERN (insn), 0, i))
1447 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
1449 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1451 rtx pat = PATTERN (trial);
1452 rtx oldtrial = trial;
1454 next_trial = next_nonnote_insn (trial);
1456 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1457 if (GET_CODE (trial) == INSN
1458 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1461 if (GET_CODE (next_to_match) == GET_CODE (trial)
1463 /* We can't share an insn that sets cc0. */
1464 && ! sets_cc0_p (pat)
1466 && ! insn_references_resource_p (trial, &set, 1)
1467 && ! insn_sets_resource_p (trial, &set, 1)
1468 && ! insn_sets_resource_p (trial, &needed, 1)
1469 && (trial = try_split (pat, trial, 0)) != 0
1470 /* Update next_trial, in case try_split succeeded. */
1471 && (next_trial = next_nonnote_insn (trial))
1472 /* Likewise THREAD. */
1473 && (thread = oldtrial == thread ? trial : thread)
1474 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1475 /* Have to test this condition if annul condition is different
1476 from (and less restrictive than) non-annulling one. */
1477 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1482 update_block (trial, thread);
1483 if (trial == thread)
1484 thread = next_active_insn (thread);
1486 delete_insn (trial);
1487 INSN_FROM_TARGET_P (next_to_match) = 0;
1490 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1492 if (++slot_number == num_slots)
1495 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1498 mark_set_resources (trial, &set, 0, 1);
1499 mark_referenced_resources (trial, &needed, 1);
1502 /* See if we stopped on a filled insn. If we did, try to see if its
1503 delay slots match. */
1504 if (slot_number != num_slots
1505 && trial && GET_CODE (trial) == INSN
1506 && GET_CODE (PATTERN (trial)) == SEQUENCE
1507 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1509 rtx pat = PATTERN (trial);
1510 rtx filled_insn = XVECEXP (pat, 0, 0);
1512 /* Account for resources set/needed by the filled insn. */
1513 mark_set_resources (filled_insn, &set, 0, 1);
1514 mark_referenced_resources (filled_insn, &needed, 1);
1516 for (i = 1; i < XVECLEN (pat, 0); i++)
1518 rtx dtrial = XVECEXP (pat, 0, i);
1520 if (! insn_references_resource_p (dtrial, &set, 1)
1521 && ! insn_sets_resource_p (dtrial, &set, 1)
1522 && ! insn_sets_resource_p (dtrial, &needed, 1)
1524 && ! sets_cc0_p (PATTERN (dtrial))
1526 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1527 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1533 update_block (dtrial, thread);
1534 new = delete_from_delay_slot (dtrial);
1535 if (INSN_DELETED_P (thread))
1537 INSN_FROM_TARGET_P (next_to_match) = 0;
1540 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1543 if (++slot_number == num_slots)
1546 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1550 /* Keep track of the set/referenced resources for the delay
1551 slots of any trial insns we encounter. */
1552 mark_set_resources (dtrial, &set, 0, 1);
1553 mark_referenced_resources (dtrial, &needed, 1);
1558 /* If all insns in the delay slot have been matched and we were previously
1559 annulling the branch, we need not any more. In that case delete all the
1560 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1561 the delay list so that we know that it isn't only being used at the
1563 if (slot_number == num_slots && annul_p)
1565 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1567 if (GET_MODE (merged_insns) == SImode)
1571 update_block (XEXP (merged_insns, 0), thread);
1572 new = delete_from_delay_slot (XEXP (merged_insns, 0));
1573 if (INSN_DELETED_P (thread))
1578 update_block (XEXP (merged_insns, 0), thread);
1579 delete_insn (XEXP (merged_insns, 0));
1583 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1585 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1586 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1590 /* See if INSN is redundant with an insn in front of TARGET. Often this
1591 is called when INSN is a candidate for a delay slot of TARGET.
1592 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1593 of INSN. Often INSN will be redundant with an insn in a delay slot of
1594 some previous insn. This happens when we have a series of branches to the
1595 same label; in that case the first insn at the target might want to go
1596 into each of the delay slots.
1598 If we are not careful, this routine can take up a significant fraction
1599 of the total compilation time (4%), but only wins rarely. Hence we
1600 speed this routine up by making two passes. The first pass goes back
1601 until it hits a label and sees if it find an insn with an identical
1602 pattern. Only in this (relatively rare) event does it check for
1605 We do not split insns we encounter. This could cause us not to find a
1606 redundant insn, but the cost of splitting seems greater than the possible
1607 gain in rare cases. */
1610 redundant_insn (insn, target, delay_list)
1615 rtx target_main = target;
1616 rtx ipat = PATTERN (insn);
1618 struct resources needed, set;
1621 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1622 are allowed to not actually assign to such a register. */
1623 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1626 /* Scan backwards looking for a match. */
1627 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
1629 if (GET_CODE (trial) == CODE_LABEL)
1632 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
1635 pat = PATTERN (trial);
1636 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1639 if (GET_CODE (pat) == SEQUENCE)
1641 /* Stop for a CALL and its delay slots because it is difficult to
1642 track its resource needs correctly. */
1643 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1646 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1647 slots because it is difficult to track its resource needs
1650 #ifdef INSN_SETS_ARE_DELAYED
1651 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1655 #ifdef INSN_REFERENCES_ARE_DELAYED
1656 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1660 /* See if any of the insns in the delay slot match, updating
1661 resource requirements as we go. */
1662 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1663 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1664 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1665 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1668 /* If found a match, exit this loop early. */
1673 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1674 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1678 /* If we didn't find an insn that matches, return 0. */
1682 /* See what resources this insn sets and needs. If they overlap, or
1683 if this insn references CC0, it can't be redundant. */
1685 CLEAR_RESOURCE (&needed);
1686 CLEAR_RESOURCE (&set);
1687 mark_set_resources (insn, &set, 0, 1);
1688 mark_referenced_resources (insn, &needed, 1);
1690 /* If TARGET is a SEQUENCE, get the main insn. */
1691 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1692 target_main = XVECEXP (PATTERN (target), 0, 0);
1694 if (resource_conflicts_p (&needed, &set)
1696 || reg_mentioned_p (cc0_rtx, ipat)
1698 /* The insn requiring the delay may not set anything needed or set by
1700 || insn_sets_resource_p (target_main, &needed, 1)
1701 || insn_sets_resource_p (target_main, &set, 1))
1704 /* Insns we pass may not set either NEEDED or SET, so merge them for
1706 needed.memory |= set.memory;
1707 needed.unch_memory |= set.unch_memory;
1708 IOR_HARD_REG_SET (needed.regs, set.regs);
1710 /* This insn isn't redundant if it conflicts with an insn that either is
1711 or will be in a delay slot of TARGET. */
1715 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1717 delay_list = XEXP (delay_list, 1);
1720 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1721 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1722 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1725 /* Scan backwards until we reach a label or an insn that uses something
1726 INSN sets or sets something insn uses or sets. */
1728 for (trial = PREV_INSN (target);
1729 trial && GET_CODE (trial) != CODE_LABEL;
1730 trial = PREV_INSN (trial))
1732 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
1733 && GET_CODE (trial) != JUMP_INSN)
1736 pat = PATTERN (trial);
1737 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1740 if (GET_CODE (pat) == SEQUENCE)
1742 /* If this is a CALL_INSN and its delay slots, it is hard to track
1743 the resource needs properly, so give up. */
1744 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1747 /* If this is an INSN or JUMP_INSN with delayed effects, it
1748 is hard to track the resource needs properly, so give up. */
1750 #ifdef INSN_SETS_ARE_DELAYED
1751 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1755 #ifdef INSN_REFERENCES_ARE_DELAYED
1756 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1760 /* See if any of the insns in the delay slot match, updating
1761 resource requirements as we go. */
1762 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1764 rtx candidate = XVECEXP (pat, 0, i);
1766 /* If an insn will be annulled if the branch is false, it isn't
1767 considered as a possible duplicate insn. */
1768 if (rtx_equal_p (PATTERN (candidate), ipat)
1769 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1770 && INSN_FROM_TARGET_P (candidate)))
1772 /* Show that this insn will be used in the sequel. */
1773 INSN_FROM_TARGET_P (candidate) = 0;
1777 /* Unless this is an annulled insn from the target of a branch,
1778 we must stop if it sets anything needed or set by INSN. */
1779 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1780 || ! INSN_FROM_TARGET_P (candidate))
1781 && insn_sets_resource_p (candidate, &needed, 1))
1786 /* If the insn requiring the delay slot conflicts with INSN, we
1788 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
1793 /* See if TRIAL is the same as INSN. */
1794 pat = PATTERN (trial);
1795 if (rtx_equal_p (pat, ipat))
1798 /* Can't go any further if TRIAL conflicts with INSN. */
1799 if (insn_sets_resource_p (trial, &needed, 1))
1807 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
1808 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1809 is non-zero, we are allowed to fall into this thread; otherwise, we are
1812 If LABEL is used more than one or we pass a label other than LABEL before
1813 finding an active insn, we do not own this thread. */
1816 own_thread_p (thread, label, allow_fallthrough)
1819 int allow_fallthrough;
1824 /* We don't own the function end. */
1828 /* Get the first active insn, or THREAD, if it is an active insn. */
1829 active_insn = next_active_insn (PREV_INSN (thread));
1831 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1832 if (GET_CODE (insn) == CODE_LABEL
1833 && (insn != label || LABEL_NUSES (insn) != 1))
1836 if (allow_fallthrough)
1839 /* Ensure that we reach a BARRIER before any insn or label. */
1840 for (insn = prev_nonnote_insn (thread);
1841 insn == 0 || GET_CODE (insn) != BARRIER;
1842 insn = prev_nonnote_insn (insn))
1844 || GET_CODE (insn) == CODE_LABEL
1845 || (GET_CODE (insn) == INSN
1846 && GET_CODE (PATTERN (insn)) != USE
1847 && GET_CODE (PATTERN (insn)) != CLOBBER))
1853 /* Called when INSN is being moved from a location near the target of a jump.
1854 We leave a marker of the form (use (INSN)) immediately in front
1855 of WHERE for mark_target_live_regs. These markers will be deleted when
1858 We used to try to update the live status of registers if WHERE is at
1859 the start of a basic block, but that can't work since we may remove a
1860 BARRIER in relax_delay_slots. */
1863 update_block (insn, where)
1867 /* Ignore if this was in a delay slot and it came from the target of
1869 if (INSN_FROM_TARGET_P (insn))
1872 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1874 /* INSN might be making a value live in a block where it didn't use to
1875 be. So recompute liveness information for this block. */
1877 incr_ticks_for_insn (insn);
1880 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1881 the basic block containing the jump. */
1884 reorg_redirect_jump (jump, nlabel)
1888 incr_ticks_for_insn (jump);
1889 return redirect_jump (jump, nlabel);
1892 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1893 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1894 that reference values used in INSN. If we find one, then we move the
1895 REG_DEAD note to INSN.
1897 This is needed to handle the case where an later insn (after INSN) has a
1898 REG_DEAD note for a register used by INSN, and this later insn subsequently
1899 gets moved before a CODE_LABEL because it is a redundant insn. In this
1900 case, mark_target_live_regs may be confused into thinking the register
1901 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1904 update_reg_dead_notes (insn, delayed_insn)
1905 rtx insn, delayed_insn;
1909 for (p = next_nonnote_insn (insn); p != delayed_insn;
1910 p = next_nonnote_insn (p))
1911 for (link = REG_NOTES (p); link; link = next)
1913 next = XEXP (link, 1);
1915 if (REG_NOTE_KIND (link) != REG_DEAD
1916 || GET_CODE (XEXP (link, 0)) != REG)
1919 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1921 /* Move the REG_DEAD note from P to INSN. */
1922 remove_note (p, link);
1923 XEXP (link, 1) = REG_NOTES (insn);
1924 REG_NOTES (insn) = link;
1929 /* Called when an insn redundant with start_insn is deleted. If there
1930 is a REG_DEAD note for the target of start_insn between start_insn
1931 and stop_insn, then the REG_DEAD note needs to be deleted since the
1932 value no longer dies there.
1934 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1935 confused into thinking the register is dead. */
1938 fix_reg_dead_note (start_insn, stop_insn)
1939 rtx start_insn, stop_insn;
1943 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1944 p = next_nonnote_insn (p))
1945 for (link = REG_NOTES (p); link; link = next)
1947 next = XEXP (link, 1);
1949 if (REG_NOTE_KIND (link) != REG_DEAD
1950 || GET_CODE (XEXP (link, 0)) != REG)
1953 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1955 remove_note (p, link);
1961 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1963 This handles the case of udivmodXi4 instructions which optimize their
1964 output depending on whether any REG_UNUSED notes are present.
1965 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1969 update_reg_unused_notes (insn, redundant_insn)
1970 rtx insn, redundant_insn;
1974 for (link = REG_NOTES (insn); link; link = next)
1976 next = XEXP (link, 1);
1978 if (REG_NOTE_KIND (link) != REG_UNUSED
1979 || GET_CODE (XEXP (link, 0)) != REG)
1982 if (! find_regno_note (redundant_insn, REG_UNUSED,
1983 REGNO (XEXP (link, 0))))
1984 remove_note (insn, link);
1988 /* Scan a function looking for insns that need a delay slot and find insns to
1989 put into the delay slot.
1991 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
1992 as calls). We do these first since we don't want jump insns (that are
1993 easier to fill) to get the only insns that could be used for non-jump insns.
1994 When it is zero, only try to fill JUMP_INSNs.
1996 When slots are filled in this manner, the insns (including the
1997 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
1998 it is possible to tell whether a delay slot has really been filled
1999 or not. `final' knows how to deal with this, by communicating
2000 through FINAL_SEQUENCE. */
2003 fill_simple_delay_slots (non_jumps_p)
2006 register rtx insn, pat, trial, next_trial;
2008 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2009 struct resources needed, set;
2010 int slots_to_fill, slots_filled;
2013 for (i = 0; i < num_unfilled_slots; i++)
2016 /* Get the next insn to fill. If it has already had any slots assigned,
2017 we can't do anything with it. Maybe we'll improve this later. */
2019 insn = unfilled_slots_base[i];
2021 || INSN_DELETED_P (insn)
2022 || (GET_CODE (insn) == INSN
2023 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2024 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2025 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2028 /* It may have been that this insn used to need delay slots, but
2029 now doesn't; ignore in that case. This can happen, for example,
2030 on the HP PA RISC, where the number of delay slots depends on
2031 what insns are nearby. */
2032 slots_to_fill = num_delay_slots (insn);
2034 /* Some machine description have defined instructions to have
2035 delay slots only in certain circumstances which may depend on
2036 nearby insns (which change due to reorg's actions).
2038 For example, the PA port normally has delay slots for unconditional
2041 However, the PA port claims such jumps do not have a delay slot
2042 if they are immediate successors of certain CALL_INSNs. This
2043 allows the port to favor filling the delay slot of the call with
2044 the unconditional jump. */
2045 if (slots_to_fill == 0)
2048 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2049 says how many. After initialization, first try optimizing
2052 nop add %o7,.-L1,%o7
2056 If this case applies, the delay slot of the call is filled with
2057 the unconditional jump. This is done first to avoid having the
2058 delay slot of the call filled in the backward scan. Also, since
2059 the unconditional jump is likely to also have a delay slot, that
2060 insn must exist when it is subsequently scanned.
2062 This is tried on each insn with delay slots as some machines
2063 have insns which perform calls, but are not represented as
2069 if (GET_CODE (insn) == JUMP_INSN)
2070 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2072 flags = get_jump_flags (insn, NULL_RTX);
2074 if ((trial = next_active_insn (insn))
2075 && GET_CODE (trial) == JUMP_INSN
2076 && simplejump_p (trial)
2077 && eligible_for_delay (insn, slots_filled, trial, flags)
2078 && no_labels_between_p (insn, trial))
2082 delay_list = add_to_delay_list (trial, delay_list);
2084 /* TRIAL may have had its delay slot filled, then unfilled. When
2085 the delay slot is unfilled, TRIAL is placed back on the unfilled
2086 slots obstack. Unfortunately, it is placed on the end of the
2087 obstack, not in its original location. Therefore, we must search
2088 from entry i + 1 to the end of the unfilled slots obstack to
2089 try and find TRIAL. */
2090 tmp = &unfilled_slots_base[i + 1];
2091 while (*tmp != trial && tmp != unfilled_slots_next)
2094 /* Remove the unconditional jump from consideration for delay slot
2095 filling and unthread it. */
2099 rtx next = NEXT_INSN (trial);
2100 rtx prev = PREV_INSN (trial);
2102 NEXT_INSN (prev) = next;
2104 PREV_INSN (next) = prev;
2108 /* Now, scan backwards from the insn to search for a potential
2109 delay-slot candidate. Stop searching when a label or jump is hit.
2111 For each candidate, if it is to go into the delay slot (moved
2112 forward in execution sequence), it must not need or set any resources
2113 that were set by later insns and must not set any resources that
2114 are needed for those insns.
2116 The delay slot insn itself sets resources unless it is a call
2117 (in which case the called routine, not the insn itself, is doing
2120 if (slots_filled < slots_to_fill)
2122 CLEAR_RESOURCE (&needed);
2123 CLEAR_RESOURCE (&set);
2124 mark_set_resources (insn, &set, 0, 0);
2125 mark_referenced_resources (insn, &needed, 0);
2127 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2130 next_trial = prev_nonnote_insn (trial);
2132 /* This must be an INSN or CALL_INSN. */
2133 pat = PATTERN (trial);
2135 /* USE and CLOBBER at this level was just for flow; ignore it. */
2136 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2139 /* Check for resource conflict first, to avoid unnecessary
2141 if (! insn_references_resource_p (trial, &set, 1)
2142 && ! insn_sets_resource_p (trial, &set, 1)
2143 && ! insn_sets_resource_p (trial, &needed, 1)
2145 /* Can't separate set of cc0 from its use. */
2146 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2150 trial = try_split (pat, trial, 1);
2151 next_trial = prev_nonnote_insn (trial);
2152 if (eligible_for_delay (insn, slots_filled, trial, flags))
2154 /* In this case, we are searching backward, so if we
2155 find insns to put on the delay list, we want
2156 to put them at the head, rather than the
2157 tail, of the list. */
2159 update_reg_dead_notes (trial, insn);
2160 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2162 update_block (trial, trial);
2163 delete_insn (trial);
2164 if (slots_to_fill == ++slots_filled)
2170 mark_set_resources (trial, &set, 0, 1);
2171 mark_referenced_resources (trial, &needed, 1);
2175 /* If all needed slots haven't been filled, we come here. */
2177 /* Try to optimize case of jumping around a single insn. */
2178 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2179 if (slots_filled != slots_to_fill
2181 && GET_CODE (insn) == JUMP_INSN
2182 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2184 delay_list = optimize_skip (insn);
2190 /* Try to get insns from beyond the insn needing the delay slot.
2191 These insns can neither set or reference resources set in insns being
2192 skipped, cannot set resources in the insn being skipped, and, if this
2193 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2194 call might not return).
2196 There used to be code which continued past the target label if
2197 we saw all uses of the target label. This code did not work,
2198 because it failed to account for some instructions which were
2199 both annulled and marked as from the target. This can happen as a
2200 result of optimize_skip. Since this code was redundant with
2201 fill_eager_delay_slots anyways, it was just deleted. */
2203 if (slots_filled != slots_to_fill
2204 && (GET_CODE (insn) != JUMP_INSN
2205 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2206 && ! simplejump_p (insn)
2207 && JUMP_LABEL (insn) != 0)))
2210 int maybe_never = 0;
2211 struct resources needed_at_jump;
2213 CLEAR_RESOURCE (&needed);
2214 CLEAR_RESOURCE (&set);
2216 if (GET_CODE (insn) == CALL_INSN)
2218 mark_set_resources (insn, &set, 0, 1);
2219 mark_referenced_resources (insn, &needed, 1);
2224 mark_set_resources (insn, &set, 0, 1);
2225 mark_referenced_resources (insn, &needed, 1);
2226 if (GET_CODE (insn) == JUMP_INSN)
2227 target = JUMP_LABEL (insn);
2230 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2232 rtx pat, trial_delay;
2234 next_trial = next_nonnote_insn (trial);
2236 if (GET_CODE (trial) == CODE_LABEL
2237 || GET_CODE (trial) == BARRIER)
2240 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2241 pat = PATTERN (trial);
2243 /* Stand-alone USE and CLOBBER are just for flow. */
2244 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2247 /* If this already has filled delay slots, get the insn needing
2249 if (GET_CODE (pat) == SEQUENCE)
2250 trial_delay = XVECEXP (pat, 0, 0);
2252 trial_delay = trial;
2254 /* If this is a jump insn to our target, indicate that we have
2255 seen another jump to it. If we aren't handling a conditional
2256 jump, stop our search. Otherwise, compute the needs at its
2257 target and add them to NEEDED. */
2258 if (GET_CODE (trial_delay) == JUMP_INSN)
2262 else if (JUMP_LABEL (trial_delay) != target)
2265 next_active_insn (JUMP_LABEL (trial_delay));
2267 mark_target_live_regs (get_insns (), ninsn,
2269 needed.memory |= needed_at_jump.memory;
2270 needed.unch_memory |= needed_at_jump.unch_memory;
2271 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
2275 /* See if we have a resource problem before we try to
2278 && GET_CODE (pat) != SEQUENCE
2279 && ! insn_references_resource_p (trial, &set, 1)
2280 && ! insn_sets_resource_p (trial, &set, 1)
2281 && ! insn_sets_resource_p (trial, &needed, 1)
2283 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2285 && ! (maybe_never && may_trap_p (pat))
2286 && (trial = try_split (pat, trial, 0))
2287 && eligible_for_delay (insn, slots_filled, trial, flags))
2289 next_trial = next_nonnote_insn (trial);
2290 delay_list = add_to_delay_list (trial, delay_list);
2293 if (reg_mentioned_p (cc0_rtx, pat))
2294 link_cc0_insns (trial);
2297 delete_insn (trial);
2298 if (slots_to_fill == ++slots_filled)
2303 mark_set_resources (trial, &set, 0, 1);
2304 mark_referenced_resources (trial, &needed, 1);
2306 /* Ensure we don't put insns between the setting of cc and the
2307 comparison by moving a setting of cc into an earlier delay
2308 slot since these insns could clobber the condition code. */
2311 /* If this is a call or jump, we might not get here. */
2312 if (GET_CODE (trial_delay) == CALL_INSN
2313 || GET_CODE (trial_delay) == JUMP_INSN)
2317 /* If there are slots left to fill and our search was stopped by an
2318 unconditional branch, try the insn at the branch target. We can
2319 redirect the branch if it works.
2321 Don't do this if the insn at the branch target is a branch. */
2322 if (slots_to_fill != slots_filled
2324 && GET_CODE (trial) == JUMP_INSN
2325 && simplejump_p (trial)
2326 && (target == 0 || JUMP_LABEL (trial) == target)
2327 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2328 && ! (GET_CODE (next_trial) == INSN
2329 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2330 && GET_CODE (next_trial) != JUMP_INSN
2331 && ! insn_references_resource_p (next_trial, &set, 1)
2332 && ! insn_sets_resource_p (next_trial, &set, 1)
2333 && ! insn_sets_resource_p (next_trial, &needed, 1)
2335 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2337 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
2338 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2339 && eligible_for_delay (insn, slots_filled, next_trial, flags))
2341 rtx new_label = next_active_insn (next_trial);
2344 new_label = get_label_before (new_label);
2346 new_label = find_end_label ();
2349 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2351 reorg_redirect_jump (trial, new_label);
2353 /* If we merged because we both jumped to the same place,
2354 redirect the original insn also. */
2356 reorg_redirect_jump (insn, new_label);
2360 /* If this is an unconditional jump, then try to get insns from the
2361 target of the jump. */
2362 if (GET_CODE (insn) == JUMP_INSN
2363 && simplejump_p (insn)
2364 && slots_filled != slots_to_fill)
2366 = fill_slots_from_thread (insn, const_true_rtx,
2367 next_active_insn (JUMP_LABEL (insn)),
2369 own_thread_p (JUMP_LABEL (insn),
2370 JUMP_LABEL (insn), 0),
2371 slots_to_fill, &slots_filled,
2375 unfilled_slots_base[i]
2376 = emit_delay_sequence (insn, delay_list, slots_filled);
2378 if (slots_to_fill == slots_filled)
2379 unfilled_slots_base[i] = 0;
2381 note_delay_statistics (slots_filled, 0);
2384 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2385 /* See if the epilogue needs any delay slots. Try to fill them if so.
2386 The only thing we can do is scan backwards from the end of the
2387 function. If we did this in a previous pass, it is incorrect to do it
2389 if (current_function_epilogue_delay_list)
2392 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2393 if (slots_to_fill == 0)
2397 CLEAR_RESOURCE (&set);
2399 /* The frame pointer and stack pointer are needed at the beginning of
2400 the epilogue, so instructions setting them can not be put in the
2401 epilogue delay slot. However, everything else needed at function
2402 end is safe, so we don't want to use end_of_function_needs here. */
2403 CLEAR_RESOURCE (&needed);
2404 if (frame_pointer_needed)
2406 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2407 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2408 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2410 #ifdef EXIT_IGNORE_STACK
2411 if (! EXIT_IGNORE_STACK
2412 || current_function_sp_is_unchanging)
2414 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2417 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2419 #ifdef EPILOGUE_USES
2420 for (i = 0; i <FIRST_PSEUDO_REGISTER; i++)
2422 if (EPILOGUE_USES (i))
2423 SET_HARD_REG_BIT (needed.regs, i);
2427 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2428 trial = PREV_INSN (trial))
2430 if (GET_CODE (trial) == NOTE)
2432 pat = PATTERN (trial);
2433 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2436 if (! insn_references_resource_p (trial, &set, 1)
2437 && ! insn_sets_resource_p (trial, &needed, 1)
2438 && ! insn_sets_resource_p (trial, &set, 1)
2440 /* Don't want to mess with cc0 here. */
2441 && ! reg_mentioned_p (cc0_rtx, pat)
2445 trial = try_split (pat, trial, 1);
2446 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2448 /* Here as well we are searching backward, so put the
2449 insns we find on the head of the list. */
2451 current_function_epilogue_delay_list
2452 = gen_rtx_INSN_LIST (VOIDmode, trial,
2453 current_function_epilogue_delay_list);
2454 mark_end_of_function_resources (trial, 1);
2455 update_block (trial, trial);
2456 delete_insn (trial);
2458 /* Clear deleted bit so final.c will output the insn. */
2459 INSN_DELETED_P (trial) = 0;
2461 if (slots_to_fill == ++slots_filled)
2467 mark_set_resources (trial, &set, 0, 1);
2468 mark_referenced_resources (trial, &needed, 1);
2471 note_delay_statistics (slots_filled, 0);
2475 /* Try to find insns to place in delay slots.
2477 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2478 or is an unconditional branch if CONDITION is const_true_rtx.
2479 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2481 THREAD is a flow-of-control, either the insns to be executed if the
2482 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2484 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2485 to see if any potential delay slot insns set things needed there.
2487 LIKELY is non-zero if it is extremely likely that the branch will be
2488 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2489 end of a loop back up to the top.
2491 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2492 thread. I.e., it is the fallthrough code of our jump or the target of the
2493 jump when we are the only jump going there.
2495 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2496 case, we can only take insns from the head of the thread for our delay
2497 slot. We then adjust the jump to point after the insns we have taken. */
2500 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
2501 thread_if_true, own_thread,
2502 slots_to_fill, pslots_filled, delay_list)
2505 rtx thread, opposite_thread;
2509 int slots_to_fill, *pslots_filled;
2513 struct resources opposite_needed, set, needed;
2519 /* Validate our arguments. */
2520 if ((condition == const_true_rtx && ! thread_if_true)
2521 || (! own_thread && ! thread_if_true))
2524 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2526 /* If our thread is the end of subroutine, we can't get any delay
2531 /* If this is an unconditional branch, nothing is needed at the
2532 opposite thread. Otherwise, compute what is needed there. */
2533 if (condition == const_true_rtx)
2534 CLEAR_RESOURCE (&opposite_needed);
2536 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2538 /* If the insn at THREAD can be split, do it here to avoid having to
2539 update THREAD and NEW_THREAD if it is done in the loop below. Also
2540 initialize NEW_THREAD. */
2542 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2544 /* Scan insns at THREAD. We are looking for an insn that can be removed
2545 from THREAD (it neither sets nor references resources that were set
2546 ahead of it and it doesn't set anything needs by the insns ahead of
2547 it) and that either can be placed in an annulling insn or aren't
2548 needed at OPPOSITE_THREAD. */
2550 CLEAR_RESOURCE (&needed);
2551 CLEAR_RESOURCE (&set);
2553 /* If we do not own this thread, we must stop as soon as we find
2554 something that we can't put in a delay slot, since all we can do
2555 is branch into THREAD at a later point. Therefore, labels stop
2556 the search if this is not the `true' thread. */
2558 for (trial = thread;
2559 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2560 trial = next_nonnote_insn (trial))
2564 /* If we have passed a label, we no longer own this thread. */
2565 if (GET_CODE (trial) == CODE_LABEL)
2571 pat = PATTERN (trial);
2572 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2575 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2576 don't separate or copy insns that set and use CC0. */
2577 if (! insn_references_resource_p (trial, &set, 1)
2578 && ! insn_sets_resource_p (trial, &set, 1)
2579 && ! insn_sets_resource_p (trial, &needed, 1)
2581 && ! (reg_mentioned_p (cc0_rtx, pat)
2582 && (! own_thread || ! sets_cc0_p (pat)))
2588 /* If TRIAL is redundant with some insn before INSN, we don't
2589 actually need to add it to the delay list; we can merely pretend
2591 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2593 fix_reg_dead_note (prior_insn, insn);
2596 update_block (trial, thread);
2597 if (trial == thread)
2599 thread = next_active_insn (thread);
2600 if (new_thread == trial)
2601 new_thread = thread;
2604 delete_insn (trial);
2608 update_reg_unused_notes (prior_insn, trial);
2609 new_thread = next_active_insn (trial);
2615 /* There are two ways we can win: If TRIAL doesn't set anything
2616 needed at the opposite thread and can't trap, or if it can
2617 go into an annulled delay slot. */
2619 && (condition == const_true_rtx
2620 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
2621 && ! may_trap_p (pat))))
2624 trial = try_split (pat, trial, 0);
2625 if (new_thread == old_trial)
2627 if (thread == old_trial)
2629 pat = PATTERN (trial);
2630 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2634 #ifdef ANNUL_IFTRUE_SLOTS
2637 #ifdef ANNUL_IFFALSE_SLOTS
2643 trial = try_split (pat, trial, 0);
2644 if (new_thread == old_trial)
2646 if (thread == old_trial)
2648 pat = PATTERN (trial);
2649 if ((must_annul || delay_list == NULL) && (thread_if_true
2650 ? check_annul_list_true_false (0, delay_list)
2651 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2652 : check_annul_list_true_false (1, delay_list)
2653 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2661 if (reg_mentioned_p (cc0_rtx, pat))
2662 link_cc0_insns (trial);
2665 /* If we own this thread, delete the insn. If this is the
2666 destination of a branch, show that a basic block status
2667 may have been updated. In any case, mark the new
2668 starting point of this thread. */
2671 update_block (trial, thread);
2672 if (trial == thread)
2674 thread = next_active_insn (thread);
2675 if (new_thread == trial)
2676 new_thread = thread;
2678 delete_insn (trial);
2681 new_thread = next_active_insn (trial);
2683 temp = own_thread ? trial : copy_rtx (trial);
2685 INSN_FROM_TARGET_P (temp) = 1;
2687 delay_list = add_to_delay_list (temp, delay_list);
2689 if (slots_to_fill == ++(*pslots_filled))
2691 /* Even though we have filled all the slots, we
2692 may be branching to a location that has a
2693 redundant insn. Skip any if so. */
2694 while (new_thread && ! own_thread
2695 && ! insn_sets_resource_p (new_thread, &set, 1)
2696 && ! insn_sets_resource_p (new_thread, &needed, 1)
2697 && ! insn_references_resource_p (new_thread,
2700 = redundant_insn (new_thread, insn,
2703 /* We know we do not own the thread, so no need
2704 to call update_block and delete_insn. */
2705 fix_reg_dead_note (prior_insn, insn);
2706 update_reg_unused_notes (prior_insn, new_thread);
2707 new_thread = next_active_insn (new_thread);
2717 /* This insn can't go into a delay slot. */
2719 mark_set_resources (trial, &set, 0, 1);
2720 mark_referenced_resources (trial, &needed, 1);
2722 /* Ensure we don't put insns between the setting of cc and the comparison
2723 by moving a setting of cc into an earlier delay slot since these insns
2724 could clobber the condition code. */
2727 /* If this insn is a register-register copy and the next insn has
2728 a use of our destination, change it to use our source. That way,
2729 it will become a candidate for our delay slot the next time
2730 through this loop. This case occurs commonly in loops that
2733 We could check for more complex cases than those tested below,
2734 but it doesn't seem worth it. It might also be a good idea to try
2735 to swap the two insns. That might do better.
2737 We can't do this if the next insn modifies our destination, because
2738 that would make the replacement into the insn invalid. We also can't
2739 do this if it modifies our source, because it might be an earlyclobber
2740 operand. This latter test also prevents updating the contents of
2743 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
2744 && GET_CODE (SET_SRC (pat)) == REG
2745 && GET_CODE (SET_DEST (pat)) == REG)
2747 rtx next = next_nonnote_insn (trial);
2749 if (next && GET_CODE (next) == INSN
2750 && GET_CODE (PATTERN (next)) != USE
2751 && ! reg_set_p (SET_DEST (pat), next)
2752 && ! reg_set_p (SET_SRC (pat), next)
2753 && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2754 && ! modified_in_p (SET_DEST (pat), next))
2755 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2759 /* If we stopped on a branch insn that has delay slots, see if we can
2760 steal some of the insns in those slots. */
2761 if (trial && GET_CODE (trial) == INSN
2762 && GET_CODE (PATTERN (trial)) == SEQUENCE
2763 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
2765 /* If this is the `true' thread, we will want to follow the jump,
2766 so we can only do this if we have taken everything up to here. */
2767 if (thread_if_true && trial == new_thread)
2769 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2770 delay_list, &set, &needed,
2771 &opposite_needed, slots_to_fill,
2772 pslots_filled, &must_annul,
2774 else if (! thread_if_true)
2776 = steal_delay_list_from_fallthrough (insn, condition,
2778 delay_list, &set, &needed,
2779 &opposite_needed, slots_to_fill,
2780 pslots_filled, &must_annul);
2783 /* If we haven't found anything for this delay slot and it is very
2784 likely that the branch will be taken, see if the insn at our target
2785 increments or decrements a register with an increment that does not
2786 depend on the destination register. If so, try to place the opposite
2787 arithmetic insn after the jump insn and put the arithmetic insn in the
2788 delay slot. If we can't do this, return. */
2789 if (delay_list == 0 && likely && new_thread
2790 && GET_CODE (new_thread) == INSN
2791 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2792 && asm_noperands (PATTERN (new_thread)) < 0)
2794 rtx pat = PATTERN (new_thread);
2799 pat = PATTERN (trial);
2801 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
2802 || ! eligible_for_delay (insn, 0, trial, flags))
2805 dest = SET_DEST (pat), src = SET_SRC (pat);
2806 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2807 && rtx_equal_p (XEXP (src, 0), dest)
2808 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
2810 rtx other = XEXP (src, 1);
2814 /* If this is a constant adjustment, use the same code with
2815 the negated constant. Otherwise, reverse the sense of the
2817 if (GET_CODE (other) == CONST_INT)
2818 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2819 negate_rtx (GET_MODE (src), other));
2821 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2822 GET_MODE (src), dest, other);
2824 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2827 if (recog_memoized (ninsn) < 0
2828 || (extract_insn (ninsn), ! constrain_operands (1)))
2830 delete_insn (ninsn);
2836 update_block (trial, thread);
2837 if (trial == thread)
2839 thread = next_active_insn (thread);
2840 if (new_thread == trial)
2841 new_thread = thread;
2843 delete_insn (trial);
2846 new_thread = next_active_insn (trial);
2848 ninsn = own_thread ? trial : copy_rtx (trial);
2850 INSN_FROM_TARGET_P (ninsn) = 1;
2852 delay_list = add_to_delay_list (ninsn, NULL_RTX);
2857 if (delay_list && must_annul)
2858 INSN_ANNULLED_BRANCH_P (insn) = 1;
2860 /* If we are to branch into the middle of this thread, find an appropriate
2861 label or make a new one if none, and redirect INSN to it. If we hit the
2862 end of the function, use the end-of-function label. */
2863 if (new_thread != thread)
2867 if (! thread_if_true)
2870 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
2871 && (simplejump_p (new_thread)
2872 || GET_CODE (PATTERN (new_thread)) == RETURN)
2873 && redirect_with_delay_list_safe_p (insn,
2874 JUMP_LABEL (new_thread),
2876 new_thread = follow_jumps (JUMP_LABEL (new_thread));
2878 if (new_thread == 0)
2879 label = find_end_label ();
2880 else if (GET_CODE (new_thread) == CODE_LABEL)
2883 label = get_label_before (new_thread);
2885 reorg_redirect_jump (insn, label);
2891 /* Make another attempt to find insns to place in delay slots.
2893 We previously looked for insns located in front of the delay insn
2894 and, for non-jump delay insns, located behind the delay insn.
2896 Here only try to schedule jump insns and try to move insns from either
2897 the target or the following insns into the delay slot. If annulling is
2898 supported, we will be likely to do this. Otherwise, we can do this only
2902 fill_eager_delay_slots ()
2906 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2908 for (i = 0; i < num_unfilled_slots; i++)
2911 rtx target_label, insn_at_target, fallthrough_insn;
2914 int own_fallthrough;
2915 int prediction, slots_to_fill, slots_filled;
2917 insn = unfilled_slots_base[i];
2919 || INSN_DELETED_P (insn)
2920 || GET_CODE (insn) != JUMP_INSN
2921 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
2924 slots_to_fill = num_delay_slots (insn);
2925 /* Some machine description have defined instructions to have
2926 delay slots only in certain circumstances which may depend on
2927 nearby insns (which change due to reorg's actions).
2929 For example, the PA port normally has delay slots for unconditional
2932 However, the PA port claims such jumps do not have a delay slot
2933 if they are immediate successors of certain CALL_INSNs. This
2934 allows the port to favor filling the delay slot of the call with
2935 the unconditional jump. */
2936 if (slots_to_fill == 0)
2940 target_label = JUMP_LABEL (insn);
2941 condition = get_branch_condition (insn, target_label);
2946 /* Get the next active fallthrough and target insns and see if we own
2947 them. Then see whether the branch is likely true. We don't need
2948 to do a lot of this for unconditional branches. */
2950 insn_at_target = next_active_insn (target_label);
2951 own_target = own_thread_p (target_label, target_label, 0);
2953 if (condition == const_true_rtx)
2955 own_fallthrough = 0;
2956 fallthrough_insn = 0;
2961 fallthrough_insn = next_active_insn (insn);
2962 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
2963 prediction = mostly_true_jump (insn, condition);
2966 /* If this insn is expected to branch, first try to get insns from our
2967 target, then our fallthrough insns. If it is not, expected to branch,
2968 try the other order. */
2973 = fill_slots_from_thread (insn, condition, insn_at_target,
2974 fallthrough_insn, prediction == 2, 1,
2976 slots_to_fill, &slots_filled, delay_list);
2978 if (delay_list == 0 && own_fallthrough)
2980 /* Even though we didn't find anything for delay slots,
2981 we might have found a redundant insn which we deleted
2982 from the thread that was filled. So we have to recompute
2983 the next insn at the target. */
2984 target_label = JUMP_LABEL (insn);
2985 insn_at_target = next_active_insn (target_label);
2988 = fill_slots_from_thread (insn, condition, fallthrough_insn,
2989 insn_at_target, 0, 0,
2991 slots_to_fill, &slots_filled,
2997 if (own_fallthrough)
2999 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3000 insn_at_target, 0, 0,
3002 slots_to_fill, &slots_filled,
3005 if (delay_list == 0)
3007 = fill_slots_from_thread (insn, condition, insn_at_target,
3008 next_active_insn (insn), 0, 1,
3010 slots_to_fill, &slots_filled,
3015 unfilled_slots_base[i]
3016 = emit_delay_sequence (insn, delay_list, slots_filled);
3018 if (slots_to_fill == slots_filled)
3019 unfilled_slots_base[i] = 0;
3021 note_delay_statistics (slots_filled, 1);
3025 /* Once we have tried two ways to fill a delay slot, make a pass over the
3026 code to try to improve the results and to do such things as more jump
3030 relax_delay_slots (first)
3033 register rtx insn, next, pat;
3034 register rtx trial, delay_insn, target_label;
3036 /* Look at every JUMP_INSN and see if we can improve it. */
3037 for (insn = first; insn; insn = next)
3041 next = next_active_insn (insn);
3043 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3044 the next insn, or jumps to a label that is not the last of a
3045 group of consecutive labels. */
3046 if (GET_CODE (insn) == JUMP_INSN
3047 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3048 && (target_label = JUMP_LABEL (insn)) != 0)
3050 target_label = follow_jumps (target_label);
3051 target_label = prev_label (next_active_insn (target_label));
3053 if (target_label == 0)
3054 target_label = find_end_label ();
3056 if (next_active_insn (target_label) == next
3057 && ! condjump_in_parallel_p (insn))
3063 if (target_label != JUMP_LABEL (insn))
3064 reorg_redirect_jump (insn, target_label);
3066 /* See if this jump branches around a unconditional jump.
3067 If so, invert this jump and point it to the target of the
3069 if (next && GET_CODE (next) == JUMP_INSN
3070 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3071 && next_active_insn (target_label) == next_active_insn (next)
3072 && no_labels_between_p (insn, next))
3074 rtx label = JUMP_LABEL (next);
3076 /* Be careful how we do this to avoid deleting code or
3077 labels that are momentarily dead. See similar optimization
3080 We also need to ensure we properly handle the case when
3081 invert_jump fails. */
3083 ++LABEL_NUSES (target_label);
3085 ++LABEL_NUSES (label);
3087 if (invert_jump (insn, label))
3094 --LABEL_NUSES (label);
3096 if (--LABEL_NUSES (target_label) == 0)
3097 delete_insn (target_label);
3103 /* If this is an unconditional jump and the previous insn is a
3104 conditional jump, try reversing the condition of the previous
3105 insn and swapping our targets. The next pass might be able to
3108 Don't do this if we expect the conditional branch to be true, because
3109 we would then be making the more common case longer. */
3111 if (GET_CODE (insn) == JUMP_INSN
3112 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3113 && (other = prev_active_insn (insn)) != 0
3114 && (condjump_p (other) || condjump_in_parallel_p (other))
3115 && no_labels_between_p (other, insn)
3116 && 0 > mostly_true_jump (other,
3117 get_branch_condition (other,
3118 JUMP_LABEL (other))))
3120 rtx other_target = JUMP_LABEL (other);
3121 target_label = JUMP_LABEL (insn);
3123 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3124 as we move the label. */
3126 ++LABEL_NUSES (other_target);
3128 if (invert_jump (other, target_label))
3129 reorg_redirect_jump (insn, other_target);
3132 --LABEL_NUSES (other_target);
3135 /* Now look only at cases where we have filled a delay slot. */
3136 if (GET_CODE (insn) != INSN
3137 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3140 pat = PATTERN (insn);
3141 delay_insn = XVECEXP (pat, 0, 0);
3143 /* See if the first insn in the delay slot is redundant with some
3144 previous insn. Remove it from the delay slot if so; then set up
3145 to reprocess this insn. */
3146 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3148 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3149 next = prev_active_insn (next);
3153 /* See if we have a RETURN insn with a filled delay slot followed
3154 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3155 the first RETURN (but not it's delay insn). This gives the same
3156 effect in fewer instructions.
3158 Only do so if optimizing for size since this results in slower, but
3161 && GET_CODE (PATTERN (delay_insn)) == RETURN
3163 && GET_CODE (next) == JUMP_INSN
3164 && GET_CODE (PATTERN (next)) == RETURN)
3168 /* Delete the RETURN and just execute the delay list insns.
3170 We do this by deleting the INSN containing the SEQUENCE, then
3171 re-emitting the insns separately, and then deleting the RETURN.
3172 This allows the count of the jump target to be properly
3175 /* Clear the from target bit, since these insns are no longer
3177 for (i = 0; i < XVECLEN (pat, 0); i++)
3178 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3180 trial = PREV_INSN (insn);
3182 emit_insn_after (pat, trial);
3183 delete_scheduled_jump (delay_insn);
3187 /* Now look only at the cases where we have a filled JUMP_INSN. */
3188 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3189 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3190 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3193 target_label = JUMP_LABEL (delay_insn);
3197 /* If this jump goes to another unconditional jump, thread it, but
3198 don't convert a jump into a RETURN here. */
3199 trial = follow_jumps (target_label);
3200 /* We use next_real_insn instead of next_active_insn, so that
3201 the special USE insns emitted by reorg won't be ignored.
3202 If they are ignored, then they will get deleted if target_label
3203 is now unreachable, and that would cause mark_target_live_regs
3205 trial = prev_label (next_real_insn (trial));
3206 if (trial == 0 && target_label != 0)
3207 trial = find_end_label ();
3209 if (trial != target_label
3210 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3212 reorg_redirect_jump (delay_insn, trial);
3213 target_label = trial;
3216 /* If the first insn at TARGET_LABEL is redundant with a previous
3217 insn, redirect the jump to the following insn process again. */
3218 trial = next_active_insn (target_label);
3219 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3220 && redundant_insn (trial, insn, 0))
3224 /* Figure out where to emit the special USE insn so we don't
3225 later incorrectly compute register live/death info. */
3226 tmp = next_active_insn (trial);
3228 tmp = find_end_label ();
3230 /* Insert the special USE insn and update dataflow info. */
3231 update_block (trial, tmp);
3233 /* Now emit a label before the special USE insn, and
3234 redirect our jump to the new label. */
3235 target_label = get_label_before (PREV_INSN (tmp));
3236 reorg_redirect_jump (delay_insn, target_label);
3241 /* Similarly, if it is an unconditional jump with one insn in its
3242 delay list and that insn is redundant, thread the jump. */
3243 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3244 && XVECLEN (PATTERN (trial), 0) == 2
3245 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3246 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3247 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3248 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3250 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3251 if (target_label == 0)
3252 target_label = find_end_label ();
3254 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
3257 reorg_redirect_jump (delay_insn, target_label);
3264 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3265 && prev_active_insn (target_label) == insn
3266 && ! condjump_in_parallel_p (delay_insn)
3268 /* If the last insn in the delay slot sets CC0 for some insn,
3269 various code assumes that it is in a delay slot. We could
3270 put it back where it belonged and delete the register notes,
3271 but it doesn't seem worthwhile in this uncommon case. */
3272 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3273 REG_CC_USER, NULL_RTX)
3279 /* All this insn does is execute its delay list and jump to the
3280 following insn. So delete the jump and just execute the delay
3283 We do this by deleting the INSN containing the SEQUENCE, then
3284 re-emitting the insns separately, and then deleting the jump.
3285 This allows the count of the jump target to be properly
3288 /* Clear the from target bit, since these insns are no longer
3290 for (i = 0; i < XVECLEN (pat, 0); i++)
3291 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3293 trial = PREV_INSN (insn);
3295 emit_insn_after (pat, trial);
3296 delete_scheduled_jump (delay_insn);
3300 /* See if this is an unconditional jump around a single insn which is
3301 identical to the one in its delay slot. In this case, we can just
3302 delete the branch and the insn in its delay slot. */
3303 if (next && GET_CODE (next) == INSN
3304 && prev_label (next_active_insn (next)) == target_label
3305 && simplejump_p (insn)
3306 && XVECLEN (pat, 0) == 2
3307 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3313 /* See if this jump (with its delay slots) branches around another
3314 jump (without delay slots). If so, invert this jump and point
3315 it to the target of the second jump. We cannot do this for
3316 annulled jumps, though. Again, don't convert a jump to a RETURN
3318 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3319 && next && GET_CODE (next) == JUMP_INSN
3320 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3321 && next_active_insn (target_label) == next_active_insn (next)
3322 && no_labels_between_p (insn, next))
3324 rtx label = JUMP_LABEL (next);
3325 rtx old_label = JUMP_LABEL (delay_insn);
3328 label = find_end_label ();
3330 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3332 /* Be careful how we do this to avoid deleting code or labels
3333 that are momentarily dead. See similar optimization in
3336 ++LABEL_NUSES (old_label);
3338 if (invert_jump (delay_insn, label))
3342 /* Must update the INSN_FROM_TARGET_P bits now that
3343 the branch is reversed, so that mark_target_live_regs
3344 will handle the delay slot insn correctly. */
3345 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3347 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3348 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3355 if (old_label && --LABEL_NUSES (old_label) == 0)
3356 delete_insn (old_label);
3361 /* If we own the thread opposite the way this insn branches, see if we
3362 can merge its delay slots with following insns. */
3363 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3364 && own_thread_p (NEXT_INSN (insn), 0, 1))
3365 try_merge_delay_insns (insn, next);
3366 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3367 && own_thread_p (target_label, target_label, 0))
3368 try_merge_delay_insns (insn, next_active_insn (target_label));
3370 /* If we get here, we haven't deleted INSN. But we may have deleted
3371 NEXT, so recompute it. */
3372 next = next_active_insn (insn);
3378 /* Look for filled jumps to the end of function label. We can try to convert
3379 them into RETURN insns if the insns in the delay slot are valid for the
3383 make_return_insns (first)
3386 rtx insn, jump_insn, pat;
3387 rtx real_return_label = end_of_function_label;
3390 /* See if there is a RETURN insn in the function other than the one we
3391 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3392 into a RETURN to jump to it. */
3393 for (insn = first; insn; insn = NEXT_INSN (insn))
3394 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
3396 real_return_label = get_label_before (insn);
3400 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3401 was equal to END_OF_FUNCTION_LABEL. */
3402 LABEL_NUSES (real_return_label)++;
3404 /* Clear the list of insns to fill so we can use it. */
3405 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3407 for (insn = first; insn; insn = NEXT_INSN (insn))
3411 /* Only look at filled JUMP_INSNs that go to the end of function
3413 if (GET_CODE (insn) != INSN
3414 || GET_CODE (PATTERN (insn)) != SEQUENCE
3415 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3416 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3419 pat = PATTERN (insn);
3420 jump_insn = XVECEXP (pat, 0, 0);
3422 /* If we can't make the jump into a RETURN, try to redirect it to the best
3423 RETURN and go on to the next insn. */
3424 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
3426 /* Make sure redirecting the jump will not invalidate the delay
3428 if (redirect_with_delay_slots_safe_p (jump_insn,
3431 reorg_redirect_jump (jump_insn, real_return_label);
3435 /* See if this RETURN can accept the insns current in its delay slot.
3436 It can if it has more or an equal number of slots and the contents
3437 of each is valid. */
3439 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3440 slots = num_delay_slots (jump_insn);
3441 if (slots >= XVECLEN (pat, 0) - 1)
3443 for (i = 1; i < XVECLEN (pat, 0); i++)
3445 #ifdef ANNUL_IFFALSE_SLOTS
3446 (INSN_ANNULLED_BRANCH_P (jump_insn)
3447 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3448 ? eligible_for_annul_false (jump_insn, i - 1,
3449 XVECEXP (pat, 0, i), flags) :
3451 #ifdef ANNUL_IFTRUE_SLOTS
3452 (INSN_ANNULLED_BRANCH_P (jump_insn)
3453 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3454 ? eligible_for_annul_true (jump_insn, i - 1,
3455 XVECEXP (pat, 0, i), flags) :
3457 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
3463 if (i == XVECLEN (pat, 0))
3466 /* We have to do something with this insn. If it is an unconditional
3467 RETURN, delete the SEQUENCE and output the individual insns,
3468 followed by the RETURN. Then set things up so we try to find
3469 insns for its delay slots, if it needs some. */
3470 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3472 rtx prev = PREV_INSN (insn);
3475 for (i = 1; i < XVECLEN (pat, 0); i++)
3476 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3478 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3479 emit_barrier_after (insn);
3482 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3485 /* It is probably more efficient to keep this with its current
3486 delay slot as a branch to a RETURN. */
3487 reorg_redirect_jump (jump_insn, real_return_label);
3490 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3491 new delay slots we have created. */
3492 if (--LABEL_NUSES (real_return_label) == 0)
3493 delete_insn (real_return_label);
3495 fill_simple_delay_slots (1);
3496 fill_simple_delay_slots (0);
3500 /* Try to find insns to place in delay slots. */
3503 dbr_schedule (first, file)
3507 rtx insn, next, epilogue_insn = 0;
3510 int old_flag_no_peephole = flag_no_peephole;
3512 /* Execute `final' once in prescan mode to delete any insns that won't be
3513 used. Don't let final try to do any peephole optimization--it will
3514 ruin dataflow information for this pass. */
3516 flag_no_peephole = 1;
3517 final (first, 0, NO_DEBUG, 1, 1);
3518 flag_no_peephole = old_flag_no_peephole;
3521 /* If the current function has no insns other than the prologue and
3522 epilogue, then do not try to fill any delay slots. */
3523 if (n_basic_blocks == 0)
3526 /* Find the highest INSN_UID and allocate and initialize our map from
3527 INSN_UID's to position in code. */
3528 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3530 if (INSN_UID (insn) > max_uid)
3531 max_uid = INSN_UID (insn);
3532 if (GET_CODE (insn) == NOTE
3533 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
3534 epilogue_insn = insn;
3537 uid_to_ruid = (int *) xmalloc ((max_uid + 1) * sizeof (int));
3538 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3539 uid_to_ruid[INSN_UID (insn)] = i;
3541 /* Initialize the list of insns that need filling. */
3542 if (unfilled_firstobj == 0)
3544 gcc_obstack_init (&unfilled_slots_obstack);
3545 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3548 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3552 INSN_ANNULLED_BRANCH_P (insn) = 0;
3553 INSN_FROM_TARGET_P (insn) = 0;
3555 /* Skip vector tables. We can't get attributes for them. */
3556 if (GET_CODE (insn) == JUMP_INSN
3557 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3558 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3561 if (num_delay_slots (insn) > 0)
3562 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3564 /* Ensure all jumps go to the last of a set of consecutive labels. */
3565 if (GET_CODE (insn) == JUMP_INSN
3566 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3567 && JUMP_LABEL (insn) != 0
3568 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
3569 != JUMP_LABEL (insn)))
3570 redirect_jump (insn, target);
3573 init_resource_info (epilogue_insn);
3575 /* Show we haven't computed an end-of-function label yet. */
3576 end_of_function_label = 0;
3578 /* Initialize the statistics for this function. */
3579 bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
3580 bzero ((char *) num_filled_delays, sizeof num_filled_delays);
3582 /* Now do the delay slot filling. Try everything twice in case earlier
3583 changes make more slots fillable. */
3585 for (reorg_pass_number = 0;
3586 reorg_pass_number < MAX_REORG_PASSES;
3587 reorg_pass_number++)
3589 fill_simple_delay_slots (1);
3590 fill_simple_delay_slots (0);
3591 fill_eager_delay_slots ();
3592 relax_delay_slots (first);
3595 /* Delete any USE insns made by update_block; subsequent passes don't need
3596 them or know how to deal with them. */
3597 for (insn = first; insn; insn = next)
3599 next = NEXT_INSN (insn);
3601 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
3602 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
3603 next = delete_insn (insn);
3606 /* If we made an end of function label, indicate that it is now
3607 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3608 If it is now unused, delete it. */
3609 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3610 delete_insn (end_of_function_label);
3613 if (HAVE_return && end_of_function_label != 0)
3614 make_return_insns (first);
3617 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3619 /* It is not clear why the line below is needed, but it does seem to be. */
3620 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3622 /* Reposition the prologue and epilogue notes in case we moved the
3623 prologue/epilogue insns. */
3624 reposition_prologue_and_epilogue_notes (first);
3628 register int i, j, need_comma;
3629 int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
3630 int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
3632 for (reorg_pass_number = 0;
3633 reorg_pass_number < MAX_REORG_PASSES;
3634 reorg_pass_number++)
3636 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3637 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3640 fprintf (file, ";; Reorg function #%d\n", i);
3642 fprintf (file, ";; %d insns needing delay slots\n;; ",
3643 num_insns_needing_delays[i][reorg_pass_number]);
3645 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3646 if (num_filled_delays[i][j][reorg_pass_number])
3649 fprintf (file, ", ");
3651 fprintf (file, "%d got %d delays",
3652 num_filled_delays[i][j][reorg_pass_number], j);
3654 fprintf (file, "\n");
3657 bzero ((char *) total_delay_slots, sizeof total_delay_slots);
3658 bzero ((char *) total_annul_slots, sizeof total_annul_slots);
3659 for (insn = first; insn; insn = NEXT_INSN (insn))
3661 if (! INSN_DELETED_P (insn)
3662 && GET_CODE (insn) == INSN
3663 && GET_CODE (PATTERN (insn)) != USE
3664 && GET_CODE (PATTERN (insn)) != CLOBBER)
3666 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
3668 j = XVECLEN (PATTERN (insn), 0) - 1;
3669 if (j > MAX_DELAY_HISTOGRAM)
3670 j = MAX_DELAY_HISTOGRAM;
3671 if (INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (insn), 0, 0)))
3672 total_annul_slots[j]++;
3674 total_delay_slots[j]++;
3676 else if (num_delay_slots (insn) > 0)
3677 total_delay_slots[0]++;
3680 fprintf (file, ";; Reorg totals: ");
3682 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3684 if (total_delay_slots[j])
3687 fprintf (file, ", ");
3689 fprintf (file, "%d got %d delays", total_delay_slots[j], j);
3692 fprintf (file, "\n");
3693 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
3694 fprintf (file, ";; Reorg annuls: ");
3696 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3698 if (total_annul_slots[j])
3701 fprintf (file, ", ");
3703 fprintf (file, "%d got %d delays", total_annul_slots[j], j);
3706 fprintf (file, "\n");
3708 fprintf (file, "\n");
3711 /* For all JUMP insns, fill in branch prediction notes, so that during
3712 assembler output a target can set branch prediction bits in the code.
3713 We have to do this now, as up until this point the destinations of
3714 JUMPS can be moved around and changed, but past right here that cannot
3716 for (insn = first; insn; insn = NEXT_INSN (insn))
3720 if (GET_CODE (insn) == INSN)
3722 rtx pat = PATTERN (insn);
3724 if (GET_CODE (pat) == SEQUENCE)
3725 insn = XVECEXP (pat, 0, 0);
3727 if (GET_CODE (insn) != JUMP_INSN)
3730 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
3731 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
3732 GEN_INT (pred_flags),
3735 free_resource_info ();
3738 #endif /* DELAY_SLOTS */