1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 /* Instruction reorganization pass.
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
64 The TMS320C3x and C4x have three branch delay slots. When the three
65 slots are filled, the branch penalty is zero. Most insns can fill the
66 delay slots except jump insns.
68 Three techniques for filling delay slots have been implemented so far:
70 (1) `fill_simple_delay_slots' is the simplest, most efficient way
71 to fill delay slots. This pass first looks for insns which come
72 from before the branch and which are safe to execute after the
73 branch. Then it searches after the insn requiring delay slots or,
74 in the case of a branch, for insns that are after the point at
75 which the branch merges into the fallthrough code, if such a point
76 exists. When such insns are found, the branch penalty decreases
77 and no code expansion takes place.
79 (2) `fill_eager_delay_slots' is more complicated: it is used for
80 scheduling conditional jumps, or for scheduling jumps which cannot
81 be filled using (1). A machine need not have annulled jumps to use
82 this strategy, but it helps (by keeping more options open).
83 `fill_eager_delay_slots' tries to guess the direction the branch
84 will go; if it guesses right 100% of the time, it can reduce the
85 branch penalty as much as `fill_simple_delay_slots' does. If it
86 guesses wrong 100% of the time, it might as well schedule nops (or
87 on the m88k, unexpose the branch slot). When
88 `fill_eager_delay_slots' takes insns from the fall-through path of
89 the jump, usually there is no code expansion; when it takes insns
90 from the branch target, there is code expansion if it is not the
91 only way to reach that target.
93 (3) `relax_delay_slots' uses a set of rules to simplify code that
94 has been reorganized by (1) and (2). It finds cases where
95 conditional test can be eliminated, jumps can be threaded, extra
96 insns can be eliminated, etc. It is the job of (1) and (2) to do a
97 good job of scheduling locally; `relax_delay_slots' takes care of
98 making the various individual schedules work well together. It is
99 especially tuned to handle the control flow interactions of branch
100 insns. It does nothing for insns with delay slots that do not
103 On machines that use CC0, we are very conservative. We will not make
104 a copy of an insn involving CC0 since we want to maintain a 1-1
105 correspondence between the insn that sets and uses CC0. The insns are
106 allowed to be separated by placing an insn that sets CC0 (but not an insn
107 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
108 delay slot. In that case, we point each insn at the other with REG_CC_USER
109 and REG_CC_SETTER notes. Note that these restrictions affect very few
110 machines because most RISC machines with delay slots will not use CC0
111 (the RT is the only known exception at this point).
115 The Acorn Risc Machine can conditionally execute most insns, so
116 it is profitable to move single insns into a position to execute
117 based on the condition code of the previous insn.
119 The HP-PA can conditionally nullify insns, providing a similar
120 effect to the ARM, differing mostly in which insn is "in charge". */
127 #include "function.h"
128 #include "insn-config.h"
129 #include "conditions.h"
130 #include "hard-reg-set.h"
131 #include "basic-block.h"
133 #include "insn-flags.h"
138 #include "insn-attr.h"
139 #include "resource.h"
144 #define obstack_chunk_alloc xmalloc
145 #define obstack_chunk_free free
147 #ifndef ANNUL_IFTRUE_SLOTS
148 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
150 #ifndef ANNUL_IFFALSE_SLOTS
151 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
154 /* Insns which have delay slots that have not yet been filled. */
156 static struct obstack unfilled_slots_obstack;
157 static rtx *unfilled_firstobj;
159 /* Define macros to refer to the first and last slot containing unfilled
160 insns. These are used because the list may move and its address
161 should be recomputed at each use. */
163 #define unfilled_slots_base \
164 ((rtx *) obstack_base (&unfilled_slots_obstack))
166 #define unfilled_slots_next \
167 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
169 /* Points to the label before the end of the function. */
170 static rtx end_of_function_label;
172 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
173 not always monotonically increase. */
174 static int *uid_to_ruid;
176 /* Highest valid index in `uid_to_ruid'. */
179 static int stop_search_p PROTO((rtx, int));
180 static int resource_conflicts_p PROTO((struct resources *,
181 struct resources *));
182 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
183 static int insn_sets_resource_p PROTO((rtx, struct resources *, int));
184 static rtx find_end_label PROTO((void));
185 static rtx emit_delay_sequence PROTO((rtx, rtx, int));
186 static rtx add_to_delay_list PROTO((rtx, rtx));
187 static rtx delete_from_delay_slot PROTO((rtx));
188 static void delete_scheduled_jump PROTO((rtx));
189 static void note_delay_statistics PROTO((int, int));
190 static rtx optimize_skip PROTO((rtx));
191 static int get_jump_flags PROTO((rtx, rtx));
192 static int rare_destination PROTO((rtx));
193 static int mostly_true_jump PROTO((rtx, rtx));
194 static rtx get_branch_condition PROTO((rtx, rtx));
195 static int condition_dominates_p PROTO((rtx, rtx));
196 static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
197 static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
198 static int check_annul_list_true_false PROTO ((int, rtx));
199 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
203 int, int *, int *, rtx *));
204 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
209 static void try_merge_delay_insns PROTO((rtx, rtx));
210 static rtx redundant_insn PROTO((rtx, rtx, rtx));
211 static int own_thread_p PROTO((rtx, rtx, int));
212 static void update_block PROTO((rtx, rtx));
213 static int reorg_redirect_jump PROTO((rtx, rtx));
214 static void update_reg_dead_notes PROTO((rtx, rtx));
215 static void fix_reg_dead_note PROTO((rtx, rtx));
216 static void update_reg_unused_notes PROTO((rtx, rtx));
217 static void fill_simple_delay_slots PROTO((int));
218 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
219 int, int, int *, rtx));
220 static void fill_eager_delay_slots PROTO((void));
221 static void relax_delay_slots PROTO((rtx));
222 static void make_return_insns PROTO((rtx));
224 /* Return TRUE if this insn should stop the search for insn to fill delay
225 slots. LABELS_P indicates that labels should terminate the search.
226 In all cases, jumps terminate the search. */
229 stop_search_p (insn, labels_p)
236 switch (GET_CODE (insn))
250 /* OK unless it contains a delay slot or is an `asm' insn of some type.
251 We don't know anything about these. */
252 return (GET_CODE (PATTERN (insn)) == SEQUENCE
253 || GET_CODE (PATTERN (insn)) == ASM_INPUT
254 || asm_noperands (PATTERN (insn)) >= 0);
261 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
262 resource set contains a volatile memory reference. Otherwise, return FALSE. */
265 resource_conflicts_p (res1, res2)
266 struct resources *res1, *res2;
268 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
269 || (res1->unch_memory && res2->unch_memory)
270 || res1->volatil || res2->volatil)
274 return (res1->regs & res2->regs) != HARD_CONST (0);
279 for (i = 0; i < HARD_REG_SET_LONGS; i++)
280 if ((res1->regs[i] & res2->regs[i]) != 0)
287 /* Return TRUE if any resource marked in RES, a `struct resources', is
288 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
289 routine is using those resources.
291 We compute this by computing all the resources referenced by INSN and
292 seeing if this conflicts with RES. It might be faster to directly check
293 ourselves, and this is the way it used to work, but it means duplicating
294 a large block of complex code. */
297 insn_references_resource_p (insn, res, include_delayed_effects)
299 register struct resources *res;
300 int include_delayed_effects;
302 struct resources insn_res;
304 CLEAR_RESOURCE (&insn_res);
305 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
306 return resource_conflicts_p (&insn_res, res);
309 /* Return TRUE if INSN modifies resources that are marked in RES.
310 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
311 included. CC0 is only modified if it is explicitly set; see comments
312 in front of mark_set_resources for details. */
315 insn_sets_resource_p (insn, res, include_delayed_effects)
317 register struct resources *res;
318 int include_delayed_effects;
320 struct resources insn_sets;
322 CLEAR_RESOURCE (&insn_sets);
323 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
324 return resource_conflicts_p (&insn_sets, res);
327 /* Find a label at the end of the function or before a RETURN. If there is
335 /* If we found one previously, return it. */
336 if (end_of_function_label)
337 return end_of_function_label;
339 /* Otherwise, see if there is a label at the end of the function. If there
340 is, it must be that RETURN insns aren't needed, so that is our return
341 label and we don't have to do anything else. */
343 insn = get_last_insn ();
344 while (GET_CODE (insn) == NOTE
345 || (GET_CODE (insn) == INSN
346 && (GET_CODE (PATTERN (insn)) == USE
347 || GET_CODE (PATTERN (insn)) == CLOBBER)))
348 insn = PREV_INSN (insn);
350 /* When a target threads its epilogue we might already have a
351 suitable return insn. If so put a label before it for the
352 end_of_function_label. */
353 if (GET_CODE (insn) == BARRIER
354 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
355 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
357 rtx temp = PREV_INSN (PREV_INSN (insn));
358 end_of_function_label = gen_label_rtx ();
359 LABEL_NUSES (end_of_function_label) = 0;
361 /* Put the label before an USE insns that may proceed the RETURN insn. */
362 while (GET_CODE (temp) == USE)
363 temp = PREV_INSN (temp);
365 emit_label_after (end_of_function_label, temp);
368 else if (GET_CODE (insn) == CODE_LABEL)
369 end_of_function_label = insn;
372 /* Otherwise, make a new label and emit a RETURN and BARRIER,
374 end_of_function_label = gen_label_rtx ();
375 LABEL_NUSES (end_of_function_label) = 0;
376 emit_label (end_of_function_label);
380 /* The return we make may have delay slots too. */
381 rtx insn = gen_return ();
382 insn = emit_jump_insn (insn);
384 if (num_delay_slots (insn) > 0)
385 obstack_ptr_grow (&unfilled_slots_obstack, insn);
390 /* Show one additional use for this label so it won't go away until
392 ++LABEL_NUSES (end_of_function_label);
394 return end_of_function_label;
397 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
398 the pattern of INSN with the SEQUENCE.
400 Chain the insns so that NEXT_INSN of each insn in the sequence points to
401 the next and NEXT_INSN of the last insn in the sequence points to
402 the first insn after the sequence. Similarly for PREV_INSN. This makes
403 it easier to scan all insns.
405 Returns the SEQUENCE that replaces INSN. */
408 emit_delay_sequence (insn, list, length)
417 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
418 rtvec seqv = rtvec_alloc (length + 1);
419 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
420 rtx seq_insn = make_insn_raw (seq);
421 rtx first = get_insns ();
422 rtx last = get_last_insn ();
424 /* Make a copy of the insn having delay slots. */
425 rtx delay_insn = copy_rtx (insn);
427 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
428 confuse further processing. Update LAST in case it was the last insn.
429 We will put the BARRIER back in later. */
430 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
432 delete_insn (NEXT_INSN (insn));
433 last = get_last_insn ();
437 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
438 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
439 PREV_INSN (seq_insn) = PREV_INSN (insn);
442 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
445 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
447 /* Note the calls to set_new_first_and_last_insn must occur after
448 SEQ_INSN has been completely spliced into the insn stream.
450 Otherwise CUR_INSN_UID will get set to an incorrect value because
451 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
453 set_new_first_and_last_insn (first, seq_insn);
456 set_new_first_and_last_insn (seq_insn, last);
458 /* Build our SEQUENCE and rebuild the insn chain. */
459 XVECEXP (seq, 0, 0) = delay_insn;
460 INSN_DELETED_P (delay_insn) = 0;
461 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
463 for (li = list; li; li = XEXP (li, 1), i++)
465 rtx tem = XEXP (li, 0);
468 /* Show that this copy of the insn isn't deleted. */
469 INSN_DELETED_P (tem) = 0;
471 XVECEXP (seq, 0, i) = tem;
472 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
473 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
475 /* Remove any REG_DEAD notes because we can't rely on them now
476 that the insn has been moved. */
477 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
478 if (REG_NOTE_KIND (note) == REG_DEAD)
479 XEXP (note, 0) = const0_rtx;
482 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
484 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
485 last insn in that SEQUENCE to point to us. Similarly for the first
486 insn in the following insn if it is a SEQUENCE. */
488 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
489 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
490 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
491 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
494 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
495 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
496 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
498 /* If there used to be a BARRIER, put it back. */
500 emit_barrier_after (seq_insn);
508 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
509 be in the order in which the insns are to be executed. */
512 add_to_delay_list (insn, delay_list)
516 /* If we have an empty list, just make a new list element. If
517 INSN has its block number recorded, clear it since we may
518 be moving the insn to a new block. */
522 clear_hashed_info_for_insn (insn);
523 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
526 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
528 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
533 /* Delete INSN from the delay slot of the insn that it is in, which may
534 produce an insn with no delay slots. Return the new insn. */
537 delete_from_delay_slot (insn)
540 rtx trial, seq_insn, seq, prev;
544 /* We first must find the insn containing the SEQUENCE with INSN in its
545 delay slot. Do this by finding an insn, TRIAL, where
546 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
549 PREV_INSN (NEXT_INSN (trial)) == trial;
550 trial = NEXT_INSN (trial))
553 seq_insn = PREV_INSN (NEXT_INSN (trial));
554 seq = PATTERN (seq_insn);
556 /* Create a delay list consisting of all the insns other than the one
557 we are deleting (unless we were the only one). */
558 if (XVECLEN (seq, 0) > 2)
559 for (i = 1; i < XVECLEN (seq, 0); i++)
560 if (XVECEXP (seq, 0, i) != insn)
561 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
563 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
564 list, and rebuild the delay list if non-empty. */
565 prev = PREV_INSN (seq_insn);
566 trial = XVECEXP (seq, 0, 0);
567 delete_insn (seq_insn);
568 add_insn_after (trial, prev);
570 if (GET_CODE (trial) == JUMP_INSN
571 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
572 emit_barrier_after (trial);
574 /* If there are any delay insns, remit them. Otherwise clear the
577 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
579 INSN_ANNULLED_BRANCH_P (trial) = 0;
581 INSN_FROM_TARGET_P (insn) = 0;
583 /* Show we need to fill this insn again. */
584 obstack_ptr_grow (&unfilled_slots_obstack, trial);
589 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
590 the insn that sets CC0 for it and delete it too. */
593 delete_scheduled_jump (insn)
596 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
597 delete the insn that sets the condition code, but it is hard to find it.
598 Since this case is rare anyway, don't bother trying; there would likely
599 be other insns that became dead anyway, which we wouldn't know to
603 if (reg_mentioned_p (cc0_rtx, insn))
605 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
607 /* If a reg-note was found, it points to an insn to set CC0. This
608 insn is in the delay list of some other insn. So delete it from
609 the delay list it was in. */
612 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
613 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
614 delete_from_delay_slot (XEXP (note, 0));
618 /* The insn setting CC0 is our previous insn, but it may be in
619 a delay slot. It will be the last insn in the delay slot, if
621 rtx trial = previous_insn (insn);
622 if (GET_CODE (trial) == NOTE)
623 trial = prev_nonnote_insn (trial);
624 if (sets_cc0_p (PATTERN (trial)) != 1
625 || FIND_REG_INC_NOTE (trial, 0))
627 if (PREV_INSN (NEXT_INSN (trial)) == trial)
630 delete_from_delay_slot (trial);
638 /* Counters for delay-slot filling. */
640 #define NUM_REORG_FUNCTIONS 2
641 #define MAX_DELAY_HISTOGRAM 3
642 #define MAX_REORG_PASSES 2
644 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
646 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
648 static int reorg_pass_number;
651 note_delay_statistics (slots_filled, index)
652 int slots_filled, index;
654 num_insns_needing_delays[index][reorg_pass_number]++;
655 if (slots_filled > MAX_DELAY_HISTOGRAM)
656 slots_filled = MAX_DELAY_HISTOGRAM;
657 num_filled_delays[index][slots_filled][reorg_pass_number]++;
660 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
662 /* Optimize the following cases:
664 1. When a conditional branch skips over only one instruction,
665 use an annulling branch and put that insn in the delay slot.
666 Use either a branch that annuls when the condition if true or
667 invert the test with a branch that annuls when the condition is
668 false. This saves insns, since otherwise we must copy an insn
671 (orig) (skip) (otherwise)
672 Bcc.n L1 Bcc',a L1 Bcc,a L1'
679 2. When a conditional branch skips over only one instruction,
680 and after that, it unconditionally branches somewhere else,
681 perform the similar optimization. This saves executing the
682 second branch in the case where the inverted condition is true.
691 This should be expanded to skip over N insns, where N is the number
692 of delay slots required. */
698 register rtx trial = next_nonnote_insn (insn);
699 rtx next_trial = next_active_insn (trial);
704 flags = get_jump_flags (insn, JUMP_LABEL (insn));
707 || GET_CODE (trial) != INSN
708 || GET_CODE (PATTERN (trial)) == SEQUENCE
709 || recog_memoized (trial) < 0
710 || (! eligible_for_annul_false (insn, 0, trial, flags)
711 && ! eligible_for_annul_true (insn, 0, trial, flags)))
714 /* There are two cases where we are just executing one insn (we assume
715 here that a branch requires only one insn; this should be generalized
716 at some point): Where the branch goes around a single insn or where
717 we have one insn followed by a branch to the same label we branch to.
718 In both of these cases, inverting the jump and annulling the delay
719 slot give the same effect in fewer insns. */
720 if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
722 && GET_CODE (next_trial) == JUMP_INSN
723 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
724 && (simplejump_p (next_trial)
725 || GET_CODE (PATTERN (next_trial)) == RETURN)))
727 if (eligible_for_annul_false (insn, 0, trial, flags))
729 if (invert_jump (insn, JUMP_LABEL (insn)))
730 INSN_FROM_TARGET_P (trial) = 1;
731 else if (! eligible_for_annul_true (insn, 0, trial, flags))
735 delay_list = add_to_delay_list (trial, NULL_RTX);
736 next_trial = next_active_insn (trial);
737 update_block (trial, trial);
740 /* Also, if we are targeting an unconditional
741 branch, thread our jump to the target of that branch. Don't
742 change this into a RETURN here, because it may not accept what
743 we have in the delay slot. We'll fix this up later. */
744 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
745 && (simplejump_p (next_trial)
746 || GET_CODE (PATTERN (next_trial)) == RETURN))
748 target_label = JUMP_LABEL (next_trial);
749 if (target_label == 0)
750 target_label = find_end_label ();
752 /* Recompute the flags based on TARGET_LABEL since threading
753 the jump to TARGET_LABEL may change the direction of the
754 jump (which may change the circumstances in which the
755 delay slot is nullified). */
756 flags = get_jump_flags (insn, target_label);
757 if (eligible_for_annul_true (insn, 0, trial, flags))
758 reorg_redirect_jump (insn, target_label);
761 INSN_ANNULLED_BRANCH_P (insn) = 1;
769 /* Encode and return branch direction and prediction information for
770 INSN assuming it will jump to LABEL.
772 Non conditional branches return no direction information and
773 are predicted as very likely taken. */
776 get_jump_flags (insn, label)
781 /* get_jump_flags can be passed any insn with delay slots, these may
782 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
783 direction information, and only if they are conditional jumps.
785 If LABEL is zero, then there is no way to determine the branch
787 if (GET_CODE (insn) == JUMP_INSN
788 && (condjump_p (insn) || condjump_in_parallel_p (insn))
789 && INSN_UID (insn) <= max_uid
791 && INSN_UID (label) <= max_uid)
793 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
794 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
795 /* No valid direction information. */
799 /* If insn is a conditional branch call mostly_true_jump to get
800 determine the branch prediction.
802 Non conditional branches are predicted as very likely taken. */
803 if (GET_CODE (insn) == JUMP_INSN
804 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
808 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
812 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
815 flags |= ATTR_FLAG_likely;
818 flags |= ATTR_FLAG_unlikely;
821 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
829 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
834 /* Return 1 if INSN is a destination that will be branched to rarely (the
835 return point of a function); return 2 if DEST will be branched to very
836 rarely (a call to a function that doesn't return). Otherwise,
840 rare_destination (insn)
846 for (; insn; insn = next)
848 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
849 insn = XVECEXP (PATTERN (insn), 0, 0);
851 next = NEXT_INSN (insn);
853 switch (GET_CODE (insn))
858 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
859 don't scan past JUMP_INSNs, so any barrier we find here must
860 have been after a CALL_INSN and hence mean the call doesn't
864 if (GET_CODE (PATTERN (insn)) == RETURN)
866 else if (simplejump_p (insn)
867 && jump_count++ < 10)
868 next = JUMP_LABEL (insn);
877 /* If we got here it means we hit the end of the function. So this
878 is an unlikely destination. */
883 /* Return truth value of the statement that this branch
884 is mostly taken. If we think that the branch is extremely likely
885 to be taken, we return 2. If the branch is slightly more likely to be
886 taken, return 1. If the branch is slightly less likely to be taken,
887 return 0 and if the branch is highly unlikely to be taken, return -1.
889 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
892 mostly_true_jump (jump_insn, condition)
893 rtx jump_insn, condition;
895 rtx target_label = JUMP_LABEL (jump_insn);
897 int rare_dest = rare_destination (target_label);
898 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
900 /* If branch probabilities are available, then use that number since it
901 always gives a correct answer. */
902 if (flag_branch_probabilities)
904 rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);
907 int prob = XINT (note, 0);
909 if (prob >= REG_BR_PROB_BASE * 9 / 10)
911 else if (prob >= REG_BR_PROB_BASE / 2)
913 else if (prob >= REG_BR_PROB_BASE / 10)
920 /* If this is a branch outside a loop, it is highly unlikely. */
921 if (GET_CODE (PATTERN (jump_insn)) == SET
922 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
923 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
924 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
925 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
926 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
931 /* If this is the test of a loop, it is very likely true. We scan
932 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
933 before the next real insn, we assume the branch is to the top of
935 for (insn = PREV_INSN (target_label);
936 insn && GET_CODE (insn) == NOTE;
937 insn = PREV_INSN (insn))
938 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
941 /* If this is a jump to the test of a loop, it is likely true. We scan
942 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
943 before the next real insn, we assume the branch is to the loop branch
945 for (insn = NEXT_INSN (target_label);
946 insn && GET_CODE (insn) == NOTE;
947 insn = PREV_INSN (insn))
948 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
952 /* Look at the relative rarities of the fallthrough and destination. If
953 they differ, we can predict the branch that way. */
955 switch (rare_fallthrough - rare_dest)
969 /* If we couldn't figure out what this jump was, assume it won't be
970 taken. This should be rare. */
974 /* EQ tests are usually false and NE tests are usually true. Also,
975 most quantities are positive, so we can make the appropriate guesses
976 about signed comparisons against zero. */
977 switch (GET_CODE (condition))
980 /* Unconditional branch. */
988 if (XEXP (condition, 1) == const0_rtx)
993 if (XEXP (condition, 1) == const0_rtx)
1001 /* Predict backward branches usually take, forward branches usually not. If
1002 we don't know whether this is forward or backward, assume the branch
1003 will be taken, since most are. */
1004 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1005 || INSN_UID (target_label) > max_uid
1006 || (uid_to_ruid[INSN_UID (jump_insn)]
1007 > uid_to_ruid[INSN_UID (target_label)]));;
1010 /* Return the condition under which INSN will branch to TARGET. If TARGET
1011 is zero, return the condition under which INSN will return. If INSN is
1012 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1013 type of jump, or it doesn't go to TARGET, return 0. */
1016 get_branch_condition (insn, target)
1020 rtx pat = PATTERN (insn);
1023 if (condjump_in_parallel_p (insn))
1024 pat = XVECEXP (pat, 0, 0);
1026 if (GET_CODE (pat) == RETURN)
1027 return target == 0 ? const_true_rtx : 0;
1029 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1032 src = SET_SRC (pat);
1033 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1034 return const_true_rtx;
1036 else if (GET_CODE (src) == IF_THEN_ELSE
1037 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1038 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1039 && XEXP (XEXP (src, 1), 0) == target))
1040 && XEXP (src, 2) == pc_rtx)
1041 return XEXP (src, 0);
1043 else if (GET_CODE (src) == IF_THEN_ELSE
1044 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1045 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1046 && XEXP (XEXP (src, 2), 0) == target))
1047 && XEXP (src, 1) == pc_rtx)
1048 return gen_rtx_fmt_ee (reverse_condition (GET_CODE (XEXP (src, 0))),
1049 GET_MODE (XEXP (src, 0)),
1050 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1055 /* Return non-zero if CONDITION is more strict than the condition of
1056 INSN, i.e., if INSN will always branch if CONDITION is true. */
1059 condition_dominates_p (condition, insn)
1063 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1064 enum rtx_code code = GET_CODE (condition);
1065 enum rtx_code other_code;
1067 if (rtx_equal_p (condition, other_condition)
1068 || other_condition == const_true_rtx)
1071 else if (condition == const_true_rtx || other_condition == 0)
1074 other_code = GET_CODE (other_condition);
1075 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1076 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1077 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1080 return comparison_dominates_p (code, other_code);
1083 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1084 any insns already in the delay slot of JUMP. */
1087 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1088 rtx jump, newlabel, seq;
1091 rtx pat = PATTERN (seq);
1093 /* Make sure all the delay slots of this jump would still
1094 be valid after threading the jump. If they are still
1095 valid, then return non-zero. */
1097 flags = get_jump_flags (jump, newlabel);
1098 for (i = 1; i < XVECLEN (pat, 0); i++)
1100 #ifdef ANNUL_IFFALSE_SLOTS
1101 (INSN_ANNULLED_BRANCH_P (jump)
1102 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1103 ? eligible_for_annul_false (jump, i - 1,
1104 XVECEXP (pat, 0, i), flags) :
1106 #ifdef ANNUL_IFTRUE_SLOTS
1107 (INSN_ANNULLED_BRANCH_P (jump)
1108 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1109 ? eligible_for_annul_true (jump, i - 1,
1110 XVECEXP (pat, 0, i), flags) :
1112 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1115 return (i == XVECLEN (pat, 0));
1118 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1119 any insns we wish to place in the delay slot of JUMP. */
1122 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1123 rtx jump, newlabel, delay_list;
1128 /* Make sure all the insns in DELAY_LIST would still be
1129 valid after threading the jump. If they are still
1130 valid, then return non-zero. */
1132 flags = get_jump_flags (jump, newlabel);
1133 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1135 #ifdef ANNUL_IFFALSE_SLOTS
1136 (INSN_ANNULLED_BRANCH_P (jump)
1137 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1138 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1140 #ifdef ANNUL_IFTRUE_SLOTS
1141 (INSN_ANNULLED_BRANCH_P (jump)
1142 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1143 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1145 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1148 return (li == NULL);
1151 /* DELAY_LIST is a list of insns that have already been placed into delay
1152 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1153 If not, return 0; otherwise return 1. */
1156 check_annul_list_true_false (annul_true_p, delay_list)
1164 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1166 rtx trial = XEXP (temp, 0);
1168 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1169 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1178 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1179 the condition tested by INSN is CONDITION and the resources shown in
1180 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1181 from SEQ's delay list, in addition to whatever insns it may execute
1182 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1183 needed while searching for delay slot insns. Return the concatenated
1184 delay list if possible, otherwise, return 0.
1186 SLOTS_TO_FILL is the total number of slots required by INSN, and
1187 PSLOTS_FILLED points to the number filled so far (also the number of
1188 insns in DELAY_LIST). It is updated with the number that have been
1189 filled from the SEQUENCE, if any.
1191 PANNUL_P points to a non-zero value if we already know that we need
1192 to annul INSN. If this routine determines that annulling is needed,
1193 it may set that value non-zero.
1195 PNEW_THREAD points to a location that is to receive the place at which
1196 execution should continue. */
1199 steal_delay_list_from_target (insn, condition, seq, delay_list,
1200 sets, needed, other_needed,
1201 slots_to_fill, pslots_filled, pannul_p,
1203 rtx insn, condition;
1206 struct resources *sets, *needed, *other_needed;
1213 int slots_remaining = slots_to_fill - *pslots_filled;
1214 int total_slots_filled = *pslots_filled;
1215 rtx new_delay_list = 0;
1216 int must_annul = *pannul_p;
1219 struct resources cc_set;
1221 /* We can't do anything if there are more delay slots in SEQ than we
1222 can handle, or if we don't know that it will be a taken branch.
1223 We know that it will be a taken branch if it is either an unconditional
1224 branch or a conditional branch with a stricter branch condition.
1226 Also, exit if the branch has more than one set, since then it is computing
1227 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1228 ??? It may be possible to move other sets into INSN in addition to
1229 moving the instructions in the delay slots.
1231 We can not steal the delay list if one of the instructions in the
1232 current delay_list modifies the condition codes and the jump in the
1233 sequence is a conditional jump. We can not do this because we can
1234 not change the direction of the jump because the condition codes
1235 will effect the direction of the jump in the sequence. */
1237 CLEAR_RESOURCE (&cc_set);
1238 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1240 rtx trial = XEXP (temp, 0);
1242 mark_set_resources (trial, &cc_set, 0, 1);
1243 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, 0))
1247 if (XVECLEN (seq, 0) - 1 > slots_remaining
1248 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1249 || ! single_set (XVECEXP (seq, 0, 0)))
1252 for (i = 1; i < XVECLEN (seq, 0); i++)
1254 rtx trial = XVECEXP (seq, 0, i);
1257 if (insn_references_resource_p (trial, sets, 0)
1258 || insn_sets_resource_p (trial, needed, 0)
1259 || insn_sets_resource_p (trial, sets, 0)
1261 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1263 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1265 /* If TRIAL is from the fallthrough code of an annulled branch insn
1266 in SEQ, we cannot use it. */
1267 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1268 && ! INSN_FROM_TARGET_P (trial)))
1271 /* If this insn was already done (usually in a previous delay slot),
1272 pretend we put it in our delay slot. */
1273 if (redundant_insn (trial, insn, new_delay_list))
1276 /* We will end up re-vectoring this branch, so compute flags
1277 based on jumping to the new label. */
1278 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1281 && ((condition == const_true_rtx
1282 || (! insn_sets_resource_p (trial, other_needed, 0)
1283 && ! may_trap_p (PATTERN (trial)))))
1284 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1285 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1287 check_annul_list_true_false (0, delay_list)
1288 && check_annul_list_true_false (0, new_delay_list)
1289 && eligible_for_annul_false (insn, total_slots_filled,
1294 temp = copy_rtx (trial);
1295 INSN_FROM_TARGET_P (temp) = 1;
1296 new_delay_list = add_to_delay_list (temp, new_delay_list);
1297 total_slots_filled++;
1299 if (--slots_remaining == 0)
1306 /* Show the place to which we will be branching. */
1307 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1309 /* Add any new insns to the delay list and update the count of the
1310 number of slots filled. */
1311 *pslots_filled = total_slots_filled;
1315 if (delay_list == 0)
1316 return new_delay_list;
1318 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1319 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1324 /* Similar to steal_delay_list_from_target except that SEQ is on the
1325 fallthrough path of INSN. Here we only do something if the delay insn
1326 of SEQ is an unconditional branch. In that case we steal its delay slot
1327 for INSN since unconditional branches are much easier to fill. */
1330 steal_delay_list_from_fallthrough (insn, condition, seq,
1331 delay_list, sets, needed, other_needed,
1332 slots_to_fill, pslots_filled, pannul_p)
1333 rtx insn, condition;
1336 struct resources *sets, *needed, *other_needed;
1343 int must_annul = *pannul_p;
1346 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1348 /* We can't do anything if SEQ's delay insn isn't an
1349 unconditional branch. */
1351 if (! simplejump_p (XVECEXP (seq, 0, 0))
1352 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1355 for (i = 1; i < XVECLEN (seq, 0); i++)
1357 rtx trial = XVECEXP (seq, 0, i);
1359 /* If TRIAL sets CC0, stealing it will move it too far from the use
1361 if (insn_references_resource_p (trial, sets, 0)
1362 || insn_sets_resource_p (trial, needed, 0)
1363 || insn_sets_resource_p (trial, sets, 0)
1365 || sets_cc0_p (PATTERN (trial))
1371 /* If this insn was already done, we don't need it. */
1372 if (redundant_insn (trial, insn, delay_list))
1374 delete_from_delay_slot (trial);
1379 && ((condition == const_true_rtx
1380 || (! insn_sets_resource_p (trial, other_needed, 0)
1381 && ! may_trap_p (PATTERN (trial)))))
1382 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1383 : (must_annul || delay_list == NULL) && (must_annul = 1,
1384 check_annul_list_true_false (1, delay_list)
1385 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1389 delete_from_delay_slot (trial);
1390 delay_list = add_to_delay_list (trial, delay_list);
1392 if (++(*pslots_filled) == slots_to_fill)
1405 /* Try merging insns starting at THREAD which match exactly the insns in
1408 If all insns were matched and the insn was previously annulling, the
1409 annul bit will be cleared.
1411 For each insn that is merged, if the branch is or will be non-annulling,
1412 we delete the merged insn. */
1415 try_merge_delay_insns (insn, thread)
1418 rtx trial, next_trial;
1419 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1420 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1421 int slot_number = 1;
1422 int num_slots = XVECLEN (PATTERN (insn), 0);
1423 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1424 struct resources set, needed;
1425 rtx merged_insns = 0;
1429 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1431 CLEAR_RESOURCE (&needed);
1432 CLEAR_RESOURCE (&set);
1434 /* If this is not an annulling branch, take into account anything needed in
1435 INSN's delay slot. This prevents two increments from being incorrectly
1436 folded into one. If we are annulling, this would be the correct
1437 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1438 will essentially disable this optimization. This method is somewhat of
1439 a kludge, but I don't see a better way.) */
1441 for (i = 1 ; i < num_slots ; i++)
1442 if (XVECEXP (PATTERN (insn), 0, i))
1443 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
1445 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1447 rtx pat = PATTERN (trial);
1448 rtx oldtrial = trial;
1450 next_trial = next_nonnote_insn (trial);
1452 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1453 if (GET_CODE (trial) == INSN
1454 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1457 if (GET_CODE (next_to_match) == GET_CODE (trial)
1459 /* We can't share an insn that sets cc0. */
1460 && ! sets_cc0_p (pat)
1462 && ! insn_references_resource_p (trial, &set, 1)
1463 && ! insn_sets_resource_p (trial, &set, 1)
1464 && ! insn_sets_resource_p (trial, &needed, 1)
1465 && (trial = try_split (pat, trial, 0)) != 0
1466 /* Update next_trial, in case try_split succeeded. */
1467 && (next_trial = next_nonnote_insn (trial))
1468 /* Likewise THREAD. */
1469 && (thread = oldtrial == thread ? trial : thread)
1470 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1471 /* Have to test this condition if annul condition is different
1472 from (and less restrictive than) non-annulling one. */
1473 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1478 update_block (trial, thread);
1479 if (trial == thread)
1480 thread = next_active_insn (thread);
1482 delete_insn (trial);
1483 INSN_FROM_TARGET_P (next_to_match) = 0;
1486 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1488 if (++slot_number == num_slots)
1491 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1494 mark_set_resources (trial, &set, 0, 1);
1495 mark_referenced_resources (trial, &needed, 1);
1498 /* See if we stopped on a filled insn. If we did, try to see if its
1499 delay slots match. */
1500 if (slot_number != num_slots
1501 && trial && GET_CODE (trial) == INSN
1502 && GET_CODE (PATTERN (trial)) == SEQUENCE
1503 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1505 rtx pat = PATTERN (trial);
1506 rtx filled_insn = XVECEXP (pat, 0, 0);
1508 /* Account for resources set/needed by the filled insn. */
1509 mark_set_resources (filled_insn, &set, 0, 1);
1510 mark_referenced_resources (filled_insn, &needed, 1);
1512 for (i = 1; i < XVECLEN (pat, 0); i++)
1514 rtx dtrial = XVECEXP (pat, 0, i);
1516 if (! insn_references_resource_p (dtrial, &set, 1)
1517 && ! insn_sets_resource_p (dtrial, &set, 1)
1518 && ! insn_sets_resource_p (dtrial, &needed, 1)
1520 && ! sets_cc0_p (PATTERN (dtrial))
1522 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1523 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1529 update_block (dtrial, thread);
1530 new = delete_from_delay_slot (dtrial);
1531 if (INSN_DELETED_P (thread))
1533 INSN_FROM_TARGET_P (next_to_match) = 0;
1536 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1539 if (++slot_number == num_slots)
1542 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1546 /* Keep track of the set/referenced resources for the delay
1547 slots of any trial insns we encounter. */
1548 mark_set_resources (dtrial, &set, 0, 1);
1549 mark_referenced_resources (dtrial, &needed, 1);
1554 /* If all insns in the delay slot have been matched and we were previously
1555 annulling the branch, we need not any more. In that case delete all the
1556 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1557 the delay list so that we know that it isn't only being used at the
1559 if (slot_number == num_slots && annul_p)
1561 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1563 if (GET_MODE (merged_insns) == SImode)
1567 update_block (XEXP (merged_insns, 0), thread);
1568 new = delete_from_delay_slot (XEXP (merged_insns, 0));
1569 if (INSN_DELETED_P (thread))
1574 update_block (XEXP (merged_insns, 0), thread);
1575 delete_insn (XEXP (merged_insns, 0));
1579 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1581 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1582 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1586 /* See if INSN is redundant with an insn in front of TARGET. Often this
1587 is called when INSN is a candidate for a delay slot of TARGET.
1588 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1589 of INSN. Often INSN will be redundant with an insn in a delay slot of
1590 some previous insn. This happens when we have a series of branches to the
1591 same label; in that case the first insn at the target might want to go
1592 into each of the delay slots.
1594 If we are not careful, this routine can take up a significant fraction
1595 of the total compilation time (4%), but only wins rarely. Hence we
1596 speed this routine up by making two passes. The first pass goes back
1597 until it hits a label and sees if it find an insn with an identical
1598 pattern. Only in this (relatively rare) event does it check for
1601 We do not split insns we encounter. This could cause us not to find a
1602 redundant insn, but the cost of splitting seems greater than the possible
1603 gain in rare cases. */
1606 redundant_insn (insn, target, delay_list)
1611 rtx target_main = target;
1612 rtx ipat = PATTERN (insn);
1614 struct resources needed, set;
1617 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1618 are allowed to not actually assign to such a register. */
1619 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1622 /* Scan backwards looking for a match. */
1623 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
1625 if (GET_CODE (trial) == CODE_LABEL)
1628 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
1631 pat = PATTERN (trial);
1632 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1635 if (GET_CODE (pat) == SEQUENCE)
1637 /* Stop for a CALL and its delay slots because it is difficult to
1638 track its resource needs correctly. */
1639 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1642 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1643 slots because it is difficult to track its resource needs
1646 #ifdef INSN_SETS_ARE_DELAYED
1647 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1651 #ifdef INSN_REFERENCES_ARE_DELAYED
1652 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1656 /* See if any of the insns in the delay slot match, updating
1657 resource requirements as we go. */
1658 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1659 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1660 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1661 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1664 /* If found a match, exit this loop early. */
1669 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1670 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1674 /* If we didn't find an insn that matches, return 0. */
1678 /* See what resources this insn sets and needs. If they overlap, or
1679 if this insn references CC0, it can't be redundant. */
1681 CLEAR_RESOURCE (&needed);
1682 CLEAR_RESOURCE (&set);
1683 mark_set_resources (insn, &set, 0, 1);
1684 mark_referenced_resources (insn, &needed, 1);
1686 /* If TARGET is a SEQUENCE, get the main insn. */
1687 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1688 target_main = XVECEXP (PATTERN (target), 0, 0);
1690 if (resource_conflicts_p (&needed, &set)
1692 || reg_mentioned_p (cc0_rtx, ipat)
1694 /* The insn requiring the delay may not set anything needed or set by
1696 || insn_sets_resource_p (target_main, &needed, 1)
1697 || insn_sets_resource_p (target_main, &set, 1))
1700 /* Insns we pass may not set either NEEDED or SET, so merge them for
1702 needed.memory |= set.memory;
1703 needed.unch_memory |= set.unch_memory;
1704 IOR_HARD_REG_SET (needed.regs, set.regs);
1706 /* This insn isn't redundant if it conflicts with an insn that either is
1707 or will be in a delay slot of TARGET. */
1711 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1713 delay_list = XEXP (delay_list, 1);
1716 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1717 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1718 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1721 /* Scan backwards until we reach a label or an insn that uses something
1722 INSN sets or sets something insn uses or sets. */
1724 for (trial = PREV_INSN (target);
1725 trial && GET_CODE (trial) != CODE_LABEL;
1726 trial = PREV_INSN (trial))
1728 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
1729 && GET_CODE (trial) != JUMP_INSN)
1732 pat = PATTERN (trial);
1733 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1736 if (GET_CODE (pat) == SEQUENCE)
1738 /* If this is a CALL_INSN and its delay slots, it is hard to track
1739 the resource needs properly, so give up. */
1740 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1743 /* If this is an INSN or JUMP_INSN with delayed effects, it
1744 is hard to track the resource needs properly, so give up. */
1746 #ifdef INSN_SETS_ARE_DELAYED
1747 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1751 #ifdef INSN_REFERENCES_ARE_DELAYED
1752 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1756 /* See if any of the insns in the delay slot match, updating
1757 resource requirements as we go. */
1758 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1760 rtx candidate = XVECEXP (pat, 0, i);
1762 /* If an insn will be annulled if the branch is false, it isn't
1763 considered as a possible duplicate insn. */
1764 if (rtx_equal_p (PATTERN (candidate), ipat)
1765 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1766 && INSN_FROM_TARGET_P (candidate)))
1768 /* Show that this insn will be used in the sequel. */
1769 INSN_FROM_TARGET_P (candidate) = 0;
1773 /* Unless this is an annulled insn from the target of a branch,
1774 we must stop if it sets anything needed or set by INSN. */
1775 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1776 || ! INSN_FROM_TARGET_P (candidate))
1777 && insn_sets_resource_p (candidate, &needed, 1))
1782 /* If the insn requiring the delay slot conflicts with INSN, we
1784 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
1789 /* See if TRIAL is the same as INSN. */
1790 pat = PATTERN (trial);
1791 if (rtx_equal_p (pat, ipat))
1794 /* Can't go any further if TRIAL conflicts with INSN. */
1795 if (insn_sets_resource_p (trial, &needed, 1))
1803 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
1804 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1805 is non-zero, we are allowed to fall into this thread; otherwise, we are
1808 If LABEL is used more than one or we pass a label other than LABEL before
1809 finding an active insn, we do not own this thread. */
1812 own_thread_p (thread, label, allow_fallthrough)
1815 int allow_fallthrough;
1820 /* We don't own the function end. */
1824 /* Get the first active insn, or THREAD, if it is an active insn. */
1825 active_insn = next_active_insn (PREV_INSN (thread));
1827 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1828 if (GET_CODE (insn) == CODE_LABEL
1829 && (insn != label || LABEL_NUSES (insn) != 1))
1832 if (allow_fallthrough)
1835 /* Ensure that we reach a BARRIER before any insn or label. */
1836 for (insn = prev_nonnote_insn (thread);
1837 insn == 0 || GET_CODE (insn) != BARRIER;
1838 insn = prev_nonnote_insn (insn))
1840 || GET_CODE (insn) == CODE_LABEL
1841 || (GET_CODE (insn) == INSN
1842 && GET_CODE (PATTERN (insn)) != USE
1843 && GET_CODE (PATTERN (insn)) != CLOBBER))
1849 /* Called when INSN is being moved from a location near the target of a jump.
1850 We leave a marker of the form (use (INSN)) immediately in front
1851 of WHERE for mark_target_live_regs. These markers will be deleted when
1854 We used to try to update the live status of registers if WHERE is at
1855 the start of a basic block, but that can't work since we may remove a
1856 BARRIER in relax_delay_slots. */
1859 update_block (insn, where)
1863 /* Ignore if this was in a delay slot and it came from the target of
1865 if (INSN_FROM_TARGET_P (insn))
1868 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1870 /* INSN might be making a value live in a block where it didn't use to
1871 be. So recompute liveness information for this block. */
1873 incr_ticks_for_insn (insn);
1876 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1877 the basic block containing the jump. */
1880 reorg_redirect_jump (jump, nlabel)
1884 incr_ticks_for_insn (jump);
1885 return redirect_jump (jump, nlabel);
1888 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1889 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1890 that reference values used in INSN. If we find one, then we move the
1891 REG_DEAD note to INSN.
1893 This is needed to handle the case where an later insn (after INSN) has a
1894 REG_DEAD note for a register used by INSN, and this later insn subsequently
1895 gets moved before a CODE_LABEL because it is a redundant insn. In this
1896 case, mark_target_live_regs may be confused into thinking the register
1897 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1900 update_reg_dead_notes (insn, delayed_insn)
1901 rtx insn, delayed_insn;
1905 for (p = next_nonnote_insn (insn); p != delayed_insn;
1906 p = next_nonnote_insn (p))
1907 for (link = REG_NOTES (p); link; link = next)
1909 next = XEXP (link, 1);
1911 if (REG_NOTE_KIND (link) != REG_DEAD
1912 || GET_CODE (XEXP (link, 0)) != REG)
1915 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1917 /* Move the REG_DEAD note from P to INSN. */
1918 remove_note (p, link);
1919 XEXP (link, 1) = REG_NOTES (insn);
1920 REG_NOTES (insn) = link;
1925 /* Called when an insn redundant with start_insn is deleted. If there
1926 is a REG_DEAD note for the target of start_insn between start_insn
1927 and stop_insn, then the REG_DEAD note needs to be deleted since the
1928 value no longer dies there.
1930 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1931 confused into thinking the register is dead. */
1934 fix_reg_dead_note (start_insn, stop_insn)
1935 rtx start_insn, stop_insn;
1939 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1940 p = next_nonnote_insn (p))
1941 for (link = REG_NOTES (p); link; link = next)
1943 next = XEXP (link, 1);
1945 if (REG_NOTE_KIND (link) != REG_DEAD
1946 || GET_CODE (XEXP (link, 0)) != REG)
1949 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1951 remove_note (p, link);
1957 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1959 This handles the case of udivmodXi4 instructions which optimize their
1960 output depending on whether any REG_UNUSED notes are present.
1961 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1965 update_reg_unused_notes (insn, redundant_insn)
1966 rtx insn, redundant_insn;
1970 for (link = REG_NOTES (insn); link; link = next)
1972 next = XEXP (link, 1);
1974 if (REG_NOTE_KIND (link) != REG_UNUSED
1975 || GET_CODE (XEXP (link, 0)) != REG)
1978 if (! find_regno_note (redundant_insn, REG_UNUSED,
1979 REGNO (XEXP (link, 0))))
1980 remove_note (insn, link);
1984 /* Scan a function looking for insns that need a delay slot and find insns to
1985 put into the delay slot.
1987 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
1988 as calls). We do these first since we don't want jump insns (that are
1989 easier to fill) to get the only insns that could be used for non-jump insns.
1990 When it is zero, only try to fill JUMP_INSNs.
1992 When slots are filled in this manner, the insns (including the
1993 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
1994 it is possible to tell whether a delay slot has really been filled
1995 or not. `final' knows how to deal with this, by communicating
1996 through FINAL_SEQUENCE. */
1999 fill_simple_delay_slots (non_jumps_p)
2002 register rtx insn, pat, trial, next_trial;
2004 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2005 struct resources needed, set;
2006 int slots_to_fill, slots_filled;
2009 for (i = 0; i < num_unfilled_slots; i++)
2012 /* Get the next insn to fill. If it has already had any slots assigned,
2013 we can't do anything with it. Maybe we'll improve this later. */
2015 insn = unfilled_slots_base[i];
2017 || INSN_DELETED_P (insn)
2018 || (GET_CODE (insn) == INSN
2019 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2020 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2021 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2024 if (GET_CODE (insn) == JUMP_INSN)
2025 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2027 flags = get_jump_flags (insn, NULL_RTX);
2028 slots_to_fill = num_delay_slots (insn);
2030 /* Some machine description have defined instructions to have
2031 delay slots only in certain circumstances which may depend on
2032 nearby insns (which change due to reorg's actions).
2034 For example, the PA port normally has delay slots for unconditional
2037 However, the PA port claims such jumps do not have a delay slot
2038 if they are immediate successors of certain CALL_INSNs. This
2039 allows the port to favor filling the delay slot of the call with
2040 the unconditional jump. */
2041 if (slots_to_fill == 0)
2044 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2045 says how many. After initialization, first try optimizing
2048 nop add %o7,.-L1,%o7
2052 If this case applies, the delay slot of the call is filled with
2053 the unconditional jump. This is done first to avoid having the
2054 delay slot of the call filled in the backward scan. Also, since
2055 the unconditional jump is likely to also have a delay slot, that
2056 insn must exist when it is subsequently scanned.
2058 This is tried on each insn with delay slots as some machines
2059 have insns which perform calls, but are not represented as
2065 if ((trial = next_active_insn (insn))
2066 && GET_CODE (trial) == JUMP_INSN
2067 && simplejump_p (trial)
2068 && eligible_for_delay (insn, slots_filled, trial, flags)
2069 && no_labels_between_p (insn, trial))
2073 delay_list = add_to_delay_list (trial, delay_list);
2075 /* TRIAL may have had its delay slot filled, then unfilled. When
2076 the delay slot is unfilled, TRIAL is placed back on the unfilled
2077 slots obstack. Unfortunately, it is placed on the end of the
2078 obstack, not in its original location. Therefore, we must search
2079 from entry i + 1 to the end of the unfilled slots obstack to
2080 try and find TRIAL. */
2081 tmp = &unfilled_slots_base[i + 1];
2082 while (*tmp != trial && tmp != unfilled_slots_next)
2085 /* Remove the unconditional jump from consideration for delay slot
2086 filling and unthread it. */
2090 rtx next = NEXT_INSN (trial);
2091 rtx prev = PREV_INSN (trial);
2093 NEXT_INSN (prev) = next;
2095 PREV_INSN (next) = prev;
2099 /* Now, scan backwards from the insn to search for a potential
2100 delay-slot candidate. Stop searching when a label or jump is hit.
2102 For each candidate, if it is to go into the delay slot (moved
2103 forward in execution sequence), it must not need or set any resources
2104 that were set by later insns and must not set any resources that
2105 are needed for those insns.
2107 The delay slot insn itself sets resources unless it is a call
2108 (in which case the called routine, not the insn itself, is doing
2111 if (slots_filled < slots_to_fill)
2113 CLEAR_RESOURCE (&needed);
2114 CLEAR_RESOURCE (&set);
2115 mark_set_resources (insn, &set, 0, 0);
2116 mark_referenced_resources (insn, &needed, 0);
2118 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2121 next_trial = prev_nonnote_insn (trial);
2123 /* This must be an INSN or CALL_INSN. */
2124 pat = PATTERN (trial);
2126 /* USE and CLOBBER at this level was just for flow; ignore it. */
2127 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2130 /* Check for resource conflict first, to avoid unnecessary
2132 if (! insn_references_resource_p (trial, &set, 1)
2133 && ! insn_sets_resource_p (trial, &set, 1)
2134 && ! insn_sets_resource_p (trial, &needed, 1)
2136 /* Can't separate set of cc0 from its use. */
2137 && ! (reg_mentioned_p (cc0_rtx, pat)
2138 && ! sets_cc0_p (pat))
2142 trial = try_split (pat, trial, 1);
2143 next_trial = prev_nonnote_insn (trial);
2144 if (eligible_for_delay (insn, slots_filled, trial, flags))
2146 /* In this case, we are searching backward, so if we
2147 find insns to put on the delay list, we want
2148 to put them at the head, rather than the
2149 tail, of the list. */
2151 update_reg_dead_notes (trial, insn);
2152 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2154 update_block (trial, trial);
2155 delete_insn (trial);
2156 if (slots_to_fill == ++slots_filled)
2162 mark_set_resources (trial, &set, 0, 1);
2163 mark_referenced_resources (trial, &needed, 1);
2167 /* If all needed slots haven't been filled, we come here. */
2169 /* Try to optimize case of jumping around a single insn. */
2170 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2171 if (slots_filled != slots_to_fill
2173 && GET_CODE (insn) == JUMP_INSN
2174 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2176 delay_list = optimize_skip (insn);
2182 /* Try to get insns from beyond the insn needing the delay slot.
2183 These insns can neither set or reference resources set in insns being
2184 skipped, cannot set resources in the insn being skipped, and, if this
2185 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2186 call might not return).
2188 There used to be code which continued past the target label if
2189 we saw all uses of the target label. This code did not work,
2190 because it failed to account for some instructions which were
2191 both annulled and marked as from the target. This can happen as a
2192 result of optimize_skip. Since this code was redundant with
2193 fill_eager_delay_slots anyways, it was just deleted. */
2195 if (slots_filled != slots_to_fill
2196 && (GET_CODE (insn) != JUMP_INSN
2197 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2198 && ! simplejump_p (insn)
2199 && JUMP_LABEL (insn) != 0)))
2202 int maybe_never = 0;
2203 struct resources needed_at_jump;
2205 CLEAR_RESOURCE (&needed);
2206 CLEAR_RESOURCE (&set);
2208 if (GET_CODE (insn) == CALL_INSN)
2210 mark_set_resources (insn, &set, 0, 1);
2211 mark_referenced_resources (insn, &needed, 1);
2216 mark_set_resources (insn, &set, 0, 1);
2217 mark_referenced_resources (insn, &needed, 1);
2218 if (GET_CODE (insn) == JUMP_INSN)
2219 target = JUMP_LABEL (insn);
2222 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2224 rtx pat, trial_delay;
2226 next_trial = next_nonnote_insn (trial);
2228 if (GET_CODE (trial) == CODE_LABEL
2229 || GET_CODE (trial) == BARRIER)
2232 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2233 pat = PATTERN (trial);
2235 /* Stand-alone USE and CLOBBER are just for flow. */
2236 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2239 /* If this already has filled delay slots, get the insn needing
2241 if (GET_CODE (pat) == SEQUENCE)
2242 trial_delay = XVECEXP (pat, 0, 0);
2244 trial_delay = trial;
2246 /* If this is a jump insn to our target, indicate that we have
2247 seen another jump to it. If we aren't handling a conditional
2248 jump, stop our search. Otherwise, compute the needs at its
2249 target and add them to NEEDED. */
2250 if (GET_CODE (trial_delay) == JUMP_INSN)
2254 else if (JUMP_LABEL (trial_delay) != target)
2257 next_active_insn (JUMP_LABEL (trial_delay));
2259 mark_target_live_regs (get_insns (), ninsn,
2261 needed.memory |= needed_at_jump.memory;
2262 needed.unch_memory |= needed_at_jump.unch_memory;
2263 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
2267 /* See if we have a resource problem before we try to
2270 && GET_CODE (pat) != SEQUENCE
2271 && ! insn_references_resource_p (trial, &set, 1)
2272 && ! insn_sets_resource_p (trial, &set, 1)
2273 && ! insn_sets_resource_p (trial, &needed, 1)
2275 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2277 && ! (maybe_never && may_trap_p (pat))
2278 && (trial = try_split (pat, trial, 0))
2279 && eligible_for_delay (insn, slots_filled, trial, flags))
2281 next_trial = next_nonnote_insn (trial);
2282 delay_list = add_to_delay_list (trial, delay_list);
2285 if (reg_mentioned_p (cc0_rtx, pat))
2286 link_cc0_insns (trial);
2289 delete_insn (trial);
2290 if (slots_to_fill == ++slots_filled)
2295 mark_set_resources (trial, &set, 0, 1);
2296 mark_referenced_resources (trial, &needed, 1);
2298 /* Ensure we don't put insns between the setting of cc and the
2299 comparison by moving a setting of cc into an earlier delay
2300 slot since these insns could clobber the condition code. */
2303 /* If this is a call or jump, we might not get here. */
2304 if (GET_CODE (trial_delay) == CALL_INSN
2305 || GET_CODE (trial_delay) == JUMP_INSN)
2309 /* If there are slots left to fill and our search was stopped by an
2310 unconditional branch, try the insn at the branch target. We can
2311 redirect the branch if it works.
2313 Don't do this if the insn at the branch target is a branch. */
2314 if (slots_to_fill != slots_filled
2316 && GET_CODE (trial) == JUMP_INSN
2317 && simplejump_p (trial)
2318 && (target == 0 || JUMP_LABEL (trial) == target)
2319 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2320 && ! (GET_CODE (next_trial) == INSN
2321 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2322 && GET_CODE (next_trial) != JUMP_INSN
2323 && ! insn_references_resource_p (next_trial, &set, 1)
2324 && ! insn_sets_resource_p (next_trial, &set, 1)
2325 && ! insn_sets_resource_p (next_trial, &needed, 1)
2327 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2329 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
2330 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2331 && eligible_for_delay (insn, slots_filled, next_trial, flags))
2333 rtx new_label = next_active_insn (next_trial);
2336 new_label = get_label_before (new_label);
2338 new_label = find_end_label ();
2341 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2343 reorg_redirect_jump (trial, new_label);
2345 /* If we merged because we both jumped to the same place,
2346 redirect the original insn also. */
2348 reorg_redirect_jump (insn, new_label);
2352 /* If this is an unconditional jump, then try to get insns from the
2353 target of the jump. */
2354 if (GET_CODE (insn) == JUMP_INSN
2355 && simplejump_p (insn)
2356 && slots_filled != slots_to_fill)
2358 = fill_slots_from_thread (insn, const_true_rtx,
2359 next_active_insn (JUMP_LABEL (insn)),
2361 own_thread_p (JUMP_LABEL (insn),
2362 JUMP_LABEL (insn), 0),
2363 slots_to_fill, &slots_filled,
2367 unfilled_slots_base[i]
2368 = emit_delay_sequence (insn, delay_list, slots_filled);
2370 if (slots_to_fill == slots_filled)
2371 unfilled_slots_base[i] = 0;
2373 note_delay_statistics (slots_filled, 0);
2376 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2377 /* See if the epilogue needs any delay slots. Try to fill them if so.
2378 The only thing we can do is scan backwards from the end of the
2379 function. If we did this in a previous pass, it is incorrect to do it
2381 if (current_function_epilogue_delay_list)
2384 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2385 if (slots_to_fill == 0)
2389 CLEAR_RESOURCE (&set);
2391 /* The frame pointer and stack pointer are needed at the beginning of
2392 the epilogue, so instructions setting them can not be put in the
2393 epilogue delay slot. However, everything else needed at function
2394 end is safe, so we don't want to use end_of_function_needs here. */
2395 CLEAR_RESOURCE (&needed);
2396 if (frame_pointer_needed)
2398 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2399 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2400 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2402 #ifdef EXIT_IGNORE_STACK
2403 if (! EXIT_IGNORE_STACK
2404 || current_function_sp_is_unchanging)
2406 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2409 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2411 #ifdef EPILOGUE_USES
2412 for (i = 0; i <FIRST_PSEUDO_REGISTER; i++)
2414 if (EPILOGUE_USES (i))
2415 SET_HARD_REG_BIT (needed.regs, i);
2419 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2420 trial = PREV_INSN (trial))
2422 if (GET_CODE (trial) == NOTE)
2424 pat = PATTERN (trial);
2425 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2428 if (! insn_references_resource_p (trial, &set, 1)
2429 && ! insn_sets_resource_p (trial, &needed, 1)
2430 && ! insn_sets_resource_p (trial, &set, 1)
2432 /* Don't want to mess with cc0 here. */
2433 && ! reg_mentioned_p (cc0_rtx, pat)
2437 trial = try_split (pat, trial, 1);
2438 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2440 /* Here as well we are searching backward, so put the
2441 insns we find on the head of the list. */
2443 current_function_epilogue_delay_list
2444 = gen_rtx_INSN_LIST (VOIDmode, trial,
2445 current_function_epilogue_delay_list);
2446 mark_end_of_function_resources (trial, 1);
2447 update_block (trial, trial);
2448 delete_insn (trial);
2450 /* Clear deleted bit so final.c will output the insn. */
2451 INSN_DELETED_P (trial) = 0;
2453 if (slots_to_fill == ++slots_filled)
2459 mark_set_resources (trial, &set, 0, 1);
2460 mark_referenced_resources (trial, &needed, 1);
2463 note_delay_statistics (slots_filled, 0);
2467 /* Try to find insns to place in delay slots.
2469 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2470 or is an unconditional branch if CONDITION is const_true_rtx.
2471 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2473 THREAD is a flow-of-control, either the insns to be executed if the
2474 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2476 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2477 to see if any potential delay slot insns set things needed there.
2479 LIKELY is non-zero if it is extremely likely that the branch will be
2480 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2481 end of a loop back up to the top.
2483 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2484 thread. I.e., it is the fallthrough code of our jump or the target of the
2485 jump when we are the only jump going there.
2487 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2488 case, we can only take insns from the head of the thread for our delay
2489 slot. We then adjust the jump to point after the insns we have taken. */
2492 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
2493 thread_if_true, own_thread,
2494 slots_to_fill, pslots_filled, delay_list)
2497 rtx thread, opposite_thread;
2501 int slots_to_fill, *pslots_filled;
2505 struct resources opposite_needed, set, needed;
2511 /* Validate our arguments. */
2512 if ((condition == const_true_rtx && ! thread_if_true)
2513 || (! own_thread && ! thread_if_true))
2516 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2518 /* If our thread is the end of subroutine, we can't get any delay
2523 /* If this is an unconditional branch, nothing is needed at the
2524 opposite thread. Otherwise, compute what is needed there. */
2525 if (condition == const_true_rtx)
2526 CLEAR_RESOURCE (&opposite_needed);
2528 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2530 /* If the insn at THREAD can be split, do it here to avoid having to
2531 update THREAD and NEW_THREAD if it is done in the loop below. Also
2532 initialize NEW_THREAD. */
2534 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2536 /* Scan insns at THREAD. We are looking for an insn that can be removed
2537 from THREAD (it neither sets nor references resources that were set
2538 ahead of it and it doesn't set anything needs by the insns ahead of
2539 it) and that either can be placed in an annulling insn or aren't
2540 needed at OPPOSITE_THREAD. */
2542 CLEAR_RESOURCE (&needed);
2543 CLEAR_RESOURCE (&set);
2545 /* If we do not own this thread, we must stop as soon as we find
2546 something that we can't put in a delay slot, since all we can do
2547 is branch into THREAD at a later point. Therefore, labels stop
2548 the search if this is not the `true' thread. */
2550 for (trial = thread;
2551 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2552 trial = next_nonnote_insn (trial))
2556 /* If we have passed a label, we no longer own this thread. */
2557 if (GET_CODE (trial) == CODE_LABEL)
2563 pat = PATTERN (trial);
2564 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2567 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2568 don't separate or copy insns that set and use CC0. */
2569 if (! insn_references_resource_p (trial, &set, 1)
2570 && ! insn_sets_resource_p (trial, &set, 1)
2571 && ! insn_sets_resource_p (trial, &needed, 1)
2573 && ! (reg_mentioned_p (cc0_rtx, pat)
2574 && (! own_thread || ! sets_cc0_p (pat)))
2580 /* If TRIAL is redundant with some insn before INSN, we don't
2581 actually need to add it to the delay list; we can merely pretend
2583 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2585 fix_reg_dead_note (prior_insn, insn);
2588 update_block (trial, thread);
2589 if (trial == thread)
2591 thread = next_active_insn (thread);
2592 if (new_thread == trial)
2593 new_thread = thread;
2596 delete_insn (trial);
2600 update_reg_unused_notes (prior_insn, trial);
2601 new_thread = next_active_insn (trial);
2607 /* There are two ways we can win: If TRIAL doesn't set anything
2608 needed at the opposite thread and can't trap, or if it can
2609 go into an annulled delay slot. */
2611 && (condition == const_true_rtx
2612 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
2613 && ! may_trap_p (pat))))
2616 trial = try_split (pat, trial, 0);
2617 if (new_thread == old_trial)
2619 if (thread == old_trial)
2621 pat = PATTERN (trial);
2622 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2626 #ifdef ANNUL_IFTRUE_SLOTS
2629 #ifdef ANNUL_IFFALSE_SLOTS
2635 trial = try_split (pat, trial, 0);
2636 if (new_thread == old_trial)
2638 if (thread == old_trial)
2640 pat = PATTERN (trial);
2641 if ((must_annul || delay_list == NULL) && (thread_if_true
2642 ? check_annul_list_true_false (0, delay_list)
2643 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2644 : check_annul_list_true_false (1, delay_list)
2645 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2653 if (reg_mentioned_p (cc0_rtx, pat))
2654 link_cc0_insns (trial);
2657 /* If we own this thread, delete the insn. If this is the
2658 destination of a branch, show that a basic block status
2659 may have been updated. In any case, mark the new
2660 starting point of this thread. */
2663 update_block (trial, thread);
2664 if (trial == thread)
2666 thread = next_active_insn (thread);
2667 if (new_thread == trial)
2668 new_thread = thread;
2670 delete_insn (trial);
2673 new_thread = next_active_insn (trial);
2675 temp = own_thread ? trial : copy_rtx (trial);
2677 INSN_FROM_TARGET_P (temp) = 1;
2679 delay_list = add_to_delay_list (temp, delay_list);
2681 if (slots_to_fill == ++(*pslots_filled))
2683 /* Even though we have filled all the slots, we
2684 may be branching to a location that has a
2685 redundant insn. Skip any if so. */
2686 while (new_thread && ! own_thread
2687 && ! insn_sets_resource_p (new_thread, &set, 1)
2688 && ! insn_sets_resource_p (new_thread, &needed, 1)
2689 && ! insn_references_resource_p (new_thread,
2692 = redundant_insn (new_thread, insn,
2695 /* We know we do not own the thread, so no need
2696 to call update_block and delete_insn. */
2697 fix_reg_dead_note (prior_insn, insn);
2698 update_reg_unused_notes (prior_insn, new_thread);
2699 new_thread = next_active_insn (new_thread);
2709 /* This insn can't go into a delay slot. */
2711 mark_set_resources (trial, &set, 0, 1);
2712 mark_referenced_resources (trial, &needed, 1);
2714 /* Ensure we don't put insns between the setting of cc and the comparison
2715 by moving a setting of cc into an earlier delay slot since these insns
2716 could clobber the condition code. */
2719 /* If this insn is a register-register copy and the next insn has
2720 a use of our destination, change it to use our source. That way,
2721 it will become a candidate for our delay slot the next time
2722 through this loop. This case occurs commonly in loops that
2725 We could check for more complex cases than those tested below,
2726 but it doesn't seem worth it. It might also be a good idea to try
2727 to swap the two insns. That might do better.
2729 We can't do this if the next insn modifies our destination, because
2730 that would make the replacement into the insn invalid. We also can't
2731 do this if it modifies our source, because it might be an earlyclobber
2732 operand. This latter test also prevents updating the contents of
2735 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
2736 && GET_CODE (SET_SRC (pat)) == REG
2737 && GET_CODE (SET_DEST (pat)) == REG)
2739 rtx next = next_nonnote_insn (trial);
2741 if (next && GET_CODE (next) == INSN
2742 && GET_CODE (PATTERN (next)) != USE
2743 && ! reg_set_p (SET_DEST (pat), next)
2744 && ! reg_set_p (SET_SRC (pat), next)
2745 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
2746 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2750 /* If we stopped on a branch insn that has delay slots, see if we can
2751 steal some of the insns in those slots. */
2752 if (trial && GET_CODE (trial) == INSN
2753 && GET_CODE (PATTERN (trial)) == SEQUENCE
2754 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
2756 /* If this is the `true' thread, we will want to follow the jump,
2757 so we can only do this if we have taken everything up to here. */
2758 if (thread_if_true && trial == new_thread)
2760 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2761 delay_list, &set, &needed,
2762 &opposite_needed, slots_to_fill,
2763 pslots_filled, &must_annul,
2765 else if (! thread_if_true)
2767 = steal_delay_list_from_fallthrough (insn, condition,
2769 delay_list, &set, &needed,
2770 &opposite_needed, slots_to_fill,
2771 pslots_filled, &must_annul);
2774 /* If we haven't found anything for this delay slot and it is very
2775 likely that the branch will be taken, see if the insn at our target
2776 increments or decrements a register with an increment that does not
2777 depend on the destination register. If so, try to place the opposite
2778 arithmetic insn after the jump insn and put the arithmetic insn in the
2779 delay slot. If we can't do this, return. */
2780 if (delay_list == 0 && likely && new_thread
2781 && GET_CODE (new_thread) == INSN
2782 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2783 && asm_noperands (PATTERN (new_thread)) < 0)
2785 rtx pat = PATTERN (new_thread);
2790 pat = PATTERN (trial);
2792 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
2793 || ! eligible_for_delay (insn, 0, trial, flags))
2796 dest = SET_DEST (pat), src = SET_SRC (pat);
2797 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2798 && rtx_equal_p (XEXP (src, 0), dest)
2799 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
2801 rtx other = XEXP (src, 1);
2805 /* If this is a constant adjustment, use the same code with
2806 the negated constant. Otherwise, reverse the sense of the
2808 if (GET_CODE (other) == CONST_INT)
2809 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2810 negate_rtx (GET_MODE (src), other));
2812 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2813 GET_MODE (src), dest, other);
2815 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2818 if (recog_memoized (ninsn) < 0
2819 || (extract_insn (ninsn), ! constrain_operands (1)))
2821 delete_insn (ninsn);
2827 update_block (trial, thread);
2828 if (trial == thread)
2830 thread = next_active_insn (thread);
2831 if (new_thread == trial)
2832 new_thread = thread;
2834 delete_insn (trial);
2837 new_thread = next_active_insn (trial);
2839 ninsn = own_thread ? trial : copy_rtx (trial);
2841 INSN_FROM_TARGET_P (ninsn) = 1;
2843 delay_list = add_to_delay_list (ninsn, NULL_RTX);
2848 if (delay_list && must_annul)
2849 INSN_ANNULLED_BRANCH_P (insn) = 1;
2851 /* If we are to branch into the middle of this thread, find an appropriate
2852 label or make a new one if none, and redirect INSN to it. If we hit the
2853 end of the function, use the end-of-function label. */
2854 if (new_thread != thread)
2858 if (! thread_if_true)
2861 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
2862 && (simplejump_p (new_thread)
2863 || GET_CODE (PATTERN (new_thread)) == RETURN)
2864 && redirect_with_delay_list_safe_p (insn,
2865 JUMP_LABEL (new_thread),
2867 new_thread = follow_jumps (JUMP_LABEL (new_thread));
2869 if (new_thread == 0)
2870 label = find_end_label ();
2871 else if (GET_CODE (new_thread) == CODE_LABEL)
2874 label = get_label_before (new_thread);
2876 reorg_redirect_jump (insn, label);
2882 /* Make another attempt to find insns to place in delay slots.
2884 We previously looked for insns located in front of the delay insn
2885 and, for non-jump delay insns, located behind the delay insn.
2887 Here only try to schedule jump insns and try to move insns from either
2888 the target or the following insns into the delay slot. If annulling is
2889 supported, we will be likely to do this. Otherwise, we can do this only
2893 fill_eager_delay_slots ()
2897 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2899 for (i = 0; i < num_unfilled_slots; i++)
2902 rtx target_label, insn_at_target, fallthrough_insn;
2905 int own_fallthrough;
2906 int prediction, slots_to_fill, slots_filled;
2908 insn = unfilled_slots_base[i];
2910 || INSN_DELETED_P (insn)
2911 || GET_CODE (insn) != JUMP_INSN
2912 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
2915 slots_to_fill = num_delay_slots (insn);
2916 /* Some machine description have defined instructions to have
2917 delay slots only in certain circumstances which may depend on
2918 nearby insns (which change due to reorg's actions).
2920 For example, the PA port normally has delay slots for unconditional
2923 However, the PA port claims such jumps do not have a delay slot
2924 if they are immediate successors of certain CALL_INSNs. This
2925 allows the port to favor filling the delay slot of the call with
2926 the unconditional jump. */
2927 if (slots_to_fill == 0)
2931 target_label = JUMP_LABEL (insn);
2932 condition = get_branch_condition (insn, target_label);
2937 /* Get the next active fallthrough and target insns and see if we own
2938 them. Then see whether the branch is likely true. We don't need
2939 to do a lot of this for unconditional branches. */
2941 insn_at_target = next_active_insn (target_label);
2942 own_target = own_thread_p (target_label, target_label, 0);
2944 if (condition == const_true_rtx)
2946 own_fallthrough = 0;
2947 fallthrough_insn = 0;
2952 fallthrough_insn = next_active_insn (insn);
2953 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
2954 prediction = mostly_true_jump (insn, condition);
2957 /* If this insn is expected to branch, first try to get insns from our
2958 target, then our fallthrough insns. If it is not, expected to branch,
2959 try the other order. */
2964 = fill_slots_from_thread (insn, condition, insn_at_target,
2965 fallthrough_insn, prediction == 2, 1,
2967 slots_to_fill, &slots_filled, delay_list);
2969 if (delay_list == 0 && own_fallthrough)
2971 /* Even though we didn't find anything for delay slots,
2972 we might have found a redundant insn which we deleted
2973 from the thread that was filled. So we have to recompute
2974 the next insn at the target. */
2975 target_label = JUMP_LABEL (insn);
2976 insn_at_target = next_active_insn (target_label);
2979 = fill_slots_from_thread (insn, condition, fallthrough_insn,
2980 insn_at_target, 0, 0,
2982 slots_to_fill, &slots_filled,
2988 if (own_fallthrough)
2990 = fill_slots_from_thread (insn, condition, fallthrough_insn,
2991 insn_at_target, 0, 0,
2993 slots_to_fill, &slots_filled,
2996 if (delay_list == 0)
2998 = fill_slots_from_thread (insn, condition, insn_at_target,
2999 next_active_insn (insn), 0, 1,
3001 slots_to_fill, &slots_filled,
3006 unfilled_slots_base[i]
3007 = emit_delay_sequence (insn, delay_list, slots_filled);
3009 if (slots_to_fill == slots_filled)
3010 unfilled_slots_base[i] = 0;
3012 note_delay_statistics (slots_filled, 1);
3016 /* Once we have tried two ways to fill a delay slot, make a pass over the
3017 code to try to improve the results and to do such things as more jump
3021 relax_delay_slots (first)
3024 register rtx insn, next, pat;
3025 register rtx trial, delay_insn, target_label;
3027 /* Look at every JUMP_INSN and see if we can improve it. */
3028 for (insn = first; insn; insn = next)
3032 next = next_active_insn (insn);
3034 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3035 the next insn, or jumps to a label that is not the last of a
3036 group of consecutive labels. */
3037 if (GET_CODE (insn) == JUMP_INSN
3038 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3039 && (target_label = JUMP_LABEL (insn)) != 0)
3041 target_label = follow_jumps (target_label);
3042 target_label = prev_label (next_active_insn (target_label));
3044 if (target_label == 0)
3045 target_label = find_end_label ();
3047 if (next_active_insn (target_label) == next
3048 && ! condjump_in_parallel_p (insn))
3054 if (target_label != JUMP_LABEL (insn))
3055 reorg_redirect_jump (insn, target_label);
3057 /* See if this jump branches around a unconditional jump.
3058 If so, invert this jump and point it to the target of the
3060 if (next && GET_CODE (next) == JUMP_INSN
3061 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3062 && next_active_insn (target_label) == next_active_insn (next)
3063 && no_labels_between_p (insn, next))
3065 rtx label = JUMP_LABEL (next);
3067 /* Be careful how we do this to avoid deleting code or
3068 labels that are momentarily dead. See similar optimization
3071 We also need to ensure we properly handle the case when
3072 invert_jump fails. */
3074 ++LABEL_NUSES (target_label);
3076 ++LABEL_NUSES (label);
3078 if (invert_jump (insn, label))
3085 --LABEL_NUSES (label);
3087 if (--LABEL_NUSES (target_label) == 0)
3088 delete_insn (target_label);
3094 /* If this is an unconditional jump and the previous insn is a
3095 conditional jump, try reversing the condition of the previous
3096 insn and swapping our targets. The next pass might be able to
3099 Don't do this if we expect the conditional branch to be true, because
3100 we would then be making the more common case longer. */
3102 if (GET_CODE (insn) == JUMP_INSN
3103 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3104 && (other = prev_active_insn (insn)) != 0
3105 && (condjump_p (other) || condjump_in_parallel_p (other))
3106 && no_labels_between_p (other, insn)
3107 && 0 > mostly_true_jump (other,
3108 get_branch_condition (other,
3109 JUMP_LABEL (other))))
3111 rtx other_target = JUMP_LABEL (other);
3112 target_label = JUMP_LABEL (insn);
3114 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3115 as we move the label. */
3117 ++LABEL_NUSES (other_target);
3119 if (invert_jump (other, target_label))
3120 reorg_redirect_jump (insn, other_target);
3123 --LABEL_NUSES (other_target);
3126 /* Now look only at cases where we have filled a delay slot. */
3127 if (GET_CODE (insn) != INSN
3128 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3131 pat = PATTERN (insn);
3132 delay_insn = XVECEXP (pat, 0, 0);
3134 /* See if the first insn in the delay slot is redundant with some
3135 previous insn. Remove it from the delay slot if so; then set up
3136 to reprocess this insn. */
3137 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3139 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3140 next = prev_active_insn (next);
3144 /* See if we have a RETURN insn with a filled delay slot followed
3145 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3146 the first RETURN (but not it's delay insn). This gives the same
3147 effect in fewer instructions.
3149 Only do so if optimizing for size since this results in slower, but
3152 && GET_CODE (PATTERN (delay_insn)) == RETURN
3154 && GET_CODE (next) == JUMP_INSN
3155 && GET_CODE (PATTERN (next)) == RETURN)
3159 /* Delete the RETURN and just execute the delay list insns.
3161 We do this by deleting the INSN containing the SEQUENCE, then
3162 re-emitting the insns separately, and then deleting the RETURN.
3163 This allows the count of the jump target to be properly
3166 /* Clear the from target bit, since these insns are no longer
3168 for (i = 0; i < XVECLEN (pat, 0); i++)
3169 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3171 trial = PREV_INSN (insn);
3173 emit_insn_after (pat, trial);
3174 delete_scheduled_jump (delay_insn);
3178 /* Now look only at the cases where we have a filled JUMP_INSN. */
3179 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3180 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3181 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3184 target_label = JUMP_LABEL (delay_insn);
3188 /* If this jump goes to another unconditional jump, thread it, but
3189 don't convert a jump into a RETURN here. */
3190 trial = follow_jumps (target_label);
3191 /* We use next_real_insn instead of next_active_insn, so that
3192 the special USE insns emitted by reorg won't be ignored.
3193 If they are ignored, then they will get deleted if target_label
3194 is now unreachable, and that would cause mark_target_live_regs
3196 trial = prev_label (next_real_insn (trial));
3197 if (trial == 0 && target_label != 0)
3198 trial = find_end_label ();
3200 if (trial != target_label
3201 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3203 reorg_redirect_jump (delay_insn, trial);
3204 target_label = trial;
3207 /* If the first insn at TARGET_LABEL is redundant with a previous
3208 insn, redirect the jump to the following insn process again. */
3209 trial = next_active_insn (target_label);
3210 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3211 && redundant_insn (trial, insn, 0))
3215 /* Figure out where to emit the special USE insn so we don't
3216 later incorrectly compute register live/death info. */
3217 tmp = next_active_insn (trial);
3219 tmp = find_end_label ();
3221 /* Insert the special USE insn and update dataflow info. */
3222 update_block (trial, tmp);
3224 /* Now emit a label before the special USE insn, and
3225 redirect our jump to the new label. */
3226 target_label = get_label_before (PREV_INSN (tmp));
3227 reorg_redirect_jump (delay_insn, target_label);
3232 /* Similarly, if it is an unconditional jump with one insn in its
3233 delay list and that insn is redundant, thread the jump. */
3234 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3235 && XVECLEN (PATTERN (trial), 0) == 2
3236 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3237 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3238 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3239 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3241 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3242 if (target_label == 0)
3243 target_label = find_end_label ();
3245 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
3248 reorg_redirect_jump (delay_insn, target_label);
3255 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3256 && prev_active_insn (target_label) == insn
3257 && ! condjump_in_parallel_p (delay_insn)
3259 /* If the last insn in the delay slot sets CC0 for some insn,
3260 various code assumes that it is in a delay slot. We could
3261 put it back where it belonged and delete the register notes,
3262 but it doesn't seem worthwhile in this uncommon case. */
3263 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3264 REG_CC_USER, NULL_RTX)
3270 /* All this insn does is execute its delay list and jump to the
3271 following insn. So delete the jump and just execute the delay
3274 We do this by deleting the INSN containing the SEQUENCE, then
3275 re-emitting the insns separately, and then deleting the jump.
3276 This allows the count of the jump target to be properly
3279 /* Clear the from target bit, since these insns are no longer
3281 for (i = 0; i < XVECLEN (pat, 0); i++)
3282 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3284 trial = PREV_INSN (insn);
3286 emit_insn_after (pat, trial);
3287 delete_scheduled_jump (delay_insn);
3291 /* See if this is an unconditional jump around a single insn which is
3292 identical to the one in its delay slot. In this case, we can just
3293 delete the branch and the insn in its delay slot. */
3294 if (next && GET_CODE (next) == INSN
3295 && prev_label (next_active_insn (next)) == target_label
3296 && simplejump_p (insn)
3297 && XVECLEN (pat, 0) == 2
3298 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3304 /* See if this jump (with its delay slots) branches around another
3305 jump (without delay slots). If so, invert this jump and point
3306 it to the target of the second jump. We cannot do this for
3307 annulled jumps, though. Again, don't convert a jump to a RETURN
3309 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3310 && next && GET_CODE (next) == JUMP_INSN
3311 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3312 && next_active_insn (target_label) == next_active_insn (next)
3313 && no_labels_between_p (insn, next))
3315 rtx label = JUMP_LABEL (next);
3316 rtx old_label = JUMP_LABEL (delay_insn);
3319 label = find_end_label ();
3321 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3323 /* Be careful how we do this to avoid deleting code or labels
3324 that are momentarily dead. See similar optimization in
3327 ++LABEL_NUSES (old_label);
3329 if (invert_jump (delay_insn, label))
3333 /* Must update the INSN_FROM_TARGET_P bits now that
3334 the branch is reversed, so that mark_target_live_regs
3335 will handle the delay slot insn correctly. */
3336 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3338 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3339 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3346 if (old_label && --LABEL_NUSES (old_label) == 0)
3347 delete_insn (old_label);
3352 /* If we own the thread opposite the way this insn branches, see if we
3353 can merge its delay slots with following insns. */
3354 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3355 && own_thread_p (NEXT_INSN (insn), 0, 1))
3356 try_merge_delay_insns (insn, next);
3357 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3358 && own_thread_p (target_label, target_label, 0))
3359 try_merge_delay_insns (insn, next_active_insn (target_label));
3361 /* If we get here, we haven't deleted INSN. But we may have deleted
3362 NEXT, so recompute it. */
3363 next = next_active_insn (insn);
3369 /* Look for filled jumps to the end of function label. We can try to convert
3370 them into RETURN insns if the insns in the delay slot are valid for the
3374 make_return_insns (first)
3377 rtx insn, jump_insn, pat;
3378 rtx real_return_label = end_of_function_label;
3381 /* See if there is a RETURN insn in the function other than the one we
3382 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3383 into a RETURN to jump to it. */
3384 for (insn = first; insn; insn = NEXT_INSN (insn))
3385 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
3387 real_return_label = get_label_before (insn);
3391 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3392 was equal to END_OF_FUNCTION_LABEL. */
3393 LABEL_NUSES (real_return_label)++;
3395 /* Clear the list of insns to fill so we can use it. */
3396 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3398 for (insn = first; insn; insn = NEXT_INSN (insn))
3402 /* Only look at filled JUMP_INSNs that go to the end of function
3404 if (GET_CODE (insn) != INSN
3405 || GET_CODE (PATTERN (insn)) != SEQUENCE
3406 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3407 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3410 pat = PATTERN (insn);
3411 jump_insn = XVECEXP (pat, 0, 0);
3413 /* If we can't make the jump into a RETURN, try to redirect it to the best
3414 RETURN and go on to the next insn. */
3415 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
3417 /* Make sure redirecting the jump will not invalidate the delay
3419 if (redirect_with_delay_slots_safe_p (jump_insn,
3422 reorg_redirect_jump (jump_insn, real_return_label);
3426 /* See if this RETURN can accept the insns current in its delay slot.
3427 It can if it has more or an equal number of slots and the contents
3428 of each is valid. */
3430 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3431 slots = num_delay_slots (jump_insn);
3432 if (slots >= XVECLEN (pat, 0) - 1)
3434 for (i = 1; i < XVECLEN (pat, 0); i++)
3436 #ifdef ANNUL_IFFALSE_SLOTS
3437 (INSN_ANNULLED_BRANCH_P (jump_insn)
3438 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3439 ? eligible_for_annul_false (jump_insn, i - 1,
3440 XVECEXP (pat, 0, i), flags) :
3442 #ifdef ANNUL_IFTRUE_SLOTS
3443 (INSN_ANNULLED_BRANCH_P (jump_insn)
3444 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3445 ? eligible_for_annul_true (jump_insn, i - 1,
3446 XVECEXP (pat, 0, i), flags) :
3448 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
3454 if (i == XVECLEN (pat, 0))
3457 /* We have to do something with this insn. If it is an unconditional
3458 RETURN, delete the SEQUENCE and output the individual insns,
3459 followed by the RETURN. Then set things up so we try to find
3460 insns for its delay slots, if it needs some. */
3461 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3463 rtx prev = PREV_INSN (insn);
3466 for (i = 1; i < XVECLEN (pat, 0); i++)
3467 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3469 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3470 emit_barrier_after (insn);
3473 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3476 /* It is probably more efficient to keep this with its current
3477 delay slot as a branch to a RETURN. */
3478 reorg_redirect_jump (jump_insn, real_return_label);
3481 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3482 new delay slots we have created. */
3483 if (--LABEL_NUSES (real_return_label) == 0)
3484 delete_insn (real_return_label);
3486 fill_simple_delay_slots (1);
3487 fill_simple_delay_slots (0);
3491 /* Try to find insns to place in delay slots. */
3494 dbr_schedule (first, file)
3498 rtx insn, next, epilogue_insn = 0;
3501 int old_flag_no_peephole = flag_no_peephole;
3503 /* Execute `final' once in prescan mode to delete any insns that won't be
3504 used. Don't let final try to do any peephole optimization--it will
3505 ruin dataflow information for this pass. */
3507 flag_no_peephole = 1;
3508 final (first, 0, NO_DEBUG, 1, 1);
3509 flag_no_peephole = old_flag_no_peephole;
3512 /* If the current function has no insns other than the prologue and
3513 epilogue, then do not try to fill any delay slots. */
3514 if (n_basic_blocks == 0)
3517 /* Find the highest INSN_UID and allocate and initialize our map from
3518 INSN_UID's to position in code. */
3519 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3521 if (INSN_UID (insn) > max_uid)
3522 max_uid = INSN_UID (insn);
3523 if (GET_CODE (insn) == NOTE
3524 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
3525 epilogue_insn = insn;
3528 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int));
3529 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3530 uid_to_ruid[INSN_UID (insn)] = i;
3532 /* Initialize the list of insns that need filling. */
3533 if (unfilled_firstobj == 0)
3535 gcc_obstack_init (&unfilled_slots_obstack);
3536 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3539 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3543 INSN_ANNULLED_BRANCH_P (insn) = 0;
3544 INSN_FROM_TARGET_P (insn) = 0;
3546 /* Skip vector tables. We can't get attributes for them. */
3547 if (GET_CODE (insn) == JUMP_INSN
3548 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3549 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3552 if (num_delay_slots (insn) > 0)
3553 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3555 /* Ensure all jumps go to the last of a set of consecutive labels. */
3556 if (GET_CODE (insn) == JUMP_INSN
3557 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3558 && JUMP_LABEL (insn) != 0
3559 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
3560 != JUMP_LABEL (insn)))
3561 redirect_jump (insn, target);
3564 init_resource_info (epilogue_insn);
3566 /* Show we haven't computed an end-of-function label yet. */
3567 end_of_function_label = 0;
3569 /* Initialize the statistics for this function. */
3570 bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
3571 bzero ((char *) num_filled_delays, sizeof num_filled_delays);
3573 /* Now do the delay slot filling. Try everything twice in case earlier
3574 changes make more slots fillable. */
3576 for (reorg_pass_number = 0;
3577 reorg_pass_number < MAX_REORG_PASSES;
3578 reorg_pass_number++)
3580 fill_simple_delay_slots (1);
3581 fill_simple_delay_slots (0);
3582 fill_eager_delay_slots ();
3583 relax_delay_slots (first);
3586 /* Delete any USE insns made by update_block; subsequent passes don't need
3587 them or know how to deal with them. */
3588 for (insn = first; insn; insn = next)
3590 next = NEXT_INSN (insn);
3592 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
3593 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
3594 next = delete_insn (insn);
3597 /* If we made an end of function label, indicate that it is now
3598 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3599 If it is now unused, delete it. */
3600 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3601 delete_insn (end_of_function_label);
3604 if (HAVE_return && end_of_function_label != 0)
3605 make_return_insns (first);
3608 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3610 /* It is not clear why the line below is needed, but it does seem to be. */
3611 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3613 /* Reposition the prologue and epilogue notes in case we moved the
3614 prologue/epilogue insns. */
3615 reposition_prologue_and_epilogue_notes (first);
3619 register int i, j, need_comma;
3621 for (reorg_pass_number = 0;
3622 reorg_pass_number < MAX_REORG_PASSES;
3623 reorg_pass_number++)
3625 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3626 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3629 fprintf (file, ";; Reorg function #%d\n", i);
3631 fprintf (file, ";; %d insns needing delay slots\n;; ",
3632 num_insns_needing_delays[i][reorg_pass_number]);
3634 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
3635 if (num_filled_delays[i][j][reorg_pass_number])
3638 fprintf (file, ", ");
3640 fprintf (file, "%d got %d delays",
3641 num_filled_delays[i][j][reorg_pass_number], j);
3643 fprintf (file, "\n");
3648 /* For all JUMP insns, fill in branch prediction notes, so that during
3649 assembler output a target can set branch prediction bits in the code.
3650 We have to do this now, as up until this point the destinations of
3651 JUMPS can be moved around and changed, but past right here that cannot
3653 for (insn = first; insn; insn = NEXT_INSN (insn))
3657 if (GET_CODE (insn) == INSN)
3659 rtx pat = PATTERN (insn);
3661 if (GET_CODE (pat) == SEQUENCE)
3662 insn = XVECEXP (pat, 0, 0);
3664 if (GET_CODE (insn) != JUMP_INSN)
3667 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
3668 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
3669 GEN_INT (pred_flags),
3672 free_resource_info ();
3674 #endif /* DELAY_SLOTS */