1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
3 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
6 Hacked by Michael Tiemann (tiemann@cygnus.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Instruction reorganization pass.
26 This pass runs after register allocation and final jump
27 optimization. It should be the last pass to run before peephole.
28 It serves primarily to fill delay slots of insns, typically branch
29 and call insns. Other insns typically involve more complicated
30 interactions of data dependencies and resource constraints, and
31 are better handled by scheduling before register allocation (by the
32 function `schedule_insns').
34 The Branch Penalty is the number of extra cycles that are needed to
35 execute a branch insn. On an ideal machine, branches take a single
36 cycle, and the Branch Penalty is 0. Several RISC machines approach
37 branch delays differently:
39 The MIPS has a single branch delay slot. Most insns
40 (except other branches) can be used to fill this slot. When the
41 slot is filled, two insns execute in two cycles, reducing the
42 branch penalty to zero.
44 The SPARC always has a branch delay slot, but its effects can be
45 annulled when the branch is not taken. This means that failing to
46 find other sources of insns, we can hoist an insn from the branch
47 target that would only be safe to execute knowing that the branch
50 The HP-PA always has a branch delay slot. For unconditional branches
51 its effects can be annulled when the branch is taken. The effects
52 of the delay slot in a conditional branch can be nullified for forward
53 taken branches, or for untaken backward branches. This means
54 we can hoist insns from the fall-through path for forward branches or
55 steal insns from the target of backward branches.
57 The TMS320C3x and C4x have three branch delay slots. When the three
58 slots are filled, the branch penalty is zero. Most insns can fill the
59 delay slots except jump insns.
61 Three techniques for filling delay slots have been implemented so far:
63 (1) `fill_simple_delay_slots' is the simplest, most efficient way
64 to fill delay slots. This pass first looks for insns which come
65 from before the branch and which are safe to execute after the
66 branch. Then it searches after the insn requiring delay slots or,
67 in the case of a branch, for insns that are after the point at
68 which the branch merges into the fallthrough code, if such a point
69 exists. When such insns are found, the branch penalty decreases
70 and no code expansion takes place.
72 (2) `fill_eager_delay_slots' is more complicated: it is used for
73 scheduling conditional jumps, or for scheduling jumps which cannot
74 be filled using (1). A machine need not have annulled jumps to use
75 this strategy, but it helps (by keeping more options open).
76 `fill_eager_delay_slots' tries to guess the direction the branch
77 will go; if it guesses right 100% of the time, it can reduce the
78 branch penalty as much as `fill_simple_delay_slots' does. If it
79 guesses wrong 100% of the time, it might as well schedule nops. When
80 `fill_eager_delay_slots' takes insns from the fall-through path of
81 the jump, usually there is no code expansion; when it takes insns
82 from the branch target, there is code expansion if it is not the
83 only way to reach that target.
85 (3) `relax_delay_slots' uses a set of rules to simplify code that
86 has been reorganized by (1) and (2). It finds cases where
87 conditional test can be eliminated, jumps can be threaded, extra
88 insns can be eliminated, etc. It is the job of (1) and (2) to do a
89 good job of scheduling locally; `relax_delay_slots' takes care of
90 making the various individual schedules work well together. It is
91 especially tuned to handle the control flow interactions of branch
92 insns. It does nothing for insns with delay slots that do not
95 On machines that use CC0, we are very conservative. We will not make
96 a copy of an insn involving CC0 since we want to maintain a 1-1
97 correspondence between the insn that sets and uses CC0. The insns are
98 allowed to be separated by placing an insn that sets CC0 (but not an insn
99 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
100 delay slot. In that case, we point each insn at the other with REG_CC_USER
101 and REG_CC_SETTER notes. Note that these restrictions affect very few
102 machines because most RISC machines with delay slots will not use CC0
103 (the RT is the only known exception at this point).
107 The Acorn Risc Machine can conditionally execute most insns, so
108 it is profitable to move single insns into a position to execute
109 based on the condition code of the previous insn.
111 The HP-PA can conditionally nullify insns, providing a similar
112 effect to the ARM, differing mostly in which insn is "in charge". */
116 #include "coretypes.h"
122 #include "function.h"
123 #include "insn-config.h"
124 #include "conditions.h"
125 #include "hard-reg-set.h"
126 #include "basic-block.h"
132 #include "insn-attr.h"
133 #include "resource.h"
138 #include "tree-pass.h"
142 #ifndef ANNUL_IFTRUE_SLOTS
143 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
145 #ifndef ANNUL_IFFALSE_SLOTS
146 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
149 /* Insns which have delay slots that have not yet been filled. */
151 static struct obstack unfilled_slots_obstack;
152 static rtx *unfilled_firstobj;
154 /* Define macros to refer to the first and last slot containing unfilled
155 insns. These are used because the list may move and its address
156 should be recomputed at each use. */
158 #define unfilled_slots_base \
159 ((rtx *) obstack_base (&unfilled_slots_obstack))
161 #define unfilled_slots_next \
162 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
164 /* Points to the label before the end of the function. */
165 static rtx end_of_function_label;
167 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
168 not always monotonically increase. */
169 static int *uid_to_ruid;
171 /* Highest valid index in `uid_to_ruid'. */
174 static int stop_search_p (rtx, int);
175 static int resource_conflicts_p (struct resources *, struct resources *);
176 static int insn_references_resource_p (rtx, struct resources *, int);
177 static int insn_sets_resource_p (rtx, struct resources *, int);
178 static rtx find_end_label (void);
179 static rtx emit_delay_sequence (rtx, rtx, int);
180 static rtx add_to_delay_list (rtx, rtx);
181 static rtx delete_from_delay_slot (rtx);
182 static void delete_scheduled_jump (rtx);
183 static void note_delay_statistics (int, int);
184 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
185 static rtx optimize_skip (rtx);
187 static int get_jump_flags (rtx, rtx);
188 static int rare_destination (rtx);
189 static int mostly_true_jump (rtx, rtx);
190 static rtx get_branch_condition (rtx, rtx);
191 static int condition_dominates_p (rtx, rtx);
192 static int redirect_with_delay_slots_safe_p (rtx, rtx, rtx);
193 static int redirect_with_delay_list_safe_p (rtx, rtx, rtx);
194 static int check_annul_list_true_false (int, rtx);
195 static rtx steal_delay_list_from_target (rtx, rtx, rtx, rtx,
199 int, int *, int *, rtx *);
200 static rtx steal_delay_list_from_fallthrough (rtx, rtx, rtx, rtx,
205 static void try_merge_delay_insns (rtx, rtx);
206 static rtx redundant_insn (rtx, rtx, rtx);
207 static int own_thread_p (rtx, rtx, int);
208 static void update_block (rtx, rtx);
209 static int reorg_redirect_jump (rtx, rtx);
210 static void update_reg_dead_notes (rtx, rtx);
211 static void fix_reg_dead_note (rtx, rtx);
212 static void update_reg_unused_notes (rtx, rtx);
213 static void fill_simple_delay_slots (int);
214 static rtx fill_slots_from_thread (rtx, rtx, rtx, rtx,
217 static void fill_eager_delay_slots (void);
218 static void relax_delay_slots (rtx);
220 static void make_return_insns (rtx);
223 /* Return TRUE if this insn should stop the search for insn to fill delay
224 slots. LABELS_P indicates that labels should terminate the search.
225 In all cases, jumps terminate the search. */
228 stop_search_p (rtx insn, int labels_p)
233 /* If the insn can throw an exception that is caught within the function,
234 it may effectively perform a jump from the viewpoint of the function.
235 Therefore act like for a jump. */
236 if (can_throw_internal (insn))
239 switch (GET_CODE (insn))
253 /* OK unless it contains a delay slot or is an `asm' insn of some type.
254 We don't know anything about these. */
255 return (GET_CODE (PATTERN (insn)) == SEQUENCE
256 || GET_CODE (PATTERN (insn)) == ASM_INPUT
257 || asm_noperands (PATTERN (insn)) >= 0);
264 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
265 resource set contains a volatile memory reference. Otherwise, return FALSE. */
268 resource_conflicts_p (struct resources *res1, struct resources *res2)
270 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
271 || (res1->unch_memory && res2->unch_memory)
272 || res1->volatil || res2->volatil)
276 return (res1->regs & res2->regs) != HARD_CONST (0);
281 for (i = 0; i < HARD_REG_SET_LONGS; i++)
282 if ((res1->regs[i] & res2->regs[i]) != 0)
289 /* Return TRUE if any resource marked in RES, a `struct resources', is
290 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
291 routine is using those resources.
293 We compute this by computing all the resources referenced by INSN and
294 seeing if this conflicts with RES. It might be faster to directly check
295 ourselves, and this is the way it used to work, but it means duplicating
296 a large block of complex code. */
299 insn_references_resource_p (rtx insn, struct resources *res,
300 int include_delayed_effects)
302 struct resources insn_res;
304 CLEAR_RESOURCE (&insn_res);
305 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
306 return resource_conflicts_p (&insn_res, res);
309 /* Return TRUE if INSN modifies resources that are marked in RES.
310 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
311 included. CC0 is only modified if it is explicitly set; see comments
312 in front of mark_set_resources for details. */
315 insn_sets_resource_p (rtx insn, struct resources *res,
316 int include_delayed_effects)
318 struct resources insn_sets;
320 CLEAR_RESOURCE (&insn_sets);
321 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
322 return resource_conflicts_p (&insn_sets, res);
325 /* Find a label at the end of the function or before a RETURN. If there
326 is none, try to make one. If that fails, returns 0.
328 The property of such a label is that it is placed just before the
329 epilogue or a bare RETURN insn, so that another bare RETURN can be
330 turned into a jump to the label unconditionally. In particular, the
331 label cannot be placed before a RETURN insn with a filled delay slot.
333 ??? There may be a problem with the current implementation. Suppose
334 we start with a bare RETURN insn and call find_end_label. It may set
335 end_of_function_label just before the RETURN. Suppose the machinery
336 is able to fill the delay slot of the RETURN insn afterwards. Then
337 end_of_function_label is no longer valid according to the property
338 described above and find_end_label will still return it unmodified.
339 Note that this is probably mitigated by the following observation:
340 once end_of_function_label is made, it is very likely the target of
341 a jump, so filling the delay slot of the RETURN will be much more
345 find_end_label (void)
349 /* If we found one previously, return it. */
350 if (end_of_function_label)
351 return end_of_function_label;
353 /* Otherwise, see if there is a label at the end of the function. If there
354 is, it must be that RETURN insns aren't needed, so that is our return
355 label and we don't have to do anything else. */
357 insn = get_last_insn ();
359 || (NONJUMP_INSN_P (insn)
360 && (GET_CODE (PATTERN (insn)) == USE
361 || GET_CODE (PATTERN (insn)) == CLOBBER)))
362 insn = PREV_INSN (insn);
364 /* When a target threads its epilogue we might already have a
365 suitable return insn. If so put a label before it for the
366 end_of_function_label. */
368 && JUMP_P (PREV_INSN (insn))
369 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
371 rtx temp = PREV_INSN (PREV_INSN (insn));
372 end_of_function_label = gen_label_rtx ();
373 LABEL_NUSES (end_of_function_label) = 0;
375 /* Put the label before an USE insns that may precede the RETURN insn. */
376 while (GET_CODE (temp) == USE)
377 temp = PREV_INSN (temp);
379 emit_label_after (end_of_function_label, temp);
382 else if (LABEL_P (insn))
383 end_of_function_label = insn;
386 end_of_function_label = gen_label_rtx ();
387 LABEL_NUSES (end_of_function_label) = 0;
388 /* If the basic block reorder pass moves the return insn to
389 some other place try to locate it again and put our
390 end_of_function_label there. */
391 while (insn && ! (JUMP_P (insn)
392 && (GET_CODE (PATTERN (insn)) == RETURN)))
393 insn = PREV_INSN (insn);
396 insn = PREV_INSN (insn);
398 /* Put the label before an USE insns that may proceed the
400 while (GET_CODE (insn) == USE)
401 insn = PREV_INSN (insn);
403 emit_label_after (end_of_function_label, insn);
414 /* The RETURN insn has its delay slot filled so we cannot
415 emit the label just before it. Since we already have
416 an epilogue and cannot emit a new RETURN, we cannot
417 emit the label at all. */
418 end_of_function_label = NULL_RTX;
419 return end_of_function_label;
421 #endif /* HAVE_epilogue */
423 /* Otherwise, make a new label and emit a RETURN and BARRIER,
425 emit_label (end_of_function_label);
427 /* We don't bother trying to create a return insn if the
428 epilogue has filled delay-slots; we would have to try and
429 move the delay-slot fillers to the delay-slots for the new
430 return insn or in front of the new return insn. */
431 if (current_function_epilogue_delay_list == NULL
434 /* The return we make may have delay slots too. */
435 rtx insn = gen_return ();
436 insn = emit_jump_insn (insn);
438 if (num_delay_slots (insn) > 0)
439 obstack_ptr_grow (&unfilled_slots_obstack, insn);
445 /* Show one additional use for this label so it won't go away until
447 ++LABEL_NUSES (end_of_function_label);
449 return end_of_function_label;
452 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
453 the pattern of INSN with the SEQUENCE.
455 Chain the insns so that NEXT_INSN of each insn in the sequence points to
456 the next and NEXT_INSN of the last insn in the sequence points to
457 the first insn after the sequence. Similarly for PREV_INSN. This makes
458 it easier to scan all insns.
460 Returns the SEQUENCE that replaces INSN. */
463 emit_delay_sequence (rtx insn, rtx list, int length)
469 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
470 rtvec seqv = rtvec_alloc (length + 1);
471 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
472 rtx seq_insn = make_insn_raw (seq);
473 rtx first = get_insns ();
474 rtx last = get_last_insn ();
476 /* Make a copy of the insn having delay slots. */
477 rtx delay_insn = copy_rtx (insn);
479 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
480 confuse further processing. Update LAST in case it was the last insn.
481 We will put the BARRIER back in later. */
482 if (NEXT_INSN (insn) && BARRIER_P (NEXT_INSN (insn)))
484 delete_related_insns (NEXT_INSN (insn));
485 last = get_last_insn ();
489 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
490 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
491 PREV_INSN (seq_insn) = PREV_INSN (insn);
494 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
497 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
499 /* Note the calls to set_new_first_and_last_insn must occur after
500 SEQ_INSN has been completely spliced into the insn stream.
502 Otherwise CUR_INSN_UID will get set to an incorrect value because
503 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
505 set_new_first_and_last_insn (first, seq_insn);
508 set_new_first_and_last_insn (seq_insn, last);
510 /* Build our SEQUENCE and rebuild the insn chain. */
511 XVECEXP (seq, 0, 0) = delay_insn;
512 INSN_DELETED_P (delay_insn) = 0;
513 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
515 for (li = list; li; li = XEXP (li, 1), i++)
517 rtx tem = XEXP (li, 0);
520 /* Show that this copy of the insn isn't deleted. */
521 INSN_DELETED_P (tem) = 0;
523 XVECEXP (seq, 0, i) = tem;
524 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
525 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
527 /* SPARC assembler, for instance, emit warning when debug info is output
528 into the delay slot. */
529 if (INSN_LOCATOR (tem) && !INSN_LOCATOR (seq_insn))
530 INSN_LOCATOR (seq_insn) = INSN_LOCATOR (tem);
531 INSN_LOCATOR (tem) = 0;
533 for (note = REG_NOTES (tem); note; note = next)
535 next = XEXP (note, 1);
536 switch (REG_NOTE_KIND (note))
539 /* Remove any REG_DEAD notes because we can't rely on them now
540 that the insn has been moved. */
541 remove_note (tem, note);
545 /* Keep the label reference count up to date. */
546 if (LABEL_P (XEXP (note, 0)))
547 LABEL_NUSES (XEXP (note, 0)) ++;
556 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
558 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
559 last insn in that SEQUENCE to point to us. Similarly for the first
560 insn in the following insn if it is a SEQUENCE. */
562 if (PREV_INSN (seq_insn) && NONJUMP_INSN_P (PREV_INSN (seq_insn))
563 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
564 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
565 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
568 if (NEXT_INSN (seq_insn) && NONJUMP_INSN_P (NEXT_INSN (seq_insn))
569 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
570 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
572 /* If there used to be a BARRIER, put it back. */
574 emit_barrier_after (seq_insn);
576 gcc_assert (i == length + 1);
581 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
582 be in the order in which the insns are to be executed. */
585 add_to_delay_list (rtx insn, rtx delay_list)
587 /* If we have an empty list, just make a new list element. If
588 INSN has its block number recorded, clear it since we may
589 be moving the insn to a new block. */
593 clear_hashed_info_for_insn (insn);
594 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
597 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
599 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
604 /* Delete INSN from the delay slot of the insn that it is in, which may
605 produce an insn with no delay slots. Return the new insn. */
608 delete_from_delay_slot (rtx insn)
610 rtx trial, seq_insn, seq, prev;
615 /* We first must find the insn containing the SEQUENCE with INSN in its
616 delay slot. Do this by finding an insn, TRIAL, where
617 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
620 PREV_INSN (NEXT_INSN (trial)) == trial;
621 trial = NEXT_INSN (trial))
624 seq_insn = PREV_INSN (NEXT_INSN (trial));
625 seq = PATTERN (seq_insn);
627 if (NEXT_INSN (seq_insn) && BARRIER_P (NEXT_INSN (seq_insn)))
630 /* Create a delay list consisting of all the insns other than the one
631 we are deleting (unless we were the only one). */
632 if (XVECLEN (seq, 0) > 2)
633 for (i = 1; i < XVECLEN (seq, 0); i++)
634 if (XVECEXP (seq, 0, i) != insn)
635 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
637 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
638 list, and rebuild the delay list if non-empty. */
639 prev = PREV_INSN (seq_insn);
640 trial = XVECEXP (seq, 0, 0);
641 delete_related_insns (seq_insn);
642 add_insn_after (trial, prev, NULL);
644 /* If there was a barrier after the old SEQUENCE, remit it. */
646 emit_barrier_after (trial);
648 /* If there are any delay insns, remit them. Otherwise clear the
651 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
652 else if (INSN_P (trial))
653 INSN_ANNULLED_BRANCH_P (trial) = 0;
655 INSN_FROM_TARGET_P (insn) = 0;
657 /* Show we need to fill this insn again. */
658 obstack_ptr_grow (&unfilled_slots_obstack, trial);
663 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
664 the insn that sets CC0 for it and delete it too. */
667 delete_scheduled_jump (rtx insn)
669 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
670 delete the insn that sets the condition code, but it is hard to find it.
671 Since this case is rare anyway, don't bother trying; there would likely
672 be other insns that became dead anyway, which we wouldn't know to
676 if (reg_mentioned_p (cc0_rtx, insn))
678 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
680 /* If a reg-note was found, it points to an insn to set CC0. This
681 insn is in the delay list of some other insn. So delete it from
682 the delay list it was in. */
685 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
686 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
687 delete_from_delay_slot (XEXP (note, 0));
691 /* The insn setting CC0 is our previous insn, but it may be in
692 a delay slot. It will be the last insn in the delay slot, if
694 rtx trial = previous_insn (insn);
696 trial = prev_nonnote_insn (trial);
697 if (sets_cc0_p (PATTERN (trial)) != 1
698 || FIND_REG_INC_NOTE (trial, NULL_RTX))
700 if (PREV_INSN (NEXT_INSN (trial)) == trial)
701 delete_related_insns (trial);
703 delete_from_delay_slot (trial);
708 delete_related_insns (insn);
711 /* Counters for delay-slot filling. */
713 #define NUM_REORG_FUNCTIONS 2
714 #define MAX_DELAY_HISTOGRAM 3
715 #define MAX_REORG_PASSES 2
717 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
719 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
721 static int reorg_pass_number;
724 note_delay_statistics (int slots_filled, int index)
726 num_insns_needing_delays[index][reorg_pass_number]++;
727 if (slots_filled > MAX_DELAY_HISTOGRAM)
728 slots_filled = MAX_DELAY_HISTOGRAM;
729 num_filled_delays[index][slots_filled][reorg_pass_number]++;
732 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
734 /* Optimize the following cases:
736 1. When a conditional branch skips over only one instruction,
737 use an annulling branch and put that insn in the delay slot.
738 Use either a branch that annuls when the condition if true or
739 invert the test with a branch that annuls when the condition is
740 false. This saves insns, since otherwise we must copy an insn
743 (orig) (skip) (otherwise)
744 Bcc.n L1 Bcc',a L1 Bcc,a L1'
751 2. When a conditional branch skips over only one instruction,
752 and after that, it unconditionally branches somewhere else,
753 perform the similar optimization. This saves executing the
754 second branch in the case where the inverted condition is true.
763 This should be expanded to skip over N insns, where N is the number
764 of delay slots required. */
767 optimize_skip (rtx insn)
769 rtx trial = next_nonnote_insn (insn);
770 rtx next_trial = next_active_insn (trial);
774 flags = get_jump_flags (insn, JUMP_LABEL (insn));
777 || !NONJUMP_INSN_P (trial)
778 || GET_CODE (PATTERN (trial)) == SEQUENCE
779 || recog_memoized (trial) < 0
780 || (! eligible_for_annul_false (insn, 0, trial, flags)
781 && ! eligible_for_annul_true (insn, 0, trial, flags))
782 || can_throw_internal (trial))
785 /* There are two cases where we are just executing one insn (we assume
786 here that a branch requires only one insn; this should be generalized
787 at some point): Where the branch goes around a single insn or where
788 we have one insn followed by a branch to the same label we branch to.
789 In both of these cases, inverting the jump and annulling the delay
790 slot give the same effect in fewer insns. */
791 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
792 && ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
794 && JUMP_P (next_trial)
795 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
796 && (simplejump_p (next_trial)
797 || GET_CODE (PATTERN (next_trial)) == RETURN)))
799 if (eligible_for_annul_false (insn, 0, trial, flags))
801 if (invert_jump (insn, JUMP_LABEL (insn), 1))
802 INSN_FROM_TARGET_P (trial) = 1;
803 else if (! eligible_for_annul_true (insn, 0, trial, flags))
807 delay_list = add_to_delay_list (trial, NULL_RTX);
808 next_trial = next_active_insn (trial);
809 update_block (trial, trial);
810 delete_related_insns (trial);
812 /* Also, if we are targeting an unconditional
813 branch, thread our jump to the target of that branch. Don't
814 change this into a RETURN here, because it may not accept what
815 we have in the delay slot. We'll fix this up later. */
816 if (next_trial && JUMP_P (next_trial)
817 && (simplejump_p (next_trial)
818 || GET_CODE (PATTERN (next_trial)) == RETURN))
820 rtx target_label = JUMP_LABEL (next_trial);
821 if (target_label == 0)
822 target_label = find_end_label ();
826 /* Recompute the flags based on TARGET_LABEL since threading
827 the jump to TARGET_LABEL may change the direction of the
828 jump (which may change the circumstances in which the
829 delay slot is nullified). */
830 flags = get_jump_flags (insn, target_label);
831 if (eligible_for_annul_true (insn, 0, trial, flags))
832 reorg_redirect_jump (insn, target_label);
836 INSN_ANNULLED_BRANCH_P (insn) = 1;
843 /* Encode and return branch direction and prediction information for
844 INSN assuming it will jump to LABEL.
846 Non conditional branches return no direction information and
847 are predicted as very likely taken. */
850 get_jump_flags (rtx insn, rtx label)
854 /* get_jump_flags can be passed any insn with delay slots, these may
855 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
856 direction information, and only if they are conditional jumps.
858 If LABEL is zero, then there is no way to determine the branch
861 && (condjump_p (insn) || condjump_in_parallel_p (insn))
862 && INSN_UID (insn) <= max_uid
864 && INSN_UID (label) <= max_uid)
866 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
867 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
868 /* No valid direction information. */
872 /* If insn is a conditional branch call mostly_true_jump to get
873 determine the branch prediction.
875 Non conditional branches are predicted as very likely taken. */
877 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
881 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
885 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
888 flags |= ATTR_FLAG_likely;
891 flags |= ATTR_FLAG_unlikely;
894 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
902 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
907 /* Return 1 if INSN is a destination that will be branched to rarely (the
908 return point of a function); return 2 if DEST will be branched to very
909 rarely (a call to a function that doesn't return). Otherwise,
913 rare_destination (rtx insn)
918 for (; insn; insn = next)
920 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
921 insn = XVECEXP (PATTERN (insn), 0, 0);
923 next = NEXT_INSN (insn);
925 switch (GET_CODE (insn))
930 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
931 don't scan past JUMP_INSNs, so any barrier we find here must
932 have been after a CALL_INSN and hence mean the call doesn't
936 if (GET_CODE (PATTERN (insn)) == RETURN)
938 else if (simplejump_p (insn)
939 && jump_count++ < 10)
940 next = JUMP_LABEL (insn);
949 /* If we got here it means we hit the end of the function. So this
950 is an unlikely destination. */
955 /* Return truth value of the statement that this branch
956 is mostly taken. If we think that the branch is extremely likely
957 to be taken, we return 2. If the branch is slightly more likely to be
958 taken, return 1. If the branch is slightly less likely to be taken,
959 return 0 and if the branch is highly unlikely to be taken, return -1.
961 CONDITION, if nonzero, is the condition that JUMP_INSN is testing. */
964 mostly_true_jump (rtx jump_insn, rtx condition)
966 rtx target_label = JUMP_LABEL (jump_insn);
968 int rare_dest, rare_fallthrough;
970 /* If branch probabilities are available, then use that number since it
971 always gives a correct answer. */
972 note = find_reg_note (jump_insn, REG_BR_PROB, 0);
975 int prob = INTVAL (XEXP (note, 0));
977 if (prob >= REG_BR_PROB_BASE * 9 / 10)
979 else if (prob >= REG_BR_PROB_BASE / 2)
981 else if (prob >= REG_BR_PROB_BASE / 10)
987 /* Look at the relative rarities of the fallthrough and destination. If
988 they differ, we can predict the branch that way. */
989 rare_dest = rare_destination (target_label);
990 rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
992 switch (rare_fallthrough - rare_dest)
1006 /* If we couldn't figure out what this jump was, assume it won't be
1007 taken. This should be rare. */
1011 /* Predict backward branches usually take, forward branches usually not. If
1012 we don't know whether this is forward or backward, assume the branch
1013 will be taken, since most are. */
1014 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1015 || INSN_UID (target_label) > max_uid
1016 || (uid_to_ruid[INSN_UID (jump_insn)]
1017 > uid_to_ruid[INSN_UID (target_label)]));
1020 /* Return the condition under which INSN will branch to TARGET. If TARGET
1021 is zero, return the condition under which INSN will return. If INSN is
1022 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1023 type of jump, or it doesn't go to TARGET, return 0. */
1026 get_branch_condition (rtx insn, rtx target)
1028 rtx pat = PATTERN (insn);
1031 if (condjump_in_parallel_p (insn))
1032 pat = XVECEXP (pat, 0, 0);
1034 if (GET_CODE (pat) == RETURN)
1035 return target == 0 ? const_true_rtx : 0;
1037 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1040 src = SET_SRC (pat);
1041 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1042 return const_true_rtx;
1044 else if (GET_CODE (src) == IF_THEN_ELSE
1045 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1046 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1047 && XEXP (XEXP (src, 1), 0) == target))
1048 && XEXP (src, 2) == pc_rtx)
1049 return XEXP (src, 0);
1051 else if (GET_CODE (src) == IF_THEN_ELSE
1052 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1053 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1054 && XEXP (XEXP (src, 2), 0) == target))
1055 && XEXP (src, 1) == pc_rtx)
1058 rev = reversed_comparison_code (XEXP (src, 0), insn);
1060 return gen_rtx_fmt_ee (rev, GET_MODE (XEXP (src, 0)),
1061 XEXP (XEXP (src, 0), 0),
1062 XEXP (XEXP (src, 0), 1));
1068 /* Return nonzero if CONDITION is more strict than the condition of
1069 INSN, i.e., if INSN will always branch if CONDITION is true. */
1072 condition_dominates_p (rtx condition, rtx insn)
1074 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1075 enum rtx_code code = GET_CODE (condition);
1076 enum rtx_code other_code;
1078 if (rtx_equal_p (condition, other_condition)
1079 || other_condition == const_true_rtx)
1082 else if (condition == const_true_rtx || other_condition == 0)
1085 other_code = GET_CODE (other_condition);
1086 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1087 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1088 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1091 return comparison_dominates_p (code, other_code);
1094 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1095 any insns already in the delay slot of JUMP. */
1098 redirect_with_delay_slots_safe_p (rtx jump, rtx newlabel, rtx seq)
1101 rtx pat = PATTERN (seq);
1103 /* Make sure all the delay slots of this jump would still
1104 be valid after threading the jump. If they are still
1105 valid, then return nonzero. */
1107 flags = get_jump_flags (jump, newlabel);
1108 for (i = 1; i < XVECLEN (pat, 0); i++)
1110 #ifdef ANNUL_IFFALSE_SLOTS
1111 (INSN_ANNULLED_BRANCH_P (jump)
1112 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1113 ? eligible_for_annul_false (jump, i - 1,
1114 XVECEXP (pat, 0, i), flags) :
1116 #ifdef ANNUL_IFTRUE_SLOTS
1117 (INSN_ANNULLED_BRANCH_P (jump)
1118 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1119 ? eligible_for_annul_true (jump, i - 1,
1120 XVECEXP (pat, 0, i), flags) :
1122 eligible_for_delay (jump, i - 1, XVECEXP (pat, 0, i), flags)))
1125 return (i == XVECLEN (pat, 0));
1128 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1129 any insns we wish to place in the delay slot of JUMP. */
1132 redirect_with_delay_list_safe_p (rtx jump, rtx newlabel, rtx delay_list)
1137 /* Make sure all the insns in DELAY_LIST would still be
1138 valid after threading the jump. If they are still
1139 valid, then return nonzero. */
1141 flags = get_jump_flags (jump, newlabel);
1142 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1144 #ifdef ANNUL_IFFALSE_SLOTS
1145 (INSN_ANNULLED_BRANCH_P (jump)
1146 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1147 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1149 #ifdef ANNUL_IFTRUE_SLOTS
1150 (INSN_ANNULLED_BRANCH_P (jump)
1151 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1152 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1154 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1157 return (li == NULL);
1160 /* DELAY_LIST is a list of insns that have already been placed into delay
1161 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1162 If not, return 0; otherwise return 1. */
1165 check_annul_list_true_false (int annul_true_p, rtx delay_list)
1171 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1173 rtx trial = XEXP (temp, 0);
1175 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1176 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1184 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1185 the condition tested by INSN is CONDITION and the resources shown in
1186 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1187 from SEQ's delay list, in addition to whatever insns it may execute
1188 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1189 needed while searching for delay slot insns. Return the concatenated
1190 delay list if possible, otherwise, return 0.
1192 SLOTS_TO_FILL is the total number of slots required by INSN, and
1193 PSLOTS_FILLED points to the number filled so far (also the number of
1194 insns in DELAY_LIST). It is updated with the number that have been
1195 filled from the SEQUENCE, if any.
1197 PANNUL_P points to a nonzero value if we already know that we need
1198 to annul INSN. If this routine determines that annulling is needed,
1199 it may set that value nonzero.
1201 PNEW_THREAD points to a location that is to receive the place at which
1202 execution should continue. */
1205 steal_delay_list_from_target (rtx insn, rtx condition, rtx seq,
1206 rtx delay_list, struct resources *sets,
1207 struct resources *needed,
1208 struct resources *other_needed,
1209 int slots_to_fill, int *pslots_filled,
1210 int *pannul_p, rtx *pnew_thread)
1213 int slots_remaining = slots_to_fill - *pslots_filled;
1214 int total_slots_filled = *pslots_filled;
1215 rtx new_delay_list = 0;
1216 int must_annul = *pannul_p;
1219 struct resources cc_set;
1221 /* We can't do anything if there are more delay slots in SEQ than we
1222 can handle, or if we don't know that it will be a taken branch.
1223 We know that it will be a taken branch if it is either an unconditional
1224 branch or a conditional branch with a stricter branch condition.
1226 Also, exit if the branch has more than one set, since then it is computing
1227 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1228 ??? It may be possible to move other sets into INSN in addition to
1229 moving the instructions in the delay slots.
1231 We can not steal the delay list if one of the instructions in the
1232 current delay_list modifies the condition codes and the jump in the
1233 sequence is a conditional jump. We can not do this because we can
1234 not change the direction of the jump because the condition codes
1235 will effect the direction of the jump in the sequence. */
1237 CLEAR_RESOURCE (&cc_set);
1238 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1240 rtx trial = XEXP (temp, 0);
1242 mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL);
1243 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, 0))
1247 if (XVECLEN (seq, 0) - 1 > slots_remaining
1248 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1249 || ! single_set (XVECEXP (seq, 0, 0)))
1252 #ifdef MD_CAN_REDIRECT_BRANCH
1253 /* On some targets, branches with delay slots can have a limited
1254 displacement. Give the back end a chance to tell us we can't do
1256 if (! MD_CAN_REDIRECT_BRANCH (insn, XVECEXP (seq, 0, 0)))
1260 for (i = 1; i < XVECLEN (seq, 0); i++)
1262 rtx trial = XVECEXP (seq, 0, i);
1265 if (insn_references_resource_p (trial, sets, 0)
1266 || insn_sets_resource_p (trial, needed, 0)
1267 || insn_sets_resource_p (trial, sets, 0)
1269 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1271 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1273 /* If TRIAL is from the fallthrough code of an annulled branch insn
1274 in SEQ, we cannot use it. */
1275 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1276 && ! INSN_FROM_TARGET_P (trial)))
1279 /* If this insn was already done (usually in a previous delay slot),
1280 pretend we put it in our delay slot. */
1281 if (redundant_insn (trial, insn, new_delay_list))
1284 /* We will end up re-vectoring this branch, so compute flags
1285 based on jumping to the new label. */
1286 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1289 && ((condition == const_true_rtx
1290 || (! insn_sets_resource_p (trial, other_needed, 0)
1291 && ! may_trap_or_fault_p (PATTERN (trial)))))
1292 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1293 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1295 check_annul_list_true_false (0, delay_list)
1296 && check_annul_list_true_false (0, new_delay_list)
1297 && eligible_for_annul_false (insn, total_slots_filled,
1302 temp = copy_rtx (trial);
1303 INSN_FROM_TARGET_P (temp) = 1;
1304 new_delay_list = add_to_delay_list (temp, new_delay_list);
1305 total_slots_filled++;
1307 if (--slots_remaining == 0)
1314 /* Show the place to which we will be branching. */
1315 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1317 /* Add any new insns to the delay list and update the count of the
1318 number of slots filled. */
1319 *pslots_filled = total_slots_filled;
1323 if (delay_list == 0)
1324 return new_delay_list;
1326 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1327 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1332 /* Similar to steal_delay_list_from_target except that SEQ is on the
1333 fallthrough path of INSN. Here we only do something if the delay insn
1334 of SEQ is an unconditional branch. In that case we steal its delay slot
1335 for INSN since unconditional branches are much easier to fill. */
1338 steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx seq,
1339 rtx delay_list, struct resources *sets,
1340 struct resources *needed,
1341 struct resources *other_needed,
1342 int slots_to_fill, int *pslots_filled,
1347 int must_annul = *pannul_p;
1350 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1352 /* We can't do anything if SEQ's delay insn isn't an
1353 unconditional branch. */
1355 if (! simplejump_p (XVECEXP (seq, 0, 0))
1356 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1359 for (i = 1; i < XVECLEN (seq, 0); i++)
1361 rtx trial = XVECEXP (seq, 0, i);
1363 /* If TRIAL sets CC0, stealing it will move it too far from the use
1365 if (insn_references_resource_p (trial, sets, 0)
1366 || insn_sets_resource_p (trial, needed, 0)
1367 || insn_sets_resource_p (trial, sets, 0)
1369 || sets_cc0_p (PATTERN (trial))
1375 /* If this insn was already done, we don't need it. */
1376 if (redundant_insn (trial, insn, delay_list))
1378 delete_from_delay_slot (trial);
1383 && ((condition == const_true_rtx
1384 || (! insn_sets_resource_p (trial, other_needed, 0)
1385 && ! may_trap_or_fault_p (PATTERN (trial)))))
1386 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1387 : (must_annul || delay_list == NULL) && (must_annul = 1,
1388 check_annul_list_true_false (1, delay_list)
1389 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1393 delete_from_delay_slot (trial);
1394 delay_list = add_to_delay_list (trial, delay_list);
1396 if (++(*pslots_filled) == slots_to_fill)
1408 /* Try merging insns starting at THREAD which match exactly the insns in
1411 If all insns were matched and the insn was previously annulling, the
1412 annul bit will be cleared.
1414 For each insn that is merged, if the branch is or will be non-annulling,
1415 we delete the merged insn. */
1418 try_merge_delay_insns (rtx insn, rtx thread)
1420 rtx trial, next_trial;
1421 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1422 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1423 int slot_number = 1;
1424 int num_slots = XVECLEN (PATTERN (insn), 0);
1425 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1426 struct resources set, needed;
1427 rtx merged_insns = 0;
1431 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1433 CLEAR_RESOURCE (&needed);
1434 CLEAR_RESOURCE (&set);
1436 /* If this is not an annulling branch, take into account anything needed in
1437 INSN's delay slot. This prevents two increments from being incorrectly
1438 folded into one. If we are annulling, this would be the correct
1439 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1440 will essentially disable this optimization. This method is somewhat of
1441 a kludge, but I don't see a better way.) */
1443 for (i = 1 ; i < num_slots; i++)
1444 if (XVECEXP (PATTERN (insn), 0, i))
1445 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
1447 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1449 rtx pat = PATTERN (trial);
1450 rtx oldtrial = trial;
1452 next_trial = next_nonnote_insn (trial);
1454 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1455 if (NONJUMP_INSN_P (trial)
1456 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1459 if (GET_CODE (next_to_match) == GET_CODE (trial)
1461 /* We can't share an insn that sets cc0. */
1462 && ! sets_cc0_p (pat)
1464 && ! insn_references_resource_p (trial, &set, 1)
1465 && ! insn_sets_resource_p (trial, &set, 1)
1466 && ! insn_sets_resource_p (trial, &needed, 1)
1467 && (trial = try_split (pat, trial, 0)) != 0
1468 /* Update next_trial, in case try_split succeeded. */
1469 && (next_trial = next_nonnote_insn (trial))
1470 /* Likewise THREAD. */
1471 && (thread = oldtrial == thread ? trial : thread)
1472 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1473 /* Have to test this condition if annul condition is different
1474 from (and less restrictive than) non-annulling one. */
1475 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1480 update_block (trial, thread);
1481 if (trial == thread)
1482 thread = next_active_insn (thread);
1484 delete_related_insns (trial);
1485 INSN_FROM_TARGET_P (next_to_match) = 0;
1488 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1490 if (++slot_number == num_slots)
1493 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1496 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
1497 mark_referenced_resources (trial, &needed, 1);
1500 /* See if we stopped on a filled insn. If we did, try to see if its
1501 delay slots match. */
1502 if (slot_number != num_slots
1503 && trial && NONJUMP_INSN_P (trial)
1504 && GET_CODE (PATTERN (trial)) == SEQUENCE
1505 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1507 rtx pat = PATTERN (trial);
1508 rtx filled_insn = XVECEXP (pat, 0, 0);
1510 /* Account for resources set/needed by the filled insn. */
1511 mark_set_resources (filled_insn, &set, 0, MARK_SRC_DEST_CALL);
1512 mark_referenced_resources (filled_insn, &needed, 1);
1514 for (i = 1; i < XVECLEN (pat, 0); i++)
1516 rtx dtrial = XVECEXP (pat, 0, i);
1518 if (! insn_references_resource_p (dtrial, &set, 1)
1519 && ! insn_sets_resource_p (dtrial, &set, 1)
1520 && ! insn_sets_resource_p (dtrial, &needed, 1)
1522 && ! sets_cc0_p (PATTERN (dtrial))
1524 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1525 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1531 update_block (dtrial, thread);
1532 new = delete_from_delay_slot (dtrial);
1533 if (INSN_DELETED_P (thread))
1535 INSN_FROM_TARGET_P (next_to_match) = 0;
1538 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1541 if (++slot_number == num_slots)
1544 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1548 /* Keep track of the set/referenced resources for the delay
1549 slots of any trial insns we encounter. */
1550 mark_set_resources (dtrial, &set, 0, MARK_SRC_DEST_CALL);
1551 mark_referenced_resources (dtrial, &needed, 1);
1556 /* If all insns in the delay slot have been matched and we were previously
1557 annulling the branch, we need not any more. In that case delete all the
1558 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1559 the delay list so that we know that it isn't only being used at the
1561 if (slot_number == num_slots && annul_p)
1563 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1565 if (GET_MODE (merged_insns) == SImode)
1569 update_block (XEXP (merged_insns, 0), thread);
1570 new = delete_from_delay_slot (XEXP (merged_insns, 0));
1571 if (INSN_DELETED_P (thread))
1576 update_block (XEXP (merged_insns, 0), thread);
1577 delete_related_insns (XEXP (merged_insns, 0));
1581 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1583 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1584 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1588 /* See if INSN is redundant with an insn in front of TARGET. Often this
1589 is called when INSN is a candidate for a delay slot of TARGET.
1590 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1591 of INSN. Often INSN will be redundant with an insn in a delay slot of
1592 some previous insn. This happens when we have a series of branches to the
1593 same label; in that case the first insn at the target might want to go
1594 into each of the delay slots.
1596 If we are not careful, this routine can take up a significant fraction
1597 of the total compilation time (4%), but only wins rarely. Hence we
1598 speed this routine up by making two passes. The first pass goes back
1599 until it hits a label and sees if it finds an insn with an identical
1600 pattern. Only in this (relatively rare) event does it check for
1603 We do not split insns we encounter. This could cause us not to find a
1604 redundant insn, but the cost of splitting seems greater than the possible
1605 gain in rare cases. */
1608 redundant_insn (rtx insn, rtx target, rtx delay_list)
1610 rtx target_main = target;
1611 rtx ipat = PATTERN (insn);
1613 struct resources needed, set;
1615 unsigned insns_to_search;
1617 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1618 are allowed to not actually assign to such a register. */
1619 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1622 /* Scan backwards looking for a match. */
1623 for (trial = PREV_INSN (target),
1624 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1625 trial && insns_to_search > 0;
1626 trial = PREV_INSN (trial), --insns_to_search)
1628 if (LABEL_P (trial))
1631 if (! INSN_P (trial))
1634 pat = PATTERN (trial);
1635 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1638 if (GET_CODE (pat) == SEQUENCE)
1640 /* Stop for a CALL and its delay slots because it is difficult to
1641 track its resource needs correctly. */
1642 if (CALL_P (XVECEXP (pat, 0, 0)))
1645 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1646 slots because it is difficult to track its resource needs
1649 #ifdef INSN_SETS_ARE_DELAYED
1650 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1654 #ifdef INSN_REFERENCES_ARE_DELAYED
1655 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1659 /* See if any of the insns in the delay slot match, updating
1660 resource requirements as we go. */
1661 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1662 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1663 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1664 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1667 /* If found a match, exit this loop early. */
1672 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1673 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1677 /* If we didn't find an insn that matches, return 0. */
1681 /* See what resources this insn sets and needs. If they overlap, or
1682 if this insn references CC0, it can't be redundant. */
1684 CLEAR_RESOURCE (&needed);
1685 CLEAR_RESOURCE (&set);
1686 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1687 mark_referenced_resources (insn, &needed, 1);
1689 /* If TARGET is a SEQUENCE, get the main insn. */
1690 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1691 target_main = XVECEXP (PATTERN (target), 0, 0);
1693 if (resource_conflicts_p (&needed, &set)
1695 || reg_mentioned_p (cc0_rtx, ipat)
1697 /* The insn requiring the delay may not set anything needed or set by
1699 || insn_sets_resource_p (target_main, &needed, 1)
1700 || insn_sets_resource_p (target_main, &set, 1))
1703 /* Insns we pass may not set either NEEDED or SET, so merge them for
1705 needed.memory |= set.memory;
1706 needed.unch_memory |= set.unch_memory;
1707 IOR_HARD_REG_SET (needed.regs, set.regs);
1709 /* This insn isn't redundant if it conflicts with an insn that either is
1710 or will be in a delay slot of TARGET. */
1714 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1716 delay_list = XEXP (delay_list, 1);
1719 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1720 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1721 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1724 /* Scan backwards until we reach a label or an insn that uses something
1725 INSN sets or sets something insn uses or sets. */
1727 for (trial = PREV_INSN (target),
1728 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1729 trial && !LABEL_P (trial) && insns_to_search > 0;
1730 trial = PREV_INSN (trial), --insns_to_search)
1732 if (!INSN_P (trial))
1735 pat = PATTERN (trial);
1736 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1739 if (GET_CODE (pat) == SEQUENCE)
1741 /* If this is a CALL_INSN and its delay slots, it is hard to track
1742 the resource needs properly, so give up. */
1743 if (CALL_P (XVECEXP (pat, 0, 0)))
1746 /* If this is an INSN or JUMP_INSN with delayed effects, it
1747 is hard to track the resource needs properly, so give up. */
1749 #ifdef INSN_SETS_ARE_DELAYED
1750 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1754 #ifdef INSN_REFERENCES_ARE_DELAYED
1755 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1759 /* See if any of the insns in the delay slot match, updating
1760 resource requirements as we go. */
1761 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1763 rtx candidate = XVECEXP (pat, 0, i);
1765 /* If an insn will be annulled if the branch is false, it isn't
1766 considered as a possible duplicate insn. */
1767 if (rtx_equal_p (PATTERN (candidate), ipat)
1768 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1769 && INSN_FROM_TARGET_P (candidate)))
1771 /* Show that this insn will be used in the sequel. */
1772 INSN_FROM_TARGET_P (candidate) = 0;
1776 /* Unless this is an annulled insn from the target of a branch,
1777 we must stop if it sets anything needed or set by INSN. */
1778 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1779 || ! INSN_FROM_TARGET_P (candidate))
1780 && insn_sets_resource_p (candidate, &needed, 1))
1784 /* If the insn requiring the delay slot conflicts with INSN, we
1786 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
1791 /* See if TRIAL is the same as INSN. */
1792 pat = PATTERN (trial);
1793 if (rtx_equal_p (pat, ipat))
1796 /* Can't go any further if TRIAL conflicts with INSN. */
1797 if (insn_sets_resource_p (trial, &needed, 1))
1805 /* Return 1 if THREAD can only be executed in one way. If LABEL is nonzero,
1806 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1807 is nonzero, we are allowed to fall into this thread; otherwise, we are
1810 If LABEL is used more than one or we pass a label other than LABEL before
1811 finding an active insn, we do not own this thread. */
1814 own_thread_p (rtx thread, rtx label, int allow_fallthrough)
1819 /* We don't own the function end. */
1823 /* Get the first active insn, or THREAD, if it is an active insn. */
1824 active_insn = next_active_insn (PREV_INSN (thread));
1826 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1828 && (insn != label || LABEL_NUSES (insn) != 1))
1831 if (allow_fallthrough)
1834 /* Ensure that we reach a BARRIER before any insn or label. */
1835 for (insn = prev_nonnote_insn (thread);
1836 insn == 0 || !BARRIER_P (insn);
1837 insn = prev_nonnote_insn (insn))
1840 || (NONJUMP_INSN_P (insn)
1841 && GET_CODE (PATTERN (insn)) != USE
1842 && GET_CODE (PATTERN (insn)) != CLOBBER))
1848 /* Called when INSN is being moved from a location near the target of a jump.
1849 We leave a marker of the form (use (INSN)) immediately in front
1850 of WHERE for mark_target_live_regs. These markers will be deleted when
1853 We used to try to update the live status of registers if WHERE is at
1854 the start of a basic block, but that can't work since we may remove a
1855 BARRIER in relax_delay_slots. */
1858 update_block (rtx insn, rtx where)
1860 /* Ignore if this was in a delay slot and it came from the target of
1862 if (INSN_FROM_TARGET_P (insn))
1865 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1867 /* INSN might be making a value live in a block where it didn't use to
1868 be. So recompute liveness information for this block. */
1870 incr_ticks_for_insn (insn);
1873 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1874 the basic block containing the jump. */
1877 reorg_redirect_jump (rtx jump, rtx nlabel)
1879 incr_ticks_for_insn (jump);
1880 return redirect_jump (jump, nlabel, 1);
1883 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1884 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1885 that reference values used in INSN. If we find one, then we move the
1886 REG_DEAD note to INSN.
1888 This is needed to handle the case where a later insn (after INSN) has a
1889 REG_DEAD note for a register used by INSN, and this later insn subsequently
1890 gets moved before a CODE_LABEL because it is a redundant insn. In this
1891 case, mark_target_live_regs may be confused into thinking the register
1892 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1895 update_reg_dead_notes (rtx insn, rtx delayed_insn)
1899 for (p = next_nonnote_insn (insn); p != delayed_insn;
1900 p = next_nonnote_insn (p))
1901 for (link = REG_NOTES (p); link; link = next)
1903 next = XEXP (link, 1);
1905 if (REG_NOTE_KIND (link) != REG_DEAD
1906 || !REG_P (XEXP (link, 0)))
1909 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1911 /* Move the REG_DEAD note from P to INSN. */
1912 remove_note (p, link);
1913 XEXP (link, 1) = REG_NOTES (insn);
1914 REG_NOTES (insn) = link;
1919 /* Called when an insn redundant with start_insn is deleted. If there
1920 is a REG_DEAD note for the target of start_insn between start_insn
1921 and stop_insn, then the REG_DEAD note needs to be deleted since the
1922 value no longer dies there.
1924 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1925 confused into thinking the register is dead. */
1928 fix_reg_dead_note (rtx start_insn, rtx stop_insn)
1932 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1933 p = next_nonnote_insn (p))
1934 for (link = REG_NOTES (p); link; link = next)
1936 next = XEXP (link, 1);
1938 if (REG_NOTE_KIND (link) != REG_DEAD
1939 || !REG_P (XEXP (link, 0)))
1942 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1944 remove_note (p, link);
1950 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1952 This handles the case of udivmodXi4 instructions which optimize their
1953 output depending on whether any REG_UNUSED notes are present.
1954 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1958 update_reg_unused_notes (rtx insn, rtx redundant_insn)
1962 for (link = REG_NOTES (insn); link; link = next)
1964 next = XEXP (link, 1);
1966 if (REG_NOTE_KIND (link) != REG_UNUSED
1967 || !REG_P (XEXP (link, 0)))
1970 if (! find_regno_note (redundant_insn, REG_UNUSED,
1971 REGNO (XEXP (link, 0))))
1972 remove_note (insn, link);
1976 /* Return the label before INSN, or put a new label there. */
1979 get_label_before (rtx insn)
1983 /* Find an existing label at this point
1984 or make a new one if there is none. */
1985 label = prev_nonnote_insn (insn);
1987 if (label == 0 || !LABEL_P (label))
1989 rtx prev = PREV_INSN (insn);
1991 label = gen_label_rtx ();
1992 emit_label_after (label, prev);
1993 LABEL_NUSES (label) = 0;
1998 /* Scan a function looking for insns that need a delay slot and find insns to
1999 put into the delay slot.
2001 NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
2002 as calls). We do these first since we don't want jump insns (that are
2003 easier to fill) to get the only insns that could be used for non-jump insns.
2004 When it is zero, only try to fill JUMP_INSNs.
2006 When slots are filled in this manner, the insns (including the
2007 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2008 it is possible to tell whether a delay slot has really been filled
2009 or not. `final' knows how to deal with this, by communicating
2010 through FINAL_SEQUENCE. */
2013 fill_simple_delay_slots (int non_jumps_p)
2015 rtx insn, pat, trial, next_trial;
2017 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2018 struct resources needed, set;
2019 int slots_to_fill, slots_filled;
2022 for (i = 0; i < num_unfilled_slots; i++)
2025 /* Get the next insn to fill. If it has already had any slots assigned,
2026 we can't do anything with it. Maybe we'll improve this later. */
2028 insn = unfilled_slots_base[i];
2030 || INSN_DELETED_P (insn)
2031 || (NONJUMP_INSN_P (insn)
2032 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2033 || (JUMP_P (insn) && non_jumps_p)
2034 || (!JUMP_P (insn) && ! non_jumps_p))
2037 /* It may have been that this insn used to need delay slots, but
2038 now doesn't; ignore in that case. This can happen, for example,
2039 on the HP PA RISC, where the number of delay slots depends on
2040 what insns are nearby. */
2041 slots_to_fill = num_delay_slots (insn);
2043 /* Some machine description have defined instructions to have
2044 delay slots only in certain circumstances which may depend on
2045 nearby insns (which change due to reorg's actions).
2047 For example, the PA port normally has delay slots for unconditional
2050 However, the PA port claims such jumps do not have a delay slot
2051 if they are immediate successors of certain CALL_INSNs. This
2052 allows the port to favor filling the delay slot of the call with
2053 the unconditional jump. */
2054 if (slots_to_fill == 0)
2057 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2058 says how many. After initialization, first try optimizing
2061 nop add %o7,.-L1,%o7
2065 If this case applies, the delay slot of the call is filled with
2066 the unconditional jump. This is done first to avoid having the
2067 delay slot of the call filled in the backward scan. Also, since
2068 the unconditional jump is likely to also have a delay slot, that
2069 insn must exist when it is subsequently scanned.
2071 This is tried on each insn with delay slots as some machines
2072 have insns which perform calls, but are not represented as
2079 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2081 flags = get_jump_flags (insn, NULL_RTX);
2083 if ((trial = next_active_insn (insn))
2085 && simplejump_p (trial)
2086 && eligible_for_delay (insn, slots_filled, trial, flags)
2087 && no_labels_between_p (insn, trial)
2088 && ! can_throw_internal (trial))
2092 delay_list = add_to_delay_list (trial, delay_list);
2094 /* TRIAL may have had its delay slot filled, then unfilled. When
2095 the delay slot is unfilled, TRIAL is placed back on the unfilled
2096 slots obstack. Unfortunately, it is placed on the end of the
2097 obstack, not in its original location. Therefore, we must search
2098 from entry i + 1 to the end of the unfilled slots obstack to
2099 try and find TRIAL. */
2100 tmp = &unfilled_slots_base[i + 1];
2101 while (*tmp != trial && tmp != unfilled_slots_next)
2104 /* Remove the unconditional jump from consideration for delay slot
2105 filling and unthread it. */
2109 rtx next = NEXT_INSN (trial);
2110 rtx prev = PREV_INSN (trial);
2112 NEXT_INSN (prev) = next;
2114 PREV_INSN (next) = prev;
2118 /* Now, scan backwards from the insn to search for a potential
2119 delay-slot candidate. Stop searching when a label or jump is hit.
2121 For each candidate, if it is to go into the delay slot (moved
2122 forward in execution sequence), it must not need or set any resources
2123 that were set by later insns and must not set any resources that
2124 are needed for those insns.
2126 The delay slot insn itself sets resources unless it is a call
2127 (in which case the called routine, not the insn itself, is doing
2130 if (slots_filled < slots_to_fill)
2132 CLEAR_RESOURCE (&needed);
2133 CLEAR_RESOURCE (&set);
2134 mark_set_resources (insn, &set, 0, MARK_SRC_DEST);
2135 mark_referenced_resources (insn, &needed, 0);
2137 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2140 next_trial = prev_nonnote_insn (trial);
2142 /* This must be an INSN or CALL_INSN. */
2143 pat = PATTERN (trial);
2145 /* USE and CLOBBER at this level was just for flow; ignore it. */
2146 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2149 /* Check for resource conflict first, to avoid unnecessary
2151 if (! insn_references_resource_p (trial, &set, 1)
2152 && ! insn_sets_resource_p (trial, &set, 1)
2153 && ! insn_sets_resource_p (trial, &needed, 1)
2155 /* Can't separate set of cc0 from its use. */
2156 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2158 && ! can_throw_internal (trial))
2160 trial = try_split (pat, trial, 1);
2161 next_trial = prev_nonnote_insn (trial);
2162 if (eligible_for_delay (insn, slots_filled, trial, flags))
2164 /* In this case, we are searching backward, so if we
2165 find insns to put on the delay list, we want
2166 to put them at the head, rather than the
2167 tail, of the list. */
2169 update_reg_dead_notes (trial, insn);
2170 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2172 update_block (trial, trial);
2173 delete_related_insns (trial);
2174 if (slots_to_fill == ++slots_filled)
2180 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2181 mark_referenced_resources (trial, &needed, 1);
2185 /* If all needed slots haven't been filled, we come here. */
2187 /* Try to optimize case of jumping around a single insn. */
2188 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2189 if (slots_filled != slots_to_fill
2192 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2194 delay_list = optimize_skip (insn);
2200 /* Try to get insns from beyond the insn needing the delay slot.
2201 These insns can neither set or reference resources set in insns being
2202 skipped, cannot set resources in the insn being skipped, and, if this
2203 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2204 call might not return).
2206 There used to be code which continued past the target label if
2207 we saw all uses of the target label. This code did not work,
2208 because it failed to account for some instructions which were
2209 both annulled and marked as from the target. This can happen as a
2210 result of optimize_skip. Since this code was redundant with
2211 fill_eager_delay_slots anyways, it was just deleted. */
2213 if (slots_filled != slots_to_fill
2214 /* If this instruction could throw an exception which is
2215 caught in the same function, then it's not safe to fill
2216 the delay slot with an instruction from beyond this
2217 point. For example, consider:
2228 Even though `i' is a local variable, we must be sure not
2229 to put `i = 3' in the delay slot if `f' might throw an
2232 Presumably, we should also check to see if we could get
2233 back to this function via `setjmp'. */
2234 && ! can_throw_internal (insn)
2236 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2237 && ! simplejump_p (insn)
2238 && JUMP_LABEL (insn) != 0)))
2240 /* Invariant: If insn is a JUMP_INSN, the insn's jump
2241 label. Otherwise, zero. */
2243 int maybe_never = 0;
2244 rtx pat, trial_delay;
2246 CLEAR_RESOURCE (&needed);
2247 CLEAR_RESOURCE (&set);
2251 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2252 mark_referenced_resources (insn, &needed, 1);
2257 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2258 mark_referenced_resources (insn, &needed, 1);
2260 target = JUMP_LABEL (insn);
2264 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2266 next_trial = next_nonnote_insn (trial);
2269 || BARRIER_P (trial))
2272 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2273 pat = PATTERN (trial);
2275 /* Stand-alone USE and CLOBBER are just for flow. */
2276 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2279 /* If this already has filled delay slots, get the insn needing
2281 if (GET_CODE (pat) == SEQUENCE)
2282 trial_delay = XVECEXP (pat, 0, 0);
2284 trial_delay = trial;
2286 /* Stop our search when seeing an unconditional jump. */
2287 if (JUMP_P (trial_delay))
2290 /* See if we have a resource problem before we try to
2292 if (GET_CODE (pat) != SEQUENCE
2293 && ! insn_references_resource_p (trial, &set, 1)
2294 && ! insn_sets_resource_p (trial, &set, 1)
2295 && ! insn_sets_resource_p (trial, &needed, 1)
2297 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2299 && ! (maybe_never && may_trap_or_fault_p (pat))
2300 && (trial = try_split (pat, trial, 0))
2301 && eligible_for_delay (insn, slots_filled, trial, flags)
2302 && ! can_throw_internal(trial))
2304 next_trial = next_nonnote_insn (trial);
2305 delay_list = add_to_delay_list (trial, delay_list);
2308 if (reg_mentioned_p (cc0_rtx, pat))
2309 link_cc0_insns (trial);
2312 delete_related_insns (trial);
2313 if (slots_to_fill == ++slots_filled)
2318 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2319 mark_referenced_resources (trial, &needed, 1);
2321 /* Ensure we don't put insns between the setting of cc and the
2322 comparison by moving a setting of cc into an earlier delay
2323 slot since these insns could clobber the condition code. */
2326 /* If this is a call or jump, we might not get here. */
2327 if (CALL_P (trial_delay)
2328 || JUMP_P (trial_delay))
2332 /* If there are slots left to fill and our search was stopped by an
2333 unconditional branch, try the insn at the branch target. We can
2334 redirect the branch if it works.
2336 Don't do this if the insn at the branch target is a branch. */
2337 if (slots_to_fill != slots_filled
2340 && simplejump_p (trial)
2341 && (target == 0 || JUMP_LABEL (trial) == target)
2342 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2343 && ! (NONJUMP_INSN_P (next_trial)
2344 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2345 && !JUMP_P (next_trial)
2346 && ! insn_references_resource_p (next_trial, &set, 1)
2347 && ! insn_sets_resource_p (next_trial, &set, 1)
2348 && ! insn_sets_resource_p (next_trial, &needed, 1)
2350 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2352 && ! (maybe_never && may_trap_or_fault_p (PATTERN (next_trial)))
2353 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2354 && eligible_for_delay (insn, slots_filled, next_trial, flags)
2355 && ! can_throw_internal (trial))
2357 /* See comment in relax_delay_slots about necessity of using
2358 next_real_insn here. */
2359 rtx new_label = next_real_insn (next_trial);
2362 new_label = get_label_before (new_label);
2364 new_label = find_end_label ();
2369 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2371 reorg_redirect_jump (trial, new_label);
2373 /* If we merged because we both jumped to the same place,
2374 redirect the original insn also. */
2376 reorg_redirect_jump (insn, new_label);
2381 /* If this is an unconditional jump, then try to get insns from the
2382 target of the jump. */
2384 && simplejump_p (insn)
2385 && slots_filled != slots_to_fill)
2387 = fill_slots_from_thread (insn, const_true_rtx,
2388 next_active_insn (JUMP_LABEL (insn)),
2390 own_thread_p (JUMP_LABEL (insn),
2391 JUMP_LABEL (insn), 0),
2392 slots_to_fill, &slots_filled,
2396 unfilled_slots_base[i]
2397 = emit_delay_sequence (insn, delay_list, slots_filled);
2399 if (slots_to_fill == slots_filled)
2400 unfilled_slots_base[i] = 0;
2402 note_delay_statistics (slots_filled, 0);
2405 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2406 /* See if the epilogue needs any delay slots. Try to fill them if so.
2407 The only thing we can do is scan backwards from the end of the
2408 function. If we did this in a previous pass, it is incorrect to do it
2410 if (current_function_epilogue_delay_list)
2413 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2414 if (slots_to_fill == 0)
2418 CLEAR_RESOURCE (&set);
2420 /* The frame pointer and stack pointer are needed at the beginning of
2421 the epilogue, so instructions setting them can not be put in the
2422 epilogue delay slot. However, everything else needed at function
2423 end is safe, so we don't want to use end_of_function_needs here. */
2424 CLEAR_RESOURCE (&needed);
2425 if (frame_pointer_needed)
2427 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2428 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2429 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2431 if (! EXIT_IGNORE_STACK
2432 || current_function_sp_is_unchanging)
2433 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2436 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2438 #ifdef EPILOGUE_USES
2439 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2441 if (EPILOGUE_USES (i))
2442 SET_HARD_REG_BIT (needed.regs, i);
2446 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2447 trial = PREV_INSN (trial))
2451 pat = PATTERN (trial);
2452 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2455 if (! insn_references_resource_p (trial, &set, 1)
2456 && ! insn_sets_resource_p (trial, &needed, 1)
2457 && ! insn_sets_resource_p (trial, &set, 1)
2459 /* Don't want to mess with cc0 here. */
2460 && ! reg_mentioned_p (cc0_rtx, pat)
2462 && ! can_throw_internal (trial))
2464 trial = try_split (pat, trial, 1);
2465 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2467 /* Here as well we are searching backward, so put the
2468 insns we find on the head of the list. */
2470 current_function_epilogue_delay_list
2471 = gen_rtx_INSN_LIST (VOIDmode, trial,
2472 current_function_epilogue_delay_list);
2473 mark_end_of_function_resources (trial, 1);
2474 update_block (trial, trial);
2475 delete_related_insns (trial);
2477 /* Clear deleted bit so final.c will output the insn. */
2478 INSN_DELETED_P (trial) = 0;
2480 if (slots_to_fill == ++slots_filled)
2486 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2487 mark_referenced_resources (trial, &needed, 1);
2490 note_delay_statistics (slots_filled, 0);
2494 /* Follow any unconditional jump at LABEL;
2495 return the ultimate label reached by any such chain of jumps.
2496 Return null if the chain ultimately leads to a return instruction.
2497 If LABEL is not followed by a jump, return LABEL.
2498 If the chain loops or we can't find end, return LABEL,
2499 since that tells caller to avoid changing the insn. */
2502 follow_jumps (rtx label)
2511 && (insn = next_active_insn (value)) != 0
2513 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
2514 && onlyjump_p (insn))
2515 || GET_CODE (PATTERN (insn)) == RETURN)
2516 && (next = NEXT_INSN (insn))
2517 && BARRIER_P (next));
2522 /* If we have found a cycle, make the insn jump to itself. */
2523 if (JUMP_LABEL (insn) == label)
2526 tem = next_active_insn (JUMP_LABEL (insn));
2527 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2528 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2531 value = JUMP_LABEL (insn);
2538 /* Try to find insns to place in delay slots.
2540 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2541 or is an unconditional branch if CONDITION is const_true_rtx.
2542 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2544 THREAD is a flow-of-control, either the insns to be executed if the
2545 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2547 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2548 to see if any potential delay slot insns set things needed there.
2550 LIKELY is nonzero if it is extremely likely that the branch will be
2551 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2552 end of a loop back up to the top.
2554 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2555 thread. I.e., it is the fallthrough code of our jump or the target of the
2556 jump when we are the only jump going there.
2558 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2559 case, we can only take insns from the head of the thread for our delay
2560 slot. We then adjust the jump to point after the insns we have taken. */
2563 fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
2564 rtx opposite_thread, int likely, int thread_if_true,
2565 int own_thread, int slots_to_fill,
2566 int *pslots_filled, rtx delay_list)
2569 struct resources opposite_needed, set, needed;
2575 /* Validate our arguments. */
2576 gcc_assert(condition != const_true_rtx || thread_if_true);
2577 gcc_assert(own_thread || thread_if_true);
2579 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2581 /* If our thread is the end of subroutine, we can't get any delay
2586 /* If this is an unconditional branch, nothing is needed at the
2587 opposite thread. Otherwise, compute what is needed there. */
2588 if (condition == const_true_rtx)
2589 CLEAR_RESOURCE (&opposite_needed);
2591 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2593 /* If the insn at THREAD can be split, do it here to avoid having to
2594 update THREAD and NEW_THREAD if it is done in the loop below. Also
2595 initialize NEW_THREAD. */
2597 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2599 /* Scan insns at THREAD. We are looking for an insn that can be removed
2600 from THREAD (it neither sets nor references resources that were set
2601 ahead of it and it doesn't set anything needs by the insns ahead of
2602 it) and that either can be placed in an annulling insn or aren't
2603 needed at OPPOSITE_THREAD. */
2605 CLEAR_RESOURCE (&needed);
2606 CLEAR_RESOURCE (&set);
2608 /* If we do not own this thread, we must stop as soon as we find
2609 something that we can't put in a delay slot, since all we can do
2610 is branch into THREAD at a later point. Therefore, labels stop
2611 the search if this is not the `true' thread. */
2613 for (trial = thread;
2614 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2615 trial = next_nonnote_insn (trial))
2619 /* If we have passed a label, we no longer own this thread. */
2620 if (LABEL_P (trial))
2626 pat = PATTERN (trial);
2627 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2630 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2631 don't separate or copy insns that set and use CC0. */
2632 if (! insn_references_resource_p (trial, &set, 1)
2633 && ! insn_sets_resource_p (trial, &set, 1)
2634 && ! insn_sets_resource_p (trial, &needed, 1)
2636 && ! (reg_mentioned_p (cc0_rtx, pat)
2637 && (! own_thread || ! sets_cc0_p (pat)))
2639 && ! can_throw_internal (trial))
2643 /* If TRIAL is redundant with some insn before INSN, we don't
2644 actually need to add it to the delay list; we can merely pretend
2646 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2648 fix_reg_dead_note (prior_insn, insn);
2651 update_block (trial, thread);
2652 if (trial == thread)
2654 thread = next_active_insn (thread);
2655 if (new_thread == trial)
2656 new_thread = thread;
2659 delete_related_insns (trial);
2663 update_reg_unused_notes (prior_insn, trial);
2664 new_thread = next_active_insn (trial);
2670 /* There are two ways we can win: If TRIAL doesn't set anything
2671 needed at the opposite thread and can't trap, or if it can
2672 go into an annulled delay slot. */
2674 && (condition == const_true_rtx
2675 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
2676 && ! may_trap_or_fault_p (pat))))
2679 trial = try_split (pat, trial, 0);
2680 if (new_thread == old_trial)
2682 if (thread == old_trial)
2684 pat = PATTERN (trial);
2685 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2689 #ifdef ANNUL_IFTRUE_SLOTS
2692 #ifdef ANNUL_IFFALSE_SLOTS
2698 trial = try_split (pat, trial, 0);
2699 if (new_thread == old_trial)
2701 if (thread == old_trial)
2703 pat = PATTERN (trial);
2704 if ((must_annul || delay_list == NULL) && (thread_if_true
2705 ? check_annul_list_true_false (0, delay_list)
2706 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2707 : check_annul_list_true_false (1, delay_list)
2708 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2716 if (reg_mentioned_p (cc0_rtx, pat))
2717 link_cc0_insns (trial);
2720 /* If we own this thread, delete the insn. If this is the
2721 destination of a branch, show that a basic block status
2722 may have been updated. In any case, mark the new
2723 starting point of this thread. */
2728 update_block (trial, thread);
2729 if (trial == thread)
2731 thread = next_active_insn (thread);
2732 if (new_thread == trial)
2733 new_thread = thread;
2736 /* We are moving this insn, not deleting it. We must
2737 temporarily increment the use count on any referenced
2738 label lest it be deleted by delete_related_insns. */
2739 note = find_reg_note (trial, REG_LABEL, 0);
2740 /* REG_LABEL could be NOTE_INSN_DELETED_LABEL too. */
2741 if (note && LABEL_P (XEXP (note, 0)))
2742 LABEL_NUSES (XEXP (note, 0))++;
2744 delete_related_insns (trial);
2746 if (note && LABEL_P (XEXP (note, 0)))
2747 LABEL_NUSES (XEXP (note, 0))--;
2750 new_thread = next_active_insn (trial);
2752 temp = own_thread ? trial : copy_rtx (trial);
2754 INSN_FROM_TARGET_P (temp) = 1;
2756 delay_list = add_to_delay_list (temp, delay_list);
2758 if (slots_to_fill == ++(*pslots_filled))
2760 /* Even though we have filled all the slots, we
2761 may be branching to a location that has a
2762 redundant insn. Skip any if so. */
2763 while (new_thread && ! own_thread
2764 && ! insn_sets_resource_p (new_thread, &set, 1)
2765 && ! insn_sets_resource_p (new_thread, &needed, 1)
2766 && ! insn_references_resource_p (new_thread,
2769 = redundant_insn (new_thread, insn,
2772 /* We know we do not own the thread, so no need
2773 to call update_block and delete_insn. */
2774 fix_reg_dead_note (prior_insn, insn);
2775 update_reg_unused_notes (prior_insn, new_thread);
2776 new_thread = next_active_insn (new_thread);
2786 /* This insn can't go into a delay slot. */
2788 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2789 mark_referenced_resources (trial, &needed, 1);
2791 /* Ensure we don't put insns between the setting of cc and the comparison
2792 by moving a setting of cc into an earlier delay slot since these insns
2793 could clobber the condition code. */
2796 /* If this insn is a register-register copy and the next insn has
2797 a use of our destination, change it to use our source. That way,
2798 it will become a candidate for our delay slot the next time
2799 through this loop. This case occurs commonly in loops that
2802 We could check for more complex cases than those tested below,
2803 but it doesn't seem worth it. It might also be a good idea to try
2804 to swap the two insns. That might do better.
2806 We can't do this if the next insn modifies our destination, because
2807 that would make the replacement into the insn invalid. We also can't
2808 do this if it modifies our source, because it might be an earlyclobber
2809 operand. This latter test also prevents updating the contents of
2810 a PRE_INC. We also can't do this if there's overlap of source and
2811 destination. Overlap may happen for larger-than-register-size modes. */
2813 if (NONJUMP_INSN_P (trial) && GET_CODE (pat) == SET
2814 && REG_P (SET_SRC (pat))
2815 && REG_P (SET_DEST (pat))
2816 && !reg_overlap_mentioned_p (SET_DEST (pat), SET_SRC (pat)))
2818 rtx next = next_nonnote_insn (trial);
2820 if (next && NONJUMP_INSN_P (next)
2821 && GET_CODE (PATTERN (next)) != USE
2822 && ! reg_set_p (SET_DEST (pat), next)
2823 && ! reg_set_p (SET_SRC (pat), next)
2824 && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2825 && ! modified_in_p (SET_DEST (pat), next))
2826 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2830 /* If we stopped on a branch insn that has delay slots, see if we can
2831 steal some of the insns in those slots. */
2832 if (trial && NONJUMP_INSN_P (trial)
2833 && GET_CODE (PATTERN (trial)) == SEQUENCE
2834 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0)))
2836 /* If this is the `true' thread, we will want to follow the jump,
2837 so we can only do this if we have taken everything up to here. */
2838 if (thread_if_true && trial == new_thread)
2841 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2842 delay_list, &set, &needed,
2843 &opposite_needed, slots_to_fill,
2844 pslots_filled, &must_annul,
2846 /* If we owned the thread and are told that it branched
2847 elsewhere, make sure we own the thread at the new location. */
2848 if (own_thread && trial != new_thread)
2849 own_thread = own_thread_p (new_thread, new_thread, 0);
2851 else if (! thread_if_true)
2853 = steal_delay_list_from_fallthrough (insn, condition,
2855 delay_list, &set, &needed,
2856 &opposite_needed, slots_to_fill,
2857 pslots_filled, &must_annul);
2860 /* If we haven't found anything for this delay slot and it is very
2861 likely that the branch will be taken, see if the insn at our target
2862 increments or decrements a register with an increment that does not
2863 depend on the destination register. If so, try to place the opposite
2864 arithmetic insn after the jump insn and put the arithmetic insn in the
2865 delay slot. If we can't do this, return. */
2866 if (delay_list == 0 && likely && new_thread
2867 && NONJUMP_INSN_P (new_thread)
2868 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2869 && asm_noperands (PATTERN (new_thread)) < 0)
2871 rtx pat = PATTERN (new_thread);
2876 pat = PATTERN (trial);
2878 if (!NONJUMP_INSN_P (trial)
2879 || GET_CODE (pat) != SET
2880 || ! eligible_for_delay (insn, 0, trial, flags)
2881 || can_throw_internal (trial))
2884 dest = SET_DEST (pat), src = SET_SRC (pat);
2885 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2886 && rtx_equal_p (XEXP (src, 0), dest)
2887 && (!FLOAT_MODE_P (GET_MODE (src))
2888 || flag_unsafe_math_optimizations)
2889 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1))
2890 && ! side_effects_p (pat))
2892 rtx other = XEXP (src, 1);
2896 /* If this is a constant adjustment, use the same code with
2897 the negated constant. Otherwise, reverse the sense of the
2899 if (GET_CODE (other) == CONST_INT)
2900 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2901 negate_rtx (GET_MODE (src), other));
2903 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2904 GET_MODE (src), dest, other);
2906 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2909 if (recog_memoized (ninsn) < 0
2910 || (extract_insn (ninsn), ! constrain_operands (1)))
2912 delete_related_insns (ninsn);
2918 update_block (trial, thread);
2919 if (trial == thread)
2921 thread = next_active_insn (thread);
2922 if (new_thread == trial)
2923 new_thread = thread;
2925 delete_related_insns (trial);
2928 new_thread = next_active_insn (trial);
2930 ninsn = own_thread ? trial : copy_rtx (trial);
2932 INSN_FROM_TARGET_P (ninsn) = 1;
2934 delay_list = add_to_delay_list (ninsn, NULL_RTX);
2939 if (delay_list && must_annul)
2940 INSN_ANNULLED_BRANCH_P (insn) = 1;
2942 /* If we are to branch into the middle of this thread, find an appropriate
2943 label or make a new one if none, and redirect INSN to it. If we hit the
2944 end of the function, use the end-of-function label. */
2945 if (new_thread != thread)
2949 gcc_assert (thread_if_true);
2951 if (new_thread && JUMP_P (new_thread)
2952 && (simplejump_p (new_thread)
2953 || GET_CODE (PATTERN (new_thread)) == RETURN)
2954 && redirect_with_delay_list_safe_p (insn,
2955 JUMP_LABEL (new_thread),
2957 new_thread = follow_jumps (JUMP_LABEL (new_thread));
2959 if (new_thread == 0)
2960 label = find_end_label ();
2961 else if (LABEL_P (new_thread))
2964 label = get_label_before (new_thread);
2967 reorg_redirect_jump (insn, label);
2973 /* Make another attempt to find insns to place in delay slots.
2975 We previously looked for insns located in front of the delay insn
2976 and, for non-jump delay insns, located behind the delay insn.
2978 Here only try to schedule jump insns and try to move insns from either
2979 the target or the following insns into the delay slot. If annulling is
2980 supported, we will be likely to do this. Otherwise, we can do this only
2984 fill_eager_delay_slots (void)
2988 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2990 for (i = 0; i < num_unfilled_slots; i++)
2993 rtx target_label, insn_at_target, fallthrough_insn;
2996 int own_fallthrough;
2997 int prediction, slots_to_fill, slots_filled;
2999 insn = unfilled_slots_base[i];
3001 || INSN_DELETED_P (insn)
3003 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3006 slots_to_fill = num_delay_slots (insn);
3007 /* Some machine description have defined instructions to have
3008 delay slots only in certain circumstances which may depend on
3009 nearby insns (which change due to reorg's actions).
3011 For example, the PA port normally has delay slots for unconditional
3014 However, the PA port claims such jumps do not have a delay slot
3015 if they are immediate successors of certain CALL_INSNs. This
3016 allows the port to favor filling the delay slot of the call with
3017 the unconditional jump. */
3018 if (slots_to_fill == 0)
3022 target_label = JUMP_LABEL (insn);
3023 condition = get_branch_condition (insn, target_label);
3028 /* Get the next active fallthrough and target insns and see if we own
3029 them. Then see whether the branch is likely true. We don't need
3030 to do a lot of this for unconditional branches. */
3032 insn_at_target = next_active_insn (target_label);
3033 own_target = own_thread_p (target_label, target_label, 0);
3035 if (condition == const_true_rtx)
3037 own_fallthrough = 0;
3038 fallthrough_insn = 0;
3043 fallthrough_insn = next_active_insn (insn);
3044 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3045 prediction = mostly_true_jump (insn, condition);
3048 /* If this insn is expected to branch, first try to get insns from our
3049 target, then our fallthrough insns. If it is not expected to branch,
3050 try the other order. */
3055 = fill_slots_from_thread (insn, condition, insn_at_target,
3056 fallthrough_insn, prediction == 2, 1,
3058 slots_to_fill, &slots_filled, delay_list);
3060 if (delay_list == 0 && own_fallthrough)
3062 /* Even though we didn't find anything for delay slots,
3063 we might have found a redundant insn which we deleted
3064 from the thread that was filled. So we have to recompute
3065 the next insn at the target. */
3066 target_label = JUMP_LABEL (insn);
3067 insn_at_target = next_active_insn (target_label);
3070 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3071 insn_at_target, 0, 0,
3073 slots_to_fill, &slots_filled,
3079 if (own_fallthrough)
3081 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3082 insn_at_target, 0, 0,
3084 slots_to_fill, &slots_filled,
3087 if (delay_list == 0)
3089 = fill_slots_from_thread (insn, condition, insn_at_target,
3090 next_active_insn (insn), 0, 1,
3092 slots_to_fill, &slots_filled,
3097 unfilled_slots_base[i]
3098 = emit_delay_sequence (insn, delay_list, slots_filled);
3100 if (slots_to_fill == slots_filled)
3101 unfilled_slots_base[i] = 0;
3103 note_delay_statistics (slots_filled, 1);
3107 static void delete_computation (rtx insn);
3109 /* Recursively delete prior insns that compute the value (used only by INSN
3110 which the caller is deleting) stored in the register mentioned by NOTE
3111 which is a REG_DEAD note associated with INSN. */
3114 delete_prior_computation (rtx note, rtx insn)
3117 rtx reg = XEXP (note, 0);
3119 for (our_prev = prev_nonnote_insn (insn);
3120 our_prev && (NONJUMP_INSN_P (our_prev)
3121 || CALL_P (our_prev));
3122 our_prev = prev_nonnote_insn (our_prev))
3124 rtx pat = PATTERN (our_prev);
3126 /* If we reach a CALL which is not calling a const function
3127 or the callee pops the arguments, then give up. */
3128 if (CALL_P (our_prev)
3129 && (! CONST_OR_PURE_CALL_P (our_prev)
3130 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
3133 /* If we reach a SEQUENCE, it is too complex to try to
3134 do anything with it, so give up. We can be run during
3135 and after reorg, so SEQUENCE rtl can legitimately show
3137 if (GET_CODE (pat) == SEQUENCE)
3140 if (GET_CODE (pat) == USE
3141 && NONJUMP_INSN_P (XEXP (pat, 0)))
3142 /* reorg creates USEs that look like this. We leave them
3143 alone because reorg needs them for its own purposes. */
3146 if (reg_set_p (reg, pat))
3148 if (side_effects_p (pat) && !CALL_P (our_prev))
3151 if (GET_CODE (pat) == PARALLEL)
3153 /* If we find a SET of something else, we can't
3158 for (i = 0; i < XVECLEN (pat, 0); i++)
3160 rtx part = XVECEXP (pat, 0, i);
3162 if (GET_CODE (part) == SET
3163 && SET_DEST (part) != reg)
3167 if (i == XVECLEN (pat, 0))
3168 delete_computation (our_prev);
3170 else if (GET_CODE (pat) == SET
3171 && REG_P (SET_DEST (pat)))
3173 int dest_regno = REGNO (SET_DEST (pat));
3174 int dest_endregno = END_REGNO (SET_DEST (pat));
3175 int regno = REGNO (reg);
3176 int endregno = END_REGNO (reg);
3178 if (dest_regno >= regno
3179 && dest_endregno <= endregno)
3180 delete_computation (our_prev);
3182 /* We may have a multi-word hard register and some, but not
3183 all, of the words of the register are needed in subsequent
3184 insns. Write REG_UNUSED notes for those parts that were not
3186 else if (dest_regno <= regno
3187 && dest_endregno >= endregno)
3191 REG_NOTES (our_prev)
3192 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
3193 REG_NOTES (our_prev));
3195 for (i = dest_regno; i < dest_endregno; i++)
3196 if (! find_regno_note (our_prev, REG_UNUSED, i))
3199 if (i == dest_endregno)
3200 delete_computation (our_prev);
3207 /* If PAT references the register that dies here, it is an
3208 additional use. Hence any prior SET isn't dead. However, this
3209 insn becomes the new place for the REG_DEAD note. */
3210 if (reg_overlap_mentioned_p (reg, pat))
3212 XEXP (note, 1) = REG_NOTES (our_prev);
3213 REG_NOTES (our_prev) = note;
3219 /* Delete INSN and recursively delete insns that compute values used only
3220 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3221 If we are running before flow.c, we need do nothing since flow.c will
3222 delete dead code. We also can't know if the registers being used are
3223 dead or not at this point.
3225 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3226 nothing other than set a register that dies in this insn, we can delete
3229 On machines with CC0, if CC0 is used in this insn, we may be able to
3230 delete the insn that set it. */
3233 delete_computation (rtx insn)
3238 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3240 rtx prev = prev_nonnote_insn (insn);
3241 /* We assume that at this stage
3242 CC's are always set explicitly
3243 and always immediately before the jump that
3244 will use them. So if the previous insn
3245 exists to set the CC's, delete it
3246 (unless it performs auto-increments, etc.). */
3247 if (prev && NONJUMP_INSN_P (prev)
3248 && sets_cc0_p (PATTERN (prev)))
3250 if (sets_cc0_p (PATTERN (prev)) > 0
3251 && ! side_effects_p (PATTERN (prev)))
3252 delete_computation (prev);
3254 /* Otherwise, show that cc0 won't be used. */
3255 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
3256 cc0_rtx, REG_NOTES (prev));
3261 for (note = REG_NOTES (insn); note; note = next)
3263 next = XEXP (note, 1);
3265 if (REG_NOTE_KIND (note) != REG_DEAD
3266 /* Verify that the REG_NOTE is legitimate. */
3267 || !REG_P (XEXP (note, 0)))
3270 delete_prior_computation (note, insn);
3273 delete_related_insns (insn);
3276 /* If all INSN does is set the pc, delete it,
3277 and delete the insn that set the condition codes for it
3278 if that's what the previous thing was. */
3281 delete_jump (rtx insn)
3283 rtx set = single_set (insn);
3285 if (set && GET_CODE (SET_DEST (set)) == PC)
3286 delete_computation (insn);
3290 /* Once we have tried two ways to fill a delay slot, make a pass over the
3291 code to try to improve the results and to do such things as more jump
3295 relax_delay_slots (rtx first)
3297 rtx insn, next, pat;
3298 rtx trial, delay_insn, target_label;
3300 /* Look at every JUMP_INSN and see if we can improve it. */
3301 for (insn = first; insn; insn = next)
3305 next = next_active_insn (insn);
3307 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3308 the next insn, or jumps to a label that is not the last of a
3309 group of consecutive labels. */
3311 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3312 && (target_label = JUMP_LABEL (insn)) != 0)
3314 target_label = skip_consecutive_labels (follow_jumps (target_label));
3315 if (target_label == 0)
3316 target_label = find_end_label ();
3318 if (target_label && next_active_insn (target_label) == next
3319 && ! condjump_in_parallel_p (insn))
3325 if (target_label && target_label != JUMP_LABEL (insn))
3326 reorg_redirect_jump (insn, target_label);
3328 /* See if this jump conditionally branches around an unconditional
3329 jump. If so, invert this jump and point it to the target of the
3331 if (next && JUMP_P (next)
3332 && any_condjump_p (insn)
3333 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3335 && next_active_insn (target_label) == next_active_insn (next)
3336 && no_labels_between_p (insn, next))
3338 rtx label = JUMP_LABEL (next);
3340 /* Be careful how we do this to avoid deleting code or
3341 labels that are momentarily dead. See similar optimization
3344 We also need to ensure we properly handle the case when
3345 invert_jump fails. */
3347 ++LABEL_NUSES (target_label);
3349 ++LABEL_NUSES (label);
3351 if (invert_jump (insn, label, 1))
3353 delete_related_insns (next);
3358 --LABEL_NUSES (label);
3360 if (--LABEL_NUSES (target_label) == 0)
3361 delete_related_insns (target_label);
3367 /* If this is an unconditional jump and the previous insn is a
3368 conditional jump, try reversing the condition of the previous
3369 insn and swapping our targets. The next pass might be able to
3372 Don't do this if we expect the conditional branch to be true, because
3373 we would then be making the more common case longer. */
3376 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3377 && (other = prev_active_insn (insn)) != 0
3378 && any_condjump_p (other)
3379 && no_labels_between_p (other, insn)
3380 && 0 > mostly_true_jump (other,
3381 get_branch_condition (other,
3382 JUMP_LABEL (other))))
3384 rtx other_target = JUMP_LABEL (other);
3385 target_label = JUMP_LABEL (insn);
3387 if (invert_jump (other, target_label, 0))
3388 reorg_redirect_jump (insn, other_target);
3391 /* Now look only at cases where we have filled a delay slot. */
3392 if (!NONJUMP_INSN_P (insn)
3393 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3396 pat = PATTERN (insn);
3397 delay_insn = XVECEXP (pat, 0, 0);
3399 /* See if the first insn in the delay slot is redundant with some
3400 previous insn. Remove it from the delay slot if so; then set up
3401 to reprocess this insn. */
3402 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3404 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3405 next = prev_active_insn (next);
3409 /* See if we have a RETURN insn with a filled delay slot followed
3410 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3411 the first RETURN (but not its delay insn). This gives the same
3412 effect in fewer instructions.
3414 Only do so if optimizing for size since this results in slower, but
3417 && GET_CODE (PATTERN (delay_insn)) == RETURN
3420 && GET_CODE (PATTERN (next)) == RETURN)
3425 /* Delete the RETURN and just execute the delay list insns.
3427 We do this by deleting the INSN containing the SEQUENCE, then
3428 re-emitting the insns separately, and then deleting the RETURN.
3429 This allows the count of the jump target to be properly
3432 /* Clear the from target bit, since these insns are no longer
3434 for (i = 0; i < XVECLEN (pat, 0); i++)
3435 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3437 trial = PREV_INSN (insn);
3438 delete_related_insns (insn);
3439 gcc_assert (GET_CODE (pat) == SEQUENCE);
3441 for (i = 0; i < XVECLEN (pat, 0); i++)
3443 rtx this_insn = XVECEXP (pat, 0, i);
3444 add_insn_after (this_insn, after, NULL);
3447 delete_scheduled_jump (delay_insn);
3451 /* Now look only at the cases where we have a filled JUMP_INSN. */
3452 if (!JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
3453 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3454 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3457 target_label = JUMP_LABEL (delay_insn);
3461 /* If this jump goes to another unconditional jump, thread it, but
3462 don't convert a jump into a RETURN here. */
3463 trial = skip_consecutive_labels (follow_jumps (target_label));
3465 trial = find_end_label ();
3467 if (trial && trial != target_label
3468 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3470 reorg_redirect_jump (delay_insn, trial);
3471 target_label = trial;
3474 /* If the first insn at TARGET_LABEL is redundant with a previous
3475 insn, redirect the jump to the following insn process again. */
3476 trial = next_active_insn (target_label);
3477 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3478 && redundant_insn (trial, insn, 0)
3479 && ! can_throw_internal (trial))
3481 /* Figure out where to emit the special USE insn so we don't
3482 later incorrectly compute register live/death info. */
3483 rtx tmp = next_active_insn (trial);
3485 tmp = find_end_label ();
3489 /* Insert the special USE insn and update dataflow info. */
3490 update_block (trial, tmp);
3492 /* Now emit a label before the special USE insn, and
3493 redirect our jump to the new label. */
3494 target_label = get_label_before (PREV_INSN (tmp));
3495 reorg_redirect_jump (delay_insn, target_label);
3501 /* Similarly, if it is an unconditional jump with one insn in its
3502 delay list and that insn is redundant, thread the jump. */
3503 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3504 && XVECLEN (PATTERN (trial), 0) == 2
3505 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
3506 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3507 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3508 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3510 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3511 if (target_label == 0)
3512 target_label = find_end_label ();
3515 && redirect_with_delay_slots_safe_p (delay_insn, target_label,
3518 reorg_redirect_jump (delay_insn, target_label);
3525 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3526 && prev_active_insn (target_label) == insn
3527 && ! condjump_in_parallel_p (delay_insn)
3529 /* If the last insn in the delay slot sets CC0 for some insn,
3530 various code assumes that it is in a delay slot. We could
3531 put it back where it belonged and delete the register notes,
3532 but it doesn't seem worthwhile in this uncommon case. */
3533 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3534 REG_CC_USER, NULL_RTX)
3541 /* All this insn does is execute its delay list and jump to the
3542 following insn. So delete the jump and just execute the delay
3545 We do this by deleting the INSN containing the SEQUENCE, then
3546 re-emitting the insns separately, and then deleting the jump.
3547 This allows the count of the jump target to be properly
3550 /* Clear the from target bit, since these insns are no longer
3552 for (i = 0; i < XVECLEN (pat, 0); i++)
3553 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3555 trial = PREV_INSN (insn);
3556 delete_related_insns (insn);
3557 gcc_assert (GET_CODE (pat) == SEQUENCE);
3559 for (i = 0; i < XVECLEN (pat, 0); i++)
3561 rtx this_insn = XVECEXP (pat, 0, i);
3562 add_insn_after (this_insn, after, NULL);
3565 delete_scheduled_jump (delay_insn);
3569 /* See if this is an unconditional jump around a single insn which is
3570 identical to the one in its delay slot. In this case, we can just
3571 delete the branch and the insn in its delay slot. */
3572 if (next && NONJUMP_INSN_P (next)
3573 && prev_label (next_active_insn (next)) == target_label
3574 && simplejump_p (insn)
3575 && XVECLEN (pat, 0) == 2
3576 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3578 delete_related_insns (insn);
3582 /* See if this jump (with its delay slots) conditionally branches
3583 around an unconditional jump (without delay slots). If so, invert
3584 this jump and point it to the target of the second jump. We cannot
3585 do this for annulled jumps, though. Again, don't convert a jump to
3587 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3588 && any_condjump_p (delay_insn)
3589 && next && JUMP_P (next)
3590 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3591 && next_active_insn (target_label) == next_active_insn (next)
3592 && no_labels_between_p (insn, next))
3594 rtx label = JUMP_LABEL (next);
3595 rtx old_label = JUMP_LABEL (delay_insn);
3598 label = find_end_label ();
3600 /* find_end_label can generate a new label. Check this first. */
3602 && no_labels_between_p (insn, next)
3603 && redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3605 /* Be careful how we do this to avoid deleting code or labels
3606 that are momentarily dead. See similar optimization in
3609 ++LABEL_NUSES (old_label);
3611 if (invert_jump (delay_insn, label, 1))
3615 /* Must update the INSN_FROM_TARGET_P bits now that
3616 the branch is reversed, so that mark_target_live_regs
3617 will handle the delay slot insn correctly. */
3618 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3620 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3621 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3624 delete_related_insns (next);
3628 if (old_label && --LABEL_NUSES (old_label) == 0)
3629 delete_related_insns (old_label);
3634 /* If we own the thread opposite the way this insn branches, see if we
3635 can merge its delay slots with following insns. */
3636 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3637 && own_thread_p (NEXT_INSN (insn), 0, 1))
3638 try_merge_delay_insns (insn, next);
3639 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3640 && own_thread_p (target_label, target_label, 0))
3641 try_merge_delay_insns (insn, next_active_insn (target_label));
3643 /* If we get here, we haven't deleted INSN. But we may have deleted
3644 NEXT, so recompute it. */
3645 next = next_active_insn (insn);
3651 /* Look for filled jumps to the end of function label. We can try to convert
3652 them into RETURN insns if the insns in the delay slot are valid for the
3656 make_return_insns (rtx first)
3658 rtx insn, jump_insn, pat;
3659 rtx real_return_label = end_of_function_label;
3662 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3663 /* If a previous pass filled delay slots in the epilogue, things get a
3664 bit more complicated, as those filler insns would generally (without
3665 data flow analysis) have to be executed after any existing branch
3666 delay slot filler insns. It is also unknown whether such a
3667 transformation would actually be profitable. Note that the existing
3668 code only cares for branches with (some) filled delay slots. */
3669 if (current_function_epilogue_delay_list != NULL)
3673 /* See if there is a RETURN insn in the function other than the one we
3674 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3675 into a RETURN to jump to it. */
3676 for (insn = first; insn; insn = NEXT_INSN (insn))
3677 if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
3679 real_return_label = get_label_before (insn);
3683 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3684 was equal to END_OF_FUNCTION_LABEL. */
3685 LABEL_NUSES (real_return_label)++;
3687 /* Clear the list of insns to fill so we can use it. */
3688 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3690 for (insn = first; insn; insn = NEXT_INSN (insn))
3694 /* Only look at filled JUMP_INSNs that go to the end of function
3696 if (!NONJUMP_INSN_P (insn)
3697 || GET_CODE (PATTERN (insn)) != SEQUENCE
3698 || !JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
3699 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3702 pat = PATTERN (insn);
3703 jump_insn = XVECEXP (pat, 0, 0);
3705 /* If we can't make the jump into a RETURN, try to redirect it to the best
3706 RETURN and go on to the next insn. */
3707 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
3709 /* Make sure redirecting the jump will not invalidate the delay
3711 if (redirect_with_delay_slots_safe_p (jump_insn,
3714 reorg_redirect_jump (jump_insn, real_return_label);
3718 /* See if this RETURN can accept the insns current in its delay slot.
3719 It can if it has more or an equal number of slots and the contents
3720 of each is valid. */
3722 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3723 slots = num_delay_slots (jump_insn);
3724 if (slots >= XVECLEN (pat, 0) - 1)
3726 for (i = 1; i < XVECLEN (pat, 0); i++)
3728 #ifdef ANNUL_IFFALSE_SLOTS
3729 (INSN_ANNULLED_BRANCH_P (jump_insn)
3730 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3731 ? eligible_for_annul_false (jump_insn, i - 1,
3732 XVECEXP (pat, 0, i), flags) :
3734 #ifdef ANNUL_IFTRUE_SLOTS
3735 (INSN_ANNULLED_BRANCH_P (jump_insn)
3736 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3737 ? eligible_for_annul_true (jump_insn, i - 1,
3738 XVECEXP (pat, 0, i), flags) :
3740 eligible_for_delay (jump_insn, i - 1,
3741 XVECEXP (pat, 0, i), flags)))
3747 if (i == XVECLEN (pat, 0))
3750 /* We have to do something with this insn. If it is an unconditional
3751 RETURN, delete the SEQUENCE and output the individual insns,
3752 followed by the RETURN. Then set things up so we try to find
3753 insns for its delay slots, if it needs some. */
3754 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3756 rtx prev = PREV_INSN (insn);
3758 delete_related_insns (insn);
3759 for (i = 1; i < XVECLEN (pat, 0); i++)
3760 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3762 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3763 emit_barrier_after (insn);
3766 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3769 /* It is probably more efficient to keep this with its current
3770 delay slot as a branch to a RETURN. */
3771 reorg_redirect_jump (jump_insn, real_return_label);
3774 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3775 new delay slots we have created. */
3776 if (--LABEL_NUSES (real_return_label) == 0)
3777 delete_related_insns (real_return_label);
3779 fill_simple_delay_slots (1);
3780 fill_simple_delay_slots (0);
3784 /* Try to find insns to place in delay slots. */
3787 dbr_schedule (rtx first)
3789 rtx insn, next, epilogue_insn = 0;
3792 /* If the current function has no insns other than the prologue and
3793 epilogue, then do not try to fill any delay slots. */
3794 if (n_basic_blocks == NUM_FIXED_BLOCKS)
3797 /* Find the highest INSN_UID and allocate and initialize our map from
3798 INSN_UID's to position in code. */
3799 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3801 if (INSN_UID (insn) > max_uid)
3802 max_uid = INSN_UID (insn);
3804 && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
3805 epilogue_insn = insn;
3808 uid_to_ruid = xmalloc ((max_uid + 1) * sizeof (int));
3809 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3810 uid_to_ruid[INSN_UID (insn)] = i;
3812 /* Initialize the list of insns that need filling. */
3813 if (unfilled_firstobj == 0)
3815 gcc_obstack_init (&unfilled_slots_obstack);
3816 unfilled_firstobj = obstack_alloc (&unfilled_slots_obstack, 0);
3819 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3823 INSN_ANNULLED_BRANCH_P (insn) = 0;
3824 INSN_FROM_TARGET_P (insn) = 0;
3826 /* Skip vector tables. We can't get attributes for them. */
3828 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3829 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3832 if (num_delay_slots (insn) > 0)
3833 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3835 /* Ensure all jumps go to the last of a set of consecutive labels. */
3837 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3838 && JUMP_LABEL (insn) != 0
3839 && ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
3840 != JUMP_LABEL (insn)))
3841 redirect_jump (insn, target, 1);
3844 init_resource_info (epilogue_insn);
3846 /* Show we haven't computed an end-of-function label yet. */
3847 end_of_function_label = 0;
3849 /* Initialize the statistics for this function. */
3850 memset (num_insns_needing_delays, 0, sizeof num_insns_needing_delays);
3851 memset (num_filled_delays, 0, sizeof num_filled_delays);
3853 /* Now do the delay slot filling. Try everything twice in case earlier
3854 changes make more slots fillable. */
3856 for (reorg_pass_number = 0;
3857 reorg_pass_number < MAX_REORG_PASSES;
3858 reorg_pass_number++)
3860 fill_simple_delay_slots (1);
3861 fill_simple_delay_slots (0);
3862 fill_eager_delay_slots ();
3863 relax_delay_slots (first);
3866 /* Delete any USE insns made by update_block; subsequent passes don't need
3867 them or know how to deal with them. */
3868 for (insn = first; insn; insn = next)
3870 next = NEXT_INSN (insn);
3872 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
3873 && INSN_P (XEXP (PATTERN (insn), 0)))
3874 next = delete_related_insns (insn);
3877 /* If we made an end of function label, indicate that it is now
3878 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3879 If it is now unused, delete it. */
3880 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3881 delete_related_insns (end_of_function_label);
3884 if (HAVE_return && end_of_function_label != 0)
3885 make_return_insns (first);
3888 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3890 /* It is not clear why the line below is needed, but it does seem to be. */
3891 unfilled_firstobj = obstack_alloc (&unfilled_slots_obstack, 0);
3895 int i, j, need_comma;
3896 int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
3897 int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
3899 for (reorg_pass_number = 0;
3900 reorg_pass_number < MAX_REORG_PASSES;
3901 reorg_pass_number++)
3903 fprintf (dump_file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3904 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3907 fprintf (dump_file, ";; Reorg function #%d\n", i);
3909 fprintf (dump_file, ";; %d insns needing delay slots\n;; ",
3910 num_insns_needing_delays[i][reorg_pass_number]);
3912 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3913 if (num_filled_delays[i][j][reorg_pass_number])
3916 fprintf (dump_file, ", ");
3918 fprintf (dump_file, "%d got %d delays",
3919 num_filled_delays[i][j][reorg_pass_number], j);
3921 fprintf (dump_file, "\n");
3924 memset (total_delay_slots, 0, sizeof total_delay_slots);
3925 memset (total_annul_slots, 0, sizeof total_annul_slots);
3926 for (insn = first; insn; insn = NEXT_INSN (insn))
3928 if (! INSN_DELETED_P (insn)
3929 && NONJUMP_INSN_P (insn)
3930 && GET_CODE (PATTERN (insn)) != USE
3931 && GET_CODE (PATTERN (insn)) != CLOBBER)
3933 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
3935 j = XVECLEN (PATTERN (insn), 0) - 1;
3936 if (j > MAX_DELAY_HISTOGRAM)
3937 j = MAX_DELAY_HISTOGRAM;
3938 if (INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (insn), 0, 0)))
3939 total_annul_slots[j]++;
3941 total_delay_slots[j]++;
3943 else if (num_delay_slots (insn) > 0)
3944 total_delay_slots[0]++;
3947 fprintf (dump_file, ";; Reorg totals: ");
3949 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3951 if (total_delay_slots[j])
3954 fprintf (dump_file, ", ");
3956 fprintf (dump_file, "%d got %d delays", total_delay_slots[j], j);
3959 fprintf (dump_file, "\n");
3960 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
3961 fprintf (dump_file, ";; Reorg annuls: ");
3963 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3965 if (total_annul_slots[j])
3968 fprintf (dump_file, ", ");
3970 fprintf (dump_file, "%d got %d delays", total_annul_slots[j], j);
3973 fprintf (dump_file, "\n");
3975 fprintf (dump_file, "\n");
3978 /* For all JUMP insns, fill in branch prediction notes, so that during
3979 assembler output a target can set branch prediction bits in the code.
3980 We have to do this now, as up until this point the destinations of
3981 JUMPS can be moved around and changed, but past right here that cannot
3983 for (insn = first; insn; insn = NEXT_INSN (insn))
3987 if (NONJUMP_INSN_P (insn))
3989 rtx pat = PATTERN (insn);
3991 if (GET_CODE (pat) == SEQUENCE)
3992 insn = XVECEXP (pat, 0, 0);
3997 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
3998 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
3999 GEN_INT (pred_flags),
4002 free_resource_info ();
4004 #ifdef DELAY_SLOTS_FOR_EPILOGUE
4005 /* SPARC assembler, for instance, emit warning when debug info is output
4006 into the delay slot. */
4010 for (link = current_function_epilogue_delay_list;
4012 link = XEXP (link, 1))
4013 INSN_LOCATOR (XEXP (link, 0)) = 0;
4018 #endif /* DELAY_SLOTS */
4021 gate_handle_delay_slots (void)
4024 return flag_delayed_branch;
4030 /* Run delay slot optimization. */
4032 rest_of_handle_delay_slots (void)
4035 dbr_schedule (get_insns ());
4040 struct tree_opt_pass pass_delay_slots =
4043 gate_handle_delay_slots, /* gate */
4044 rest_of_handle_delay_slots, /* execute */
4047 0, /* static_pass_number */
4048 TV_DBR_SCHED, /* tv_id */
4049 0, /* properties_required */
4050 0, /* properties_provided */
4051 0, /* properties_destroyed */
4052 0, /* todo_flags_start */
4054 TODO_ggc_collect, /* todo_flags_finish */
4058 /* Machine dependent reorg pass. */
4060 gate_handle_machine_reorg (void)
4062 return targetm.machine_dependent_reorg != 0;
4067 rest_of_handle_machine_reorg (void)
4069 targetm.machine_dependent_reorg ();
4073 struct tree_opt_pass pass_machine_reorg =
4076 gate_handle_machine_reorg, /* gate */
4077 rest_of_handle_machine_reorg, /* execute */
4080 0, /* static_pass_number */
4081 TV_MACH_DEP, /* tv_id */
4082 0, /* properties_required */
4083 0, /* properties_provided */
4084 0, /* properties_destroyed */
4085 0, /* todo_flags_start */
4087 TODO_ggc_collect, /* todo_flags_finish */