1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
3 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
6 Hacked by Michael Tiemann (tiemann@cygnus.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Instruction reorganization pass.
26 This pass runs after register allocation and final jump
27 optimization. It should be the last pass to run before peephole.
28 It serves primarily to fill delay slots of insns, typically branch
29 and call insns. Other insns typically involve more complicated
30 interactions of data dependencies and resource constraints, and
31 are better handled by scheduling before register allocation (by the
32 function `schedule_insns').
34 The Branch Penalty is the number of extra cycles that are needed to
35 execute a branch insn. On an ideal machine, branches take a single
36 cycle, and the Branch Penalty is 0. Several RISC machines approach
37 branch delays differently:
39 The MIPS has a single branch delay slot. Most insns
40 (except other branches) can be used to fill this slot. When the
41 slot is filled, two insns execute in two cycles, reducing the
42 branch penalty to zero.
44 The SPARC always has a branch delay slot, but its effects can be
45 annulled when the branch is not taken. This means that failing to
46 find other sources of insns, we can hoist an insn from the branch
47 target that would only be safe to execute knowing that the branch
50 The HP-PA always has a branch delay slot. For unconditional branches
51 its effects can be annulled when the branch is taken. The effects
52 of the delay slot in a conditional branch can be nullified for forward
53 taken branches, or for untaken backward branches. This means
54 we can hoist insns from the fall-through path for forward branches or
55 steal insns from the target of backward branches.
57 The TMS320C3x and C4x have three branch delay slots. When the three
58 slots are filled, the branch penalty is zero. Most insns can fill the
59 delay slots except jump insns.
61 Three techniques for filling delay slots have been implemented so far:
63 (1) `fill_simple_delay_slots' is the simplest, most efficient way
64 to fill delay slots. This pass first looks for insns which come
65 from before the branch and which are safe to execute after the
66 branch. Then it searches after the insn requiring delay slots or,
67 in the case of a branch, for insns that are after the point at
68 which the branch merges into the fallthrough code, if such a point
69 exists. When such insns are found, the branch penalty decreases
70 and no code expansion takes place.
72 (2) `fill_eager_delay_slots' is more complicated: it is used for
73 scheduling conditional jumps, or for scheduling jumps which cannot
74 be filled using (1). A machine need not have annulled jumps to use
75 this strategy, but it helps (by keeping more options open).
76 `fill_eager_delay_slots' tries to guess the direction the branch
77 will go; if it guesses right 100% of the time, it can reduce the
78 branch penalty as much as `fill_simple_delay_slots' does. If it
79 guesses wrong 100% of the time, it might as well schedule nops. When
80 `fill_eager_delay_slots' takes insns from the fall-through path of
81 the jump, usually there is no code expansion; when it takes insns
82 from the branch target, there is code expansion if it is not the
83 only way to reach that target.
85 (3) `relax_delay_slots' uses a set of rules to simplify code that
86 has been reorganized by (1) and (2). It finds cases where
87 conditional test can be eliminated, jumps can be threaded, extra
88 insns can be eliminated, etc. It is the job of (1) and (2) to do a
89 good job of scheduling locally; `relax_delay_slots' takes care of
90 making the various individual schedules work well together. It is
91 especially tuned to handle the control flow interactions of branch
92 insns. It does nothing for insns with delay slots that do not
95 On machines that use CC0, we are very conservative. We will not make
96 a copy of an insn involving CC0 since we want to maintain a 1-1
97 correspondence between the insn that sets and uses CC0. The insns are
98 allowed to be separated by placing an insn that sets CC0 (but not an insn
99 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
100 delay slot. In that case, we point each insn at the other with REG_CC_USER
101 and REG_CC_SETTER notes. Note that these restrictions affect very few
102 machines because most RISC machines with delay slots will not use CC0
103 (the RT is the only known exception at this point).
107 The Acorn Risc Machine can conditionally execute most insns, so
108 it is profitable to move single insns into a position to execute
109 based on the condition code of the previous insn.
111 The HP-PA can conditionally nullify insns, providing a similar
112 effect to the ARM, differing mostly in which insn is "in charge". */
116 #include "coretypes.h"
118 #include "diagnostic-core.h"
122 #include "function.h"
123 #include "insn-config.h"
124 #include "conditions.h"
125 #include "hard-reg-set.h"
126 #include "basic-block.h"
132 #include "insn-attr.h"
133 #include "resource.h"
138 #include "tree-pass.h"
142 #ifndef ANNUL_IFTRUE_SLOTS
143 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
145 #ifndef ANNUL_IFFALSE_SLOTS
146 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
149 /* Insns which have delay slots that have not yet been filled. */
151 static struct obstack unfilled_slots_obstack;
152 static rtx *unfilled_firstobj;
154 /* Define macros to refer to the first and last slot containing unfilled
155 insns. These are used because the list may move and its address
156 should be recomputed at each use. */
158 #define unfilled_slots_base \
159 ((rtx *) obstack_base (&unfilled_slots_obstack))
161 #define unfilled_slots_next \
162 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
164 /* Points to the label before the end of the function. */
165 static rtx end_of_function_label;
167 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
168 not always monotonically increase. */
169 static int *uid_to_ruid;
171 /* Highest valid index in `uid_to_ruid'. */
174 static int stop_search_p (rtx, int);
175 static int resource_conflicts_p (struct resources *, struct resources *);
176 static int insn_references_resource_p (rtx, struct resources *, bool);
177 static int insn_sets_resource_p (rtx, struct resources *, bool);
178 static rtx find_end_label (void);
179 static rtx emit_delay_sequence (rtx, rtx, int);
180 static rtx add_to_delay_list (rtx, rtx);
181 static rtx delete_from_delay_slot (rtx);
182 static void delete_scheduled_jump (rtx);
183 static void note_delay_statistics (int, int);
184 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
185 static rtx optimize_skip (rtx);
187 static int get_jump_flags (rtx, rtx);
188 static int rare_destination (rtx);
189 static int mostly_true_jump (rtx, rtx);
190 static rtx get_branch_condition (rtx, rtx);
191 static int condition_dominates_p (rtx, rtx);
192 static int redirect_with_delay_slots_safe_p (rtx, rtx, rtx);
193 static int redirect_with_delay_list_safe_p (rtx, rtx, rtx);
194 static int check_annul_list_true_false (int, rtx);
195 static rtx steal_delay_list_from_target (rtx, rtx, rtx, rtx,
199 int, int *, int *, rtx *);
200 static rtx steal_delay_list_from_fallthrough (rtx, rtx, rtx, rtx,
205 static void try_merge_delay_insns (rtx, rtx);
206 static rtx redundant_insn (rtx, rtx, rtx);
207 static int own_thread_p (rtx, rtx, int);
208 static void update_block (rtx, rtx);
209 static int reorg_redirect_jump (rtx, rtx);
210 static void update_reg_dead_notes (rtx, rtx);
211 static void fix_reg_dead_note (rtx, rtx);
212 static void update_reg_unused_notes (rtx, rtx);
213 static void fill_simple_delay_slots (int);
214 static rtx fill_slots_from_thread (rtx, rtx, rtx, rtx,
217 static void fill_eager_delay_slots (void);
218 static void relax_delay_slots (rtx);
220 static void make_return_insns (rtx);
223 /* A wrapper around next_active_insn which takes care to return ret_rtx
227 first_active_target_insn (rtx insn)
229 if (ANY_RETURN_P (insn))
231 return next_active_insn (insn);
234 /* Return TRUE if this insn should stop the search for insn to fill delay
235 slots. LABELS_P indicates that labels should terminate the search.
236 In all cases, jumps terminate the search. */
239 stop_search_p (rtx insn, int labels_p)
244 /* If the insn can throw an exception that is caught within the function,
245 it may effectively perform a jump from the viewpoint of the function.
246 Therefore act like for a jump. */
247 if (can_throw_internal (insn))
250 switch (GET_CODE (insn))
264 /* OK unless it contains a delay slot or is an `asm' insn of some type.
265 We don't know anything about these. */
266 return (GET_CODE (PATTERN (insn)) == SEQUENCE
267 || GET_CODE (PATTERN (insn)) == ASM_INPUT
268 || asm_noperands (PATTERN (insn)) >= 0);
275 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
276 resource set contains a volatile memory reference. Otherwise, return FALSE. */
279 resource_conflicts_p (struct resources *res1, struct resources *res2)
281 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
282 || (res1->unch_memory && res2->unch_memory)
283 || res1->volatil || res2->volatil)
287 return (res1->regs & res2->regs) != HARD_CONST (0);
292 for (i = 0; i < HARD_REG_SET_LONGS; i++)
293 if ((res1->regs[i] & res2->regs[i]) != 0)
300 /* Return TRUE if any resource marked in RES, a `struct resources', is
301 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
302 routine is using those resources.
304 We compute this by computing all the resources referenced by INSN and
305 seeing if this conflicts with RES. It might be faster to directly check
306 ourselves, and this is the way it used to work, but it means duplicating
307 a large block of complex code. */
310 insn_references_resource_p (rtx insn, struct resources *res,
311 bool include_delayed_effects)
313 struct resources insn_res;
315 CLEAR_RESOURCE (&insn_res);
316 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
317 return resource_conflicts_p (&insn_res, res);
320 /* Return TRUE if INSN modifies resources that are marked in RES.
321 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
322 included. CC0 is only modified if it is explicitly set; see comments
323 in front of mark_set_resources for details. */
326 insn_sets_resource_p (rtx insn, struct resources *res,
327 bool include_delayed_effects)
329 struct resources insn_sets;
331 CLEAR_RESOURCE (&insn_sets);
332 mark_set_resources (insn, &insn_sets, 0,
333 (include_delayed_effects
336 return resource_conflicts_p (&insn_sets, res);
339 /* Find a label at the end of the function or before a RETURN. If there
340 is none, try to make one. If that fails, returns 0.
342 The property of such a label is that it is placed just before the
343 epilogue or a bare RETURN insn, so that another bare RETURN can be
344 turned into a jump to the label unconditionally. In particular, the
345 label cannot be placed before a RETURN insn with a filled delay slot.
347 ??? There may be a problem with the current implementation. Suppose
348 we start with a bare RETURN insn and call find_end_label. It may set
349 end_of_function_label just before the RETURN. Suppose the machinery
350 is able to fill the delay slot of the RETURN insn afterwards. Then
351 end_of_function_label is no longer valid according to the property
352 described above and find_end_label will still return it unmodified.
353 Note that this is probably mitigated by the following observation:
354 once end_of_function_label is made, it is very likely the target of
355 a jump, so filling the delay slot of the RETURN will be much more
359 find_end_label (void)
363 /* If we found one previously, return it. */
364 if (end_of_function_label)
365 return end_of_function_label;
367 /* Otherwise, see if there is a label at the end of the function. If there
368 is, it must be that RETURN insns aren't needed, so that is our return
369 label and we don't have to do anything else. */
371 insn = get_last_insn ();
373 || (NONJUMP_INSN_P (insn)
374 && (GET_CODE (PATTERN (insn)) == USE
375 || GET_CODE (PATTERN (insn)) == CLOBBER)))
376 insn = PREV_INSN (insn);
378 /* When a target threads its epilogue we might already have a
379 suitable return insn. If so put a label before it for the
380 end_of_function_label. */
382 && JUMP_P (PREV_INSN (insn))
383 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
385 rtx temp = PREV_INSN (PREV_INSN (insn));
386 end_of_function_label = gen_label_rtx ();
387 LABEL_NUSES (end_of_function_label) = 0;
389 /* Put the label before an USE insns that may precede the RETURN insn. */
390 while (GET_CODE (temp) == USE)
391 temp = PREV_INSN (temp);
393 emit_label_after (end_of_function_label, temp);
396 else if (LABEL_P (insn))
397 end_of_function_label = insn;
400 end_of_function_label = gen_label_rtx ();
401 LABEL_NUSES (end_of_function_label) = 0;
402 /* If the basic block reorder pass moves the return insn to
403 some other place try to locate it again and put our
404 end_of_function_label there. */
405 while (insn && ! (JUMP_P (insn)
406 && (GET_CODE (PATTERN (insn)) == RETURN)))
407 insn = PREV_INSN (insn);
410 insn = PREV_INSN (insn);
412 /* Put the label before an USE insns that may proceed the
414 while (GET_CODE (insn) == USE)
415 insn = PREV_INSN (insn);
417 emit_label_after (end_of_function_label, insn);
428 /* The RETURN insn has its delay slot filled so we cannot
429 emit the label just before it. Since we already have
430 an epilogue and cannot emit a new RETURN, we cannot
431 emit the label at all. */
432 end_of_function_label = NULL_RTX;
433 return end_of_function_label;
435 #endif /* HAVE_epilogue */
437 /* Otherwise, make a new label and emit a RETURN and BARRIER,
439 emit_label (end_of_function_label);
441 /* We don't bother trying to create a return insn if the
442 epilogue has filled delay-slots; we would have to try and
443 move the delay-slot fillers to the delay-slots for the new
444 return insn or in front of the new return insn. */
445 if (crtl->epilogue_delay_list == NULL
448 /* The return we make may have delay slots too. */
449 rtx insn = gen_return ();
450 insn = emit_jump_insn (insn);
451 JUMP_LABEL (insn) = ret_rtx;
453 if (num_delay_slots (insn) > 0)
454 obstack_ptr_grow (&unfilled_slots_obstack, insn);
460 /* Show one additional use for this label so it won't go away until
462 ++LABEL_NUSES (end_of_function_label);
464 return end_of_function_label;
467 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
468 the pattern of INSN with the SEQUENCE.
470 Chain the insns so that NEXT_INSN of each insn in the sequence points to
471 the next and NEXT_INSN of the last insn in the sequence points to
472 the first insn after the sequence. Similarly for PREV_INSN. This makes
473 it easier to scan all insns.
475 Returns the SEQUENCE that replaces INSN. */
478 emit_delay_sequence (rtx insn, rtx list, int length)
484 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
485 rtvec seqv = rtvec_alloc (length + 1);
486 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
487 rtx seq_insn = make_insn_raw (seq);
488 rtx first = get_insns ();
489 rtx last = get_last_insn ();
491 /* Make a copy of the insn having delay slots. */
492 rtx delay_insn = copy_rtx (insn);
494 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
495 confuse further processing. Update LAST in case it was the last insn.
496 We will put the BARRIER back in later. */
497 if (NEXT_INSN (insn) && BARRIER_P (NEXT_INSN (insn)))
499 delete_related_insns (NEXT_INSN (insn));
500 last = get_last_insn ();
504 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
505 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
506 PREV_INSN (seq_insn) = PREV_INSN (insn);
509 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
512 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
514 /* Note the calls to set_new_first_and_last_insn must occur after
515 SEQ_INSN has been completely spliced into the insn stream.
517 Otherwise CUR_INSN_UID will get set to an incorrect value because
518 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
520 set_new_first_and_last_insn (first, seq_insn);
523 set_new_first_and_last_insn (seq_insn, last);
525 /* Build our SEQUENCE and rebuild the insn chain. */
526 XVECEXP (seq, 0, 0) = delay_insn;
527 INSN_DELETED_P (delay_insn) = 0;
528 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
530 INSN_LOCATOR (seq_insn) = INSN_LOCATOR (delay_insn);
532 for (li = list; li; li = XEXP (li, 1), i++)
534 rtx tem = XEXP (li, 0);
537 /* Show that this copy of the insn isn't deleted. */
538 INSN_DELETED_P (tem) = 0;
540 XVECEXP (seq, 0, i) = tem;
541 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
542 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
544 /* SPARC assembler, for instance, emit warning when debug info is output
545 into the delay slot. */
546 if (INSN_LOCATOR (tem) && !INSN_LOCATOR (seq_insn))
547 INSN_LOCATOR (seq_insn) = INSN_LOCATOR (tem);
548 INSN_LOCATOR (tem) = 0;
550 for (note = REG_NOTES (tem); note; note = next)
552 next = XEXP (note, 1);
553 switch (REG_NOTE_KIND (note))
556 /* Remove any REG_DEAD notes because we can't rely on them now
557 that the insn has been moved. */
558 remove_note (tem, note);
561 case REG_LABEL_OPERAND:
562 case REG_LABEL_TARGET:
563 /* Keep the label reference count up to date. */
564 if (LABEL_P (XEXP (note, 0)))
565 LABEL_NUSES (XEXP (note, 0)) ++;
574 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
576 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
577 last insn in that SEQUENCE to point to us. Similarly for the first
578 insn in the following insn if it is a SEQUENCE. */
580 if (PREV_INSN (seq_insn) && NONJUMP_INSN_P (PREV_INSN (seq_insn))
581 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
582 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
583 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
586 if (NEXT_INSN (seq_insn) && NONJUMP_INSN_P (NEXT_INSN (seq_insn))
587 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
588 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
590 /* If there used to be a BARRIER, put it back. */
592 emit_barrier_after (seq_insn);
594 gcc_assert (i == length + 1);
599 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
600 be in the order in which the insns are to be executed. */
603 add_to_delay_list (rtx insn, rtx delay_list)
605 /* If we have an empty list, just make a new list element. If
606 INSN has its block number recorded, clear it since we may
607 be moving the insn to a new block. */
611 clear_hashed_info_for_insn (insn);
612 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
615 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
617 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
622 /* Delete INSN from the delay slot of the insn that it is in, which may
623 produce an insn with no delay slots. Return the new insn. */
626 delete_from_delay_slot (rtx insn)
628 rtx trial, seq_insn, seq, prev;
633 /* We first must find the insn containing the SEQUENCE with INSN in its
634 delay slot. Do this by finding an insn, TRIAL, where
635 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
638 PREV_INSN (NEXT_INSN (trial)) == trial;
639 trial = NEXT_INSN (trial))
642 seq_insn = PREV_INSN (NEXT_INSN (trial));
643 seq = PATTERN (seq_insn);
645 if (NEXT_INSN (seq_insn) && BARRIER_P (NEXT_INSN (seq_insn)))
648 /* Create a delay list consisting of all the insns other than the one
649 we are deleting (unless we were the only one). */
650 if (XVECLEN (seq, 0) > 2)
651 for (i = 1; i < XVECLEN (seq, 0); i++)
652 if (XVECEXP (seq, 0, i) != insn)
653 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
655 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
656 list, and rebuild the delay list if non-empty. */
657 prev = PREV_INSN (seq_insn);
658 trial = XVECEXP (seq, 0, 0);
659 delete_related_insns (seq_insn);
660 add_insn_after (trial, prev, NULL);
662 /* If there was a barrier after the old SEQUENCE, remit it. */
664 emit_barrier_after (trial);
666 /* If there are any delay insns, remit them. Otherwise clear the
669 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
670 else if (JUMP_P (trial))
671 INSN_ANNULLED_BRANCH_P (trial) = 0;
673 INSN_FROM_TARGET_P (insn) = 0;
675 /* Show we need to fill this insn again. */
676 obstack_ptr_grow (&unfilled_slots_obstack, trial);
681 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
682 the insn that sets CC0 for it and delete it too. */
685 delete_scheduled_jump (rtx insn)
687 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
688 delete the insn that sets the condition code, but it is hard to find it.
689 Since this case is rare anyway, don't bother trying; there would likely
690 be other insns that became dead anyway, which we wouldn't know to
694 if (reg_mentioned_p (cc0_rtx, insn))
696 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
698 /* If a reg-note was found, it points to an insn to set CC0. This
699 insn is in the delay list of some other insn. So delete it from
700 the delay list it was in. */
703 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
704 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
705 delete_from_delay_slot (XEXP (note, 0));
709 /* The insn setting CC0 is our previous insn, but it may be in
710 a delay slot. It will be the last insn in the delay slot, if
712 rtx trial = previous_insn (insn);
714 trial = prev_nonnote_insn (trial);
715 if (sets_cc0_p (PATTERN (trial)) != 1
716 || FIND_REG_INC_NOTE (trial, NULL_RTX))
718 if (PREV_INSN (NEXT_INSN (trial)) == trial)
719 delete_related_insns (trial);
721 delete_from_delay_slot (trial);
726 delete_related_insns (insn);
729 /* Counters for delay-slot filling. */
731 #define NUM_REORG_FUNCTIONS 2
732 #define MAX_DELAY_HISTOGRAM 3
733 #define MAX_REORG_PASSES 2
735 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
737 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
739 static int reorg_pass_number;
742 note_delay_statistics (int slots_filled, int index)
744 num_insns_needing_delays[index][reorg_pass_number]++;
745 if (slots_filled > MAX_DELAY_HISTOGRAM)
746 slots_filled = MAX_DELAY_HISTOGRAM;
747 num_filled_delays[index][slots_filled][reorg_pass_number]++;
750 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
752 /* Optimize the following cases:
754 1. When a conditional branch skips over only one instruction,
755 use an annulling branch and put that insn in the delay slot.
756 Use either a branch that annuls when the condition if true or
757 invert the test with a branch that annuls when the condition is
758 false. This saves insns, since otherwise we must copy an insn
761 (orig) (skip) (otherwise)
762 Bcc.n L1 Bcc',a L1 Bcc,a L1'
769 2. When a conditional branch skips over only one instruction,
770 and after that, it unconditionally branches somewhere else,
771 perform the similar optimization. This saves executing the
772 second branch in the case where the inverted condition is true.
781 This should be expanded to skip over N insns, where N is the number
782 of delay slots required. */
785 optimize_skip (rtx insn)
787 rtx trial = next_nonnote_insn (insn);
788 rtx next_trial = next_active_insn (trial);
792 flags = get_jump_flags (insn, JUMP_LABEL (insn));
795 || !NONJUMP_INSN_P (trial)
796 || GET_CODE (PATTERN (trial)) == SEQUENCE
797 || recog_memoized (trial) < 0
798 || (! eligible_for_annul_false (insn, 0, trial, flags)
799 && ! eligible_for_annul_true (insn, 0, trial, flags))
800 || can_throw_internal (trial))
803 /* There are two cases where we are just executing one insn (we assume
804 here that a branch requires only one insn; this should be generalized
805 at some point): Where the branch goes around a single insn or where
806 we have one insn followed by a branch to the same label we branch to.
807 In both of these cases, inverting the jump and annulling the delay
808 slot give the same effect in fewer insns. */
809 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
810 && ! (next_trial == 0 && crtl->epilogue_delay_list != 0))
812 && JUMP_P (next_trial)
813 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
814 && (simplejump_p (next_trial)
815 || GET_CODE (PATTERN (next_trial)) == RETURN)))
817 if (eligible_for_annul_false (insn, 0, trial, flags))
819 if (invert_jump (insn, JUMP_LABEL (insn), 1))
820 INSN_FROM_TARGET_P (trial) = 1;
821 else if (! eligible_for_annul_true (insn, 0, trial, flags))
825 delay_list = add_to_delay_list (trial, NULL_RTX);
826 next_trial = next_active_insn (trial);
827 update_block (trial, trial);
828 delete_related_insns (trial);
830 /* Also, if we are targeting an unconditional
831 branch, thread our jump to the target of that branch. Don't
832 change this into a RETURN here, because it may not accept what
833 we have in the delay slot. We'll fix this up later. */
834 if (next_trial && JUMP_P (next_trial)
835 && (simplejump_p (next_trial)
836 || GET_CODE (PATTERN (next_trial)) == RETURN))
838 rtx target_label = JUMP_LABEL (next_trial);
839 if (ANY_RETURN_P (target_label))
840 target_label = find_end_label ();
844 /* Recompute the flags based on TARGET_LABEL since threading
845 the jump to TARGET_LABEL may change the direction of the
846 jump (which may change the circumstances in which the
847 delay slot is nullified). */
848 flags = get_jump_flags (insn, target_label);
849 if (eligible_for_annul_true (insn, 0, trial, flags))
850 reorg_redirect_jump (insn, target_label);
854 INSN_ANNULLED_BRANCH_P (insn) = 1;
861 /* Encode and return branch direction and prediction information for
862 INSN assuming it will jump to LABEL.
864 Non conditional branches return no direction information and
865 are predicted as very likely taken. */
868 get_jump_flags (rtx insn, rtx label)
872 /* get_jump_flags can be passed any insn with delay slots, these may
873 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
874 direction information, and only if they are conditional jumps.
876 If LABEL is a return, then there is no way to determine the branch
879 && (condjump_p (insn) || condjump_in_parallel_p (insn))
880 && !ANY_RETURN_P (label)
881 && INSN_UID (insn) <= max_uid
882 && INSN_UID (label) <= max_uid)
884 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
885 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
886 /* No valid direction information. */
890 /* If insn is a conditional branch call mostly_true_jump to get
891 determine the branch prediction.
893 Non conditional branches are predicted as very likely taken. */
895 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
899 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
903 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
906 flags |= ATTR_FLAG_likely;
909 flags |= ATTR_FLAG_unlikely;
912 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
920 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
925 /* Return 1 if INSN is a destination that will be branched to rarely (the
926 return point of a function); return 2 if DEST will be branched to very
927 rarely (a call to a function that doesn't return). Otherwise,
931 rare_destination (rtx insn)
936 for (; insn && !ANY_RETURN_P (insn); insn = next)
938 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
939 insn = XVECEXP (PATTERN (insn), 0, 0);
941 next = NEXT_INSN (insn);
943 switch (GET_CODE (insn))
948 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
949 don't scan past JUMP_INSNs, so any barrier we find here must
950 have been after a CALL_INSN and hence mean the call doesn't
954 if (GET_CODE (PATTERN (insn)) == RETURN)
956 else if (simplejump_p (insn)
957 && jump_count++ < 10)
958 next = JUMP_LABEL (insn);
967 /* If we got here it means we hit the end of the function. So this
968 is an unlikely destination. */
973 /* Return truth value of the statement that this branch
974 is mostly taken. If we think that the branch is extremely likely
975 to be taken, we return 2. If the branch is slightly more likely to be
976 taken, return 1. If the branch is slightly less likely to be taken,
977 return 0 and if the branch is highly unlikely to be taken, return -1.
979 CONDITION, if nonzero, is the condition that JUMP_INSN is testing. */
982 mostly_true_jump (rtx jump_insn, rtx condition)
984 rtx target_label = JUMP_LABEL (jump_insn);
986 int rare_dest, rare_fallthrough;
988 /* If branch probabilities are available, then use that number since it
989 always gives a correct answer. */
990 note = find_reg_note (jump_insn, REG_BR_PROB, 0);
993 int prob = INTVAL (XEXP (note, 0));
995 if (prob >= REG_BR_PROB_BASE * 9 / 10)
997 else if (prob >= REG_BR_PROB_BASE / 2)
999 else if (prob >= REG_BR_PROB_BASE / 10)
1005 /* Look at the relative rarities of the fallthrough and destination. If
1006 they differ, we can predict the branch that way. */
1007 rare_dest = rare_destination (target_label);
1008 rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1010 switch (rare_fallthrough - rare_dest)
1024 /* If we couldn't figure out what this jump was, assume it won't be
1025 taken. This should be rare. */
1029 /* Predict backward branches usually take, forward branches usually not. If
1030 we don't know whether this is forward or backward, assume the branch
1031 will be taken, since most are. */
1032 return (ANY_RETURN_P (target_label) || INSN_UID (jump_insn) > max_uid
1033 || INSN_UID (target_label) > max_uid
1034 || (uid_to_ruid[INSN_UID (jump_insn)]
1035 > uid_to_ruid[INSN_UID (target_label)]));
1038 /* Return the condition under which INSN will branch to TARGET. If TARGET
1039 is zero, return the condition under which INSN will return. If INSN is
1040 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1041 type of jump, or it doesn't go to TARGET, return 0. */
1044 get_branch_condition (rtx insn, rtx target)
1046 rtx pat = PATTERN (insn);
1049 if (condjump_in_parallel_p (insn))
1050 pat = XVECEXP (pat, 0, 0);
1052 if (ANY_RETURN_P (pat))
1053 return pat == target ? const_true_rtx : 0;
1055 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1058 src = SET_SRC (pat);
1059 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1060 return const_true_rtx;
1062 else if (GET_CODE (src) == IF_THEN_ELSE
1063 && XEXP (src, 2) == pc_rtx
1064 && GET_CODE (XEXP (src, 1)) == LABEL_REF
1065 && XEXP (XEXP (src, 1), 0) == target)
1066 return XEXP (src, 0);
1068 else if (GET_CODE (src) == IF_THEN_ELSE
1069 && XEXP (src, 1) == pc_rtx
1070 && GET_CODE (XEXP (src, 2)) == LABEL_REF
1071 && XEXP (XEXP (src, 2), 0) == target)
1074 rev = reversed_comparison_code (XEXP (src, 0), insn);
1076 return gen_rtx_fmt_ee (rev, GET_MODE (XEXP (src, 0)),
1077 XEXP (XEXP (src, 0), 0),
1078 XEXP (XEXP (src, 0), 1));
1084 /* Return nonzero if CONDITION is more strict than the condition of
1085 INSN, i.e., if INSN will always branch if CONDITION is true. */
1088 condition_dominates_p (rtx condition, rtx insn)
1090 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1091 enum rtx_code code = GET_CODE (condition);
1092 enum rtx_code other_code;
1094 if (rtx_equal_p (condition, other_condition)
1095 || other_condition == const_true_rtx)
1098 else if (condition == const_true_rtx || other_condition == 0)
1101 other_code = GET_CODE (other_condition);
1102 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1103 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1104 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1107 return comparison_dominates_p (code, other_code);
1110 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1111 any insns already in the delay slot of JUMP. */
1114 redirect_with_delay_slots_safe_p (rtx jump, rtx newlabel, rtx seq)
1117 rtx pat = PATTERN (seq);
1119 /* Make sure all the delay slots of this jump would still
1120 be valid after threading the jump. If they are still
1121 valid, then return nonzero. */
1123 flags = get_jump_flags (jump, newlabel);
1124 for (i = 1; i < XVECLEN (pat, 0); i++)
1126 #ifdef ANNUL_IFFALSE_SLOTS
1127 (INSN_ANNULLED_BRANCH_P (jump)
1128 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1129 ? eligible_for_annul_false (jump, i - 1,
1130 XVECEXP (pat, 0, i), flags) :
1132 #ifdef ANNUL_IFTRUE_SLOTS
1133 (INSN_ANNULLED_BRANCH_P (jump)
1134 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1135 ? eligible_for_annul_true (jump, i - 1,
1136 XVECEXP (pat, 0, i), flags) :
1138 eligible_for_delay (jump, i - 1, XVECEXP (pat, 0, i), flags)))
1141 return (i == XVECLEN (pat, 0));
1144 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1145 any insns we wish to place in the delay slot of JUMP. */
1148 redirect_with_delay_list_safe_p (rtx jump, rtx newlabel, rtx delay_list)
1153 /* Make sure all the insns in DELAY_LIST would still be
1154 valid after threading the jump. If they are still
1155 valid, then return nonzero. */
1157 flags = get_jump_flags (jump, newlabel);
1158 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1160 #ifdef ANNUL_IFFALSE_SLOTS
1161 (INSN_ANNULLED_BRANCH_P (jump)
1162 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1163 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1165 #ifdef ANNUL_IFTRUE_SLOTS
1166 (INSN_ANNULLED_BRANCH_P (jump)
1167 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1168 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1170 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1173 return (li == NULL);
1176 /* DELAY_LIST is a list of insns that have already been placed into delay
1177 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1178 If not, return 0; otherwise return 1. */
1181 check_annul_list_true_false (int annul_true_p, rtx delay_list)
1187 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1189 rtx trial = XEXP (temp, 0);
1191 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1192 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1200 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1201 the condition tested by INSN is CONDITION and the resources shown in
1202 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1203 from SEQ's delay list, in addition to whatever insns it may execute
1204 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1205 needed while searching for delay slot insns. Return the concatenated
1206 delay list if possible, otherwise, return 0.
1208 SLOTS_TO_FILL is the total number of slots required by INSN, and
1209 PSLOTS_FILLED points to the number filled so far (also the number of
1210 insns in DELAY_LIST). It is updated with the number that have been
1211 filled from the SEQUENCE, if any.
1213 PANNUL_P points to a nonzero value if we already know that we need
1214 to annul INSN. If this routine determines that annulling is needed,
1215 it may set that value nonzero.
1217 PNEW_THREAD points to a location that is to receive the place at which
1218 execution should continue. */
1221 steal_delay_list_from_target (rtx insn, rtx condition, rtx seq,
1222 rtx delay_list, struct resources *sets,
1223 struct resources *needed,
1224 struct resources *other_needed,
1225 int slots_to_fill, int *pslots_filled,
1226 int *pannul_p, rtx *pnew_thread)
1229 int slots_remaining = slots_to_fill - *pslots_filled;
1230 int total_slots_filled = *pslots_filled;
1231 rtx new_delay_list = 0;
1232 int must_annul = *pannul_p;
1235 struct resources cc_set;
1237 /* We can't do anything if there are more delay slots in SEQ than we
1238 can handle, or if we don't know that it will be a taken branch.
1239 We know that it will be a taken branch if it is either an unconditional
1240 branch or a conditional branch with a stricter branch condition.
1242 Also, exit if the branch has more than one set, since then it is computing
1243 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1244 ??? It may be possible to move other sets into INSN in addition to
1245 moving the instructions in the delay slots.
1247 We can not steal the delay list if one of the instructions in the
1248 current delay_list modifies the condition codes and the jump in the
1249 sequence is a conditional jump. We can not do this because we can
1250 not change the direction of the jump because the condition codes
1251 will effect the direction of the jump in the sequence. */
1253 CLEAR_RESOURCE (&cc_set);
1254 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1256 rtx trial = XEXP (temp, 0);
1258 mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL);
1259 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, false))
1263 if (XVECLEN (seq, 0) - 1 > slots_remaining
1264 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1265 || ! single_set (XVECEXP (seq, 0, 0)))
1268 #ifdef MD_CAN_REDIRECT_BRANCH
1269 /* On some targets, branches with delay slots can have a limited
1270 displacement. Give the back end a chance to tell us we can't do
1272 if (! MD_CAN_REDIRECT_BRANCH (insn, XVECEXP (seq, 0, 0)))
1276 for (i = 1; i < XVECLEN (seq, 0); i++)
1278 rtx trial = XVECEXP (seq, 0, i);
1281 if (insn_references_resource_p (trial, sets, false)
1282 || insn_sets_resource_p (trial, needed, false)
1283 || insn_sets_resource_p (trial, sets, false)
1285 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1287 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1289 /* If TRIAL is from the fallthrough code of an annulled branch insn
1290 in SEQ, we cannot use it. */
1291 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1292 && ! INSN_FROM_TARGET_P (trial)))
1295 /* If this insn was already done (usually in a previous delay slot),
1296 pretend we put it in our delay slot. */
1297 if (redundant_insn (trial, insn, new_delay_list))
1300 /* We will end up re-vectoring this branch, so compute flags
1301 based on jumping to the new label. */
1302 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1305 && ((condition == const_true_rtx
1306 || (! insn_sets_resource_p (trial, other_needed, false)
1307 && ! may_trap_or_fault_p (PATTERN (trial)))))
1308 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1309 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1311 check_annul_list_true_false (0, delay_list)
1312 && check_annul_list_true_false (0, new_delay_list)
1313 && eligible_for_annul_false (insn, total_slots_filled,
1318 temp = copy_rtx (trial);
1319 INSN_FROM_TARGET_P (temp) = 1;
1320 new_delay_list = add_to_delay_list (temp, new_delay_list);
1321 total_slots_filled++;
1323 if (--slots_remaining == 0)
1330 /* Show the place to which we will be branching. */
1331 *pnew_thread = first_active_target_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1333 /* Add any new insns to the delay list and update the count of the
1334 number of slots filled. */
1335 *pslots_filled = total_slots_filled;
1339 if (delay_list == 0)
1340 return new_delay_list;
1342 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1343 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1348 /* Similar to steal_delay_list_from_target except that SEQ is on the
1349 fallthrough path of INSN. Here we only do something if the delay insn
1350 of SEQ is an unconditional branch. In that case we steal its delay slot
1351 for INSN since unconditional branches are much easier to fill. */
1354 steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx seq,
1355 rtx delay_list, struct resources *sets,
1356 struct resources *needed,
1357 struct resources *other_needed,
1358 int slots_to_fill, int *pslots_filled,
1363 int must_annul = *pannul_p;
1366 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1368 /* We can't do anything if SEQ's delay insn isn't an
1369 unconditional branch. */
1371 if (! simplejump_p (XVECEXP (seq, 0, 0))
1372 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1375 for (i = 1; i < XVECLEN (seq, 0); i++)
1377 rtx trial = XVECEXP (seq, 0, i);
1379 /* If TRIAL sets CC0, stealing it will move it too far from the use
1381 if (insn_references_resource_p (trial, sets, false)
1382 || insn_sets_resource_p (trial, needed, false)
1383 || insn_sets_resource_p (trial, sets, false)
1385 || sets_cc0_p (PATTERN (trial))
1391 /* If this insn was already done, we don't need it. */
1392 if (redundant_insn (trial, insn, delay_list))
1394 delete_from_delay_slot (trial);
1399 && ((condition == const_true_rtx
1400 || (! insn_sets_resource_p (trial, other_needed, false)
1401 && ! may_trap_or_fault_p (PATTERN (trial)))))
1402 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1403 : (must_annul || delay_list == NULL) && (must_annul = 1,
1404 check_annul_list_true_false (1, delay_list)
1405 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1409 delete_from_delay_slot (trial);
1410 delay_list = add_to_delay_list (trial, delay_list);
1412 if (++(*pslots_filled) == slots_to_fill)
1424 /* Try merging insns starting at THREAD which match exactly the insns in
1427 If all insns were matched and the insn was previously annulling, the
1428 annul bit will be cleared.
1430 For each insn that is merged, if the branch is or will be non-annulling,
1431 we delete the merged insn. */
1434 try_merge_delay_insns (rtx insn, rtx thread)
1436 rtx trial, next_trial;
1437 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1438 int annul_p = JUMP_P (delay_insn) && INSN_ANNULLED_BRANCH_P (delay_insn);
1439 int slot_number = 1;
1440 int num_slots = XVECLEN (PATTERN (insn), 0);
1441 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1442 struct resources set, needed;
1443 rtx merged_insns = 0;
1447 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1449 CLEAR_RESOURCE (&needed);
1450 CLEAR_RESOURCE (&set);
1452 /* If this is not an annulling branch, take into account anything needed in
1453 INSN's delay slot. This prevents two increments from being incorrectly
1454 folded into one. If we are annulling, this would be the correct
1455 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1456 will essentially disable this optimization. This method is somewhat of
1457 a kludge, but I don't see a better way.) */
1459 for (i = 1 ; i < num_slots; i++)
1460 if (XVECEXP (PATTERN (insn), 0, i))
1461 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed,
1464 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1466 rtx pat = PATTERN (trial);
1467 rtx oldtrial = trial;
1469 next_trial = next_nonnote_insn (trial);
1471 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1472 if (NONJUMP_INSN_P (trial)
1473 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1476 if (GET_CODE (next_to_match) == GET_CODE (trial)
1478 /* We can't share an insn that sets cc0. */
1479 && ! sets_cc0_p (pat)
1481 && ! insn_references_resource_p (trial, &set, true)
1482 && ! insn_sets_resource_p (trial, &set, true)
1483 && ! insn_sets_resource_p (trial, &needed, true)
1484 && (trial = try_split (pat, trial, 0)) != 0
1485 /* Update next_trial, in case try_split succeeded. */
1486 && (next_trial = next_nonnote_insn (trial))
1487 /* Likewise THREAD. */
1488 && (thread = oldtrial == thread ? trial : thread)
1489 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1490 /* Have to test this condition if annul condition is different
1491 from (and less restrictive than) non-annulling one. */
1492 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1497 update_block (trial, thread);
1498 if (trial == thread)
1499 thread = next_active_insn (thread);
1501 delete_related_insns (trial);
1502 INSN_FROM_TARGET_P (next_to_match) = 0;
1505 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1507 if (++slot_number == num_slots)
1510 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1513 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
1514 mark_referenced_resources (trial, &needed, true);
1517 /* See if we stopped on a filled insn. If we did, try to see if its
1518 delay slots match. */
1519 if (slot_number != num_slots
1520 && trial && NONJUMP_INSN_P (trial)
1521 && GET_CODE (PATTERN (trial)) == SEQUENCE
1522 && !(JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
1523 && INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0))))
1525 rtx pat = PATTERN (trial);
1526 rtx filled_insn = XVECEXP (pat, 0, 0);
1528 /* Account for resources set/needed by the filled insn. */
1529 mark_set_resources (filled_insn, &set, 0, MARK_SRC_DEST_CALL);
1530 mark_referenced_resources (filled_insn, &needed, true);
1532 for (i = 1; i < XVECLEN (pat, 0); i++)
1534 rtx dtrial = XVECEXP (pat, 0, i);
1536 if (! insn_references_resource_p (dtrial, &set, true)
1537 && ! insn_sets_resource_p (dtrial, &set, true)
1538 && ! insn_sets_resource_p (dtrial, &needed, true)
1540 && ! sets_cc0_p (PATTERN (dtrial))
1542 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1543 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1549 update_block (dtrial, thread);
1550 new_rtx = delete_from_delay_slot (dtrial);
1551 if (INSN_DELETED_P (thread))
1553 INSN_FROM_TARGET_P (next_to_match) = 0;
1556 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1559 if (++slot_number == num_slots)
1562 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1566 /* Keep track of the set/referenced resources for the delay
1567 slots of any trial insns we encounter. */
1568 mark_set_resources (dtrial, &set, 0, MARK_SRC_DEST_CALL);
1569 mark_referenced_resources (dtrial, &needed, true);
1574 /* If all insns in the delay slot have been matched and we were previously
1575 annulling the branch, we need not any more. In that case delete all the
1576 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1577 the delay list so that we know that it isn't only being used at the
1579 if (slot_number == num_slots && annul_p)
1581 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1583 if (GET_MODE (merged_insns) == SImode)
1587 update_block (XEXP (merged_insns, 0), thread);
1588 new_rtx = delete_from_delay_slot (XEXP (merged_insns, 0));
1589 if (INSN_DELETED_P (thread))
1594 update_block (XEXP (merged_insns, 0), thread);
1595 delete_related_insns (XEXP (merged_insns, 0));
1599 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1601 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1602 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1606 /* See if INSN is redundant with an insn in front of TARGET. Often this
1607 is called when INSN is a candidate for a delay slot of TARGET.
1608 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1609 of INSN. Often INSN will be redundant with an insn in a delay slot of
1610 some previous insn. This happens when we have a series of branches to the
1611 same label; in that case the first insn at the target might want to go
1612 into each of the delay slots.
1614 If we are not careful, this routine can take up a significant fraction
1615 of the total compilation time (4%), but only wins rarely. Hence we
1616 speed this routine up by making two passes. The first pass goes back
1617 until it hits a label and sees if it finds an insn with an identical
1618 pattern. Only in this (relatively rare) event does it check for
1621 We do not split insns we encounter. This could cause us not to find a
1622 redundant insn, but the cost of splitting seems greater than the possible
1623 gain in rare cases. */
1626 redundant_insn (rtx insn, rtx target, rtx delay_list)
1628 rtx target_main = target;
1629 rtx ipat = PATTERN (insn);
1631 struct resources needed, set;
1633 unsigned insns_to_search;
1635 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1636 are allowed to not actually assign to such a register. */
1637 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1640 /* Scan backwards looking for a match. */
1641 for (trial = PREV_INSN (target),
1642 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1643 trial && insns_to_search > 0;
1644 trial = PREV_INSN (trial))
1646 if (LABEL_P (trial))
1649 if (!NONDEBUG_INSN_P (trial))
1653 pat = PATTERN (trial);
1654 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1657 if (GET_CODE (pat) == SEQUENCE)
1659 /* Stop for a CALL and its delay slots because it is difficult to
1660 track its resource needs correctly. */
1661 if (CALL_P (XVECEXP (pat, 0, 0)))
1664 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1665 slots because it is difficult to track its resource needs
1668 #ifdef INSN_SETS_ARE_DELAYED
1669 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1673 #ifdef INSN_REFERENCES_ARE_DELAYED
1674 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1678 /* See if any of the insns in the delay slot match, updating
1679 resource requirements as we go. */
1680 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1681 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1682 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1683 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1686 /* If found a match, exit this loop early. */
1691 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1692 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1696 /* If we didn't find an insn that matches, return 0. */
1700 /* See what resources this insn sets and needs. If they overlap, or
1701 if this insn references CC0, it can't be redundant. */
1703 CLEAR_RESOURCE (&needed);
1704 CLEAR_RESOURCE (&set);
1705 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1706 mark_referenced_resources (insn, &needed, true);
1708 /* If TARGET is a SEQUENCE, get the main insn. */
1709 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1710 target_main = XVECEXP (PATTERN (target), 0, 0);
1712 if (resource_conflicts_p (&needed, &set)
1714 || reg_mentioned_p (cc0_rtx, ipat)
1716 /* The insn requiring the delay may not set anything needed or set by
1718 || insn_sets_resource_p (target_main, &needed, true)
1719 || insn_sets_resource_p (target_main, &set, true))
1722 /* Insns we pass may not set either NEEDED or SET, so merge them for
1724 needed.memory |= set.memory;
1725 needed.unch_memory |= set.unch_memory;
1726 IOR_HARD_REG_SET (needed.regs, set.regs);
1728 /* This insn isn't redundant if it conflicts with an insn that either is
1729 or will be in a delay slot of TARGET. */
1733 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, true))
1735 delay_list = XEXP (delay_list, 1);
1738 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1739 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1740 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed,
1744 /* Scan backwards until we reach a label or an insn that uses something
1745 INSN sets or sets something insn uses or sets. */
1747 for (trial = PREV_INSN (target),
1748 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1749 trial && !LABEL_P (trial) && insns_to_search > 0;
1750 trial = PREV_INSN (trial))
1752 if (!NONDEBUG_INSN_P (trial))
1756 pat = PATTERN (trial);
1757 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1760 if (GET_CODE (pat) == SEQUENCE)
1762 bool annul_p = false;
1763 rtx control = XVECEXP (pat, 0, 0);
1765 /* If this is a CALL_INSN and its delay slots, it is hard to track
1766 the resource needs properly, so give up. */
1767 if (CALL_P (control))
1770 /* If this is an INSN or JUMP_INSN with delayed effects, it
1771 is hard to track the resource needs properly, so give up. */
1773 #ifdef INSN_SETS_ARE_DELAYED
1774 if (INSN_SETS_ARE_DELAYED (control))
1778 #ifdef INSN_REFERENCES_ARE_DELAYED
1779 if (INSN_REFERENCES_ARE_DELAYED (control))
1783 if (JUMP_P (control))
1784 annul_p = INSN_ANNULLED_BRANCH_P (control);
1786 /* See if any of the insns in the delay slot match, updating
1787 resource requirements as we go. */
1788 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1790 rtx candidate = XVECEXP (pat, 0, i);
1792 /* If an insn will be annulled if the branch is false, it isn't
1793 considered as a possible duplicate insn. */
1794 if (rtx_equal_p (PATTERN (candidate), ipat)
1795 && ! (annul_p && INSN_FROM_TARGET_P (candidate)))
1797 /* Show that this insn will be used in the sequel. */
1798 INSN_FROM_TARGET_P (candidate) = 0;
1802 /* Unless this is an annulled insn from the target of a branch,
1803 we must stop if it sets anything needed or set by INSN. */
1804 if ((!annul_p || !INSN_FROM_TARGET_P (candidate))
1805 && insn_sets_resource_p (candidate, &needed, true))
1809 /* If the insn requiring the delay slot conflicts with INSN, we
1811 if (insn_sets_resource_p (control, &needed, true))
1816 /* See if TRIAL is the same as INSN. */
1817 pat = PATTERN (trial);
1818 if (rtx_equal_p (pat, ipat))
1821 /* Can't go any further if TRIAL conflicts with INSN. */
1822 if (insn_sets_resource_p (trial, &needed, true))
1830 /* Return 1 if THREAD can only be executed in one way. If LABEL is nonzero,
1831 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1832 is nonzero, we are allowed to fall into this thread; otherwise, we are
1835 If LABEL is used more than one or we pass a label other than LABEL before
1836 finding an active insn, we do not own this thread. */
1839 own_thread_p (rtx thread, rtx label, int allow_fallthrough)
1844 /* We don't own the function end. */
1845 if (thread == 0 || ANY_RETURN_P (thread))
1848 /* Get the first active insn, or THREAD, if it is an active insn. */
1849 active_insn = next_active_insn (PREV_INSN (thread));
1851 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1853 && (insn != label || LABEL_NUSES (insn) != 1))
1856 if (allow_fallthrough)
1859 /* Ensure that we reach a BARRIER before any insn or label. */
1860 for (insn = prev_nonnote_insn (thread);
1861 insn == 0 || !BARRIER_P (insn);
1862 insn = prev_nonnote_insn (insn))
1865 || (NONJUMP_INSN_P (insn)
1866 && GET_CODE (PATTERN (insn)) != USE
1867 && GET_CODE (PATTERN (insn)) != CLOBBER))
1873 /* Called when INSN is being moved from a location near the target of a jump.
1874 We leave a marker of the form (use (INSN)) immediately in front
1875 of WHERE for mark_target_live_regs. These markers will be deleted when
1878 We used to try to update the live status of registers if WHERE is at
1879 the start of a basic block, but that can't work since we may remove a
1880 BARRIER in relax_delay_slots. */
1883 update_block (rtx insn, rtx where)
1885 /* Ignore if this was in a delay slot and it came from the target of
1887 if (INSN_FROM_TARGET_P (insn))
1890 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1892 /* INSN might be making a value live in a block where it didn't use to
1893 be. So recompute liveness information for this block. */
1895 incr_ticks_for_insn (insn);
1898 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1899 the basic block containing the jump. */
1902 reorg_redirect_jump (rtx jump, rtx nlabel)
1904 incr_ticks_for_insn (jump);
1905 return redirect_jump (jump, nlabel, 1);
1908 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1909 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1910 that reference values used in INSN. If we find one, then we move the
1911 REG_DEAD note to INSN.
1913 This is needed to handle the case where a later insn (after INSN) has a
1914 REG_DEAD note for a register used by INSN, and this later insn subsequently
1915 gets moved before a CODE_LABEL because it is a redundant insn. In this
1916 case, mark_target_live_regs may be confused into thinking the register
1917 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1920 update_reg_dead_notes (rtx insn, rtx delayed_insn)
1924 for (p = next_nonnote_insn (insn); p != delayed_insn;
1925 p = next_nonnote_insn (p))
1926 for (link = REG_NOTES (p); link; link = next)
1928 next = XEXP (link, 1);
1930 if (REG_NOTE_KIND (link) != REG_DEAD
1931 || !REG_P (XEXP (link, 0)))
1934 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1936 /* Move the REG_DEAD note from P to INSN. */
1937 remove_note (p, link);
1938 XEXP (link, 1) = REG_NOTES (insn);
1939 REG_NOTES (insn) = link;
1944 /* Called when an insn redundant with start_insn is deleted. If there
1945 is a REG_DEAD note for the target of start_insn between start_insn
1946 and stop_insn, then the REG_DEAD note needs to be deleted since the
1947 value no longer dies there.
1949 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1950 confused into thinking the register is dead. */
1953 fix_reg_dead_note (rtx start_insn, rtx stop_insn)
1957 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1958 p = next_nonnote_insn (p))
1959 for (link = REG_NOTES (p); link; link = next)
1961 next = XEXP (link, 1);
1963 if (REG_NOTE_KIND (link) != REG_DEAD
1964 || !REG_P (XEXP (link, 0)))
1967 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1969 remove_note (p, link);
1975 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1977 This handles the case of udivmodXi4 instructions which optimize their
1978 output depending on whether any REG_UNUSED notes are present.
1979 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1983 update_reg_unused_notes (rtx insn, rtx redundant_insn)
1987 for (link = REG_NOTES (insn); link; link = next)
1989 next = XEXP (link, 1);
1991 if (REG_NOTE_KIND (link) != REG_UNUSED
1992 || !REG_P (XEXP (link, 0)))
1995 if (! find_regno_note (redundant_insn, REG_UNUSED,
1996 REGNO (XEXP (link, 0))))
1997 remove_note (insn, link);
2001 /* Return the label before INSN, or put a new label there. */
2004 get_label_before (rtx insn)
2008 /* Find an existing label at this point
2009 or make a new one if there is none. */
2010 label = prev_nonnote_insn (insn);
2012 if (label == 0 || !LABEL_P (label))
2014 rtx prev = PREV_INSN (insn);
2016 label = gen_label_rtx ();
2017 emit_label_after (label, prev);
2018 LABEL_NUSES (label) = 0;
2023 /* Scan a function looking for insns that need a delay slot and find insns to
2024 put into the delay slot.
2026 NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
2027 as calls). We do these first since we don't want jump insns (that are
2028 easier to fill) to get the only insns that could be used for non-jump insns.
2029 When it is zero, only try to fill JUMP_INSNs.
2031 When slots are filled in this manner, the insns (including the
2032 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2033 it is possible to tell whether a delay slot has really been filled
2034 or not. `final' knows how to deal with this, by communicating
2035 through FINAL_SEQUENCE. */
2038 fill_simple_delay_slots (int non_jumps_p)
2040 rtx insn, pat, trial, next_trial;
2042 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2043 struct resources needed, set;
2044 int slots_to_fill, slots_filled;
2047 for (i = 0; i < num_unfilled_slots; i++)
2050 /* Get the next insn to fill. If it has already had any slots assigned,
2051 we can't do anything with it. Maybe we'll improve this later. */
2053 insn = unfilled_slots_base[i];
2055 || INSN_DELETED_P (insn)
2056 || (NONJUMP_INSN_P (insn)
2057 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2058 || (JUMP_P (insn) && non_jumps_p)
2059 || (!JUMP_P (insn) && ! non_jumps_p))
2062 /* It may have been that this insn used to need delay slots, but
2063 now doesn't; ignore in that case. This can happen, for example,
2064 on the HP PA RISC, where the number of delay slots depends on
2065 what insns are nearby. */
2066 slots_to_fill = num_delay_slots (insn);
2068 /* Some machine description have defined instructions to have
2069 delay slots only in certain circumstances which may depend on
2070 nearby insns (which change due to reorg's actions).
2072 For example, the PA port normally has delay slots for unconditional
2075 However, the PA port claims such jumps do not have a delay slot
2076 if they are immediate successors of certain CALL_INSNs. This
2077 allows the port to favor filling the delay slot of the call with
2078 the unconditional jump. */
2079 if (slots_to_fill == 0)
2082 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2083 says how many. After initialization, first try optimizing
2086 nop add %o7,.-L1,%o7
2090 If this case applies, the delay slot of the call is filled with
2091 the unconditional jump. This is done first to avoid having the
2092 delay slot of the call filled in the backward scan. Also, since
2093 the unconditional jump is likely to also have a delay slot, that
2094 insn must exist when it is subsequently scanned.
2096 This is tried on each insn with delay slots as some machines
2097 have insns which perform calls, but are not represented as
2104 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2106 flags = get_jump_flags (insn, NULL_RTX);
2108 if ((trial = next_active_insn (insn))
2110 && simplejump_p (trial)
2111 && eligible_for_delay (insn, slots_filled, trial, flags)
2112 && no_labels_between_p (insn, trial)
2113 && ! can_throw_internal (trial))
2117 delay_list = add_to_delay_list (trial, delay_list);
2119 /* TRIAL may have had its delay slot filled, then unfilled. When
2120 the delay slot is unfilled, TRIAL is placed back on the unfilled
2121 slots obstack. Unfortunately, it is placed on the end of the
2122 obstack, not in its original location. Therefore, we must search
2123 from entry i + 1 to the end of the unfilled slots obstack to
2124 try and find TRIAL. */
2125 tmp = &unfilled_slots_base[i + 1];
2126 while (*tmp != trial && tmp != unfilled_slots_next)
2129 /* Remove the unconditional jump from consideration for delay slot
2130 filling and unthread it. */
2134 rtx next = NEXT_INSN (trial);
2135 rtx prev = PREV_INSN (trial);
2137 NEXT_INSN (prev) = next;
2139 PREV_INSN (next) = prev;
2143 /* Now, scan backwards from the insn to search for a potential
2144 delay-slot candidate. Stop searching when a label or jump is hit.
2146 For each candidate, if it is to go into the delay slot (moved
2147 forward in execution sequence), it must not need or set any resources
2148 that were set by later insns and must not set any resources that
2149 are needed for those insns.
2151 The delay slot insn itself sets resources unless it is a call
2152 (in which case the called routine, not the insn itself, is doing
2155 if (slots_filled < slots_to_fill)
2157 CLEAR_RESOURCE (&needed);
2158 CLEAR_RESOURCE (&set);
2159 mark_set_resources (insn, &set, 0, MARK_SRC_DEST);
2160 mark_referenced_resources (insn, &needed, false);
2162 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2165 next_trial = prev_nonnote_insn (trial);
2167 /* This must be an INSN or CALL_INSN. */
2168 pat = PATTERN (trial);
2170 /* Stand-alone USE and CLOBBER are just for flow. */
2171 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2174 /* Check for resource conflict first, to avoid unnecessary
2176 if (! insn_references_resource_p (trial, &set, true)
2177 && ! insn_sets_resource_p (trial, &set, true)
2178 && ! insn_sets_resource_p (trial, &needed, true)
2180 /* Can't separate set of cc0 from its use. */
2181 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2183 && ! can_throw_internal (trial))
2185 trial = try_split (pat, trial, 1);
2186 next_trial = prev_nonnote_insn (trial);
2187 if (eligible_for_delay (insn, slots_filled, trial, flags))
2189 /* In this case, we are searching backward, so if we
2190 find insns to put on the delay list, we want
2191 to put them at the head, rather than the
2192 tail, of the list. */
2194 update_reg_dead_notes (trial, insn);
2195 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2197 update_block (trial, trial);
2198 delete_related_insns (trial);
2199 if (slots_to_fill == ++slots_filled)
2205 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2206 mark_referenced_resources (trial, &needed, true);
2210 /* If all needed slots haven't been filled, we come here. */
2212 /* Try to optimize case of jumping around a single insn. */
2213 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2214 if (slots_filled != slots_to_fill
2217 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2219 delay_list = optimize_skip (insn);
2225 /* Try to get insns from beyond the insn needing the delay slot.
2226 These insns can neither set or reference resources set in insns being
2227 skipped, cannot set resources in the insn being skipped, and, if this
2228 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2229 call might not return).
2231 There used to be code which continued past the target label if
2232 we saw all uses of the target label. This code did not work,
2233 because it failed to account for some instructions which were
2234 both annulled and marked as from the target. This can happen as a
2235 result of optimize_skip. Since this code was redundant with
2236 fill_eager_delay_slots anyways, it was just deleted. */
2238 if (slots_filled != slots_to_fill
2239 /* If this instruction could throw an exception which is
2240 caught in the same function, then it's not safe to fill
2241 the delay slot with an instruction from beyond this
2242 point. For example, consider:
2253 Even though `i' is a local variable, we must be sure not
2254 to put `i = 3' in the delay slot if `f' might throw an
2257 Presumably, we should also check to see if we could get
2258 back to this function via `setjmp'. */
2259 && ! can_throw_internal (insn)
2261 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2262 && ! simplejump_p (insn)
2263 && !ANY_RETURN_P (JUMP_LABEL (insn)))))
2265 /* Invariant: If insn is a JUMP_INSN, the insn's jump
2266 label. Otherwise, zero. */
2268 int maybe_never = 0;
2269 rtx pat, trial_delay;
2271 CLEAR_RESOURCE (&needed);
2272 CLEAR_RESOURCE (&set);
2276 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2277 mark_referenced_resources (insn, &needed, true);
2282 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2283 mark_referenced_resources (insn, &needed, true);
2285 target = JUMP_LABEL (insn);
2288 if (target == 0 || ANY_RETURN_P (target))
2289 for (trial = next_nonnote_insn (insn); !stop_search_p (trial, 1);
2292 next_trial = next_nonnote_insn (trial);
2294 /* This must be an INSN or CALL_INSN. */
2295 pat = PATTERN (trial);
2297 /* Stand-alone USE and CLOBBER are just for flow. */
2298 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2301 /* If this already has filled delay slots, get the insn needing
2303 if (GET_CODE (pat) == SEQUENCE)
2304 trial_delay = XVECEXP (pat, 0, 0);
2306 trial_delay = trial;
2308 /* Stop our search when seeing a jump. */
2309 if (JUMP_P (trial_delay))
2312 /* See if we have a resource problem before we try to
2314 if (GET_CODE (pat) != SEQUENCE
2315 && ! insn_references_resource_p (trial, &set, true)
2316 && ! insn_sets_resource_p (trial, &set, true)
2317 && ! insn_sets_resource_p (trial, &needed, true)
2319 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2321 && ! (maybe_never && may_trap_or_fault_p (pat))
2322 && (trial = try_split (pat, trial, 0))
2323 && eligible_for_delay (insn, slots_filled, trial, flags)
2324 && ! can_throw_internal(trial))
2326 next_trial = next_nonnote_insn (trial);
2327 delay_list = add_to_delay_list (trial, delay_list);
2330 if (reg_mentioned_p (cc0_rtx, pat))
2331 link_cc0_insns (trial);
2334 delete_related_insns (trial);
2335 if (slots_to_fill == ++slots_filled)
2340 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2341 mark_referenced_resources (trial, &needed, true);
2343 /* Ensure we don't put insns between the setting of cc and the
2344 comparison by moving a setting of cc into an earlier delay
2345 slot since these insns could clobber the condition code. */
2348 /* If this is a call or jump, we might not get here. */
2349 if (CALL_P (trial_delay)
2350 || JUMP_P (trial_delay))
2354 /* If there are slots left to fill and our search was stopped by an
2355 unconditional branch, try the insn at the branch target. We can
2356 redirect the branch if it works.
2358 Don't do this if the insn at the branch target is a branch. */
2359 if (slots_to_fill != slots_filled
2361 && jump_to_label_p (trial)
2362 && simplejump_p (trial)
2363 && (target == 0 || JUMP_LABEL (trial) == target)
2364 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2365 && ! (NONJUMP_INSN_P (next_trial)
2366 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2367 && !JUMP_P (next_trial)
2368 && ! insn_references_resource_p (next_trial, &set, true)
2369 && ! insn_sets_resource_p (next_trial, &set, true)
2370 && ! insn_sets_resource_p (next_trial, &needed, true)
2372 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2374 && ! (maybe_never && may_trap_or_fault_p (PATTERN (next_trial)))
2375 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2376 && eligible_for_delay (insn, slots_filled, next_trial, flags)
2377 && ! can_throw_internal (trial))
2379 /* See comment in relax_delay_slots about necessity of using
2380 next_real_insn here. */
2381 rtx new_label = next_real_insn (next_trial);
2384 new_label = get_label_before (new_label);
2386 new_label = find_end_label ();
2391 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2393 reorg_redirect_jump (trial, new_label);
2395 /* If we merged because we both jumped to the same place,
2396 redirect the original insn also. */
2398 reorg_redirect_jump (insn, new_label);
2403 /* If this is an unconditional jump, then try to get insns from the
2404 target of the jump. */
2406 && simplejump_p (insn)
2407 && slots_filled != slots_to_fill)
2409 = fill_slots_from_thread (insn, const_true_rtx,
2410 next_active_insn (JUMP_LABEL (insn)),
2412 own_thread_p (JUMP_LABEL (insn),
2413 JUMP_LABEL (insn), 0),
2414 slots_to_fill, &slots_filled,
2418 unfilled_slots_base[i]
2419 = emit_delay_sequence (insn, delay_list, slots_filled);
2421 if (slots_to_fill == slots_filled)
2422 unfilled_slots_base[i] = 0;
2424 note_delay_statistics (slots_filled, 0);
2427 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2428 /* See if the epilogue needs any delay slots. Try to fill them if so.
2429 The only thing we can do is scan backwards from the end of the
2430 function. If we did this in a previous pass, it is incorrect to do it
2432 if (crtl->epilogue_delay_list)
2435 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2436 if (slots_to_fill == 0)
2440 CLEAR_RESOURCE (&set);
2442 /* The frame pointer and stack pointer are needed at the beginning of
2443 the epilogue, so instructions setting them can not be put in the
2444 epilogue delay slot. However, everything else needed at function
2445 end is safe, so we don't want to use end_of_function_needs here. */
2446 CLEAR_RESOURCE (&needed);
2447 if (frame_pointer_needed)
2449 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2450 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2451 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2453 if (! EXIT_IGNORE_STACK
2454 || current_function_sp_is_unchanging)
2455 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2458 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2460 #ifdef EPILOGUE_USES
2461 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2463 if (EPILOGUE_USES (i))
2464 SET_HARD_REG_BIT (needed.regs, i);
2468 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2469 trial = PREV_INSN (trial))
2473 pat = PATTERN (trial);
2474 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2477 if (! insn_references_resource_p (trial, &set, true)
2478 && ! insn_sets_resource_p (trial, &needed, true)
2479 && ! insn_sets_resource_p (trial, &set, true)
2481 /* Don't want to mess with cc0 here. */
2482 && ! reg_mentioned_p (cc0_rtx, pat)
2484 && ! can_throw_internal (trial))
2486 trial = try_split (pat, trial, 1);
2487 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2489 /* Here as well we are searching backward, so put the
2490 insns we find on the head of the list. */
2492 crtl->epilogue_delay_list
2493 = gen_rtx_INSN_LIST (VOIDmode, trial,
2494 crtl->epilogue_delay_list);
2495 mark_end_of_function_resources (trial, true);
2496 update_block (trial, trial);
2497 delete_related_insns (trial);
2499 /* Clear deleted bit so final.c will output the insn. */
2500 INSN_DELETED_P (trial) = 0;
2502 if (slots_to_fill == ++slots_filled)
2508 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2509 mark_referenced_resources (trial, &needed, true);
2512 note_delay_statistics (slots_filled, 0);
2516 /* Follow any unconditional jump at LABEL;
2517 return the ultimate label reached by any such chain of jumps.
2518 Return ret_rtx if the chain ultimately leads to a return instruction.
2519 If LABEL is not followed by a jump, return LABEL.
2520 If the chain loops or we can't find end, return LABEL,
2521 since that tells caller to avoid changing the insn. */
2524 follow_jumps (rtx label)
2531 if (ANY_RETURN_P (label))
2535 && (insn = next_active_insn (value)) != 0
2537 && JUMP_LABEL (insn) != NULL_RTX
2538 && ((any_uncondjump_p (insn) && onlyjump_p (insn))
2539 || GET_CODE (PATTERN (insn)) == RETURN)
2540 && (next = NEXT_INSN (insn))
2541 && BARRIER_P (next));
2544 rtx this_label = JUMP_LABEL (insn);
2547 /* If we have found a cycle, make the insn jump to itself. */
2548 if (this_label == label)
2550 if (ANY_RETURN_P (this_label))
2552 tem = next_active_insn (this_label);
2554 && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2555 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2565 /* Try to find insns to place in delay slots.
2567 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2568 or is an unconditional branch if CONDITION is const_true_rtx.
2569 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2571 THREAD is a flow-of-control, either the insns to be executed if the
2572 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2574 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2575 to see if any potential delay slot insns set things needed there.
2577 LIKELY is nonzero if it is extremely likely that the branch will be
2578 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2579 end of a loop back up to the top.
2581 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2582 thread. I.e., it is the fallthrough code of our jump or the target of the
2583 jump when we are the only jump going there.
2585 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2586 case, we can only take insns from the head of the thread for our delay
2587 slot. We then adjust the jump to point after the insns we have taken. */
2590 fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
2591 rtx opposite_thread, int likely, int thread_if_true,
2592 int own_thread, int slots_to_fill,
2593 int *pslots_filled, rtx delay_list)
2596 struct resources opposite_needed, set, needed;
2602 /* Validate our arguments. */
2603 gcc_assert(condition != const_true_rtx || thread_if_true);
2604 gcc_assert(own_thread || thread_if_true);
2606 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2608 /* If our thread is the end of subroutine, we can't get any delay
2610 if (thread == NULL_RTX || ANY_RETURN_P (thread))
2613 /* If this is an unconditional branch, nothing is needed at the
2614 opposite thread. Otherwise, compute what is needed there. */
2615 if (condition == const_true_rtx)
2616 CLEAR_RESOURCE (&opposite_needed);
2618 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2620 /* If the insn at THREAD can be split, do it here to avoid having to
2621 update THREAD and NEW_THREAD if it is done in the loop below. Also
2622 initialize NEW_THREAD. */
2624 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2626 /* Scan insns at THREAD. We are looking for an insn that can be removed
2627 from THREAD (it neither sets nor references resources that were set
2628 ahead of it and it doesn't set anything needs by the insns ahead of
2629 it) and that either can be placed in an annulling insn or aren't
2630 needed at OPPOSITE_THREAD. */
2632 CLEAR_RESOURCE (&needed);
2633 CLEAR_RESOURCE (&set);
2635 /* If we do not own this thread, we must stop as soon as we find
2636 something that we can't put in a delay slot, since all we can do
2637 is branch into THREAD at a later point. Therefore, labels stop
2638 the search if this is not the `true' thread. */
2640 for (trial = thread;
2641 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2642 trial = next_nonnote_insn (trial))
2646 /* If we have passed a label, we no longer own this thread. */
2647 if (LABEL_P (trial))
2653 pat = PATTERN (trial);
2654 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2657 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2658 don't separate or copy insns that set and use CC0. */
2659 if (! insn_references_resource_p (trial, &set, true)
2660 && ! insn_sets_resource_p (trial, &set, true)
2661 && ! insn_sets_resource_p (trial, &needed, true)
2663 && ! (reg_mentioned_p (cc0_rtx, pat)
2664 && (! own_thread || ! sets_cc0_p (pat)))
2666 && ! can_throw_internal (trial))
2670 /* If TRIAL is redundant with some insn before INSN, we don't
2671 actually need to add it to the delay list; we can merely pretend
2673 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2675 fix_reg_dead_note (prior_insn, insn);
2678 update_block (trial, thread);
2679 if (trial == thread)
2681 thread = next_active_insn (thread);
2682 if (new_thread == trial)
2683 new_thread = thread;
2686 delete_related_insns (trial);
2690 update_reg_unused_notes (prior_insn, trial);
2691 new_thread = next_active_insn (trial);
2697 /* There are two ways we can win: If TRIAL doesn't set anything
2698 needed at the opposite thread and can't trap, or if it can
2699 go into an annulled delay slot. */
2701 && (condition == const_true_rtx
2702 || (! insn_sets_resource_p (trial, &opposite_needed, true)
2703 && ! may_trap_or_fault_p (pat))))
2706 trial = try_split (pat, trial, 0);
2707 if (new_thread == old_trial)
2709 if (thread == old_trial)
2711 pat = PATTERN (trial);
2712 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2716 #ifdef ANNUL_IFTRUE_SLOTS
2719 #ifdef ANNUL_IFFALSE_SLOTS
2725 trial = try_split (pat, trial, 0);
2726 if (new_thread == old_trial)
2728 if (thread == old_trial)
2730 pat = PATTERN (trial);
2731 if ((must_annul || delay_list == NULL) && (thread_if_true
2732 ? check_annul_list_true_false (0, delay_list)
2733 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2734 : check_annul_list_true_false (1, delay_list)
2735 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2743 if (reg_mentioned_p (cc0_rtx, pat))
2744 link_cc0_insns (trial);
2747 /* If we own this thread, delete the insn. If this is the
2748 destination of a branch, show that a basic block status
2749 may have been updated. In any case, mark the new
2750 starting point of this thread. */
2755 update_block (trial, thread);
2756 if (trial == thread)
2758 thread = next_active_insn (thread);
2759 if (new_thread == trial)
2760 new_thread = thread;
2763 /* We are moving this insn, not deleting it. We must
2764 temporarily increment the use count on any referenced
2765 label lest it be deleted by delete_related_insns. */
2766 for (note = REG_NOTES (trial);
2768 note = XEXP (note, 1))
2769 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
2770 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
2772 /* REG_LABEL_OPERAND could be
2773 NOTE_INSN_DELETED_LABEL too. */
2774 if (LABEL_P (XEXP (note, 0)))
2775 LABEL_NUSES (XEXP (note, 0))++;
2777 gcc_assert (REG_NOTE_KIND (note)
2778 == REG_LABEL_OPERAND);
2780 if (jump_to_label_p (trial))
2781 LABEL_NUSES (JUMP_LABEL (trial))++;
2783 delete_related_insns (trial);
2785 for (note = REG_NOTES (trial);
2787 note = XEXP (note, 1))
2788 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
2789 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
2791 /* REG_LABEL_OPERAND could be
2792 NOTE_INSN_DELETED_LABEL too. */
2793 if (LABEL_P (XEXP (note, 0)))
2794 LABEL_NUSES (XEXP (note, 0))--;
2796 gcc_assert (REG_NOTE_KIND (note)
2797 == REG_LABEL_OPERAND);
2799 if (jump_to_label_p (trial))
2800 LABEL_NUSES (JUMP_LABEL (trial))--;
2803 new_thread = next_active_insn (trial);
2805 temp = own_thread ? trial : copy_rtx (trial);
2807 INSN_FROM_TARGET_P (temp) = 1;
2809 delay_list = add_to_delay_list (temp, delay_list);
2811 if (slots_to_fill == ++(*pslots_filled))
2813 /* Even though we have filled all the slots, we
2814 may be branching to a location that has a
2815 redundant insn. Skip any if so. */
2816 while (new_thread && ! own_thread
2817 && ! insn_sets_resource_p (new_thread, &set, true)
2818 && ! insn_sets_resource_p (new_thread, &needed,
2820 && ! insn_references_resource_p (new_thread,
2823 = redundant_insn (new_thread, insn,
2826 /* We know we do not own the thread, so no need
2827 to call update_block and delete_insn. */
2828 fix_reg_dead_note (prior_insn, insn);
2829 update_reg_unused_notes (prior_insn, new_thread);
2830 new_thread = next_active_insn (new_thread);
2840 /* This insn can't go into a delay slot. */
2842 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2843 mark_referenced_resources (trial, &needed, true);
2845 /* Ensure we don't put insns between the setting of cc and the comparison
2846 by moving a setting of cc into an earlier delay slot since these insns
2847 could clobber the condition code. */
2850 /* If this insn is a register-register copy and the next insn has
2851 a use of our destination, change it to use our source. That way,
2852 it will become a candidate for our delay slot the next time
2853 through this loop. This case occurs commonly in loops that
2856 We could check for more complex cases than those tested below,
2857 but it doesn't seem worth it. It might also be a good idea to try
2858 to swap the two insns. That might do better.
2860 We can't do this if the next insn modifies our destination, because
2861 that would make the replacement into the insn invalid. We also can't
2862 do this if it modifies our source, because it might be an earlyclobber
2863 operand. This latter test also prevents updating the contents of
2864 a PRE_INC. We also can't do this if there's overlap of source and
2865 destination. Overlap may happen for larger-than-register-size modes. */
2867 if (NONJUMP_INSN_P (trial) && GET_CODE (pat) == SET
2868 && REG_P (SET_SRC (pat))
2869 && REG_P (SET_DEST (pat))
2870 && !reg_overlap_mentioned_p (SET_DEST (pat), SET_SRC (pat)))
2872 rtx next = next_nonnote_insn (trial);
2874 if (next && NONJUMP_INSN_P (next)
2875 && GET_CODE (PATTERN (next)) != USE
2876 && ! reg_set_p (SET_DEST (pat), next)
2877 && ! reg_set_p (SET_SRC (pat), next)
2878 && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2879 && ! modified_in_p (SET_DEST (pat), next))
2880 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2884 /* If we stopped on a branch insn that has delay slots, see if we can
2885 steal some of the insns in those slots. */
2886 if (trial && NONJUMP_INSN_P (trial)
2887 && GET_CODE (PATTERN (trial)) == SEQUENCE
2888 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0)))
2890 /* If this is the `true' thread, we will want to follow the jump,
2891 so we can only do this if we have taken everything up to here. */
2892 if (thread_if_true && trial == new_thread)
2895 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2896 delay_list, &set, &needed,
2897 &opposite_needed, slots_to_fill,
2898 pslots_filled, &must_annul,
2900 /* If we owned the thread and are told that it branched
2901 elsewhere, make sure we own the thread at the new location. */
2902 if (own_thread && trial != new_thread)
2903 own_thread = own_thread_p (new_thread, new_thread, 0);
2905 else if (! thread_if_true)
2907 = steal_delay_list_from_fallthrough (insn, condition,
2909 delay_list, &set, &needed,
2910 &opposite_needed, slots_to_fill,
2911 pslots_filled, &must_annul);
2914 /* If we haven't found anything for this delay slot and it is very
2915 likely that the branch will be taken, see if the insn at our target
2916 increments or decrements a register with an increment that does not
2917 depend on the destination register. If so, try to place the opposite
2918 arithmetic insn after the jump insn and put the arithmetic insn in the
2919 delay slot. If we can't do this, return. */
2920 if (delay_list == 0 && likely
2921 && new_thread && !ANY_RETURN_P (new_thread)
2922 && NONJUMP_INSN_P (new_thread)
2923 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2924 && asm_noperands (PATTERN (new_thread)) < 0)
2926 rtx pat = PATTERN (new_thread);
2931 pat = PATTERN (trial);
2933 if (!NONJUMP_INSN_P (trial)
2934 || GET_CODE (pat) != SET
2935 || ! eligible_for_delay (insn, 0, trial, flags)
2936 || can_throw_internal (trial))
2939 dest = SET_DEST (pat), src = SET_SRC (pat);
2940 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2941 && rtx_equal_p (XEXP (src, 0), dest)
2942 && (!FLOAT_MODE_P (GET_MODE (src))
2943 || flag_unsafe_math_optimizations)
2944 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1))
2945 && ! side_effects_p (pat))
2947 rtx other = XEXP (src, 1);
2951 /* If this is a constant adjustment, use the same code with
2952 the negated constant. Otherwise, reverse the sense of the
2954 if (CONST_INT_P (other))
2955 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2956 negate_rtx (GET_MODE (src), other));
2958 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2959 GET_MODE (src), dest, other);
2961 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2964 if (recog_memoized (ninsn) < 0
2965 || (extract_insn (ninsn), ! constrain_operands (1)))
2967 delete_related_insns (ninsn);
2973 update_block (trial, thread);
2974 if (trial == thread)
2976 thread = next_active_insn (thread);
2977 if (new_thread == trial)
2978 new_thread = thread;
2980 delete_related_insns (trial);
2983 new_thread = next_active_insn (trial);
2985 ninsn = own_thread ? trial : copy_rtx (trial);
2987 INSN_FROM_TARGET_P (ninsn) = 1;
2989 delay_list = add_to_delay_list (ninsn, NULL_RTX);
2994 if (delay_list && must_annul)
2995 INSN_ANNULLED_BRANCH_P (insn) = 1;
2997 /* If we are to branch into the middle of this thread, find an appropriate
2998 label or make a new one if none, and redirect INSN to it. If we hit the
2999 end of the function, use the end-of-function label. */
3000 if (new_thread != thread)
3004 gcc_assert (thread_if_true);
3006 if (new_thread && JUMP_P (new_thread)
3007 && (simplejump_p (new_thread)
3008 || GET_CODE (PATTERN (new_thread)) == RETURN)
3009 && redirect_with_delay_list_safe_p (insn,
3010 JUMP_LABEL (new_thread),
3012 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3014 if (ANY_RETURN_P (new_thread))
3015 label = find_end_label ();
3016 else if (LABEL_P (new_thread))
3019 label = get_label_before (new_thread);
3022 reorg_redirect_jump (insn, label);
3028 /* Make another attempt to find insns to place in delay slots.
3030 We previously looked for insns located in front of the delay insn
3031 and, for non-jump delay insns, located behind the delay insn.
3033 Here only try to schedule jump insns and try to move insns from either
3034 the target or the following insns into the delay slot. If annulling is
3035 supported, we will be likely to do this. Otherwise, we can do this only
3039 fill_eager_delay_slots (void)
3043 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3045 for (i = 0; i < num_unfilled_slots; i++)
3048 rtx target_label, insn_at_target, fallthrough_insn;
3051 int own_fallthrough;
3052 int prediction, slots_to_fill, slots_filled;
3054 insn = unfilled_slots_base[i];
3056 || INSN_DELETED_P (insn)
3058 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3061 slots_to_fill = num_delay_slots (insn);
3062 /* Some machine description have defined instructions to have
3063 delay slots only in certain circumstances which may depend on
3064 nearby insns (which change due to reorg's actions).
3066 For example, the PA port normally has delay slots for unconditional
3069 However, the PA port claims such jumps do not have a delay slot
3070 if they are immediate successors of certain CALL_INSNs. This
3071 allows the port to favor filling the delay slot of the call with
3072 the unconditional jump. */
3073 if (slots_to_fill == 0)
3077 target_label = JUMP_LABEL (insn);
3078 condition = get_branch_condition (insn, target_label);
3083 /* Get the next active fallthrough and target insns and see if we own
3084 them. Then see whether the branch is likely true. We don't need
3085 to do a lot of this for unconditional branches. */
3087 insn_at_target = first_active_target_insn (target_label);
3088 own_target = own_thread_p (target_label, target_label, 0);
3090 if (condition == const_true_rtx)
3092 own_fallthrough = 0;
3093 fallthrough_insn = 0;
3098 fallthrough_insn = next_active_insn (insn);
3099 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3100 prediction = mostly_true_jump (insn, condition);
3103 /* If this insn is expected to branch, first try to get insns from our
3104 target, then our fallthrough insns. If it is not expected to branch,
3105 try the other order. */
3110 = fill_slots_from_thread (insn, condition, insn_at_target,
3111 fallthrough_insn, prediction == 2, 1,
3113 slots_to_fill, &slots_filled, delay_list);
3115 if (delay_list == 0 && own_fallthrough)
3117 /* Even though we didn't find anything for delay slots,
3118 we might have found a redundant insn which we deleted
3119 from the thread that was filled. So we have to recompute
3120 the next insn at the target. */
3121 target_label = JUMP_LABEL (insn);
3122 insn_at_target = first_active_target_insn (target_label);
3125 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3126 insn_at_target, 0, 0,
3128 slots_to_fill, &slots_filled,
3134 if (own_fallthrough)
3136 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3137 insn_at_target, 0, 0,
3139 slots_to_fill, &slots_filled,
3142 if (delay_list == 0)
3144 = fill_slots_from_thread (insn, condition, insn_at_target,
3145 next_active_insn (insn), 0, 1,
3147 slots_to_fill, &slots_filled,
3152 unfilled_slots_base[i]
3153 = emit_delay_sequence (insn, delay_list, slots_filled);
3155 if (slots_to_fill == slots_filled)
3156 unfilled_slots_base[i] = 0;
3158 note_delay_statistics (slots_filled, 1);
3162 static void delete_computation (rtx insn);
3164 /* Recursively delete prior insns that compute the value (used only by INSN
3165 which the caller is deleting) stored in the register mentioned by NOTE
3166 which is a REG_DEAD note associated with INSN. */
3169 delete_prior_computation (rtx note, rtx insn)
3172 rtx reg = XEXP (note, 0);
3174 for (our_prev = prev_nonnote_insn (insn);
3175 our_prev && (NONJUMP_INSN_P (our_prev)
3176 || CALL_P (our_prev));
3177 our_prev = prev_nonnote_insn (our_prev))
3179 rtx pat = PATTERN (our_prev);
3181 /* If we reach a CALL which is not calling a const function
3182 or the callee pops the arguments, then give up. */
3183 if (CALL_P (our_prev)
3184 && (! RTL_CONST_CALL_P (our_prev)
3185 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
3188 /* If we reach a SEQUENCE, it is too complex to try to
3189 do anything with it, so give up. We can be run during
3190 and after reorg, so SEQUENCE rtl can legitimately show
3192 if (GET_CODE (pat) == SEQUENCE)
3195 if (GET_CODE (pat) == USE
3196 && NONJUMP_INSN_P (XEXP (pat, 0)))
3197 /* reorg creates USEs that look like this. We leave them
3198 alone because reorg needs them for its own purposes. */
3201 if (reg_set_p (reg, pat))
3203 if (side_effects_p (pat) && !CALL_P (our_prev))
3206 if (GET_CODE (pat) == PARALLEL)
3208 /* If we find a SET of something else, we can't
3213 for (i = 0; i < XVECLEN (pat, 0); i++)
3215 rtx part = XVECEXP (pat, 0, i);
3217 if (GET_CODE (part) == SET
3218 && SET_DEST (part) != reg)
3222 if (i == XVECLEN (pat, 0))
3223 delete_computation (our_prev);
3225 else if (GET_CODE (pat) == SET
3226 && REG_P (SET_DEST (pat)))
3228 int dest_regno = REGNO (SET_DEST (pat));
3229 int dest_endregno = END_REGNO (SET_DEST (pat));
3230 int regno = REGNO (reg);
3231 int endregno = END_REGNO (reg);
3233 if (dest_regno >= regno
3234 && dest_endregno <= endregno)
3235 delete_computation (our_prev);
3237 /* We may have a multi-word hard register and some, but not
3238 all, of the words of the register are needed in subsequent
3239 insns. Write REG_UNUSED notes for those parts that were not
3241 else if (dest_regno <= regno
3242 && dest_endregno >= endregno)
3246 add_reg_note (our_prev, REG_UNUSED, reg);
3248 for (i = dest_regno; i < dest_endregno; i++)
3249 if (! find_regno_note (our_prev, REG_UNUSED, i))
3252 if (i == dest_endregno)
3253 delete_computation (our_prev);
3260 /* If PAT references the register that dies here, it is an
3261 additional use. Hence any prior SET isn't dead. However, this
3262 insn becomes the new place for the REG_DEAD note. */
3263 if (reg_overlap_mentioned_p (reg, pat))
3265 XEXP (note, 1) = REG_NOTES (our_prev);
3266 REG_NOTES (our_prev) = note;
3272 /* Delete INSN and recursively delete insns that compute values used only
3273 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3275 Look at all our REG_DEAD notes. If a previous insn does nothing other
3276 than set a register that dies in this insn, we can delete that insn
3279 On machines with CC0, if CC0 is used in this insn, we may be able to
3280 delete the insn that set it. */
3283 delete_computation (rtx insn)
3288 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3290 rtx prev = prev_nonnote_insn (insn);
3291 /* We assume that at this stage
3292 CC's are always set explicitly
3293 and always immediately before the jump that
3294 will use them. So if the previous insn
3295 exists to set the CC's, delete it
3296 (unless it performs auto-increments, etc.). */
3297 if (prev && NONJUMP_INSN_P (prev)
3298 && sets_cc0_p (PATTERN (prev)))
3300 if (sets_cc0_p (PATTERN (prev)) > 0
3301 && ! side_effects_p (PATTERN (prev)))
3302 delete_computation (prev);
3304 /* Otherwise, show that cc0 won't be used. */
3305 add_reg_note (prev, REG_UNUSED, cc0_rtx);
3310 for (note = REG_NOTES (insn); note; note = next)
3312 next = XEXP (note, 1);
3314 if (REG_NOTE_KIND (note) != REG_DEAD
3315 /* Verify that the REG_NOTE is legitimate. */
3316 || !REG_P (XEXP (note, 0)))
3319 delete_prior_computation (note, insn);
3322 delete_related_insns (insn);
3325 /* If all INSN does is set the pc, delete it,
3326 and delete the insn that set the condition codes for it
3327 if that's what the previous thing was. */
3330 delete_jump (rtx insn)
3332 rtx set = single_set (insn);
3334 if (set && GET_CODE (SET_DEST (set)) == PC)
3335 delete_computation (insn);
3339 /* Once we have tried two ways to fill a delay slot, make a pass over the
3340 code to try to improve the results and to do such things as more jump
3344 relax_delay_slots (rtx first)
3346 rtx insn, next, pat;
3347 rtx trial, delay_insn, target_label;
3349 /* Look at every JUMP_INSN and see if we can improve it. */
3350 for (insn = first; insn; insn = next)
3354 next = next_active_insn (insn);
3356 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3357 the next insn, or jumps to a label that is not the last of a
3358 group of consecutive labels. */
3360 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3361 && !ANY_RETURN_P (target_label = JUMP_LABEL (insn)))
3363 target_label = skip_consecutive_labels (follow_jumps (target_label));
3364 if (ANY_RETURN_P (target_label))
3365 target_label = find_end_label ();
3367 if (target_label && next_active_insn (target_label) == next
3368 && ! condjump_in_parallel_p (insn))
3374 if (target_label && target_label != JUMP_LABEL (insn))
3375 reorg_redirect_jump (insn, target_label);
3377 /* See if this jump conditionally branches around an unconditional
3378 jump. If so, invert this jump and point it to the target of the
3380 if (next && JUMP_P (next)
3381 && any_condjump_p (insn)
3382 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3384 && next_active_insn (target_label) == next_active_insn (next)
3385 && no_labels_between_p (insn, next))
3387 rtx label = JUMP_LABEL (next);
3389 /* Be careful how we do this to avoid deleting code or
3390 labels that are momentarily dead. See similar optimization
3393 We also need to ensure we properly handle the case when
3394 invert_jump fails. */
3396 ++LABEL_NUSES (target_label);
3397 if (!ANY_RETURN_P (label))
3398 ++LABEL_NUSES (label);
3400 if (invert_jump (insn, label, 1))
3402 delete_related_insns (next);
3406 if (!ANY_RETURN_P (label))
3407 --LABEL_NUSES (label);
3409 if (--LABEL_NUSES (target_label) == 0)
3410 delete_related_insns (target_label);
3416 /* If this is an unconditional jump and the previous insn is a
3417 conditional jump, try reversing the condition of the previous
3418 insn and swapping our targets. The next pass might be able to
3421 Don't do this if we expect the conditional branch to be true, because
3422 we would then be making the more common case longer. */
3425 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3426 && (other = prev_active_insn (insn)) != 0
3427 && any_condjump_p (other)
3428 && no_labels_between_p (other, insn)
3429 && 0 > mostly_true_jump (other,
3430 get_branch_condition (other,
3431 JUMP_LABEL (other))))
3433 rtx other_target = JUMP_LABEL (other);
3434 target_label = JUMP_LABEL (insn);
3436 if (invert_jump (other, target_label, 0))
3437 reorg_redirect_jump (insn, other_target);
3440 /* Now look only at cases where we have filled a delay slot. */
3441 if (!NONJUMP_INSN_P (insn)
3442 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3445 pat = PATTERN (insn);
3446 delay_insn = XVECEXP (pat, 0, 0);
3448 /* See if the first insn in the delay slot is redundant with some
3449 previous insn. Remove it from the delay slot if so; then set up
3450 to reprocess this insn. */
3451 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3453 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3454 next = prev_active_insn (next);
3458 /* See if we have a RETURN insn with a filled delay slot followed
3459 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3460 the first RETURN (but not its delay insn). This gives the same
3461 effect in fewer instructions.
3463 Only do so if optimizing for size since this results in slower, but
3465 if (optimize_function_for_size_p (cfun)
3466 && GET_CODE (PATTERN (delay_insn)) == RETURN
3469 && GET_CODE (PATTERN (next)) == RETURN)
3474 /* Delete the RETURN and just execute the delay list insns.
3476 We do this by deleting the INSN containing the SEQUENCE, then
3477 re-emitting the insns separately, and then deleting the RETURN.
3478 This allows the count of the jump target to be properly
3481 Note that we need to change the INSN_UID of the re-emitted insns
3482 since it is used to hash the insns for mark_target_live_regs and
3483 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3485 Clear the from target bit, since these insns are no longer
3487 for (i = 0; i < XVECLEN (pat, 0); i++)
3488 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3490 trial = PREV_INSN (insn);
3491 delete_related_insns (insn);
3492 gcc_assert (GET_CODE (pat) == SEQUENCE);
3493 add_insn_after (delay_insn, trial, NULL);
3495 for (i = 1; i < XVECLEN (pat, 0); i++)
3496 after = emit_copy_of_insn_after (XVECEXP (pat, 0, i), after);
3497 delete_scheduled_jump (delay_insn);
3501 /* Now look only at the cases where we have a filled JUMP_INSN. */
3502 if (!JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
3503 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3504 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3507 target_label = JUMP_LABEL (delay_insn);
3509 if (!ANY_RETURN_P (target_label))
3511 /* If this jump goes to another unconditional jump, thread it, but
3512 don't convert a jump into a RETURN here. */
3513 trial = skip_consecutive_labels (follow_jumps (target_label));
3514 if (ANY_RETURN_P (trial))
3515 trial = find_end_label ();
3517 if (trial && trial != target_label
3518 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3520 reorg_redirect_jump (delay_insn, trial);
3521 target_label = trial;
3524 /* If the first insn at TARGET_LABEL is redundant with a previous
3525 insn, redirect the jump to the following insn and process again.
3526 We use next_real_insn instead of next_active_insn so we
3527 don't skip USE-markers, or we'll end up with incorrect
3529 trial = next_real_insn (target_label);
3530 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3531 && redundant_insn (trial, insn, 0)
3532 && ! can_throw_internal (trial))
3534 /* Figure out where to emit the special USE insn so we don't
3535 later incorrectly compute register live/death info. */
3536 rtx tmp = next_active_insn (trial);
3538 tmp = find_end_label ();
3542 /* Insert the special USE insn and update dataflow info. */
3543 update_block (trial, tmp);
3545 /* Now emit a label before the special USE insn, and
3546 redirect our jump to the new label. */
3547 target_label = get_label_before (PREV_INSN (tmp));
3548 reorg_redirect_jump (delay_insn, target_label);
3554 /* Similarly, if it is an unconditional jump with one insn in its
3555 delay list and that insn is redundant, thread the jump. */
3556 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3557 && XVECLEN (PATTERN (trial), 0) == 2
3558 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
3559 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3560 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3561 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3563 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3564 if (ANY_RETURN_P (target_label))
3565 target_label = find_end_label ();
3568 && redirect_with_delay_slots_safe_p (delay_insn, target_label,
3571 reorg_redirect_jump (delay_insn, target_label);
3578 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3579 && prev_active_insn (target_label) == insn
3580 && ! condjump_in_parallel_p (delay_insn)
3582 /* If the last insn in the delay slot sets CC0 for some insn,
3583 various code assumes that it is in a delay slot. We could
3584 put it back where it belonged and delete the register notes,
3585 but it doesn't seem worthwhile in this uncommon case. */
3586 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3587 REG_CC_USER, NULL_RTX)
3594 /* All this insn does is execute its delay list and jump to the
3595 following insn. So delete the jump and just execute the delay
3598 We do this by deleting the INSN containing the SEQUENCE, then
3599 re-emitting the insns separately, and then deleting the jump.
3600 This allows the count of the jump target to be properly
3603 Note that we need to change the INSN_UID of the re-emitted insns
3604 since it is used to hash the insns for mark_target_live_regs and
3605 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3607 Clear the from target bit, since these insns are no longer
3609 for (i = 0; i < XVECLEN (pat, 0); i++)
3610 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3612 trial = PREV_INSN (insn);
3613 delete_related_insns (insn);
3614 gcc_assert (GET_CODE (pat) == SEQUENCE);
3615 add_insn_after (delay_insn, trial, NULL);
3617 for (i = 1; i < XVECLEN (pat, 0); i++)
3618 after = emit_copy_of_insn_after (XVECEXP (pat, 0, i), after);
3619 delete_scheduled_jump (delay_insn);
3623 /* See if this is an unconditional jump around a single insn which is
3624 identical to the one in its delay slot. In this case, we can just
3625 delete the branch and the insn in its delay slot. */
3626 if (next && NONJUMP_INSN_P (next)
3627 && prev_label (next_active_insn (next)) == target_label
3628 && simplejump_p (insn)
3629 && XVECLEN (pat, 0) == 2
3630 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3632 delete_related_insns (insn);
3636 /* See if this jump (with its delay slots) conditionally branches
3637 around an unconditional jump (without delay slots). If so, invert
3638 this jump and point it to the target of the second jump. We cannot
3639 do this for annulled jumps, though. Again, don't convert a jump to
3641 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3642 && any_condjump_p (delay_insn)
3643 && next && JUMP_P (next)
3644 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3645 && next_active_insn (target_label) == next_active_insn (next)
3646 && no_labels_between_p (insn, next))
3648 rtx label = JUMP_LABEL (next);
3649 rtx old_label = JUMP_LABEL (delay_insn);
3651 if (ANY_RETURN_P (label))
3652 label = find_end_label ();
3654 /* find_end_label can generate a new label. Check this first. */
3656 && no_labels_between_p (insn, next)
3657 && redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3659 /* Be careful how we do this to avoid deleting code or labels
3660 that are momentarily dead. See similar optimization in
3663 ++LABEL_NUSES (old_label);
3665 if (invert_jump (delay_insn, label, 1))
3669 /* Must update the INSN_FROM_TARGET_P bits now that
3670 the branch is reversed, so that mark_target_live_regs
3671 will handle the delay slot insn correctly. */
3672 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3674 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3675 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3678 delete_related_insns (next);
3682 if (old_label && --LABEL_NUSES (old_label) == 0)
3683 delete_related_insns (old_label);
3688 /* If we own the thread opposite the way this insn branches, see if we
3689 can merge its delay slots with following insns. */
3690 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3691 && own_thread_p (NEXT_INSN (insn), 0, 1))
3692 try_merge_delay_insns (insn, next);
3693 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3694 && own_thread_p (target_label, target_label, 0))
3695 try_merge_delay_insns (insn, next_active_insn (target_label));
3697 /* If we get here, we haven't deleted INSN. But we may have deleted
3698 NEXT, so recompute it. */
3699 next = next_active_insn (insn);
3705 /* Look for filled jumps to the end of function label. We can try to convert
3706 them into RETURN insns if the insns in the delay slot are valid for the
3710 make_return_insns (rtx first)
3712 rtx insn, jump_insn, pat;
3713 rtx real_return_label = end_of_function_label;
3716 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3717 /* If a previous pass filled delay slots in the epilogue, things get a
3718 bit more complicated, as those filler insns would generally (without
3719 data flow analysis) have to be executed after any existing branch
3720 delay slot filler insns. It is also unknown whether such a
3721 transformation would actually be profitable. Note that the existing
3722 code only cares for branches with (some) filled delay slots. */
3723 if (crtl->epilogue_delay_list != NULL)
3727 /* See if there is a RETURN insn in the function other than the one we
3728 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3729 into a RETURN to jump to it. */
3730 for (insn = first; insn; insn = NEXT_INSN (insn))
3731 if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
3733 real_return_label = get_label_before (insn);
3737 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3738 was equal to END_OF_FUNCTION_LABEL. */
3739 LABEL_NUSES (real_return_label)++;
3741 /* Clear the list of insns to fill so we can use it. */
3742 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3744 for (insn = first; insn; insn = NEXT_INSN (insn))
3748 /* Only look at filled JUMP_INSNs that go to the end of function
3750 if (!NONJUMP_INSN_P (insn)
3751 || GET_CODE (PATTERN (insn)) != SEQUENCE
3752 || !JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
3753 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3756 pat = PATTERN (insn);
3757 jump_insn = XVECEXP (pat, 0, 0);
3759 /* If we can't make the jump into a RETURN, try to redirect it to the best
3760 RETURN and go on to the next insn. */
3761 if (! reorg_redirect_jump (jump_insn, ret_rtx))
3763 /* Make sure redirecting the jump will not invalidate the delay
3765 if (redirect_with_delay_slots_safe_p (jump_insn,
3768 reorg_redirect_jump (jump_insn, real_return_label);
3772 /* See if this RETURN can accept the insns current in its delay slot.
3773 It can if it has more or an equal number of slots and the contents
3774 of each is valid. */
3776 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3777 slots = num_delay_slots (jump_insn);
3778 if (slots >= XVECLEN (pat, 0) - 1)
3780 for (i = 1; i < XVECLEN (pat, 0); i++)
3782 #ifdef ANNUL_IFFALSE_SLOTS
3783 (INSN_ANNULLED_BRANCH_P (jump_insn)
3784 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3785 ? eligible_for_annul_false (jump_insn, i - 1,
3786 XVECEXP (pat, 0, i), flags) :
3788 #ifdef ANNUL_IFTRUE_SLOTS
3789 (INSN_ANNULLED_BRANCH_P (jump_insn)
3790 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3791 ? eligible_for_annul_true (jump_insn, i - 1,
3792 XVECEXP (pat, 0, i), flags) :
3794 eligible_for_delay (jump_insn, i - 1,
3795 XVECEXP (pat, 0, i), flags)))
3801 if (i == XVECLEN (pat, 0))
3804 /* We have to do something with this insn. If it is an unconditional
3805 RETURN, delete the SEQUENCE and output the individual insns,
3806 followed by the RETURN. Then set things up so we try to find
3807 insns for its delay slots, if it needs some. */
3808 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3810 rtx prev = PREV_INSN (insn);
3812 delete_related_insns (insn);
3813 for (i = 1; i < XVECLEN (pat, 0); i++)
3814 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3816 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3817 emit_barrier_after (insn);
3820 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3823 /* It is probably more efficient to keep this with its current
3824 delay slot as a branch to a RETURN. */
3825 reorg_redirect_jump (jump_insn, real_return_label);
3828 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3829 new delay slots we have created. */
3830 if (--LABEL_NUSES (real_return_label) == 0)
3831 delete_related_insns (real_return_label);
3833 fill_simple_delay_slots (1);
3834 fill_simple_delay_slots (0);
3838 /* Try to find insns to place in delay slots. */
3841 dbr_schedule (rtx first)
3843 rtx insn, next, epilogue_insn = 0;
3846 /* If the current function has no insns other than the prologue and
3847 epilogue, then do not try to fill any delay slots. */
3848 if (n_basic_blocks == NUM_FIXED_BLOCKS)
3851 /* Find the highest INSN_UID and allocate and initialize our map from
3852 INSN_UID's to position in code. */
3853 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3855 if (INSN_UID (insn) > max_uid)
3856 max_uid = INSN_UID (insn);
3858 && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
3859 epilogue_insn = insn;
3862 uid_to_ruid = XNEWVEC (int, max_uid + 1);
3863 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3864 uid_to_ruid[INSN_UID (insn)] = i;
3866 /* Initialize the list of insns that need filling. */
3867 if (unfilled_firstobj == 0)
3869 gcc_obstack_init (&unfilled_slots_obstack);
3870 unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0);
3873 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3878 INSN_ANNULLED_BRANCH_P (insn) = 0;
3879 INSN_FROM_TARGET_P (insn) = 0;
3881 /* Skip vector tables. We can't get attributes for them. */
3882 if (JUMP_TABLE_DATA_P (insn))
3885 if (num_delay_slots (insn) > 0)
3886 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3888 /* Ensure all jumps go to the last of a set of consecutive labels. */
3890 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3891 && !ANY_RETURN_P (JUMP_LABEL (insn))
3892 && ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
3893 != JUMP_LABEL (insn)))
3894 redirect_jump (insn, target, 1);
3897 init_resource_info (epilogue_insn);
3899 /* Show we haven't computed an end-of-function label yet. */
3900 end_of_function_label = 0;
3902 /* Initialize the statistics for this function. */
3903 memset (num_insns_needing_delays, 0, sizeof num_insns_needing_delays);
3904 memset (num_filled_delays, 0, sizeof num_filled_delays);
3906 /* Now do the delay slot filling. Try everything twice in case earlier
3907 changes make more slots fillable. */
3909 for (reorg_pass_number = 0;
3910 reorg_pass_number < MAX_REORG_PASSES;
3911 reorg_pass_number++)
3913 fill_simple_delay_slots (1);
3914 fill_simple_delay_slots (0);
3915 fill_eager_delay_slots ();
3916 relax_delay_slots (first);
3919 /* If we made an end of function label, indicate that it is now
3920 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3921 If it is now unused, delete it. */
3922 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3923 delete_related_insns (end_of_function_label);
3926 if (HAVE_return && end_of_function_label != 0)
3927 make_return_insns (first);
3930 /* Delete any USE insns made by update_block; subsequent passes don't need
3931 them or know how to deal with them. */
3932 for (insn = first; insn; insn = next)
3934 next = NEXT_INSN (insn);
3936 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
3937 && INSN_P (XEXP (PATTERN (insn), 0)))
3938 next = delete_related_insns (insn);
3941 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3943 /* It is not clear why the line below is needed, but it does seem to be. */
3944 unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0);
3948 int i, j, need_comma;
3949 int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
3950 int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
3952 for (reorg_pass_number = 0;
3953 reorg_pass_number < MAX_REORG_PASSES;
3954 reorg_pass_number++)
3956 fprintf (dump_file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3957 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3960 fprintf (dump_file, ";; Reorg function #%d\n", i);
3962 fprintf (dump_file, ";; %d insns needing delay slots\n;; ",
3963 num_insns_needing_delays[i][reorg_pass_number]);
3965 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3966 if (num_filled_delays[i][j][reorg_pass_number])
3969 fprintf (dump_file, ", ");
3971 fprintf (dump_file, "%d got %d delays",
3972 num_filled_delays[i][j][reorg_pass_number], j);
3974 fprintf (dump_file, "\n");
3977 memset (total_delay_slots, 0, sizeof total_delay_slots);
3978 memset (total_annul_slots, 0, sizeof total_annul_slots);
3979 for (insn = first; insn; insn = NEXT_INSN (insn))
3981 if (! INSN_DELETED_P (insn)
3982 && NONJUMP_INSN_P (insn)
3983 && GET_CODE (PATTERN (insn)) != USE
3984 && GET_CODE (PATTERN (insn)) != CLOBBER)
3986 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
3989 j = XVECLEN (PATTERN (insn), 0) - 1;
3990 if (j > MAX_DELAY_HISTOGRAM)
3991 j = MAX_DELAY_HISTOGRAM;
3992 control = XVECEXP (PATTERN (insn), 0, 0);
3993 if (JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control))
3994 total_annul_slots[j]++;
3996 total_delay_slots[j]++;
3998 else if (num_delay_slots (insn) > 0)
3999 total_delay_slots[0]++;
4002 fprintf (dump_file, ";; Reorg totals: ");
4004 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
4006 if (total_delay_slots[j])
4009 fprintf (dump_file, ", ");
4011 fprintf (dump_file, "%d got %d delays", total_delay_slots[j], j);
4014 fprintf (dump_file, "\n");
4015 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
4016 fprintf (dump_file, ";; Reorg annuls: ");
4018 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
4020 if (total_annul_slots[j])
4023 fprintf (dump_file, ", ");
4025 fprintf (dump_file, "%d got %d delays", total_annul_slots[j], j);
4028 fprintf (dump_file, "\n");
4030 fprintf (dump_file, "\n");
4033 /* For all JUMP insns, fill in branch prediction notes, so that during
4034 assembler output a target can set branch prediction bits in the code.
4035 We have to do this now, as up until this point the destinations of
4036 JUMPS can be moved around and changed, but past right here that cannot
4038 for (insn = first; insn; insn = NEXT_INSN (insn))
4042 if (NONJUMP_INSN_P (insn))
4044 rtx pat = PATTERN (insn);
4046 if (GET_CODE (pat) == SEQUENCE)
4047 insn = XVECEXP (pat, 0, 0);
4052 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
4053 add_reg_note (insn, REG_BR_PRED, GEN_INT (pred_flags));
4055 free_resource_info ();
4057 #ifdef DELAY_SLOTS_FOR_EPILOGUE
4058 /* SPARC assembler, for instance, emit warning when debug info is output
4059 into the delay slot. */
4063 for (link = crtl->epilogue_delay_list;
4065 link = XEXP (link, 1))
4066 INSN_LOCATOR (XEXP (link, 0)) = 0;
4070 crtl->dbr_scheduled_p = true;
4072 #endif /* DELAY_SLOTS */
4075 gate_handle_delay_slots (void)
4078 /* At -O0 dataflow info isn't updated after RA. */
4079 return optimize > 0 && flag_delayed_branch && !crtl->dbr_scheduled_p;
4085 /* Run delay slot optimization. */
4087 rest_of_handle_delay_slots (void)
4090 dbr_schedule (get_insns ());
4095 struct rtl_opt_pass pass_delay_slots =
4100 gate_handle_delay_slots, /* gate */
4101 rest_of_handle_delay_slots, /* execute */
4104 0, /* static_pass_number */
4105 TV_DBR_SCHED, /* tv_id */
4106 0, /* properties_required */
4107 0, /* properties_provided */
4108 0, /* properties_destroyed */
4109 0, /* todo_flags_start */
4110 TODO_ggc_collect /* todo_flags_finish */
4114 /* Machine dependent reorg pass. */
4116 gate_handle_machine_reorg (void)
4118 return targetm.machine_dependent_reorg != 0;
4123 rest_of_handle_machine_reorg (void)
4125 targetm.machine_dependent_reorg ();
4129 struct rtl_opt_pass pass_machine_reorg =
4134 gate_handle_machine_reorg, /* gate */
4135 rest_of_handle_machine_reorg, /* execute */
4138 0, /* static_pass_number */
4139 TV_MACH_DEP, /* tv_id */
4140 0, /* properties_required */
4141 0, /* properties_provided */
4142 0, /* properties_destroyed */
4143 0, /* todo_flags_start */
4144 TODO_ggc_collect /* todo_flags_finish */