1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
3 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
6 Hacked by Michael Tiemann (tiemann@cygnus.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Instruction reorganization pass.
26 This pass runs after register allocation and final jump
27 optimization. It should be the last pass to run before peephole.
28 It serves primarily to fill delay slots of insns, typically branch
29 and call insns. Other insns typically involve more complicated
30 interactions of data dependencies and resource constraints, and
31 are better handled by scheduling before register allocation (by the
32 function `schedule_insns').
34 The Branch Penalty is the number of extra cycles that are needed to
35 execute a branch insn. On an ideal machine, branches take a single
36 cycle, and the Branch Penalty is 0. Several RISC machines approach
37 branch delays differently:
39 The MIPS has a single branch delay slot. Most insns
40 (except other branches) can be used to fill this slot. When the
41 slot is filled, two insns execute in two cycles, reducing the
42 branch penalty to zero.
44 The SPARC always has a branch delay slot, but its effects can be
45 annulled when the branch is not taken. This means that failing to
46 find other sources of insns, we can hoist an insn from the branch
47 target that would only be safe to execute knowing that the branch
50 The HP-PA always has a branch delay slot. For unconditional branches
51 its effects can be annulled when the branch is taken. The effects
52 of the delay slot in a conditional branch can be nullified for forward
53 taken branches, or for untaken backward branches. This means
54 we can hoist insns from the fall-through path for forward branches or
55 steal insns from the target of backward branches.
57 The TMS320C3x and C4x have three branch delay slots. When the three
58 slots are filled, the branch penalty is zero. Most insns can fill the
59 delay slots except jump insns.
61 Three techniques for filling delay slots have been implemented so far:
63 (1) `fill_simple_delay_slots' is the simplest, most efficient way
64 to fill delay slots. This pass first looks for insns which come
65 from before the branch and which are safe to execute after the
66 branch. Then it searches after the insn requiring delay slots or,
67 in the case of a branch, for insns that are after the point at
68 which the branch merges into the fallthrough code, if such a point
69 exists. When such insns are found, the branch penalty decreases
70 and no code expansion takes place.
72 (2) `fill_eager_delay_slots' is more complicated: it is used for
73 scheduling conditional jumps, or for scheduling jumps which cannot
74 be filled using (1). A machine need not have annulled jumps to use
75 this strategy, but it helps (by keeping more options open).
76 `fill_eager_delay_slots' tries to guess the direction the branch
77 will go; if it guesses right 100% of the time, it can reduce the
78 branch penalty as much as `fill_simple_delay_slots' does. If it
79 guesses wrong 100% of the time, it might as well schedule nops. When
80 `fill_eager_delay_slots' takes insns from the fall-through path of
81 the jump, usually there is no code expansion; when it takes insns
82 from the branch target, there is code expansion if it is not the
83 only way to reach that target.
85 (3) `relax_delay_slots' uses a set of rules to simplify code that
86 has been reorganized by (1) and (2). It finds cases where
87 conditional test can be eliminated, jumps can be threaded, extra
88 insns can be eliminated, etc. It is the job of (1) and (2) to do a
89 good job of scheduling locally; `relax_delay_slots' takes care of
90 making the various individual schedules work well together. It is
91 especially tuned to handle the control flow interactions of branch
92 insns. It does nothing for insns with delay slots that do not
95 On machines that use CC0, we are very conservative. We will not make
96 a copy of an insn involving CC0 since we want to maintain a 1-1
97 correspondence between the insn that sets and uses CC0. The insns are
98 allowed to be separated by placing an insn that sets CC0 (but not an insn
99 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
100 delay slot. In that case, we point each insn at the other with REG_CC_USER
101 and REG_CC_SETTER notes. Note that these restrictions affect very few
102 machines because most RISC machines with delay slots will not use CC0
103 (the RT is the only known exception at this point).
107 The Acorn Risc Machine can conditionally execute most insns, so
108 it is profitable to move single insns into a position to execute
109 based on the condition code of the previous insn.
111 The HP-PA can conditionally nullify insns, providing a similar
112 effect to the ARM, differing mostly in which insn is "in charge". */
116 #include "coretypes.h"
118 #include "diagnostic-core.h"
122 #include "function.h"
123 #include "insn-config.h"
124 #include "conditions.h"
125 #include "hard-reg-set.h"
126 #include "basic-block.h"
132 #include "insn-attr.h"
133 #include "resource.h"
138 #include "tree-pass.h"
142 #ifndef ANNUL_IFTRUE_SLOTS
143 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
145 #ifndef ANNUL_IFFALSE_SLOTS
146 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
149 /* Insns which have delay slots that have not yet been filled. */
151 static struct obstack unfilled_slots_obstack;
152 static rtx *unfilled_firstobj;
154 /* Define macros to refer to the first and last slot containing unfilled
155 insns. These are used because the list may move and its address
156 should be recomputed at each use. */
158 #define unfilled_slots_base \
159 ((rtx *) obstack_base (&unfilled_slots_obstack))
161 #define unfilled_slots_next \
162 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
164 /* Points to the label before the end of the function. */
165 static rtx end_of_function_label;
167 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
168 not always monotonically increase. */
169 static int *uid_to_ruid;
171 /* Highest valid index in `uid_to_ruid'. */
174 static int stop_search_p (rtx, int);
175 static int resource_conflicts_p (struct resources *, struct resources *);
176 static int insn_references_resource_p (rtx, struct resources *, bool);
177 static int insn_sets_resource_p (rtx, struct resources *, bool);
178 static rtx find_end_label (void);
179 static rtx emit_delay_sequence (rtx, rtx, int);
180 static rtx add_to_delay_list (rtx, rtx);
181 static rtx delete_from_delay_slot (rtx);
182 static void delete_scheduled_jump (rtx);
183 static void note_delay_statistics (int, int);
184 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
185 static rtx optimize_skip (rtx);
187 static int get_jump_flags (rtx, rtx);
188 static int rare_destination (rtx);
189 static int mostly_true_jump (rtx, rtx);
190 static rtx get_branch_condition (rtx, rtx);
191 static int condition_dominates_p (rtx, rtx);
192 static int redirect_with_delay_slots_safe_p (rtx, rtx, rtx);
193 static int redirect_with_delay_list_safe_p (rtx, rtx, rtx);
194 static int check_annul_list_true_false (int, rtx);
195 static rtx steal_delay_list_from_target (rtx, rtx, rtx, rtx,
199 int, int *, int *, rtx *);
200 static rtx steal_delay_list_from_fallthrough (rtx, rtx, rtx, rtx,
205 static void try_merge_delay_insns (rtx, rtx);
206 static rtx redundant_insn (rtx, rtx, rtx);
207 static int own_thread_p (rtx, rtx, int);
208 static void update_block (rtx, rtx);
209 static int reorg_redirect_jump (rtx, rtx);
210 static void update_reg_dead_notes (rtx, rtx);
211 static void fix_reg_dead_note (rtx, rtx);
212 static void update_reg_unused_notes (rtx, rtx);
213 static void fill_simple_delay_slots (int);
214 static rtx fill_slots_from_thread (rtx, rtx, rtx, rtx,
217 static void fill_eager_delay_slots (void);
218 static void relax_delay_slots (rtx);
220 static void make_return_insns (rtx);
223 /* A wrapper around next_active_insn which takes care to return ret_rtx
227 first_active_target_insn (rtx insn)
229 if (ANY_RETURN_P (insn))
231 return next_active_insn (insn);
234 /* Return TRUE if this insn should stop the search for insn to fill delay
235 slots. LABELS_P indicates that labels should terminate the search.
236 In all cases, jumps terminate the search. */
239 stop_search_p (rtx insn, int labels_p)
244 /* If the insn can throw an exception that is caught within the function,
245 it may effectively perform a jump from the viewpoint of the function.
246 Therefore act like for a jump. */
247 if (can_throw_internal (insn))
250 switch (GET_CODE (insn))
264 /* OK unless it contains a delay slot or is an `asm' insn of some type.
265 We don't know anything about these. */
266 return (GET_CODE (PATTERN (insn)) == SEQUENCE
267 || GET_CODE (PATTERN (insn)) == ASM_INPUT
268 || asm_noperands (PATTERN (insn)) >= 0);
275 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
276 resource set contains a volatile memory reference. Otherwise, return FALSE. */
279 resource_conflicts_p (struct resources *res1, struct resources *res2)
281 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
282 || (res1->unch_memory && res2->unch_memory)
283 || res1->volatil || res2->volatil)
287 return (res1->regs & res2->regs) != HARD_CONST (0);
292 for (i = 0; i < HARD_REG_SET_LONGS; i++)
293 if ((res1->regs[i] & res2->regs[i]) != 0)
300 /* Return TRUE if any resource marked in RES, a `struct resources', is
301 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
302 routine is using those resources.
304 We compute this by computing all the resources referenced by INSN and
305 seeing if this conflicts with RES. It might be faster to directly check
306 ourselves, and this is the way it used to work, but it means duplicating
307 a large block of complex code. */
310 insn_references_resource_p (rtx insn, struct resources *res,
311 bool include_delayed_effects)
313 struct resources insn_res;
315 CLEAR_RESOURCE (&insn_res);
316 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
317 return resource_conflicts_p (&insn_res, res);
320 /* Return TRUE if INSN modifies resources that are marked in RES.
321 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
322 included. CC0 is only modified if it is explicitly set; see comments
323 in front of mark_set_resources for details. */
326 insn_sets_resource_p (rtx insn, struct resources *res,
327 bool include_delayed_effects)
329 struct resources insn_sets;
331 CLEAR_RESOURCE (&insn_sets);
332 mark_set_resources (insn, &insn_sets, 0,
333 (include_delayed_effects
336 return resource_conflicts_p (&insn_sets, res);
339 /* Find a label at the end of the function or before a RETURN. If there
340 is none, try to make one. If that fails, returns 0.
342 The property of such a label is that it is placed just before the
343 epilogue or a bare RETURN insn, so that another bare RETURN can be
344 turned into a jump to the label unconditionally. In particular, the
345 label cannot be placed before a RETURN insn with a filled delay slot.
347 ??? There may be a problem with the current implementation. Suppose
348 we start with a bare RETURN insn and call find_end_label. It may set
349 end_of_function_label just before the RETURN. Suppose the machinery
350 is able to fill the delay slot of the RETURN insn afterwards. Then
351 end_of_function_label is no longer valid according to the property
352 described above and find_end_label will still return it unmodified.
353 Note that this is probably mitigated by the following observation:
354 once end_of_function_label is made, it is very likely the target of
355 a jump, so filling the delay slot of the RETURN will be much more
359 find_end_label (void)
363 /* If we found one previously, return it. */
364 if (end_of_function_label)
365 return end_of_function_label;
367 /* Otherwise, see if there is a label at the end of the function. If there
368 is, it must be that RETURN insns aren't needed, so that is our return
369 label and we don't have to do anything else. */
371 insn = get_last_insn ();
373 || (NONJUMP_INSN_P (insn)
374 && (GET_CODE (PATTERN (insn)) == USE
375 || GET_CODE (PATTERN (insn)) == CLOBBER)))
376 insn = PREV_INSN (insn);
378 /* When a target threads its epilogue we might already have a
379 suitable return insn. If so put a label before it for the
380 end_of_function_label. */
382 && JUMP_P (PREV_INSN (insn))
383 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
385 rtx temp = PREV_INSN (PREV_INSN (insn));
386 end_of_function_label = gen_label_rtx ();
387 LABEL_NUSES (end_of_function_label) = 0;
389 /* Put the label before an USE insns that may precede the RETURN insn. */
390 while (GET_CODE (temp) == USE)
391 temp = PREV_INSN (temp);
393 emit_label_after (end_of_function_label, temp);
396 else if (LABEL_P (insn))
397 end_of_function_label = insn;
400 end_of_function_label = gen_label_rtx ();
401 LABEL_NUSES (end_of_function_label) = 0;
402 /* If the basic block reorder pass moves the return insn to
403 some other place try to locate it again and put our
404 end_of_function_label there. */
405 while (insn && ! (JUMP_P (insn)
406 && (GET_CODE (PATTERN (insn)) == RETURN)))
407 insn = PREV_INSN (insn);
410 insn = PREV_INSN (insn);
412 /* Put the label before an USE insns that may proceed the
414 while (GET_CODE (insn) == USE)
415 insn = PREV_INSN (insn);
417 emit_label_after (end_of_function_label, insn);
428 /* The RETURN insn has its delay slot filled so we cannot
429 emit the label just before it. Since we already have
430 an epilogue and cannot emit a new RETURN, we cannot
431 emit the label at all. */
432 end_of_function_label = NULL_RTX;
433 return end_of_function_label;
435 #endif /* HAVE_epilogue */
437 /* Otherwise, make a new label and emit a RETURN and BARRIER,
439 emit_label (end_of_function_label);
441 /* We don't bother trying to create a return insn if the
442 epilogue has filled delay-slots; we would have to try and
443 move the delay-slot fillers to the delay-slots for the new
444 return insn or in front of the new return insn. */
445 if (crtl->epilogue_delay_list == NULL
448 /* The return we make may have delay slots too. */
449 rtx insn = gen_return ();
450 insn = emit_jump_insn (insn);
451 JUMP_LABEL (insn) = ret_rtx;
453 if (num_delay_slots (insn) > 0)
454 obstack_ptr_grow (&unfilled_slots_obstack, insn);
460 /* Show one additional use for this label so it won't go away until
462 ++LABEL_NUSES (end_of_function_label);
464 return end_of_function_label;
467 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
468 the pattern of INSN with the SEQUENCE.
470 Chain the insns so that NEXT_INSN of each insn in the sequence points to
471 the next and NEXT_INSN of the last insn in the sequence points to
472 the first insn after the sequence. Similarly for PREV_INSN. This makes
473 it easier to scan all insns.
475 Returns the SEQUENCE that replaces INSN. */
478 emit_delay_sequence (rtx insn, rtx list, int length)
484 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
485 rtvec seqv = rtvec_alloc (length + 1);
486 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
487 rtx seq_insn = make_insn_raw (seq);
488 rtx first = get_insns ();
489 rtx last = get_last_insn ();
491 /* Make a copy of the insn having delay slots. */
492 rtx delay_insn = copy_rtx (insn);
494 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
495 confuse further processing. Update LAST in case it was the last insn.
496 We will put the BARRIER back in later. */
497 if (NEXT_INSN (insn) && BARRIER_P (NEXT_INSN (insn)))
499 delete_related_insns (NEXT_INSN (insn));
500 last = get_last_insn ();
504 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
505 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
506 PREV_INSN (seq_insn) = PREV_INSN (insn);
509 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
512 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
514 /* Note the calls to set_new_first_and_last_insn must occur after
515 SEQ_INSN has been completely spliced into the insn stream.
517 Otherwise CUR_INSN_UID will get set to an incorrect value because
518 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
520 set_new_first_and_last_insn (first, seq_insn);
523 set_new_first_and_last_insn (seq_insn, last);
525 /* Build our SEQUENCE and rebuild the insn chain. */
526 XVECEXP (seq, 0, 0) = delay_insn;
527 INSN_DELETED_P (delay_insn) = 0;
528 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
530 INSN_LOCATOR (seq_insn) = INSN_LOCATOR (delay_insn);
532 for (li = list; li; li = XEXP (li, 1), i++)
534 rtx tem = XEXP (li, 0);
537 /* Show that this copy of the insn isn't deleted. */
538 INSN_DELETED_P (tem) = 0;
540 XVECEXP (seq, 0, i) = tem;
541 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
542 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
544 /* SPARC assembler, for instance, emit warning when debug info is output
545 into the delay slot. */
546 if (INSN_LOCATOR (tem) && !INSN_LOCATOR (seq_insn))
547 INSN_LOCATOR (seq_insn) = INSN_LOCATOR (tem);
548 INSN_LOCATOR (tem) = 0;
550 for (note = REG_NOTES (tem); note; note = next)
552 next = XEXP (note, 1);
553 switch (REG_NOTE_KIND (note))
556 /* Remove any REG_DEAD notes because we can't rely on them now
557 that the insn has been moved. */
558 remove_note (tem, note);
561 case REG_LABEL_OPERAND:
562 case REG_LABEL_TARGET:
563 /* Keep the label reference count up to date. */
564 if (LABEL_P (XEXP (note, 0)))
565 LABEL_NUSES (XEXP (note, 0)) ++;
574 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
576 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
577 last insn in that SEQUENCE to point to us. Similarly for the first
578 insn in the following insn if it is a SEQUENCE. */
580 if (PREV_INSN (seq_insn) && NONJUMP_INSN_P (PREV_INSN (seq_insn))
581 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
582 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
583 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
586 if (NEXT_INSN (seq_insn) && NONJUMP_INSN_P (NEXT_INSN (seq_insn))
587 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
588 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
590 /* If there used to be a BARRIER, put it back. */
592 emit_barrier_after (seq_insn);
594 gcc_assert (i == length + 1);
599 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
600 be in the order in which the insns are to be executed. */
603 add_to_delay_list (rtx insn, rtx delay_list)
605 /* If we have an empty list, just make a new list element. If
606 INSN has its block number recorded, clear it since we may
607 be moving the insn to a new block. */
611 clear_hashed_info_for_insn (insn);
612 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
615 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
617 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
622 /* Delete INSN from the delay slot of the insn that it is in, which may
623 produce an insn with no delay slots. Return the new insn. */
626 delete_from_delay_slot (rtx insn)
628 rtx trial, seq_insn, seq, prev;
633 /* We first must find the insn containing the SEQUENCE with INSN in its
634 delay slot. Do this by finding an insn, TRIAL, where
635 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
638 PREV_INSN (NEXT_INSN (trial)) == trial;
639 trial = NEXT_INSN (trial))
642 seq_insn = PREV_INSN (NEXT_INSN (trial));
643 seq = PATTERN (seq_insn);
645 if (NEXT_INSN (seq_insn) && BARRIER_P (NEXT_INSN (seq_insn)))
648 /* Create a delay list consisting of all the insns other than the one
649 we are deleting (unless we were the only one). */
650 if (XVECLEN (seq, 0) > 2)
651 for (i = 1; i < XVECLEN (seq, 0); i++)
652 if (XVECEXP (seq, 0, i) != insn)
653 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
655 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
656 list, and rebuild the delay list if non-empty. */
657 prev = PREV_INSN (seq_insn);
658 trial = XVECEXP (seq, 0, 0);
659 delete_related_insns (seq_insn);
660 add_insn_after (trial, prev, NULL);
662 /* If there was a barrier after the old SEQUENCE, remit it. */
664 emit_barrier_after (trial);
666 /* If there are any delay insns, remit them. Otherwise clear the
669 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
670 else if (INSN_P (trial))
671 INSN_ANNULLED_BRANCH_P (trial) = 0;
673 INSN_FROM_TARGET_P (insn) = 0;
675 /* Show we need to fill this insn again. */
676 obstack_ptr_grow (&unfilled_slots_obstack, trial);
681 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
682 the insn that sets CC0 for it and delete it too. */
685 delete_scheduled_jump (rtx insn)
687 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
688 delete the insn that sets the condition code, but it is hard to find it.
689 Since this case is rare anyway, don't bother trying; there would likely
690 be other insns that became dead anyway, which we wouldn't know to
694 if (reg_mentioned_p (cc0_rtx, insn))
696 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
698 /* If a reg-note was found, it points to an insn to set CC0. This
699 insn is in the delay list of some other insn. So delete it from
700 the delay list it was in. */
703 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
704 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
705 delete_from_delay_slot (XEXP (note, 0));
709 /* The insn setting CC0 is our previous insn, but it may be in
710 a delay slot. It will be the last insn in the delay slot, if
712 rtx trial = previous_insn (insn);
714 trial = prev_nonnote_insn (trial);
715 if (sets_cc0_p (PATTERN (trial)) != 1
716 || FIND_REG_INC_NOTE (trial, NULL_RTX))
718 if (PREV_INSN (NEXT_INSN (trial)) == trial)
719 delete_related_insns (trial);
721 delete_from_delay_slot (trial);
726 delete_related_insns (insn);
729 /* Counters for delay-slot filling. */
731 #define NUM_REORG_FUNCTIONS 2
732 #define MAX_DELAY_HISTOGRAM 3
733 #define MAX_REORG_PASSES 2
735 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
737 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
739 static int reorg_pass_number;
742 note_delay_statistics (int slots_filled, int index)
744 num_insns_needing_delays[index][reorg_pass_number]++;
745 if (slots_filled > MAX_DELAY_HISTOGRAM)
746 slots_filled = MAX_DELAY_HISTOGRAM;
747 num_filled_delays[index][slots_filled][reorg_pass_number]++;
750 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
752 /* Optimize the following cases:
754 1. When a conditional branch skips over only one instruction,
755 use an annulling branch and put that insn in the delay slot.
756 Use either a branch that annuls when the condition if true or
757 invert the test with a branch that annuls when the condition is
758 false. This saves insns, since otherwise we must copy an insn
761 (orig) (skip) (otherwise)
762 Bcc.n L1 Bcc',a L1 Bcc,a L1'
769 2. When a conditional branch skips over only one instruction,
770 and after that, it unconditionally branches somewhere else,
771 perform the similar optimization. This saves executing the
772 second branch in the case where the inverted condition is true.
781 This should be expanded to skip over N insns, where N is the number
782 of delay slots required. */
785 optimize_skip (rtx insn)
787 rtx trial = next_nonnote_insn (insn);
788 rtx next_trial = next_active_insn (trial);
792 flags = get_jump_flags (insn, JUMP_LABEL (insn));
795 || !NONJUMP_INSN_P (trial)
796 || GET_CODE (PATTERN (trial)) == SEQUENCE
797 || recog_memoized (trial) < 0
798 || (! eligible_for_annul_false (insn, 0, trial, flags)
799 && ! eligible_for_annul_true (insn, 0, trial, flags))
800 || can_throw_internal (trial))
803 /* There are two cases where we are just executing one insn (we assume
804 here that a branch requires only one insn; this should be generalized
805 at some point): Where the branch goes around a single insn or where
806 we have one insn followed by a branch to the same label we branch to.
807 In both of these cases, inverting the jump and annulling the delay
808 slot give the same effect in fewer insns. */
809 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
810 && ! (next_trial == 0 && crtl->epilogue_delay_list != 0))
812 && JUMP_P (next_trial)
813 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
814 && (simplejump_p (next_trial)
815 || GET_CODE (PATTERN (next_trial)) == RETURN)))
817 if (eligible_for_annul_false (insn, 0, trial, flags))
819 if (invert_jump (insn, JUMP_LABEL (insn), 1))
820 INSN_FROM_TARGET_P (trial) = 1;
821 else if (! eligible_for_annul_true (insn, 0, trial, flags))
825 delay_list = add_to_delay_list (trial, NULL_RTX);
826 next_trial = next_active_insn (trial);
827 update_block (trial, trial);
828 delete_related_insns (trial);
830 /* Also, if we are targeting an unconditional
831 branch, thread our jump to the target of that branch. Don't
832 change this into a RETURN here, because it may not accept what
833 we have in the delay slot. We'll fix this up later. */
834 if (next_trial && JUMP_P (next_trial)
835 && (simplejump_p (next_trial)
836 || GET_CODE (PATTERN (next_trial)) == RETURN))
838 rtx target_label = JUMP_LABEL (next_trial);
839 if (ANY_RETURN_P (target_label))
840 target_label = find_end_label ();
844 /* Recompute the flags based on TARGET_LABEL since threading
845 the jump to TARGET_LABEL may change the direction of the
846 jump (which may change the circumstances in which the
847 delay slot is nullified). */
848 flags = get_jump_flags (insn, target_label);
849 if (eligible_for_annul_true (insn, 0, trial, flags))
850 reorg_redirect_jump (insn, target_label);
854 INSN_ANNULLED_BRANCH_P (insn) = 1;
861 /* Encode and return branch direction and prediction information for
862 INSN assuming it will jump to LABEL.
864 Non conditional branches return no direction information and
865 are predicted as very likely taken. */
868 get_jump_flags (rtx insn, rtx label)
872 /* get_jump_flags can be passed any insn with delay slots, these may
873 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
874 direction information, and only if they are conditional jumps.
876 If LABEL is a return, then there is no way to determine the branch
879 && (condjump_p (insn) || condjump_in_parallel_p (insn))
880 && !ANY_RETURN_P (label)
881 && INSN_UID (insn) <= max_uid
882 && INSN_UID (label) <= max_uid)
884 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
885 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
886 /* No valid direction information. */
890 /* If insn is a conditional branch call mostly_true_jump to get
891 determine the branch prediction.
893 Non conditional branches are predicted as very likely taken. */
895 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
899 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
903 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
906 flags |= ATTR_FLAG_likely;
909 flags |= ATTR_FLAG_unlikely;
912 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
920 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
925 /* Return 1 if INSN is a destination that will be branched to rarely (the
926 return point of a function); return 2 if DEST will be branched to very
927 rarely (a call to a function that doesn't return). Otherwise,
931 rare_destination (rtx insn)
936 for (; insn && !ANY_RETURN_P (insn); insn = next)
938 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
939 insn = XVECEXP (PATTERN (insn), 0, 0);
941 next = NEXT_INSN (insn);
943 switch (GET_CODE (insn))
948 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
949 don't scan past JUMP_INSNs, so any barrier we find here must
950 have been after a CALL_INSN and hence mean the call doesn't
954 if (GET_CODE (PATTERN (insn)) == RETURN)
956 else if (simplejump_p (insn)
957 && jump_count++ < 10)
958 next = JUMP_LABEL (insn);
967 /* If we got here it means we hit the end of the function. So this
968 is an unlikely destination. */
973 /* Return truth value of the statement that this branch
974 is mostly taken. If we think that the branch is extremely likely
975 to be taken, we return 2. If the branch is slightly more likely to be
976 taken, return 1. If the branch is slightly less likely to be taken,
977 return 0 and if the branch is highly unlikely to be taken, return -1.
979 CONDITION, if nonzero, is the condition that JUMP_INSN is testing. */
982 mostly_true_jump (rtx jump_insn, rtx condition)
984 rtx target_label = JUMP_LABEL (jump_insn);
986 int rare_dest, rare_fallthrough;
988 /* If branch probabilities are available, then use that number since it
989 always gives a correct answer. */
990 note = find_reg_note (jump_insn, REG_BR_PROB, 0);
993 int prob = INTVAL (XEXP (note, 0));
995 if (prob >= REG_BR_PROB_BASE * 9 / 10)
997 else if (prob >= REG_BR_PROB_BASE / 2)
999 else if (prob >= REG_BR_PROB_BASE / 10)
1005 /* Look at the relative rarities of the fallthrough and destination. If
1006 they differ, we can predict the branch that way. */
1007 rare_dest = rare_destination (target_label);
1008 rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1010 switch (rare_fallthrough - rare_dest)
1024 /* If we couldn't figure out what this jump was, assume it won't be
1025 taken. This should be rare. */
1029 /* Predict backward branches usually take, forward branches usually not. If
1030 we don't know whether this is forward or backward, assume the branch
1031 will be taken, since most are. */
1032 return (ANY_RETURN_P (target_label) || INSN_UID (jump_insn) > max_uid
1033 || INSN_UID (target_label) > max_uid
1034 || (uid_to_ruid[INSN_UID (jump_insn)]
1035 > uid_to_ruid[INSN_UID (target_label)]));
1038 /* Return the condition under which INSN will branch to TARGET. If TARGET
1039 is zero, return the condition under which INSN will return. If INSN is
1040 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1041 type of jump, or it doesn't go to TARGET, return 0. */
1044 get_branch_condition (rtx insn, rtx target)
1046 rtx pat = PATTERN (insn);
1049 if (condjump_in_parallel_p (insn))
1050 pat = XVECEXP (pat, 0, 0);
1052 if (ANY_RETURN_P (pat))
1053 return pat == target ? const_true_rtx : 0;
1055 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1058 src = SET_SRC (pat);
1059 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1060 return const_true_rtx;
1062 else if (GET_CODE (src) == IF_THEN_ELSE
1063 && XEXP (XEXP (src, 1), 0) == target
1064 && XEXP (src, 2) == pc_rtx)
1065 return XEXP (src, 0);
1067 else if (GET_CODE (src) == IF_THEN_ELSE
1068 && XEXP (XEXP (src, 2), 0) == target
1069 && XEXP (src, 1) == pc_rtx)
1072 rev = reversed_comparison_code (XEXP (src, 0), insn);
1074 return gen_rtx_fmt_ee (rev, GET_MODE (XEXP (src, 0)),
1075 XEXP (XEXP (src, 0), 0),
1076 XEXP (XEXP (src, 0), 1));
1082 /* Return nonzero if CONDITION is more strict than the condition of
1083 INSN, i.e., if INSN will always branch if CONDITION is true. */
1086 condition_dominates_p (rtx condition, rtx insn)
1088 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1089 enum rtx_code code = GET_CODE (condition);
1090 enum rtx_code other_code;
1092 if (rtx_equal_p (condition, other_condition)
1093 || other_condition == const_true_rtx)
1096 else if (condition == const_true_rtx || other_condition == 0)
1099 other_code = GET_CODE (other_condition);
1100 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1101 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1102 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1105 return comparison_dominates_p (code, other_code);
1108 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1109 any insns already in the delay slot of JUMP. */
1112 redirect_with_delay_slots_safe_p (rtx jump, rtx newlabel, rtx seq)
1115 rtx pat = PATTERN (seq);
1117 /* Make sure all the delay slots of this jump would still
1118 be valid after threading the jump. If they are still
1119 valid, then return nonzero. */
1121 flags = get_jump_flags (jump, newlabel);
1122 for (i = 1; i < XVECLEN (pat, 0); i++)
1124 #ifdef ANNUL_IFFALSE_SLOTS
1125 (INSN_ANNULLED_BRANCH_P (jump)
1126 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1127 ? eligible_for_annul_false (jump, i - 1,
1128 XVECEXP (pat, 0, i), flags) :
1130 #ifdef ANNUL_IFTRUE_SLOTS
1131 (INSN_ANNULLED_BRANCH_P (jump)
1132 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1133 ? eligible_for_annul_true (jump, i - 1,
1134 XVECEXP (pat, 0, i), flags) :
1136 eligible_for_delay (jump, i - 1, XVECEXP (pat, 0, i), flags)))
1139 return (i == XVECLEN (pat, 0));
1142 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1143 any insns we wish to place in the delay slot of JUMP. */
1146 redirect_with_delay_list_safe_p (rtx jump, rtx newlabel, rtx delay_list)
1151 /* Make sure all the insns in DELAY_LIST would still be
1152 valid after threading the jump. If they are still
1153 valid, then return nonzero. */
1155 flags = get_jump_flags (jump, newlabel);
1156 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1158 #ifdef ANNUL_IFFALSE_SLOTS
1159 (INSN_ANNULLED_BRANCH_P (jump)
1160 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1161 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1163 #ifdef ANNUL_IFTRUE_SLOTS
1164 (INSN_ANNULLED_BRANCH_P (jump)
1165 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1166 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1168 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1171 return (li == NULL);
1174 /* DELAY_LIST is a list of insns that have already been placed into delay
1175 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1176 If not, return 0; otherwise return 1. */
1179 check_annul_list_true_false (int annul_true_p, rtx delay_list)
1185 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1187 rtx trial = XEXP (temp, 0);
1189 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1190 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1198 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1199 the condition tested by INSN is CONDITION and the resources shown in
1200 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1201 from SEQ's delay list, in addition to whatever insns it may execute
1202 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1203 needed while searching for delay slot insns. Return the concatenated
1204 delay list if possible, otherwise, return 0.
1206 SLOTS_TO_FILL is the total number of slots required by INSN, and
1207 PSLOTS_FILLED points to the number filled so far (also the number of
1208 insns in DELAY_LIST). It is updated with the number that have been
1209 filled from the SEQUENCE, if any.
1211 PANNUL_P points to a nonzero value if we already know that we need
1212 to annul INSN. If this routine determines that annulling is needed,
1213 it may set that value nonzero.
1215 PNEW_THREAD points to a location that is to receive the place at which
1216 execution should continue. */
1219 steal_delay_list_from_target (rtx insn, rtx condition, rtx seq,
1220 rtx delay_list, struct resources *sets,
1221 struct resources *needed,
1222 struct resources *other_needed,
1223 int slots_to_fill, int *pslots_filled,
1224 int *pannul_p, rtx *pnew_thread)
1227 int slots_remaining = slots_to_fill - *pslots_filled;
1228 int total_slots_filled = *pslots_filled;
1229 rtx new_delay_list = 0;
1230 int must_annul = *pannul_p;
1233 struct resources cc_set;
1235 /* We can't do anything if there are more delay slots in SEQ than we
1236 can handle, or if we don't know that it will be a taken branch.
1237 We know that it will be a taken branch if it is either an unconditional
1238 branch or a conditional branch with a stricter branch condition.
1240 Also, exit if the branch has more than one set, since then it is computing
1241 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1242 ??? It may be possible to move other sets into INSN in addition to
1243 moving the instructions in the delay slots.
1245 We can not steal the delay list if one of the instructions in the
1246 current delay_list modifies the condition codes and the jump in the
1247 sequence is a conditional jump. We can not do this because we can
1248 not change the direction of the jump because the condition codes
1249 will effect the direction of the jump in the sequence. */
1251 CLEAR_RESOURCE (&cc_set);
1252 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1254 rtx trial = XEXP (temp, 0);
1256 mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL);
1257 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, false))
1261 if (XVECLEN (seq, 0) - 1 > slots_remaining
1262 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1263 || ! single_set (XVECEXP (seq, 0, 0)))
1266 #ifdef MD_CAN_REDIRECT_BRANCH
1267 /* On some targets, branches with delay slots can have a limited
1268 displacement. Give the back end a chance to tell us we can't do
1270 if (! MD_CAN_REDIRECT_BRANCH (insn, XVECEXP (seq, 0, 0)))
1274 for (i = 1; i < XVECLEN (seq, 0); i++)
1276 rtx trial = XVECEXP (seq, 0, i);
1279 if (insn_references_resource_p (trial, sets, false)
1280 || insn_sets_resource_p (trial, needed, false)
1281 || insn_sets_resource_p (trial, sets, false)
1283 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1285 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1287 /* If TRIAL is from the fallthrough code of an annulled branch insn
1288 in SEQ, we cannot use it. */
1289 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1290 && ! INSN_FROM_TARGET_P (trial)))
1293 /* If this insn was already done (usually in a previous delay slot),
1294 pretend we put it in our delay slot. */
1295 if (redundant_insn (trial, insn, new_delay_list))
1298 /* We will end up re-vectoring this branch, so compute flags
1299 based on jumping to the new label. */
1300 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1303 && ((condition == const_true_rtx
1304 || (! insn_sets_resource_p (trial, other_needed, false)
1305 && ! may_trap_or_fault_p (PATTERN (trial)))))
1306 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1307 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1309 check_annul_list_true_false (0, delay_list)
1310 && check_annul_list_true_false (0, new_delay_list)
1311 && eligible_for_annul_false (insn, total_slots_filled,
1316 temp = copy_rtx (trial);
1317 INSN_FROM_TARGET_P (temp) = 1;
1318 new_delay_list = add_to_delay_list (temp, new_delay_list);
1319 total_slots_filled++;
1321 if (--slots_remaining == 0)
1328 /* Show the place to which we will be branching. */
1329 *pnew_thread = first_active_target_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1331 /* Add any new insns to the delay list and update the count of the
1332 number of slots filled. */
1333 *pslots_filled = total_slots_filled;
1337 if (delay_list == 0)
1338 return new_delay_list;
1340 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1341 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1346 /* Similar to steal_delay_list_from_target except that SEQ is on the
1347 fallthrough path of INSN. Here we only do something if the delay insn
1348 of SEQ is an unconditional branch. In that case we steal its delay slot
1349 for INSN since unconditional branches are much easier to fill. */
1352 steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx seq,
1353 rtx delay_list, struct resources *sets,
1354 struct resources *needed,
1355 struct resources *other_needed,
1356 int slots_to_fill, int *pslots_filled,
1361 int must_annul = *pannul_p;
1364 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1366 /* We can't do anything if SEQ's delay insn isn't an
1367 unconditional branch. */
1369 if (! simplejump_p (XVECEXP (seq, 0, 0))
1370 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1373 for (i = 1; i < XVECLEN (seq, 0); i++)
1375 rtx trial = XVECEXP (seq, 0, i);
1377 /* If TRIAL sets CC0, stealing it will move it too far from the use
1379 if (insn_references_resource_p (trial, sets, false)
1380 || insn_sets_resource_p (trial, needed, false)
1381 || insn_sets_resource_p (trial, sets, false)
1383 || sets_cc0_p (PATTERN (trial))
1389 /* If this insn was already done, we don't need it. */
1390 if (redundant_insn (trial, insn, delay_list))
1392 delete_from_delay_slot (trial);
1397 && ((condition == const_true_rtx
1398 || (! insn_sets_resource_p (trial, other_needed, false)
1399 && ! may_trap_or_fault_p (PATTERN (trial)))))
1400 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1401 : (must_annul || delay_list == NULL) && (must_annul = 1,
1402 check_annul_list_true_false (1, delay_list)
1403 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1407 delete_from_delay_slot (trial);
1408 delay_list = add_to_delay_list (trial, delay_list);
1410 if (++(*pslots_filled) == slots_to_fill)
1422 /* Try merging insns starting at THREAD which match exactly the insns in
1425 If all insns were matched and the insn was previously annulling, the
1426 annul bit will be cleared.
1428 For each insn that is merged, if the branch is or will be non-annulling,
1429 we delete the merged insn. */
1432 try_merge_delay_insns (rtx insn, rtx thread)
1434 rtx trial, next_trial;
1435 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1436 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1437 int slot_number = 1;
1438 int num_slots = XVECLEN (PATTERN (insn), 0);
1439 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1440 struct resources set, needed;
1441 rtx merged_insns = 0;
1445 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1447 CLEAR_RESOURCE (&needed);
1448 CLEAR_RESOURCE (&set);
1450 /* If this is not an annulling branch, take into account anything needed in
1451 INSN's delay slot. This prevents two increments from being incorrectly
1452 folded into one. If we are annulling, this would be the correct
1453 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1454 will essentially disable this optimization. This method is somewhat of
1455 a kludge, but I don't see a better way.) */
1457 for (i = 1 ; i < num_slots; i++)
1458 if (XVECEXP (PATTERN (insn), 0, i))
1459 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed,
1462 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1464 rtx pat = PATTERN (trial);
1465 rtx oldtrial = trial;
1467 next_trial = next_nonnote_insn (trial);
1469 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1470 if (NONJUMP_INSN_P (trial)
1471 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1474 if (GET_CODE (next_to_match) == GET_CODE (trial)
1476 /* We can't share an insn that sets cc0. */
1477 && ! sets_cc0_p (pat)
1479 && ! insn_references_resource_p (trial, &set, true)
1480 && ! insn_sets_resource_p (trial, &set, true)
1481 && ! insn_sets_resource_p (trial, &needed, true)
1482 && (trial = try_split (pat, trial, 0)) != 0
1483 /* Update next_trial, in case try_split succeeded. */
1484 && (next_trial = next_nonnote_insn (trial))
1485 /* Likewise THREAD. */
1486 && (thread = oldtrial == thread ? trial : thread)
1487 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1488 /* Have to test this condition if annul condition is different
1489 from (and less restrictive than) non-annulling one. */
1490 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1495 update_block (trial, thread);
1496 if (trial == thread)
1497 thread = next_active_insn (thread);
1499 delete_related_insns (trial);
1500 INSN_FROM_TARGET_P (next_to_match) = 0;
1503 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1505 if (++slot_number == num_slots)
1508 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1511 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
1512 mark_referenced_resources (trial, &needed, true);
1515 /* See if we stopped on a filled insn. If we did, try to see if its
1516 delay slots match. */
1517 if (slot_number != num_slots
1518 && trial && NONJUMP_INSN_P (trial)
1519 && GET_CODE (PATTERN (trial)) == SEQUENCE
1520 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1522 rtx pat = PATTERN (trial);
1523 rtx filled_insn = XVECEXP (pat, 0, 0);
1525 /* Account for resources set/needed by the filled insn. */
1526 mark_set_resources (filled_insn, &set, 0, MARK_SRC_DEST_CALL);
1527 mark_referenced_resources (filled_insn, &needed, true);
1529 for (i = 1; i < XVECLEN (pat, 0); i++)
1531 rtx dtrial = XVECEXP (pat, 0, i);
1533 if (! insn_references_resource_p (dtrial, &set, true)
1534 && ! insn_sets_resource_p (dtrial, &set, true)
1535 && ! insn_sets_resource_p (dtrial, &needed, true)
1537 && ! sets_cc0_p (PATTERN (dtrial))
1539 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1540 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1546 update_block (dtrial, thread);
1547 new_rtx = delete_from_delay_slot (dtrial);
1548 if (INSN_DELETED_P (thread))
1550 INSN_FROM_TARGET_P (next_to_match) = 0;
1553 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1556 if (++slot_number == num_slots)
1559 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1563 /* Keep track of the set/referenced resources for the delay
1564 slots of any trial insns we encounter. */
1565 mark_set_resources (dtrial, &set, 0, MARK_SRC_DEST_CALL);
1566 mark_referenced_resources (dtrial, &needed, true);
1571 /* If all insns in the delay slot have been matched and we were previously
1572 annulling the branch, we need not any more. In that case delete all the
1573 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1574 the delay list so that we know that it isn't only being used at the
1576 if (slot_number == num_slots && annul_p)
1578 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1580 if (GET_MODE (merged_insns) == SImode)
1584 update_block (XEXP (merged_insns, 0), thread);
1585 new_rtx = delete_from_delay_slot (XEXP (merged_insns, 0));
1586 if (INSN_DELETED_P (thread))
1591 update_block (XEXP (merged_insns, 0), thread);
1592 delete_related_insns (XEXP (merged_insns, 0));
1596 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1598 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1599 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1603 /* See if INSN is redundant with an insn in front of TARGET. Often this
1604 is called when INSN is a candidate for a delay slot of TARGET.
1605 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1606 of INSN. Often INSN will be redundant with an insn in a delay slot of
1607 some previous insn. This happens when we have a series of branches to the
1608 same label; in that case the first insn at the target might want to go
1609 into each of the delay slots.
1611 If we are not careful, this routine can take up a significant fraction
1612 of the total compilation time (4%), but only wins rarely. Hence we
1613 speed this routine up by making two passes. The first pass goes back
1614 until it hits a label and sees if it finds an insn with an identical
1615 pattern. Only in this (relatively rare) event does it check for
1618 We do not split insns we encounter. This could cause us not to find a
1619 redundant insn, but the cost of splitting seems greater than the possible
1620 gain in rare cases. */
1623 redundant_insn (rtx insn, rtx target, rtx delay_list)
1625 rtx target_main = target;
1626 rtx ipat = PATTERN (insn);
1628 struct resources needed, set;
1630 unsigned insns_to_search;
1632 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1633 are allowed to not actually assign to such a register. */
1634 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1637 /* Scan backwards looking for a match. */
1638 for (trial = PREV_INSN (target),
1639 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1640 trial && insns_to_search > 0;
1641 trial = PREV_INSN (trial))
1643 if (LABEL_P (trial))
1646 if (!NONDEBUG_INSN_P (trial))
1650 pat = PATTERN (trial);
1651 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1654 if (GET_CODE (pat) == SEQUENCE)
1656 /* Stop for a CALL and its delay slots because it is difficult to
1657 track its resource needs correctly. */
1658 if (CALL_P (XVECEXP (pat, 0, 0)))
1661 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1662 slots because it is difficult to track its resource needs
1665 #ifdef INSN_SETS_ARE_DELAYED
1666 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1670 #ifdef INSN_REFERENCES_ARE_DELAYED
1671 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1675 /* See if any of the insns in the delay slot match, updating
1676 resource requirements as we go. */
1677 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1678 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1679 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1680 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1683 /* If found a match, exit this loop early. */
1688 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1689 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1693 /* If we didn't find an insn that matches, return 0. */
1697 /* See what resources this insn sets and needs. If they overlap, or
1698 if this insn references CC0, it can't be redundant. */
1700 CLEAR_RESOURCE (&needed);
1701 CLEAR_RESOURCE (&set);
1702 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1703 mark_referenced_resources (insn, &needed, true);
1705 /* If TARGET is a SEQUENCE, get the main insn. */
1706 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1707 target_main = XVECEXP (PATTERN (target), 0, 0);
1709 if (resource_conflicts_p (&needed, &set)
1711 || reg_mentioned_p (cc0_rtx, ipat)
1713 /* The insn requiring the delay may not set anything needed or set by
1715 || insn_sets_resource_p (target_main, &needed, true)
1716 || insn_sets_resource_p (target_main, &set, true))
1719 /* Insns we pass may not set either NEEDED or SET, so merge them for
1721 needed.memory |= set.memory;
1722 needed.unch_memory |= set.unch_memory;
1723 IOR_HARD_REG_SET (needed.regs, set.regs);
1725 /* This insn isn't redundant if it conflicts with an insn that either is
1726 or will be in a delay slot of TARGET. */
1730 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, true))
1732 delay_list = XEXP (delay_list, 1);
1735 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1736 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1737 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed,
1741 /* Scan backwards until we reach a label or an insn that uses something
1742 INSN sets or sets something insn uses or sets. */
1744 for (trial = PREV_INSN (target),
1745 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1746 trial && !LABEL_P (trial) && insns_to_search > 0;
1747 trial = PREV_INSN (trial))
1749 if (!NONDEBUG_INSN_P (trial))
1753 pat = PATTERN (trial);
1754 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1757 if (GET_CODE (pat) == SEQUENCE)
1759 /* If this is a CALL_INSN and its delay slots, it is hard to track
1760 the resource needs properly, so give up. */
1761 if (CALL_P (XVECEXP (pat, 0, 0)))
1764 /* If this is an INSN or JUMP_INSN with delayed effects, it
1765 is hard to track the resource needs properly, so give up. */
1767 #ifdef INSN_SETS_ARE_DELAYED
1768 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1772 #ifdef INSN_REFERENCES_ARE_DELAYED
1773 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1777 /* See if any of the insns in the delay slot match, updating
1778 resource requirements as we go. */
1779 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1781 rtx candidate = XVECEXP (pat, 0, i);
1783 /* If an insn will be annulled if the branch is false, it isn't
1784 considered as a possible duplicate insn. */
1785 if (rtx_equal_p (PATTERN (candidate), ipat)
1786 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1787 && INSN_FROM_TARGET_P (candidate)))
1789 /* Show that this insn will be used in the sequel. */
1790 INSN_FROM_TARGET_P (candidate) = 0;
1794 /* Unless this is an annulled insn from the target of a branch,
1795 we must stop if it sets anything needed or set by INSN. */
1796 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1797 || ! INSN_FROM_TARGET_P (candidate))
1798 && insn_sets_resource_p (candidate, &needed, true))
1802 /* If the insn requiring the delay slot conflicts with INSN, we
1804 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, true))
1809 /* See if TRIAL is the same as INSN. */
1810 pat = PATTERN (trial);
1811 if (rtx_equal_p (pat, ipat))
1814 /* Can't go any further if TRIAL conflicts with INSN. */
1815 if (insn_sets_resource_p (trial, &needed, true))
1823 /* Return 1 if THREAD can only be executed in one way. If LABEL is nonzero,
1824 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1825 is nonzero, we are allowed to fall into this thread; otherwise, we are
1828 If LABEL is used more than one or we pass a label other than LABEL before
1829 finding an active insn, we do not own this thread. */
1832 own_thread_p (rtx thread, rtx label, int allow_fallthrough)
1837 /* We don't own the function end. */
1838 if (thread == 0 || ANY_RETURN_P (thread))
1841 /* Get the first active insn, or THREAD, if it is an active insn. */
1842 active_insn = next_active_insn (PREV_INSN (thread));
1844 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1846 && (insn != label || LABEL_NUSES (insn) != 1))
1849 if (allow_fallthrough)
1852 /* Ensure that we reach a BARRIER before any insn or label. */
1853 for (insn = prev_nonnote_insn (thread);
1854 insn == 0 || !BARRIER_P (insn);
1855 insn = prev_nonnote_insn (insn))
1858 || (NONJUMP_INSN_P (insn)
1859 && GET_CODE (PATTERN (insn)) != USE
1860 && GET_CODE (PATTERN (insn)) != CLOBBER))
1866 /* Called when INSN is being moved from a location near the target of a jump.
1867 We leave a marker of the form (use (INSN)) immediately in front
1868 of WHERE for mark_target_live_regs. These markers will be deleted when
1871 We used to try to update the live status of registers if WHERE is at
1872 the start of a basic block, but that can't work since we may remove a
1873 BARRIER in relax_delay_slots. */
1876 update_block (rtx insn, rtx where)
1878 /* Ignore if this was in a delay slot and it came from the target of
1880 if (INSN_FROM_TARGET_P (insn))
1883 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1885 /* INSN might be making a value live in a block where it didn't use to
1886 be. So recompute liveness information for this block. */
1888 incr_ticks_for_insn (insn);
1891 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1892 the basic block containing the jump. */
1895 reorg_redirect_jump (rtx jump, rtx nlabel)
1897 incr_ticks_for_insn (jump);
1898 return redirect_jump (jump, nlabel, 1);
1901 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1902 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1903 that reference values used in INSN. If we find one, then we move the
1904 REG_DEAD note to INSN.
1906 This is needed to handle the case where a later insn (after INSN) has a
1907 REG_DEAD note for a register used by INSN, and this later insn subsequently
1908 gets moved before a CODE_LABEL because it is a redundant insn. In this
1909 case, mark_target_live_regs may be confused into thinking the register
1910 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1913 update_reg_dead_notes (rtx insn, rtx delayed_insn)
1917 for (p = next_nonnote_insn (insn); p != delayed_insn;
1918 p = next_nonnote_insn (p))
1919 for (link = REG_NOTES (p); link; link = next)
1921 next = XEXP (link, 1);
1923 if (REG_NOTE_KIND (link) != REG_DEAD
1924 || !REG_P (XEXP (link, 0)))
1927 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1929 /* Move the REG_DEAD note from P to INSN. */
1930 remove_note (p, link);
1931 XEXP (link, 1) = REG_NOTES (insn);
1932 REG_NOTES (insn) = link;
1937 /* Called when an insn redundant with start_insn is deleted. If there
1938 is a REG_DEAD note for the target of start_insn between start_insn
1939 and stop_insn, then the REG_DEAD note needs to be deleted since the
1940 value no longer dies there.
1942 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1943 confused into thinking the register is dead. */
1946 fix_reg_dead_note (rtx start_insn, rtx stop_insn)
1950 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1951 p = next_nonnote_insn (p))
1952 for (link = REG_NOTES (p); link; link = next)
1954 next = XEXP (link, 1);
1956 if (REG_NOTE_KIND (link) != REG_DEAD
1957 || !REG_P (XEXP (link, 0)))
1960 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1962 remove_note (p, link);
1968 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1970 This handles the case of udivmodXi4 instructions which optimize their
1971 output depending on whether any REG_UNUSED notes are present.
1972 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1976 update_reg_unused_notes (rtx insn, rtx redundant_insn)
1980 for (link = REG_NOTES (insn); link; link = next)
1982 next = XEXP (link, 1);
1984 if (REG_NOTE_KIND (link) != REG_UNUSED
1985 || !REG_P (XEXP (link, 0)))
1988 if (! find_regno_note (redundant_insn, REG_UNUSED,
1989 REGNO (XEXP (link, 0))))
1990 remove_note (insn, link);
1994 /* Return the label before INSN, or put a new label there. */
1997 get_label_before (rtx insn)
2001 /* Find an existing label at this point
2002 or make a new one if there is none. */
2003 label = prev_nonnote_insn (insn);
2005 if (label == 0 || !LABEL_P (label))
2007 rtx prev = PREV_INSN (insn);
2009 label = gen_label_rtx ();
2010 emit_label_after (label, prev);
2011 LABEL_NUSES (label) = 0;
2016 /* Scan a function looking for insns that need a delay slot and find insns to
2017 put into the delay slot.
2019 NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
2020 as calls). We do these first since we don't want jump insns (that are
2021 easier to fill) to get the only insns that could be used for non-jump insns.
2022 When it is zero, only try to fill JUMP_INSNs.
2024 When slots are filled in this manner, the insns (including the
2025 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2026 it is possible to tell whether a delay slot has really been filled
2027 or not. `final' knows how to deal with this, by communicating
2028 through FINAL_SEQUENCE. */
2031 fill_simple_delay_slots (int non_jumps_p)
2033 rtx insn, pat, trial, next_trial;
2035 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2036 struct resources needed, set;
2037 int slots_to_fill, slots_filled;
2040 for (i = 0; i < num_unfilled_slots; i++)
2043 /* Get the next insn to fill. If it has already had any slots assigned,
2044 we can't do anything with it. Maybe we'll improve this later. */
2046 insn = unfilled_slots_base[i];
2048 || INSN_DELETED_P (insn)
2049 || (NONJUMP_INSN_P (insn)
2050 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2051 || (JUMP_P (insn) && non_jumps_p)
2052 || (!JUMP_P (insn) && ! non_jumps_p))
2055 /* It may have been that this insn used to need delay slots, but
2056 now doesn't; ignore in that case. This can happen, for example,
2057 on the HP PA RISC, where the number of delay slots depends on
2058 what insns are nearby. */
2059 slots_to_fill = num_delay_slots (insn);
2061 /* Some machine description have defined instructions to have
2062 delay slots only in certain circumstances which may depend on
2063 nearby insns (which change due to reorg's actions).
2065 For example, the PA port normally has delay slots for unconditional
2068 However, the PA port claims such jumps do not have a delay slot
2069 if they are immediate successors of certain CALL_INSNs. This
2070 allows the port to favor filling the delay slot of the call with
2071 the unconditional jump. */
2072 if (slots_to_fill == 0)
2075 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2076 says how many. After initialization, first try optimizing
2079 nop add %o7,.-L1,%o7
2083 If this case applies, the delay slot of the call is filled with
2084 the unconditional jump. This is done first to avoid having the
2085 delay slot of the call filled in the backward scan. Also, since
2086 the unconditional jump is likely to also have a delay slot, that
2087 insn must exist when it is subsequently scanned.
2089 This is tried on each insn with delay slots as some machines
2090 have insns which perform calls, but are not represented as
2097 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2099 flags = get_jump_flags (insn, NULL_RTX);
2101 if ((trial = next_active_insn (insn))
2103 && simplejump_p (trial)
2104 && eligible_for_delay (insn, slots_filled, trial, flags)
2105 && no_labels_between_p (insn, trial)
2106 && ! can_throw_internal (trial))
2110 delay_list = add_to_delay_list (trial, delay_list);
2112 /* TRIAL may have had its delay slot filled, then unfilled. When
2113 the delay slot is unfilled, TRIAL is placed back on the unfilled
2114 slots obstack. Unfortunately, it is placed on the end of the
2115 obstack, not in its original location. Therefore, we must search
2116 from entry i + 1 to the end of the unfilled slots obstack to
2117 try and find TRIAL. */
2118 tmp = &unfilled_slots_base[i + 1];
2119 while (*tmp != trial && tmp != unfilled_slots_next)
2122 /* Remove the unconditional jump from consideration for delay slot
2123 filling and unthread it. */
2127 rtx next = NEXT_INSN (trial);
2128 rtx prev = PREV_INSN (trial);
2130 NEXT_INSN (prev) = next;
2132 PREV_INSN (next) = prev;
2136 /* Now, scan backwards from the insn to search for a potential
2137 delay-slot candidate. Stop searching when a label or jump is hit.
2139 For each candidate, if it is to go into the delay slot (moved
2140 forward in execution sequence), it must not need or set any resources
2141 that were set by later insns and must not set any resources that
2142 are needed for those insns.
2144 The delay slot insn itself sets resources unless it is a call
2145 (in which case the called routine, not the insn itself, is doing
2148 if (slots_filled < slots_to_fill)
2150 CLEAR_RESOURCE (&needed);
2151 CLEAR_RESOURCE (&set);
2152 mark_set_resources (insn, &set, 0, MARK_SRC_DEST);
2153 mark_referenced_resources (insn, &needed, false);
2155 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2158 next_trial = prev_nonnote_insn (trial);
2160 /* This must be an INSN or CALL_INSN. */
2161 pat = PATTERN (trial);
2163 /* Stand-alone USE and CLOBBER are just for flow. */
2164 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2167 /* Check for resource conflict first, to avoid unnecessary
2169 if (! insn_references_resource_p (trial, &set, true)
2170 && ! insn_sets_resource_p (trial, &set, true)
2171 && ! insn_sets_resource_p (trial, &needed, true)
2173 /* Can't separate set of cc0 from its use. */
2174 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2176 && ! can_throw_internal (trial))
2178 trial = try_split (pat, trial, 1);
2179 next_trial = prev_nonnote_insn (trial);
2180 if (eligible_for_delay (insn, slots_filled, trial, flags))
2182 /* In this case, we are searching backward, so if we
2183 find insns to put on the delay list, we want
2184 to put them at the head, rather than the
2185 tail, of the list. */
2187 update_reg_dead_notes (trial, insn);
2188 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2190 update_block (trial, trial);
2191 delete_related_insns (trial);
2192 if (slots_to_fill == ++slots_filled)
2198 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2199 mark_referenced_resources (trial, &needed, true);
2203 /* If all needed slots haven't been filled, we come here. */
2205 /* Try to optimize case of jumping around a single insn. */
2206 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2207 if (slots_filled != slots_to_fill
2210 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2212 delay_list = optimize_skip (insn);
2218 /* Try to get insns from beyond the insn needing the delay slot.
2219 These insns can neither set or reference resources set in insns being
2220 skipped, cannot set resources in the insn being skipped, and, if this
2221 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2222 call might not return).
2224 There used to be code which continued past the target label if
2225 we saw all uses of the target label. This code did not work,
2226 because it failed to account for some instructions which were
2227 both annulled and marked as from the target. This can happen as a
2228 result of optimize_skip. Since this code was redundant with
2229 fill_eager_delay_slots anyways, it was just deleted. */
2231 if (slots_filled != slots_to_fill
2232 /* If this instruction could throw an exception which is
2233 caught in the same function, then it's not safe to fill
2234 the delay slot with an instruction from beyond this
2235 point. For example, consider:
2246 Even though `i' is a local variable, we must be sure not
2247 to put `i = 3' in the delay slot if `f' might throw an
2250 Presumably, we should also check to see if we could get
2251 back to this function via `setjmp'. */
2252 && ! can_throw_internal (insn)
2254 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2255 && ! simplejump_p (insn)
2256 && !ANY_RETURN_P (JUMP_LABEL (insn)))))
2258 /* Invariant: If insn is a JUMP_INSN, the insn's jump
2259 label. Otherwise, zero. */
2261 int maybe_never = 0;
2262 rtx pat, trial_delay;
2264 CLEAR_RESOURCE (&needed);
2265 CLEAR_RESOURCE (&set);
2269 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2270 mark_referenced_resources (insn, &needed, true);
2275 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2276 mark_referenced_resources (insn, &needed, true);
2278 target = JUMP_LABEL (insn);
2281 if (target == 0 || ANY_RETURN_P (target))
2282 for (trial = next_nonnote_insn (insn); !stop_search_p (trial, 1);
2285 next_trial = next_nonnote_insn (trial);
2287 /* This must be an INSN or CALL_INSN. */
2288 pat = PATTERN (trial);
2290 /* Stand-alone USE and CLOBBER are just for flow. */
2291 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2294 /* If this already has filled delay slots, get the insn needing
2296 if (GET_CODE (pat) == SEQUENCE)
2297 trial_delay = XVECEXP (pat, 0, 0);
2299 trial_delay = trial;
2301 /* Stop our search when seeing a jump. */
2302 if (JUMP_P (trial_delay))
2305 /* See if we have a resource problem before we try to
2307 if (GET_CODE (pat) != SEQUENCE
2308 && ! insn_references_resource_p (trial, &set, true)
2309 && ! insn_sets_resource_p (trial, &set, true)
2310 && ! insn_sets_resource_p (trial, &needed, true)
2312 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2314 && ! (maybe_never && may_trap_or_fault_p (pat))
2315 && (trial = try_split (pat, trial, 0))
2316 && eligible_for_delay (insn, slots_filled, trial, flags)
2317 && ! can_throw_internal(trial))
2319 next_trial = next_nonnote_insn (trial);
2320 delay_list = add_to_delay_list (trial, delay_list);
2323 if (reg_mentioned_p (cc0_rtx, pat))
2324 link_cc0_insns (trial);
2327 delete_related_insns (trial);
2328 if (slots_to_fill == ++slots_filled)
2333 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2334 mark_referenced_resources (trial, &needed, true);
2336 /* Ensure we don't put insns between the setting of cc and the
2337 comparison by moving a setting of cc into an earlier delay
2338 slot since these insns could clobber the condition code. */
2341 /* If this is a call or jump, we might not get here. */
2342 if (CALL_P (trial_delay)
2343 || JUMP_P (trial_delay))
2347 /* If there are slots left to fill and our search was stopped by an
2348 unconditional branch, try the insn at the branch target. We can
2349 redirect the branch if it works.
2351 Don't do this if the insn at the branch target is a branch. */
2352 if (slots_to_fill != slots_filled
2354 && jump_to_label_p (trial)
2355 && simplejump_p (trial)
2356 && (target == 0 || JUMP_LABEL (trial) == target)
2357 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2358 && ! (NONJUMP_INSN_P (next_trial)
2359 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2360 && !JUMP_P (next_trial)
2361 && ! insn_references_resource_p (next_trial, &set, true)
2362 && ! insn_sets_resource_p (next_trial, &set, true)
2363 && ! insn_sets_resource_p (next_trial, &needed, true)
2365 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2367 && ! (maybe_never && may_trap_or_fault_p (PATTERN (next_trial)))
2368 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2369 && eligible_for_delay (insn, slots_filled, next_trial, flags)
2370 && ! can_throw_internal (trial))
2372 /* See comment in relax_delay_slots about necessity of using
2373 next_real_insn here. */
2374 rtx new_label = next_real_insn (next_trial);
2377 new_label = get_label_before (new_label);
2379 new_label = find_end_label ();
2384 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2386 reorg_redirect_jump (trial, new_label);
2388 /* If we merged because we both jumped to the same place,
2389 redirect the original insn also. */
2391 reorg_redirect_jump (insn, new_label);
2396 /* If this is an unconditional jump, then try to get insns from the
2397 target of the jump. */
2399 && simplejump_p (insn)
2400 && slots_filled != slots_to_fill)
2402 = fill_slots_from_thread (insn, const_true_rtx,
2403 next_active_insn (JUMP_LABEL (insn)),
2405 own_thread_p (JUMP_LABEL (insn),
2406 JUMP_LABEL (insn), 0),
2407 slots_to_fill, &slots_filled,
2411 unfilled_slots_base[i]
2412 = emit_delay_sequence (insn, delay_list, slots_filled);
2414 if (slots_to_fill == slots_filled)
2415 unfilled_slots_base[i] = 0;
2417 note_delay_statistics (slots_filled, 0);
2420 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2421 /* See if the epilogue needs any delay slots. Try to fill them if so.
2422 The only thing we can do is scan backwards from the end of the
2423 function. If we did this in a previous pass, it is incorrect to do it
2425 if (crtl->epilogue_delay_list)
2428 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2429 if (slots_to_fill == 0)
2433 CLEAR_RESOURCE (&set);
2435 /* The frame pointer and stack pointer are needed at the beginning of
2436 the epilogue, so instructions setting them can not be put in the
2437 epilogue delay slot. However, everything else needed at function
2438 end is safe, so we don't want to use end_of_function_needs here. */
2439 CLEAR_RESOURCE (&needed);
2440 if (frame_pointer_needed)
2442 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2443 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2444 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2446 if (! EXIT_IGNORE_STACK
2447 || current_function_sp_is_unchanging)
2448 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2451 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2453 #ifdef EPILOGUE_USES
2454 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2456 if (EPILOGUE_USES (i))
2457 SET_HARD_REG_BIT (needed.regs, i);
2461 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2462 trial = PREV_INSN (trial))
2466 pat = PATTERN (trial);
2467 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2470 if (! insn_references_resource_p (trial, &set, true)
2471 && ! insn_sets_resource_p (trial, &needed, true)
2472 && ! insn_sets_resource_p (trial, &set, true)
2474 /* Don't want to mess with cc0 here. */
2475 && ! reg_mentioned_p (cc0_rtx, pat)
2477 && ! can_throw_internal (trial))
2479 trial = try_split (pat, trial, 1);
2480 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2482 /* Here as well we are searching backward, so put the
2483 insns we find on the head of the list. */
2485 crtl->epilogue_delay_list
2486 = gen_rtx_INSN_LIST (VOIDmode, trial,
2487 crtl->epilogue_delay_list);
2488 mark_end_of_function_resources (trial, true);
2489 update_block (trial, trial);
2490 delete_related_insns (trial);
2492 /* Clear deleted bit so final.c will output the insn. */
2493 INSN_DELETED_P (trial) = 0;
2495 if (slots_to_fill == ++slots_filled)
2501 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2502 mark_referenced_resources (trial, &needed, true);
2505 note_delay_statistics (slots_filled, 0);
2509 /* Follow any unconditional jump at LABEL;
2510 return the ultimate label reached by any such chain of jumps.
2511 Return ret_rtx if the chain ultimately leads to a return instruction.
2512 If LABEL is not followed by a jump, return LABEL.
2513 If the chain loops or we can't find end, return LABEL,
2514 since that tells caller to avoid changing the insn. */
2517 follow_jumps (rtx label)
2524 if (ANY_RETURN_P (label))
2528 && (insn = next_active_insn (value)) != 0
2530 && JUMP_LABEL (insn) != NULL_RTX
2531 && ((any_uncondjump_p (insn) && onlyjump_p (insn))
2532 || GET_CODE (PATTERN (insn)) == RETURN)
2533 && (next = NEXT_INSN (insn))
2534 && BARRIER_P (next));
2537 rtx this_label = JUMP_LABEL (insn);
2540 /* If we have found a cycle, make the insn jump to itself. */
2541 if (this_label == label)
2543 if (ANY_RETURN_P (this_label))
2545 tem = next_active_insn (this_label);
2547 && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2548 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2558 /* Try to find insns to place in delay slots.
2560 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2561 or is an unconditional branch if CONDITION is const_true_rtx.
2562 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2564 THREAD is a flow-of-control, either the insns to be executed if the
2565 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2567 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2568 to see if any potential delay slot insns set things needed there.
2570 LIKELY is nonzero if it is extremely likely that the branch will be
2571 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2572 end of a loop back up to the top.
2574 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2575 thread. I.e., it is the fallthrough code of our jump or the target of the
2576 jump when we are the only jump going there.
2578 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2579 case, we can only take insns from the head of the thread for our delay
2580 slot. We then adjust the jump to point after the insns we have taken. */
2583 fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
2584 rtx opposite_thread, int likely, int thread_if_true,
2585 int own_thread, int slots_to_fill,
2586 int *pslots_filled, rtx delay_list)
2589 struct resources opposite_needed, set, needed;
2595 /* Validate our arguments. */
2596 gcc_assert(condition != const_true_rtx || thread_if_true);
2597 gcc_assert(own_thread || thread_if_true);
2599 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2601 /* If our thread is the end of subroutine, we can't get any delay
2603 if (thread == NULL_RTX || ANY_RETURN_P (thread))
2606 /* If this is an unconditional branch, nothing is needed at the
2607 opposite thread. Otherwise, compute what is needed there. */
2608 if (condition == const_true_rtx)
2609 CLEAR_RESOURCE (&opposite_needed);
2611 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2613 /* If the insn at THREAD can be split, do it here to avoid having to
2614 update THREAD and NEW_THREAD if it is done in the loop below. Also
2615 initialize NEW_THREAD. */
2617 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2619 /* Scan insns at THREAD. We are looking for an insn that can be removed
2620 from THREAD (it neither sets nor references resources that were set
2621 ahead of it and it doesn't set anything needs by the insns ahead of
2622 it) and that either can be placed in an annulling insn or aren't
2623 needed at OPPOSITE_THREAD. */
2625 CLEAR_RESOURCE (&needed);
2626 CLEAR_RESOURCE (&set);
2628 /* If we do not own this thread, we must stop as soon as we find
2629 something that we can't put in a delay slot, since all we can do
2630 is branch into THREAD at a later point. Therefore, labels stop
2631 the search if this is not the `true' thread. */
2633 for (trial = thread;
2634 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2635 trial = next_nonnote_insn (trial))
2639 /* If we have passed a label, we no longer own this thread. */
2640 if (LABEL_P (trial))
2646 pat = PATTERN (trial);
2647 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2650 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2651 don't separate or copy insns that set and use CC0. */
2652 if (! insn_references_resource_p (trial, &set, true)
2653 && ! insn_sets_resource_p (trial, &set, true)
2654 && ! insn_sets_resource_p (trial, &needed, true)
2656 && ! (reg_mentioned_p (cc0_rtx, pat)
2657 && (! own_thread || ! sets_cc0_p (pat)))
2659 && ! can_throw_internal (trial))
2663 /* If TRIAL is redundant with some insn before INSN, we don't
2664 actually need to add it to the delay list; we can merely pretend
2666 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2668 fix_reg_dead_note (prior_insn, insn);
2671 update_block (trial, thread);
2672 if (trial == thread)
2674 thread = next_active_insn (thread);
2675 if (new_thread == trial)
2676 new_thread = thread;
2679 delete_related_insns (trial);
2683 update_reg_unused_notes (prior_insn, trial);
2684 new_thread = next_active_insn (trial);
2690 /* There are two ways we can win: If TRIAL doesn't set anything
2691 needed at the opposite thread and can't trap, or if it can
2692 go into an annulled delay slot. */
2694 && (condition == const_true_rtx
2695 || (! insn_sets_resource_p (trial, &opposite_needed, true)
2696 && ! may_trap_or_fault_p (pat))))
2699 trial = try_split (pat, trial, 0);
2700 if (new_thread == old_trial)
2702 if (thread == old_trial)
2704 pat = PATTERN (trial);
2705 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2709 #ifdef ANNUL_IFTRUE_SLOTS
2712 #ifdef ANNUL_IFFALSE_SLOTS
2718 trial = try_split (pat, trial, 0);
2719 if (new_thread == old_trial)
2721 if (thread == old_trial)
2723 pat = PATTERN (trial);
2724 if ((must_annul || delay_list == NULL) && (thread_if_true
2725 ? check_annul_list_true_false (0, delay_list)
2726 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2727 : check_annul_list_true_false (1, delay_list)
2728 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2736 if (reg_mentioned_p (cc0_rtx, pat))
2737 link_cc0_insns (trial);
2740 /* If we own this thread, delete the insn. If this is the
2741 destination of a branch, show that a basic block status
2742 may have been updated. In any case, mark the new
2743 starting point of this thread. */
2748 update_block (trial, thread);
2749 if (trial == thread)
2751 thread = next_active_insn (thread);
2752 if (new_thread == trial)
2753 new_thread = thread;
2756 /* We are moving this insn, not deleting it. We must
2757 temporarily increment the use count on any referenced
2758 label lest it be deleted by delete_related_insns. */
2759 for (note = REG_NOTES (trial);
2761 note = XEXP (note, 1))
2762 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
2763 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
2765 /* REG_LABEL_OPERAND could be
2766 NOTE_INSN_DELETED_LABEL too. */
2767 if (LABEL_P (XEXP (note, 0)))
2768 LABEL_NUSES (XEXP (note, 0))++;
2770 gcc_assert (REG_NOTE_KIND (note)
2771 == REG_LABEL_OPERAND);
2773 if (jump_to_label_p (trial))
2774 LABEL_NUSES (JUMP_LABEL (trial))++;
2776 delete_related_insns (trial);
2778 for (note = REG_NOTES (trial);
2780 note = XEXP (note, 1))
2781 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
2782 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
2784 /* REG_LABEL_OPERAND could be
2785 NOTE_INSN_DELETED_LABEL too. */
2786 if (LABEL_P (XEXP (note, 0)))
2787 LABEL_NUSES (XEXP (note, 0))--;
2789 gcc_assert (REG_NOTE_KIND (note)
2790 == REG_LABEL_OPERAND);
2792 if (jump_to_label_p (trial))
2793 LABEL_NUSES (JUMP_LABEL (trial))--;
2796 new_thread = next_active_insn (trial);
2798 temp = own_thread ? trial : copy_rtx (trial);
2800 INSN_FROM_TARGET_P (temp) = 1;
2802 delay_list = add_to_delay_list (temp, delay_list);
2804 if (slots_to_fill == ++(*pslots_filled))
2806 /* Even though we have filled all the slots, we
2807 may be branching to a location that has a
2808 redundant insn. Skip any if so. */
2809 while (new_thread && ! own_thread
2810 && ! insn_sets_resource_p (new_thread, &set, true)
2811 && ! insn_sets_resource_p (new_thread, &needed,
2813 && ! insn_references_resource_p (new_thread,
2816 = redundant_insn (new_thread, insn,
2819 /* We know we do not own the thread, so no need
2820 to call update_block and delete_insn. */
2821 fix_reg_dead_note (prior_insn, insn);
2822 update_reg_unused_notes (prior_insn, new_thread);
2823 new_thread = next_active_insn (new_thread);
2833 /* This insn can't go into a delay slot. */
2835 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2836 mark_referenced_resources (trial, &needed, true);
2838 /* Ensure we don't put insns between the setting of cc and the comparison
2839 by moving a setting of cc into an earlier delay slot since these insns
2840 could clobber the condition code. */
2843 /* If this insn is a register-register copy and the next insn has
2844 a use of our destination, change it to use our source. That way,
2845 it will become a candidate for our delay slot the next time
2846 through this loop. This case occurs commonly in loops that
2849 We could check for more complex cases than those tested below,
2850 but it doesn't seem worth it. It might also be a good idea to try
2851 to swap the two insns. That might do better.
2853 We can't do this if the next insn modifies our destination, because
2854 that would make the replacement into the insn invalid. We also can't
2855 do this if it modifies our source, because it might be an earlyclobber
2856 operand. This latter test also prevents updating the contents of
2857 a PRE_INC. We also can't do this if there's overlap of source and
2858 destination. Overlap may happen for larger-than-register-size modes. */
2860 if (NONJUMP_INSN_P (trial) && GET_CODE (pat) == SET
2861 && REG_P (SET_SRC (pat))
2862 && REG_P (SET_DEST (pat))
2863 && !reg_overlap_mentioned_p (SET_DEST (pat), SET_SRC (pat)))
2865 rtx next = next_nonnote_insn (trial);
2867 if (next && NONJUMP_INSN_P (next)
2868 && GET_CODE (PATTERN (next)) != USE
2869 && ! reg_set_p (SET_DEST (pat), next)
2870 && ! reg_set_p (SET_SRC (pat), next)
2871 && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2872 && ! modified_in_p (SET_DEST (pat), next))
2873 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2877 /* If we stopped on a branch insn that has delay slots, see if we can
2878 steal some of the insns in those slots. */
2879 if (trial && NONJUMP_INSN_P (trial)
2880 && GET_CODE (PATTERN (trial)) == SEQUENCE
2881 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0)))
2883 /* If this is the `true' thread, we will want to follow the jump,
2884 so we can only do this if we have taken everything up to here. */
2885 if (thread_if_true && trial == new_thread)
2888 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2889 delay_list, &set, &needed,
2890 &opposite_needed, slots_to_fill,
2891 pslots_filled, &must_annul,
2893 /* If we owned the thread and are told that it branched
2894 elsewhere, make sure we own the thread at the new location. */
2895 if (own_thread && trial != new_thread)
2896 own_thread = own_thread_p (new_thread, new_thread, 0);
2898 else if (! thread_if_true)
2900 = steal_delay_list_from_fallthrough (insn, condition,
2902 delay_list, &set, &needed,
2903 &opposite_needed, slots_to_fill,
2904 pslots_filled, &must_annul);
2907 /* If we haven't found anything for this delay slot and it is very
2908 likely that the branch will be taken, see if the insn at our target
2909 increments or decrements a register with an increment that does not
2910 depend on the destination register. If so, try to place the opposite
2911 arithmetic insn after the jump insn and put the arithmetic insn in the
2912 delay slot. If we can't do this, return. */
2913 if (delay_list == 0 && likely
2914 && new_thread && !ANY_RETURN_P (new_thread)
2915 && NONJUMP_INSN_P (new_thread)
2916 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2917 && asm_noperands (PATTERN (new_thread)) < 0)
2919 rtx pat = PATTERN (new_thread);
2924 pat = PATTERN (trial);
2926 if (!NONJUMP_INSN_P (trial)
2927 || GET_CODE (pat) != SET
2928 || ! eligible_for_delay (insn, 0, trial, flags)
2929 || can_throw_internal (trial))
2932 dest = SET_DEST (pat), src = SET_SRC (pat);
2933 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2934 && rtx_equal_p (XEXP (src, 0), dest)
2935 && (!FLOAT_MODE_P (GET_MODE (src))
2936 || flag_unsafe_math_optimizations)
2937 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1))
2938 && ! side_effects_p (pat))
2940 rtx other = XEXP (src, 1);
2944 /* If this is a constant adjustment, use the same code with
2945 the negated constant. Otherwise, reverse the sense of the
2947 if (CONST_INT_P (other))
2948 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2949 negate_rtx (GET_MODE (src), other));
2951 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2952 GET_MODE (src), dest, other);
2954 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2957 if (recog_memoized (ninsn) < 0
2958 || (extract_insn (ninsn), ! constrain_operands (1)))
2960 delete_related_insns (ninsn);
2966 update_block (trial, thread);
2967 if (trial == thread)
2969 thread = next_active_insn (thread);
2970 if (new_thread == trial)
2971 new_thread = thread;
2973 delete_related_insns (trial);
2976 new_thread = next_active_insn (trial);
2978 ninsn = own_thread ? trial : copy_rtx (trial);
2980 INSN_FROM_TARGET_P (ninsn) = 1;
2982 delay_list = add_to_delay_list (ninsn, NULL_RTX);
2987 if (delay_list && must_annul)
2988 INSN_ANNULLED_BRANCH_P (insn) = 1;
2990 /* If we are to branch into the middle of this thread, find an appropriate
2991 label or make a new one if none, and redirect INSN to it. If we hit the
2992 end of the function, use the end-of-function label. */
2993 if (new_thread != thread)
2997 gcc_assert (thread_if_true);
2999 if (new_thread && JUMP_P (new_thread)
3000 && (simplejump_p (new_thread)
3001 || GET_CODE (PATTERN (new_thread)) == RETURN)
3002 && redirect_with_delay_list_safe_p (insn,
3003 JUMP_LABEL (new_thread),
3005 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3007 if (ANY_RETURN_P (new_thread))
3008 label = find_end_label ();
3009 else if (LABEL_P (new_thread))
3012 label = get_label_before (new_thread);
3015 reorg_redirect_jump (insn, label);
3021 /* Make another attempt to find insns to place in delay slots.
3023 We previously looked for insns located in front of the delay insn
3024 and, for non-jump delay insns, located behind the delay insn.
3026 Here only try to schedule jump insns and try to move insns from either
3027 the target or the following insns into the delay slot. If annulling is
3028 supported, we will be likely to do this. Otherwise, we can do this only
3032 fill_eager_delay_slots (void)
3036 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3038 for (i = 0; i < num_unfilled_slots; i++)
3041 rtx target_label, insn_at_target, fallthrough_insn;
3044 int own_fallthrough;
3045 int prediction, slots_to_fill, slots_filled;
3047 insn = unfilled_slots_base[i];
3049 || INSN_DELETED_P (insn)
3051 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3054 slots_to_fill = num_delay_slots (insn);
3055 /* Some machine description have defined instructions to have
3056 delay slots only in certain circumstances which may depend on
3057 nearby insns (which change due to reorg's actions).
3059 For example, the PA port normally has delay slots for unconditional
3062 However, the PA port claims such jumps do not have a delay slot
3063 if they are immediate successors of certain CALL_INSNs. This
3064 allows the port to favor filling the delay slot of the call with
3065 the unconditional jump. */
3066 if (slots_to_fill == 0)
3070 target_label = JUMP_LABEL (insn);
3071 condition = get_branch_condition (insn, target_label);
3076 /* Get the next active fallthrough and target insns and see if we own
3077 them. Then see whether the branch is likely true. We don't need
3078 to do a lot of this for unconditional branches. */
3080 insn_at_target = first_active_target_insn (target_label);
3081 own_target = own_thread_p (target_label, target_label, 0);
3083 if (condition == const_true_rtx)
3085 own_fallthrough = 0;
3086 fallthrough_insn = 0;
3091 fallthrough_insn = next_active_insn (insn);
3092 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3093 prediction = mostly_true_jump (insn, condition);
3096 /* If this insn is expected to branch, first try to get insns from our
3097 target, then our fallthrough insns. If it is not expected to branch,
3098 try the other order. */
3103 = fill_slots_from_thread (insn, condition, insn_at_target,
3104 fallthrough_insn, prediction == 2, 1,
3106 slots_to_fill, &slots_filled, delay_list);
3108 if (delay_list == 0 && own_fallthrough)
3110 /* Even though we didn't find anything for delay slots,
3111 we might have found a redundant insn which we deleted
3112 from the thread that was filled. So we have to recompute
3113 the next insn at the target. */
3114 target_label = JUMP_LABEL (insn);
3115 insn_at_target = first_active_target_insn (target_label);
3118 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3119 insn_at_target, 0, 0,
3121 slots_to_fill, &slots_filled,
3127 if (own_fallthrough)
3129 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3130 insn_at_target, 0, 0,
3132 slots_to_fill, &slots_filled,
3135 if (delay_list == 0)
3137 = fill_slots_from_thread (insn, condition, insn_at_target,
3138 next_active_insn (insn), 0, 1,
3140 slots_to_fill, &slots_filled,
3145 unfilled_slots_base[i]
3146 = emit_delay_sequence (insn, delay_list, slots_filled);
3148 if (slots_to_fill == slots_filled)
3149 unfilled_slots_base[i] = 0;
3151 note_delay_statistics (slots_filled, 1);
3155 static void delete_computation (rtx insn);
3157 /* Recursively delete prior insns that compute the value (used only by INSN
3158 which the caller is deleting) stored in the register mentioned by NOTE
3159 which is a REG_DEAD note associated with INSN. */
3162 delete_prior_computation (rtx note, rtx insn)
3165 rtx reg = XEXP (note, 0);
3167 for (our_prev = prev_nonnote_insn (insn);
3168 our_prev && (NONJUMP_INSN_P (our_prev)
3169 || CALL_P (our_prev));
3170 our_prev = prev_nonnote_insn (our_prev))
3172 rtx pat = PATTERN (our_prev);
3174 /* If we reach a CALL which is not calling a const function
3175 or the callee pops the arguments, then give up. */
3176 if (CALL_P (our_prev)
3177 && (! RTL_CONST_CALL_P (our_prev)
3178 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
3181 /* If we reach a SEQUENCE, it is too complex to try to
3182 do anything with it, so give up. We can be run during
3183 and after reorg, so SEQUENCE rtl can legitimately show
3185 if (GET_CODE (pat) == SEQUENCE)
3188 if (GET_CODE (pat) == USE
3189 && NONJUMP_INSN_P (XEXP (pat, 0)))
3190 /* reorg creates USEs that look like this. We leave them
3191 alone because reorg needs them for its own purposes. */
3194 if (reg_set_p (reg, pat))
3196 if (side_effects_p (pat) && !CALL_P (our_prev))
3199 if (GET_CODE (pat) == PARALLEL)
3201 /* If we find a SET of something else, we can't
3206 for (i = 0; i < XVECLEN (pat, 0); i++)
3208 rtx part = XVECEXP (pat, 0, i);
3210 if (GET_CODE (part) == SET
3211 && SET_DEST (part) != reg)
3215 if (i == XVECLEN (pat, 0))
3216 delete_computation (our_prev);
3218 else if (GET_CODE (pat) == SET
3219 && REG_P (SET_DEST (pat)))
3221 int dest_regno = REGNO (SET_DEST (pat));
3222 int dest_endregno = END_REGNO (SET_DEST (pat));
3223 int regno = REGNO (reg);
3224 int endregno = END_REGNO (reg);
3226 if (dest_regno >= regno
3227 && dest_endregno <= endregno)
3228 delete_computation (our_prev);
3230 /* We may have a multi-word hard register and some, but not
3231 all, of the words of the register are needed in subsequent
3232 insns. Write REG_UNUSED notes for those parts that were not
3234 else if (dest_regno <= regno
3235 && dest_endregno >= endregno)
3239 add_reg_note (our_prev, REG_UNUSED, reg);
3241 for (i = dest_regno; i < dest_endregno; i++)
3242 if (! find_regno_note (our_prev, REG_UNUSED, i))
3245 if (i == dest_endregno)
3246 delete_computation (our_prev);
3253 /* If PAT references the register that dies here, it is an
3254 additional use. Hence any prior SET isn't dead. However, this
3255 insn becomes the new place for the REG_DEAD note. */
3256 if (reg_overlap_mentioned_p (reg, pat))
3258 XEXP (note, 1) = REG_NOTES (our_prev);
3259 REG_NOTES (our_prev) = note;
3265 /* Delete INSN and recursively delete insns that compute values used only
3266 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3268 Look at all our REG_DEAD notes. If a previous insn does nothing other
3269 than set a register that dies in this insn, we can delete that insn
3272 On machines with CC0, if CC0 is used in this insn, we may be able to
3273 delete the insn that set it. */
3276 delete_computation (rtx insn)
3281 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3283 rtx prev = prev_nonnote_insn (insn);
3284 /* We assume that at this stage
3285 CC's are always set explicitly
3286 and always immediately before the jump that
3287 will use them. So if the previous insn
3288 exists to set the CC's, delete it
3289 (unless it performs auto-increments, etc.). */
3290 if (prev && NONJUMP_INSN_P (prev)
3291 && sets_cc0_p (PATTERN (prev)))
3293 if (sets_cc0_p (PATTERN (prev)) > 0
3294 && ! side_effects_p (PATTERN (prev)))
3295 delete_computation (prev);
3297 /* Otherwise, show that cc0 won't be used. */
3298 add_reg_note (prev, REG_UNUSED, cc0_rtx);
3303 for (note = REG_NOTES (insn); note; note = next)
3305 next = XEXP (note, 1);
3307 if (REG_NOTE_KIND (note) != REG_DEAD
3308 /* Verify that the REG_NOTE is legitimate. */
3309 || !REG_P (XEXP (note, 0)))
3312 delete_prior_computation (note, insn);
3315 delete_related_insns (insn);
3318 /* If all INSN does is set the pc, delete it,
3319 and delete the insn that set the condition codes for it
3320 if that's what the previous thing was. */
3323 delete_jump (rtx insn)
3325 rtx set = single_set (insn);
3327 if (set && GET_CODE (SET_DEST (set)) == PC)
3328 delete_computation (insn);
3332 /* Once we have tried two ways to fill a delay slot, make a pass over the
3333 code to try to improve the results and to do such things as more jump
3337 relax_delay_slots (rtx first)
3339 rtx insn, next, pat;
3340 rtx trial, delay_insn, target_label;
3342 /* Look at every JUMP_INSN and see if we can improve it. */
3343 for (insn = first; insn; insn = next)
3347 next = next_active_insn (insn);
3349 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3350 the next insn, or jumps to a label that is not the last of a
3351 group of consecutive labels. */
3353 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3354 && !ANY_RETURN_P (target_label = JUMP_LABEL (insn)))
3356 target_label = skip_consecutive_labels (follow_jumps (target_label));
3357 if (ANY_RETURN_P (target_label))
3358 target_label = find_end_label ();
3360 if (target_label && next_active_insn (target_label) == next
3361 && ! condjump_in_parallel_p (insn))
3367 if (target_label && target_label != JUMP_LABEL (insn))
3368 reorg_redirect_jump (insn, target_label);
3370 /* See if this jump conditionally branches around an unconditional
3371 jump. If so, invert this jump and point it to the target of the
3373 if (next && JUMP_P (next)
3374 && any_condjump_p (insn)
3375 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3377 && next_active_insn (target_label) == next_active_insn (next)
3378 && no_labels_between_p (insn, next))
3380 rtx label = JUMP_LABEL (next);
3382 /* Be careful how we do this to avoid deleting code or
3383 labels that are momentarily dead. See similar optimization
3386 We also need to ensure we properly handle the case when
3387 invert_jump fails. */
3389 ++LABEL_NUSES (target_label);
3390 if (!ANY_RETURN_P (label))
3391 ++LABEL_NUSES (label);
3393 if (invert_jump (insn, label, 1))
3395 delete_related_insns (next);
3399 if (!ANY_RETURN_P (label))
3400 --LABEL_NUSES (label);
3402 if (--LABEL_NUSES (target_label) == 0)
3403 delete_related_insns (target_label);
3409 /* If this is an unconditional jump and the previous insn is a
3410 conditional jump, try reversing the condition of the previous
3411 insn and swapping our targets. The next pass might be able to
3414 Don't do this if we expect the conditional branch to be true, because
3415 we would then be making the more common case longer. */
3418 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3419 && (other = prev_active_insn (insn)) != 0
3420 && any_condjump_p (other)
3421 && no_labels_between_p (other, insn)
3422 && 0 > mostly_true_jump (other,
3423 get_branch_condition (other,
3424 JUMP_LABEL (other))))
3426 rtx other_target = JUMP_LABEL (other);
3427 target_label = JUMP_LABEL (insn);
3429 if (invert_jump (other, target_label, 0))
3430 reorg_redirect_jump (insn, other_target);
3433 /* Now look only at cases where we have filled a delay slot. */
3434 if (!NONJUMP_INSN_P (insn)
3435 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3438 pat = PATTERN (insn);
3439 delay_insn = XVECEXP (pat, 0, 0);
3441 /* See if the first insn in the delay slot is redundant with some
3442 previous insn. Remove it from the delay slot if so; then set up
3443 to reprocess this insn. */
3444 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3446 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3447 next = prev_active_insn (next);
3451 /* See if we have a RETURN insn with a filled delay slot followed
3452 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3453 the first RETURN (but not its delay insn). This gives the same
3454 effect in fewer instructions.
3456 Only do so if optimizing for size since this results in slower, but
3458 if (optimize_function_for_size_p (cfun)
3459 && GET_CODE (PATTERN (delay_insn)) == RETURN
3462 && GET_CODE (PATTERN (next)) == RETURN)
3467 /* Delete the RETURN and just execute the delay list insns.
3469 We do this by deleting the INSN containing the SEQUENCE, then
3470 re-emitting the insns separately, and then deleting the RETURN.
3471 This allows the count of the jump target to be properly
3474 Note that we need to change the INSN_UID of the re-emitted insns
3475 since it is used to hash the insns for mark_target_live_regs and
3476 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3478 Clear the from target bit, since these insns are no longer
3480 for (i = 0; i < XVECLEN (pat, 0); i++)
3481 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3483 trial = PREV_INSN (insn);
3484 delete_related_insns (insn);
3485 gcc_assert (GET_CODE (pat) == SEQUENCE);
3486 add_insn_after (delay_insn, trial, NULL);
3488 for (i = 1; i < XVECLEN (pat, 0); i++)
3489 after = emit_copy_of_insn_after (XVECEXP (pat, 0, i), after);
3490 delete_scheduled_jump (delay_insn);
3494 /* Now look only at the cases where we have a filled JUMP_INSN. */
3495 if (!JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
3496 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3497 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3500 target_label = JUMP_LABEL (delay_insn);
3502 if (!ANY_RETURN_P (target_label))
3504 /* If this jump goes to another unconditional jump, thread it, but
3505 don't convert a jump into a RETURN here. */
3506 trial = skip_consecutive_labels (follow_jumps (target_label));
3507 if (ANY_RETURN_P (trial))
3508 trial = find_end_label ();
3510 if (trial && trial != target_label
3511 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3513 reorg_redirect_jump (delay_insn, trial);
3514 target_label = trial;
3517 /* If the first insn at TARGET_LABEL is redundant with a previous
3518 insn, redirect the jump to the following insn and process again.
3519 We use next_real_insn instead of next_active_insn so we
3520 don't skip USE-markers, or we'll end up with incorrect
3522 trial = next_real_insn (target_label);
3523 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3524 && redundant_insn (trial, insn, 0)
3525 && ! can_throw_internal (trial))
3527 /* Figure out where to emit the special USE insn so we don't
3528 later incorrectly compute register live/death info. */
3529 rtx tmp = next_active_insn (trial);
3531 tmp = find_end_label ();
3535 /* Insert the special USE insn and update dataflow info. */
3536 update_block (trial, tmp);
3538 /* Now emit a label before the special USE insn, and
3539 redirect our jump to the new label. */
3540 target_label = get_label_before (PREV_INSN (tmp));
3541 reorg_redirect_jump (delay_insn, target_label);
3547 /* Similarly, if it is an unconditional jump with one insn in its
3548 delay list and that insn is redundant, thread the jump. */
3549 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3550 && XVECLEN (PATTERN (trial), 0) == 2
3551 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
3552 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3553 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3554 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3556 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3557 if (ANY_RETURN_P (target_label))
3558 target_label = find_end_label ();
3561 && redirect_with_delay_slots_safe_p (delay_insn, target_label,
3564 reorg_redirect_jump (delay_insn, target_label);
3571 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3572 && prev_active_insn (target_label) == insn
3573 && ! condjump_in_parallel_p (delay_insn)
3575 /* If the last insn in the delay slot sets CC0 for some insn,
3576 various code assumes that it is in a delay slot. We could
3577 put it back where it belonged and delete the register notes,
3578 but it doesn't seem worthwhile in this uncommon case. */
3579 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3580 REG_CC_USER, NULL_RTX)
3587 /* All this insn does is execute its delay list and jump to the
3588 following insn. So delete the jump and just execute the delay
3591 We do this by deleting the INSN containing the SEQUENCE, then
3592 re-emitting the insns separately, and then deleting the jump.
3593 This allows the count of the jump target to be properly
3596 Note that we need to change the INSN_UID of the re-emitted insns
3597 since it is used to hash the insns for mark_target_live_regs and
3598 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3600 Clear the from target bit, since these insns are no longer
3602 for (i = 0; i < XVECLEN (pat, 0); i++)
3603 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3605 trial = PREV_INSN (insn);
3606 delete_related_insns (insn);
3607 gcc_assert (GET_CODE (pat) == SEQUENCE);
3608 add_insn_after (delay_insn, trial, NULL);
3610 for (i = 1; i < XVECLEN (pat, 0); i++)
3611 after = emit_copy_of_insn_after (XVECEXP (pat, 0, i), after);
3612 delete_scheduled_jump (delay_insn);
3616 /* See if this is an unconditional jump around a single insn which is
3617 identical to the one in its delay slot. In this case, we can just
3618 delete the branch and the insn in its delay slot. */
3619 if (next && NONJUMP_INSN_P (next)
3620 && prev_label (next_active_insn (next)) == target_label
3621 && simplejump_p (insn)
3622 && XVECLEN (pat, 0) == 2
3623 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3625 delete_related_insns (insn);
3629 /* See if this jump (with its delay slots) conditionally branches
3630 around an unconditional jump (without delay slots). If so, invert
3631 this jump and point it to the target of the second jump. We cannot
3632 do this for annulled jumps, though. Again, don't convert a jump to
3634 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3635 && any_condjump_p (delay_insn)
3636 && next && JUMP_P (next)
3637 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3638 && next_active_insn (target_label) == next_active_insn (next)
3639 && no_labels_between_p (insn, next))
3641 rtx label = JUMP_LABEL (next);
3642 rtx old_label = JUMP_LABEL (delay_insn);
3644 if (ANY_RETURN_P (label))
3645 label = find_end_label ();
3647 /* find_end_label can generate a new label. Check this first. */
3649 && no_labels_between_p (insn, next)
3650 && redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3652 /* Be careful how we do this to avoid deleting code or labels
3653 that are momentarily dead. See similar optimization in
3656 ++LABEL_NUSES (old_label);
3658 if (invert_jump (delay_insn, label, 1))
3662 /* Must update the INSN_FROM_TARGET_P bits now that
3663 the branch is reversed, so that mark_target_live_regs
3664 will handle the delay slot insn correctly. */
3665 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3667 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3668 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3671 delete_related_insns (next);
3675 if (old_label && --LABEL_NUSES (old_label) == 0)
3676 delete_related_insns (old_label);
3681 /* If we own the thread opposite the way this insn branches, see if we
3682 can merge its delay slots with following insns. */
3683 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3684 && own_thread_p (NEXT_INSN (insn), 0, 1))
3685 try_merge_delay_insns (insn, next);
3686 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3687 && own_thread_p (target_label, target_label, 0))
3688 try_merge_delay_insns (insn, next_active_insn (target_label));
3690 /* If we get here, we haven't deleted INSN. But we may have deleted
3691 NEXT, so recompute it. */
3692 next = next_active_insn (insn);
3698 /* Look for filled jumps to the end of function label. We can try to convert
3699 them into RETURN insns if the insns in the delay slot are valid for the
3703 make_return_insns (rtx first)
3705 rtx insn, jump_insn, pat;
3706 rtx real_return_label = end_of_function_label;
3709 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3710 /* If a previous pass filled delay slots in the epilogue, things get a
3711 bit more complicated, as those filler insns would generally (without
3712 data flow analysis) have to be executed after any existing branch
3713 delay slot filler insns. It is also unknown whether such a
3714 transformation would actually be profitable. Note that the existing
3715 code only cares for branches with (some) filled delay slots. */
3716 if (crtl->epilogue_delay_list != NULL)
3720 /* See if there is a RETURN insn in the function other than the one we
3721 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3722 into a RETURN to jump to it. */
3723 for (insn = first; insn; insn = NEXT_INSN (insn))
3724 if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
3726 real_return_label = get_label_before (insn);
3730 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3731 was equal to END_OF_FUNCTION_LABEL. */
3732 LABEL_NUSES (real_return_label)++;
3734 /* Clear the list of insns to fill so we can use it. */
3735 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3737 for (insn = first; insn; insn = NEXT_INSN (insn))
3741 /* Only look at filled JUMP_INSNs that go to the end of function
3743 if (!NONJUMP_INSN_P (insn)
3744 || GET_CODE (PATTERN (insn)) != SEQUENCE
3745 || !JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
3746 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3749 pat = PATTERN (insn);
3750 jump_insn = XVECEXP (pat, 0, 0);
3752 /* If we can't make the jump into a RETURN, try to redirect it to the best
3753 RETURN and go on to the next insn. */
3754 if (! reorg_redirect_jump (jump_insn, ret_rtx))
3756 /* Make sure redirecting the jump will not invalidate the delay
3758 if (redirect_with_delay_slots_safe_p (jump_insn,
3761 reorg_redirect_jump (jump_insn, real_return_label);
3765 /* See if this RETURN can accept the insns current in its delay slot.
3766 It can if it has more or an equal number of slots and the contents
3767 of each is valid. */
3769 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3770 slots = num_delay_slots (jump_insn);
3771 if (slots >= XVECLEN (pat, 0) - 1)
3773 for (i = 1; i < XVECLEN (pat, 0); i++)
3775 #ifdef ANNUL_IFFALSE_SLOTS
3776 (INSN_ANNULLED_BRANCH_P (jump_insn)
3777 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3778 ? eligible_for_annul_false (jump_insn, i - 1,
3779 XVECEXP (pat, 0, i), flags) :
3781 #ifdef ANNUL_IFTRUE_SLOTS
3782 (INSN_ANNULLED_BRANCH_P (jump_insn)
3783 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3784 ? eligible_for_annul_true (jump_insn, i - 1,
3785 XVECEXP (pat, 0, i), flags) :
3787 eligible_for_delay (jump_insn, i - 1,
3788 XVECEXP (pat, 0, i), flags)))
3794 if (i == XVECLEN (pat, 0))
3797 /* We have to do something with this insn. If it is an unconditional
3798 RETURN, delete the SEQUENCE and output the individual insns,
3799 followed by the RETURN. Then set things up so we try to find
3800 insns for its delay slots, if it needs some. */
3801 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3803 rtx prev = PREV_INSN (insn);
3805 delete_related_insns (insn);
3806 for (i = 1; i < XVECLEN (pat, 0); i++)
3807 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3809 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3810 emit_barrier_after (insn);
3813 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3816 /* It is probably more efficient to keep this with its current
3817 delay slot as a branch to a RETURN. */
3818 reorg_redirect_jump (jump_insn, real_return_label);
3821 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3822 new delay slots we have created. */
3823 if (--LABEL_NUSES (real_return_label) == 0)
3824 delete_related_insns (real_return_label);
3826 fill_simple_delay_slots (1);
3827 fill_simple_delay_slots (0);
3831 /* Try to find insns to place in delay slots. */
3834 dbr_schedule (rtx first)
3836 rtx insn, next, epilogue_insn = 0;
3839 /* If the current function has no insns other than the prologue and
3840 epilogue, then do not try to fill any delay slots. */
3841 if (n_basic_blocks == NUM_FIXED_BLOCKS)
3844 /* Find the highest INSN_UID and allocate and initialize our map from
3845 INSN_UID's to position in code. */
3846 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3848 if (INSN_UID (insn) > max_uid)
3849 max_uid = INSN_UID (insn);
3851 && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
3852 epilogue_insn = insn;
3855 uid_to_ruid = XNEWVEC (int, max_uid + 1);
3856 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3857 uid_to_ruid[INSN_UID (insn)] = i;
3859 /* Initialize the list of insns that need filling. */
3860 if (unfilled_firstobj == 0)
3862 gcc_obstack_init (&unfilled_slots_obstack);
3863 unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0);
3866 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3870 INSN_ANNULLED_BRANCH_P (insn) = 0;
3871 INSN_FROM_TARGET_P (insn) = 0;
3873 /* Skip vector tables. We can't get attributes for them. */
3874 if (JUMP_TABLE_DATA_P (insn))
3877 if (num_delay_slots (insn) > 0)
3878 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3880 /* Ensure all jumps go to the last of a set of consecutive labels. */
3882 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3883 && !ANY_RETURN_P (JUMP_LABEL (insn))
3884 && ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
3885 != JUMP_LABEL (insn)))
3886 redirect_jump (insn, target, 1);
3889 init_resource_info (epilogue_insn);
3891 /* Show we haven't computed an end-of-function label yet. */
3892 end_of_function_label = 0;
3894 /* Initialize the statistics for this function. */
3895 memset (num_insns_needing_delays, 0, sizeof num_insns_needing_delays);
3896 memset (num_filled_delays, 0, sizeof num_filled_delays);
3898 /* Now do the delay slot filling. Try everything twice in case earlier
3899 changes make more slots fillable. */
3901 for (reorg_pass_number = 0;
3902 reorg_pass_number < MAX_REORG_PASSES;
3903 reorg_pass_number++)
3905 fill_simple_delay_slots (1);
3906 fill_simple_delay_slots (0);
3907 fill_eager_delay_slots ();
3908 relax_delay_slots (first);
3911 /* If we made an end of function label, indicate that it is now
3912 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3913 If it is now unused, delete it. */
3914 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3915 delete_related_insns (end_of_function_label);
3918 if (HAVE_return && end_of_function_label != 0)
3919 make_return_insns (first);
3922 /* Delete any USE insns made by update_block; subsequent passes don't need
3923 them or know how to deal with them. */
3924 for (insn = first; insn; insn = next)
3926 next = NEXT_INSN (insn);
3928 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
3929 && INSN_P (XEXP (PATTERN (insn), 0)))
3930 next = delete_related_insns (insn);
3933 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3935 /* It is not clear why the line below is needed, but it does seem to be. */
3936 unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0);
3940 int i, j, need_comma;
3941 int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
3942 int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
3944 for (reorg_pass_number = 0;
3945 reorg_pass_number < MAX_REORG_PASSES;
3946 reorg_pass_number++)
3948 fprintf (dump_file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3949 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3952 fprintf (dump_file, ";; Reorg function #%d\n", i);
3954 fprintf (dump_file, ";; %d insns needing delay slots\n;; ",
3955 num_insns_needing_delays[i][reorg_pass_number]);
3957 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3958 if (num_filled_delays[i][j][reorg_pass_number])
3961 fprintf (dump_file, ", ");
3963 fprintf (dump_file, "%d got %d delays",
3964 num_filled_delays[i][j][reorg_pass_number], j);
3966 fprintf (dump_file, "\n");
3969 memset (total_delay_slots, 0, sizeof total_delay_slots);
3970 memset (total_annul_slots, 0, sizeof total_annul_slots);
3971 for (insn = first; insn; insn = NEXT_INSN (insn))
3973 if (! INSN_DELETED_P (insn)
3974 && NONJUMP_INSN_P (insn)
3975 && GET_CODE (PATTERN (insn)) != USE
3976 && GET_CODE (PATTERN (insn)) != CLOBBER)
3978 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
3980 j = XVECLEN (PATTERN (insn), 0) - 1;
3981 if (j > MAX_DELAY_HISTOGRAM)
3982 j = MAX_DELAY_HISTOGRAM;
3983 if (INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (insn), 0, 0)))
3984 total_annul_slots[j]++;
3986 total_delay_slots[j]++;
3988 else if (num_delay_slots (insn) > 0)
3989 total_delay_slots[0]++;
3992 fprintf (dump_file, ";; Reorg totals: ");
3994 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3996 if (total_delay_slots[j])
3999 fprintf (dump_file, ", ");
4001 fprintf (dump_file, "%d got %d delays", total_delay_slots[j], j);
4004 fprintf (dump_file, "\n");
4005 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
4006 fprintf (dump_file, ";; Reorg annuls: ");
4008 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
4010 if (total_annul_slots[j])
4013 fprintf (dump_file, ", ");
4015 fprintf (dump_file, "%d got %d delays", total_annul_slots[j], j);
4018 fprintf (dump_file, "\n");
4020 fprintf (dump_file, "\n");
4023 /* For all JUMP insns, fill in branch prediction notes, so that during
4024 assembler output a target can set branch prediction bits in the code.
4025 We have to do this now, as up until this point the destinations of
4026 JUMPS can be moved around and changed, but past right here that cannot
4028 for (insn = first; insn; insn = NEXT_INSN (insn))
4032 if (NONJUMP_INSN_P (insn))
4034 rtx pat = PATTERN (insn);
4036 if (GET_CODE (pat) == SEQUENCE)
4037 insn = XVECEXP (pat, 0, 0);
4042 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
4043 add_reg_note (insn, REG_BR_PRED, GEN_INT (pred_flags));
4045 free_resource_info ();
4047 #ifdef DELAY_SLOTS_FOR_EPILOGUE
4048 /* SPARC assembler, for instance, emit warning when debug info is output
4049 into the delay slot. */
4053 for (link = crtl->epilogue_delay_list;
4055 link = XEXP (link, 1))
4056 INSN_LOCATOR (XEXP (link, 0)) = 0;
4060 crtl->dbr_scheduled_p = true;
4062 #endif /* DELAY_SLOTS */
4065 gate_handle_delay_slots (void)
4068 /* At -O0 dataflow info isn't updated after RA. */
4069 return optimize > 0 && flag_delayed_branch && !crtl->dbr_scheduled_p;
4075 /* Run delay slot optimization. */
4077 rest_of_handle_delay_slots (void)
4080 dbr_schedule (get_insns ());
4085 struct rtl_opt_pass pass_delay_slots =
4090 gate_handle_delay_slots, /* gate */
4091 rest_of_handle_delay_slots, /* execute */
4094 0, /* static_pass_number */
4095 TV_DBR_SCHED, /* tv_id */
4096 0, /* properties_required */
4097 0, /* properties_provided */
4098 0, /* properties_destroyed */
4099 0, /* todo_flags_start */
4100 TODO_ggc_collect /* todo_flags_finish */
4104 /* Machine dependent reorg pass. */
4106 gate_handle_machine_reorg (void)
4108 return targetm.machine_dependent_reorg != 0;
4113 rest_of_handle_machine_reorg (void)
4115 targetm.machine_dependent_reorg ();
4119 struct rtl_opt_pass pass_machine_reorg =
4124 gate_handle_machine_reorg, /* gate */
4125 rest_of_handle_machine_reorg, /* execute */
4128 0, /* static_pass_number */
4129 TV_MACH_DEP, /* tv_id */
4130 0, /* properties_required */
4131 0, /* properties_provided */
4132 0, /* properties_destroyed */
4133 0, /* todo_flags_start */
4134 TODO_ggc_collect /* todo_flags_finish */