1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
39 #include "coretypes.h"
44 #include "hard-reg-set.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
53 #include "diagnostic.h"
59 /* Optimize jump y; x: ... y: jumpif... x?
60 Don't know if it is worth bothering with. */
61 /* Optimize two cases of conditional jump to conditional jump?
62 This can never delete any instruction or make anything dead,
63 or even change what is live at any point.
64 So perhaps let combiner do it. */
66 static rtx next_nonnote_insn_in_loop PARAMS ((rtx));
67 static void init_label_info PARAMS ((rtx));
68 static void mark_all_labels PARAMS ((rtx));
69 static int duplicate_loop_exit_test PARAMS ((rtx));
70 static void delete_computation PARAMS ((rtx));
71 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
72 static int redirect_exp PARAMS ((rtx, rtx, rtx));
73 static void invert_exp_1 PARAMS ((rtx));
74 static int invert_exp PARAMS ((rtx));
75 static int returnjump_p_1 PARAMS ((rtx *, void *));
76 static void delete_prior_computation PARAMS ((rtx, rtx));
78 /* Alternate entry into the jump optimizer. This entry point only rebuilds
79 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
82 rebuild_jump_labels (f)
87 timevar_push (TV_REBUILD_JUMP);
91 /* Keep track of labels used from static data; we don't track them
92 closely enough to delete them here, so make sure their reference
93 count doesn't drop to zero. */
95 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
96 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
97 LABEL_NUSES (XEXP (insn, 0))++;
98 timevar_pop (TV_REBUILD_JUMP);
101 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
102 non-fallthru insn. This is not generally true, as multiple barriers
103 may have crept in, or the BARRIER may be separated from the last
104 real insn by one or more NOTEs.
106 This simple pass moves barriers and removes duplicates so that the
112 rtx insn, next, prev;
113 for (insn = get_insns (); insn; insn = next)
115 next = NEXT_INSN (insn);
116 if (GET_CODE (insn) == BARRIER)
118 prev = prev_nonnote_insn (insn);
119 if (GET_CODE (prev) == BARRIER)
120 delete_barrier (insn);
121 else if (prev != PREV_INSN (insn))
122 reorder_insns (insn, insn, prev);
127 /* Return the next insn after INSN that is not a NOTE and is in the loop,
128 i.e. when there is no such INSN before NOTE_INSN_LOOP_END return NULL_RTX.
129 This routine does not look inside SEQUENCEs. */
132 next_nonnote_insn_in_loop (insn)
137 insn = NEXT_INSN (insn);
138 if (insn == 0 || GET_CODE (insn) != NOTE)
140 if (GET_CODE (insn) == NOTE
141 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
149 copy_loop_headers (f)
153 /* Now iterate optimizing jumps until nothing changes over one pass. */
154 for (insn = f; insn; insn = next)
158 next = NEXT_INSN (insn);
160 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
161 jump. Try to optimize by duplicating the loop exit test if so.
162 This is only safe immediately after regscan, because it uses
163 the values of regno_first_uid and regno_last_uid. */
164 if (GET_CODE (insn) == NOTE
165 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
166 && (temp1 = next_nonnote_insn_in_loop (insn)) != 0
167 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
169 temp = PREV_INSN (insn);
170 if (duplicate_loop_exit_test (insn))
172 next = NEXT_INSN (temp);
179 purge_line_number_notes (f)
184 /* Delete extraneous line number notes.
185 Note that two consecutive notes for different lines are not really
186 extraneous. There should be some indication where that line belonged,
187 even if it became empty. */
189 for (insn = f; insn; insn = NEXT_INSN (insn))
190 if (GET_CODE (insn) == NOTE)
192 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
193 /* Any previous line note was for the prologue; gdb wants a new
194 note after the prologue even if it is for the same line. */
195 last_note = NULL_RTX;
196 else if (NOTE_LINE_NUMBER (insn) >= 0)
198 /* Delete this note if it is identical to previous note. */
200 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
201 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
203 delete_related_insns (insn);
212 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
213 notes whose labels don't occur in the insn any more. Returns the
214 largest INSN_UID found. */
221 for (insn = f; insn; insn = NEXT_INSN (insn))
222 if (GET_CODE (insn) == CODE_LABEL)
223 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
224 else if (GET_CODE (insn) == JUMP_INSN)
225 JUMP_LABEL (insn) = 0;
226 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
230 for (note = REG_NOTES (insn); note; note = next)
232 next = XEXP (note, 1);
233 if (REG_NOTE_KIND (note) == REG_LABEL
234 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
235 remove_note (insn, note);
240 /* Mark the label each jump jumps to.
241 Combine consecutive labels, and count uses of labels. */
249 for (insn = f; insn; insn = NEXT_INSN (insn))
252 if (GET_CODE (insn) == CALL_INSN
253 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
255 mark_all_labels (XEXP (PATTERN (insn), 0));
256 mark_all_labels (XEXP (PATTERN (insn), 1));
257 mark_all_labels (XEXP (PATTERN (insn), 2));
259 /* Canonicalize the tail recursion label attached to the
260 CALL_PLACEHOLDER insn. */
261 if (XEXP (PATTERN (insn), 3))
263 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
264 XEXP (PATTERN (insn), 3));
265 mark_jump_label (label_ref, insn, 0);
266 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
272 mark_jump_label (PATTERN (insn), insn, 0);
273 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
275 /* When we know the LABEL_REF contained in a REG used in
276 an indirect jump, we'll have a REG_LABEL note so that
277 flow can tell where it's going. */
278 if (JUMP_LABEL (insn) == 0)
280 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
283 /* But a LABEL_REF around the REG_LABEL note, so
284 that we can canonicalize it. */
285 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
286 XEXP (label_note, 0));
288 mark_jump_label (label_ref, insn, 0);
289 XEXP (label_note, 0) = XEXP (label_ref, 0);
290 JUMP_LABEL (insn) = XEXP (label_note, 0);
297 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
298 jump. Assume that this unconditional jump is to the exit test code. If
299 the code is sufficiently simple, make a copy of it before INSN,
300 followed by a jump to the exit of the loop. Then delete the unconditional
303 Return 1 if we made the change, else 0.
305 This is only safe immediately after a regscan pass because it uses the
306 values of regno_first_uid and regno_last_uid. */
309 duplicate_loop_exit_test (loop_start)
312 rtx insn, set, reg, p, link;
313 rtx copy = 0, first_copy = 0;
316 = NEXT_INSN (JUMP_LABEL (next_nonnote_insn_in_loop (loop_start)));
318 int max_reg = max_reg_num ();
320 rtx loop_pre_header_label;
322 /* Scan the exit code. We do not perform this optimization if any insn:
326 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
327 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
329 We also do not do this if we find an insn with ASM_OPERANDS. While
330 this restriction should not be necessary, copying an insn with
331 ASM_OPERANDS can confuse asm_noperands in some cases.
333 Also, don't do this if the exit code is more than 20 insns. */
335 for (insn = exitcode;
337 && ! (GET_CODE (insn) == NOTE
338 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
339 insn = NEXT_INSN (insn))
341 switch (GET_CODE (insn))
349 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
350 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
351 /* If we were to duplicate this code, we would not move
352 the BLOCK notes, and so debugging the moved code would
353 be difficult. Thus, we only move the code with -O2 or
361 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
362 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
370 /* Unless INSN is zero, we can do the optimization. */
376 /* See if any insn sets a register only used in the loop exit code and
377 not a user variable. If so, replace it with a new register. */
378 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
379 if (GET_CODE (insn) == INSN
380 && (set = single_set (insn)) != 0
381 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
382 || (GET_CODE (reg) == SUBREG
383 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
384 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
385 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
387 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
388 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
393 /* We can do the replacement. Allocate reg_map if this is the
394 first replacement we found. */
396 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
398 REG_LOOP_TEST_P (reg) = 1;
400 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
403 loop_pre_header_label = gen_label_rtx ();
405 /* Now copy each insn. */
406 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
408 switch (GET_CODE (insn))
411 copy = emit_barrier_before (loop_start);
414 /* Only copy line-number notes. */
415 if (NOTE_LINE_NUMBER (insn) >= 0)
417 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
418 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
423 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
425 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
427 mark_jump_label (PATTERN (copy), copy, 0);
428 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
430 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
432 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
433 if (REG_NOTE_KIND (link) != REG_LABEL)
435 if (GET_CODE (link) == EXPR_LIST)
437 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
442 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
447 if (reg_map && REG_NOTES (copy))
448 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
452 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
454 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
456 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
457 mark_jump_label (PATTERN (copy), copy, 0);
458 if (REG_NOTES (insn))
460 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
462 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
465 /* Predict conditional jump that do make loop looping as taken.
466 Other jumps are probably exit conditions, so predict
468 if (any_condjump_p (copy))
470 rtx label = JUMP_LABEL (copy);
473 /* The jump_insn after loop_start should be followed
474 by barrier and loopback label. */
475 if (prev_nonnote_insn (label)
476 && (prev_nonnote_insn (prev_nonnote_insn (label))
477 == next_nonnote_insn (loop_start)))
479 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
480 /* To keep pre-header, we need to redirect all loop
481 entrances before the LOOP_BEG note. */
482 redirect_jump (copy, loop_pre_header_label, 0);
485 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
494 /* Record the first insn we copied. We need it so that we can
495 scan the copied insns for new pseudo registers. */
500 /* Now clean up by emitting a jump to the end label and deleting the jump
501 at the start of the loop. */
502 if (! copy || GET_CODE (copy) != BARRIER)
504 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
507 /* Record the first insn we copied. We need it so that we can
508 scan the copied insns for new pseudo registers. This may not
509 be strictly necessary since we should have copied at least one
510 insn above. But I am going to be safe. */
514 mark_jump_label (PATTERN (copy), copy, 0);
515 emit_barrier_before (loop_start);
518 emit_label_before (loop_pre_header_label, loop_start);
520 /* Now scan from the first insn we copied to the last insn we copied
521 (copy) for new pseudo registers. Do this after the code to jump to
522 the end label since that might create a new pseudo too. */
523 reg_scan_update (first_copy, copy, max_reg);
525 /* Mark the exit code as the virtual top of the converted loop. */
526 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
528 delete_related_insns (next_nonnote_insn (loop_start));
537 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
538 notes between START and END out before START. START and END may be such
539 notes. Returns the values of the new starting and ending insns, which
540 may be different if the original ones were such notes.
541 Return true if there were only such notes and no real instructions. */
544 squeeze_notes (startp, endp)
554 rtx past_end = NEXT_INSN (end);
556 for (insn = start; insn != past_end; insn = next)
558 next = NEXT_INSN (insn);
559 if (GET_CODE (insn) == NOTE
560 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
561 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
562 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
563 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
564 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
565 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
571 rtx prev = PREV_INSN (insn);
572 PREV_INSN (insn) = PREV_INSN (start);
573 NEXT_INSN (insn) = start;
574 NEXT_INSN (PREV_INSN (insn)) = insn;
575 PREV_INSN (NEXT_INSN (insn)) = insn;
576 NEXT_INSN (prev) = next;
577 PREV_INSN (next) = prev;
584 /* There were no real instructions. */
585 if (start == past_end)
595 /* Return the label before INSN, or put a new label there. */
598 get_label_before (insn)
603 /* Find an existing label at this point
604 or make a new one if there is none. */
605 label = prev_nonnote_insn (insn);
607 if (label == 0 || GET_CODE (label) != CODE_LABEL)
609 rtx prev = PREV_INSN (insn);
611 label = gen_label_rtx ();
612 emit_label_after (label, prev);
613 LABEL_NUSES (label) = 0;
618 /* Return the label after INSN, or put a new label there. */
621 get_label_after (insn)
626 /* Find an existing label at this point
627 or make a new one if there is none. */
628 label = next_nonnote_insn (insn);
630 if (label == 0 || GET_CODE (label) != CODE_LABEL)
632 label = gen_label_rtx ();
633 emit_label_after (label, insn);
634 LABEL_NUSES (label) = 0;
639 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
640 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
641 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
642 know whether it's source is floating point or integer comparison. Machine
643 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
644 to help this function avoid overhead in these cases. */
646 reversed_comparison_code_parts (code, arg0, arg1, insn)
647 rtx insn, arg0, arg1;
650 enum machine_mode mode;
652 /* If this is not actually a comparison, we can't reverse it. */
653 if (GET_RTX_CLASS (code) != '<')
656 mode = GET_MODE (arg0);
657 if (mode == VOIDmode)
658 mode = GET_MODE (arg1);
660 /* First see if machine description supply us way to reverse the comparison.
661 Give it priority over everything else to allow machine description to do
663 #ifdef REVERSIBLE_CC_MODE
664 if (GET_MODE_CLASS (mode) == MODE_CC
665 && REVERSIBLE_CC_MODE (mode))
667 #ifdef REVERSE_CONDITION
668 return REVERSE_CONDITION (code, mode);
670 return reverse_condition (code);
674 /* Try a few special cases based on the comparison code. */
683 /* It is always safe to reverse EQ and NE, even for the floating
684 point. Similarly the unsigned comparisons are never used for
685 floating point so we can reverse them in the default way. */
686 return reverse_condition (code);
691 /* In case we already see unordered comparison, we can be sure to
692 be dealing with floating point so we don't need any more tests. */
693 return reverse_condition_maybe_unordered (code);
698 /* We don't have safe way to reverse these yet. */
704 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
707 /* Try to search for the comparison to determine the real mode.
708 This code is expensive, but with sane machine description it
709 will be never used, since REVERSIBLE_CC_MODE will return true
714 for (prev = prev_nonnote_insn (insn);
715 prev != 0 && GET_CODE (prev) != CODE_LABEL;
716 prev = prev_nonnote_insn (prev))
718 rtx set = set_of (arg0, prev);
719 if (set && GET_CODE (set) == SET
720 && rtx_equal_p (SET_DEST (set), arg0))
722 rtx src = SET_SRC (set);
724 if (GET_CODE (src) == COMPARE)
726 rtx comparison = src;
727 arg0 = XEXP (src, 0);
728 mode = GET_MODE (arg0);
729 if (mode == VOIDmode)
730 mode = GET_MODE (XEXP (comparison, 1));
733 /* We can get past reg-reg moves. This may be useful for model
734 of i387 comparisons that first move flag registers around. */
741 /* If register is clobbered in some ununderstandable way,
748 /* Test for an integer condition, or a floating-point comparison
749 in which NaNs can be ignored. */
750 if (GET_CODE (arg0) == CONST_INT
751 || (GET_MODE (arg0) != VOIDmode
752 && GET_MODE_CLASS (mode) != MODE_CC
753 && !HONOR_NANS (mode)))
754 return reverse_condition (code);
759 /* A wrapper around the previous function to take COMPARISON as rtx
760 expression. This simplifies many callers. */
762 reversed_comparison_code (comparison, insn)
763 rtx comparison, insn;
765 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
767 return reversed_comparison_code_parts (GET_CODE (comparison),
768 XEXP (comparison, 0),
769 XEXP (comparison, 1), insn);
772 /* Given an rtx-code for a comparison, return the code for the negated
773 comparison. If no such code exists, return UNKNOWN.
775 WATCH OUT! reverse_condition is not safe to use on a jump that might
776 be acting on the results of an IEEE floating point comparison, because
777 of the special treatment of non-signaling nans in comparisons.
778 Use reversed_comparison_code instead. */
781 reverse_condition (code)
824 /* Similar, but we're allowed to generate unordered comparisons, which
825 makes it safe for IEEE floating-point. Of course, we have to recognize
826 that the target will support them too... */
829 reverse_condition_maybe_unordered (code)
868 /* Similar, but return the code when two operands of a comparison are swapped.
869 This IS safe for IEEE floating-point. */
872 swap_condition (code)
915 /* Given a comparison CODE, return the corresponding unsigned comparison.
916 If CODE is an equality comparison or already an unsigned comparison,
920 unsigned_condition (code)
947 /* Similarly, return the signed version of a comparison. */
950 signed_condition (code)
977 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
978 truth of CODE1 implies the truth of CODE2. */
981 comparison_dominates_p (code1, code2)
982 enum rtx_code code1, code2;
984 /* UNKNOWN comparison codes can happen as a result of trying to revert
986 They can't match anything, so we have to reject them here. */
987 if (code1 == UNKNOWN || code2 == UNKNOWN)
996 if (code2 == UNLE || code2 == UNGE)
1001 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1002 || code2 == ORDERED)
1007 if (code2 == UNLE || code2 == NE)
1012 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1017 if (code2 == UNGE || code2 == NE)
1022 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1028 if (code2 == ORDERED)
1033 if (code2 == NE || code2 == ORDERED)
1038 if (code2 == LEU || code2 == NE)
1043 if (code2 == GEU || code2 == NE)
1048 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1049 || code2 == UNGE || code2 == UNGT)
1060 /* Return 1 if INSN is an unconditional jump and nothing else. */
1066 return (GET_CODE (insn) == JUMP_INSN
1067 && GET_CODE (PATTERN (insn)) == SET
1068 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1069 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1072 /* Return nonzero if INSN is a (possibly) conditional jump
1075 Use this function is deprecated, since we need to support combined
1076 branch and compare insns. Use any_condjump_p instead whenever possible. */
1082 rtx x = PATTERN (insn);
1084 if (GET_CODE (x) != SET
1085 || GET_CODE (SET_DEST (x)) != PC)
1089 if (GET_CODE (x) == LABEL_REF)
1092 return (GET_CODE (x) == IF_THEN_ELSE
1093 && ((GET_CODE (XEXP (x, 2)) == PC
1094 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1095 || GET_CODE (XEXP (x, 1)) == RETURN))
1096 || (GET_CODE (XEXP (x, 1)) == PC
1097 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1098 || GET_CODE (XEXP (x, 2)) == RETURN))));
1103 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1106 Use this function is deprecated, since we need to support combined
1107 branch and compare insns. Use any_condjump_p instead whenever possible. */
1110 condjump_in_parallel_p (insn)
1113 rtx x = PATTERN (insn);
1115 if (GET_CODE (x) != PARALLEL)
1118 x = XVECEXP (x, 0, 0);
1120 if (GET_CODE (x) != SET)
1122 if (GET_CODE (SET_DEST (x)) != PC)
1124 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1126 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1128 if (XEXP (SET_SRC (x), 2) == pc_rtx
1129 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1130 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1132 if (XEXP (SET_SRC (x), 1) == pc_rtx
1133 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1134 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1139 /* Return set of PC, otherwise NULL. */
1146 if (GET_CODE (insn) != JUMP_INSN)
1148 pat = PATTERN (insn);
1150 /* The set is allowed to appear either as the insn pattern or
1151 the first set in a PARALLEL. */
1152 if (GET_CODE (pat) == PARALLEL)
1153 pat = XVECEXP (pat, 0, 0);
1154 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1160 /* Return true when insn is an unconditional direct jump,
1161 possibly bundled inside a PARALLEL. */
1164 any_uncondjump_p (insn)
1167 rtx x = pc_set (insn);
1170 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1175 /* Return true when insn is a conditional jump. This function works for
1176 instructions containing PC sets in PARALLELs. The instruction may have
1177 various other effects so before removing the jump you must verify
1180 Note that unlike condjump_p it returns false for unconditional jumps. */
1183 any_condjump_p (insn)
1186 rtx x = pc_set (insn);
1191 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1194 a = GET_CODE (XEXP (SET_SRC (x), 1));
1195 b = GET_CODE (XEXP (SET_SRC (x), 2));
1197 return ((b == PC && (a == LABEL_REF || a == RETURN))
1198 || (a == PC && (b == LABEL_REF || b == RETURN)));
1201 /* Return the label of a conditional jump. */
1204 condjump_label (insn)
1207 rtx x = pc_set (insn);
1212 if (GET_CODE (x) == LABEL_REF)
1214 if (GET_CODE (x) != IF_THEN_ELSE)
1216 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1218 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1223 /* Return true if INSN is a (possibly conditional) return insn. */
1226 returnjump_p_1 (loc, data)
1228 void *data ATTRIBUTE_UNUSED;
1232 return x && (GET_CODE (x) == RETURN
1233 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1240 if (GET_CODE (insn) != JUMP_INSN)
1242 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1245 /* Return true if INSN is a jump that only transfers control and
1254 if (GET_CODE (insn) != JUMP_INSN)
1257 set = single_set (insn);
1260 if (GET_CODE (SET_DEST (set)) != PC)
1262 if (side_effects_p (SET_SRC (set)))
1270 /* Return nonzero if X is an RTX that only sets the condition codes
1271 and has no side effects. */
1283 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1286 /* Return 1 if X is an RTX that does nothing but set the condition codes
1287 and CLOBBER or USE registers.
1288 Return -1 if X does explicitly set the condition codes,
1289 but also does other things. */
1301 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1303 if (GET_CODE (x) == PARALLEL)
1307 int other_things = 0;
1308 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1310 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1311 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1313 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1316 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1322 /* Follow any unconditional jump at LABEL;
1323 return the ultimate label reached by any such chain of jumps.
1324 If LABEL is not followed by a jump, return LABEL.
1325 If the chain loops or we can't find end, return LABEL,
1326 since that tells caller to avoid changing the insn.
1328 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1329 a USE or CLOBBER. */
1332 follow_jumps (label)
1342 && (insn = next_active_insn (value)) != 0
1343 && GET_CODE (insn) == JUMP_INSN
1344 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1345 && onlyjump_p (insn))
1346 || GET_CODE (PATTERN (insn)) == RETURN)
1347 && (next = NEXT_INSN (insn))
1348 && GET_CODE (next) == BARRIER);
1351 /* Don't chain through the insn that jumps into a loop
1352 from outside the loop,
1353 since that would create multiple loop entry jumps
1354 and prevent loop optimization. */
1356 if (!reload_completed)
1357 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1358 if (GET_CODE (tem) == NOTE
1359 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1360 /* ??? Optional. Disables some optimizations, but makes
1361 gcov output more accurate with -O. */
1362 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1365 /* If we have found a cycle, make the insn jump to itself. */
1366 if (JUMP_LABEL (insn) == label)
1369 tem = next_active_insn (JUMP_LABEL (insn));
1370 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1371 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1374 value = JUMP_LABEL (insn);
1382 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1383 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1384 in INSN, then store one of them in JUMP_LABEL (INSN).
1385 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1386 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1387 Also, when there are consecutive labels, canonicalize on the last of them.
1389 Note that two labels separated by a loop-beginning note
1390 must be kept distinct if we have not yet done loop-optimization,
1391 because the gap between them is where loop-optimize
1392 will want to move invariant code to. CROSS_JUMP tells us
1393 that loop-optimization is done with. */
1396 mark_jump_label (x, insn, in_mem)
1401 RTX_CODE code = GET_CODE (x);
1424 /* If this is a constant-pool reference, see if it is a label. */
1425 if (CONSTANT_POOL_ADDRESS_P (x))
1426 mark_jump_label (get_pool_constant (x), insn, in_mem);
1431 rtx label = XEXP (x, 0);
1433 /* Ignore remaining references to unreachable labels that
1434 have been deleted. */
1435 if (GET_CODE (label) == NOTE
1436 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1439 if (GET_CODE (label) != CODE_LABEL)
1442 /* Ignore references to labels of containing functions. */
1443 if (LABEL_REF_NONLOCAL_P (x))
1446 XEXP (x, 0) = label;
1447 if (! insn || ! INSN_DELETED_P (insn))
1448 ++LABEL_NUSES (label);
1452 if (GET_CODE (insn) == JUMP_INSN)
1453 JUMP_LABEL (insn) = label;
1456 /* Add a REG_LABEL note for LABEL unless there already
1457 is one. All uses of a label, except for labels
1458 that are the targets of jumps, must have a
1460 if (! find_reg_note (insn, REG_LABEL, label))
1461 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1468 /* Do walk the labels in a vector, but not the first operand of an
1469 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1472 if (! INSN_DELETED_P (insn))
1474 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1476 for (i = 0; i < XVECLEN (x, eltnum); i++)
1477 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1485 fmt = GET_RTX_FORMAT (code);
1486 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1489 mark_jump_label (XEXP (x, i), insn, in_mem);
1490 else if (fmt[i] == 'E')
1493 for (j = 0; j < XVECLEN (x, i); j++)
1494 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1499 /* If all INSN does is set the pc, delete it,
1500 and delete the insn that set the condition codes for it
1501 if that's what the previous thing was. */
1507 rtx set = single_set (insn);
1509 if (set && GET_CODE (SET_DEST (set)) == PC)
1510 delete_computation (insn);
1513 /* Verify INSN is a BARRIER and delete it. */
1516 delete_barrier (insn)
1519 if (GET_CODE (insn) != BARRIER)
1525 /* Recursively delete prior insns that compute the value (used only by INSN
1526 which the caller is deleting) stored in the register mentioned by NOTE
1527 which is a REG_DEAD note associated with INSN. */
1530 delete_prior_computation (note, insn)
1535 rtx reg = XEXP (note, 0);
1537 for (our_prev = prev_nonnote_insn (insn);
1538 our_prev && (GET_CODE (our_prev) == INSN
1539 || GET_CODE (our_prev) == CALL_INSN);
1540 our_prev = prev_nonnote_insn (our_prev))
1542 rtx pat = PATTERN (our_prev);
1544 /* If we reach a CALL which is not calling a const function
1545 or the callee pops the arguments, then give up. */
1546 if (GET_CODE (our_prev) == CALL_INSN
1547 && (! CONST_OR_PURE_CALL_P (our_prev)
1548 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1551 /* If we reach a SEQUENCE, it is too complex to try to
1552 do anything with it, so give up. We can be run during
1553 and after reorg, so SEQUENCE rtl can legitimately show
1555 if (GET_CODE (pat) == SEQUENCE)
1558 if (GET_CODE (pat) == USE
1559 && GET_CODE (XEXP (pat, 0)) == INSN)
1560 /* reorg creates USEs that look like this. We leave them
1561 alone because reorg needs them for its own purposes. */
1564 if (reg_set_p (reg, pat))
1566 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1569 if (GET_CODE (pat) == PARALLEL)
1571 /* If we find a SET of something else, we can't
1576 for (i = 0; i < XVECLEN (pat, 0); i++)
1578 rtx part = XVECEXP (pat, 0, i);
1580 if (GET_CODE (part) == SET
1581 && SET_DEST (part) != reg)
1585 if (i == XVECLEN (pat, 0))
1586 delete_computation (our_prev);
1588 else if (GET_CODE (pat) == SET
1589 && GET_CODE (SET_DEST (pat)) == REG)
1591 int dest_regno = REGNO (SET_DEST (pat));
1594 + (dest_regno < FIRST_PSEUDO_REGISTER
1595 ? HARD_REGNO_NREGS (dest_regno,
1596 GET_MODE (SET_DEST (pat))) : 1));
1597 int regno = REGNO (reg);
1600 + (regno < FIRST_PSEUDO_REGISTER
1601 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1603 if (dest_regno >= regno
1604 && dest_endregno <= endregno)
1605 delete_computation (our_prev);
1607 /* We may have a multi-word hard register and some, but not
1608 all, of the words of the register are needed in subsequent
1609 insns. Write REG_UNUSED notes for those parts that were not
1611 else if (dest_regno <= regno
1612 && dest_endregno >= endregno)
1616 REG_NOTES (our_prev)
1617 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1618 REG_NOTES (our_prev));
1620 for (i = dest_regno; i < dest_endregno; i++)
1621 if (! find_regno_note (our_prev, REG_UNUSED, i))
1624 if (i == dest_endregno)
1625 delete_computation (our_prev);
1632 /* If PAT references the register that dies here, it is an
1633 additional use. Hence any prior SET isn't dead. However, this
1634 insn becomes the new place for the REG_DEAD note. */
1635 if (reg_overlap_mentioned_p (reg, pat))
1637 XEXP (note, 1) = REG_NOTES (our_prev);
1638 REG_NOTES (our_prev) = note;
1644 /* Delete INSN and recursively delete insns that compute values used only
1645 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1646 If we are running before flow.c, we need do nothing since flow.c will
1647 delete dead code. We also can't know if the registers being used are
1648 dead or not at this point.
1650 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1651 nothing other than set a register that dies in this insn, we can delete
1654 On machines with CC0, if CC0 is used in this insn, we may be able to
1655 delete the insn that set it. */
1658 delete_computation (insn)
1664 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1666 rtx prev = prev_nonnote_insn (insn);
1667 /* We assume that at this stage
1668 CC's are always set explicitly
1669 and always immediately before the jump that
1670 will use them. So if the previous insn
1671 exists to set the CC's, delete it
1672 (unless it performs auto-increments, etc.). */
1673 if (prev && GET_CODE (prev) == INSN
1674 && sets_cc0_p (PATTERN (prev)))
1676 if (sets_cc0_p (PATTERN (prev)) > 0
1677 && ! side_effects_p (PATTERN (prev)))
1678 delete_computation (prev);
1680 /* Otherwise, show that cc0 won't be used. */
1681 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1682 cc0_rtx, REG_NOTES (prev));
1687 for (note = REG_NOTES (insn); note; note = next)
1689 next = XEXP (note, 1);
1691 if (REG_NOTE_KIND (note) != REG_DEAD
1692 /* Verify that the REG_NOTE is legitimate. */
1693 || GET_CODE (XEXP (note, 0)) != REG)
1696 delete_prior_computation (note, insn);
1699 delete_related_insns (insn);
1702 /* Delete insn INSN from the chain of insns and update label ref counts
1703 and delete insns now unreachable.
1705 Returns the first insn after INSN that was not deleted.
1707 Usage of this instruction is deprecated. Use delete_insn instead and
1708 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1711 delete_related_insns (insn)
1714 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1716 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1718 while (next && INSN_DELETED_P (next))
1719 next = NEXT_INSN (next);
1721 /* This insn is already deleted => return first following nondeleted. */
1722 if (INSN_DELETED_P (insn))
1727 /* If instruction is followed by a barrier,
1728 delete the barrier too. */
1730 if (next != 0 && GET_CODE (next) == BARRIER)
1733 /* If deleting a jump, decrement the count of the label,
1734 and delete the label if it is now unused. */
1736 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1738 rtx lab = JUMP_LABEL (insn), lab_next;
1740 if (LABEL_NUSES (lab) == 0)
1742 /* This can delete NEXT or PREV,
1743 either directly if NEXT is JUMP_LABEL (INSN),
1744 or indirectly through more levels of jumps. */
1745 delete_related_insns (lab);
1747 /* I feel a little doubtful about this loop,
1748 but I see no clean and sure alternative way
1749 to find the first insn after INSN that is not now deleted.
1750 I hope this works. */
1751 while (next && INSN_DELETED_P (next))
1752 next = NEXT_INSN (next);
1755 else if (tablejump_p (insn, NULL, &lab_next))
1757 /* If we're deleting the tablejump, delete the dispatch table.
1758 We may not be able to kill the label immediately preceding
1759 just yet, as it might be referenced in code leading up to
1761 delete_related_insns (lab_next);
1765 /* Likewise if we're deleting a dispatch table. */
1767 if (GET_CODE (insn) == JUMP_INSN
1768 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1769 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1771 rtx pat = PATTERN (insn);
1772 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1773 int len = XVECLEN (pat, diff_vec_p);
1775 for (i = 0; i < len; i++)
1776 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1777 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1778 while (next && INSN_DELETED_P (next))
1779 next = NEXT_INSN (next);
1783 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1784 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1785 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1786 if (REG_NOTE_KIND (note) == REG_LABEL
1787 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1788 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1789 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1790 delete_related_insns (XEXP (note, 0));
1792 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1793 prev = PREV_INSN (prev);
1795 /* If INSN was a label and a dispatch table follows it,
1796 delete the dispatch table. The tablejump must have gone already.
1797 It isn't useful to fall through into a table. */
1800 && NEXT_INSN (insn) != 0
1801 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1802 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1803 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1804 next = delete_related_insns (NEXT_INSN (insn));
1806 /* If INSN was a label, delete insns following it if now unreachable. */
1808 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1812 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1813 || code == NOTE || code == BARRIER
1814 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1817 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1818 next = NEXT_INSN (next);
1819 /* Keep going past other deleted labels to delete what follows. */
1820 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1821 next = NEXT_INSN (next);
1823 /* Note: if this deletes a jump, it can cause more
1824 deletion of unreachable code, after a different label.
1825 As long as the value from this recursive call is correct,
1826 this invocation functions correctly. */
1827 next = delete_related_insns (next);
1834 /* Delete a range of insns from FROM to TO, inclusive.
1835 This is for the sake of peephole optimization, so assume
1836 that whatever these insns do will still be done by a new
1837 peephole insn that will replace them. */
1840 delete_for_peephole (from, to)
1847 rtx next = NEXT_INSN (insn);
1848 rtx prev = PREV_INSN (insn);
1850 if (GET_CODE (insn) != NOTE)
1852 INSN_DELETED_P (insn) = 1;
1854 /* Patch this insn out of the chain. */
1855 /* We don't do this all at once, because we
1856 must preserve all NOTEs. */
1858 NEXT_INSN (prev) = next;
1861 PREV_INSN (next) = prev;
1869 /* Note that if TO is an unconditional jump
1870 we *do not* delete the BARRIER that follows,
1871 since the peephole that replaces this sequence
1872 is also an unconditional jump in that case. */
1875 /* We have determined that AVOIDED_INSN is never reached, and are
1876 about to delete it. If the insn chain between AVOIDED_INSN and
1877 FINISH contains more than one line from the current function, and
1878 contains at least one operation, print a warning if the user asked
1879 for it. If FINISH is NULL, look between AVOIDED_INSN and a LABEL.
1881 CSE and inlining can duplicate insns, so it's possible to get
1882 spurious warnings from this. */
1885 never_reached_warning (avoided_insn, finish)
1886 rtx avoided_insn, finish;
1889 rtx a_line_note = NULL;
1890 int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1892 if (!warn_notreached)
1895 /* Back up to the first of any NOTEs preceding avoided_insn; flow passes
1896 us the head of a block, a NOTE_INSN_BASIC_BLOCK, which often follows
1898 insn = avoided_insn;
1901 rtx prev = PREV_INSN (insn);
1902 if (prev == NULL_RTX
1903 || GET_CODE (prev) != NOTE)
1908 /* Scan forwards, looking at LINE_NUMBER notes, until we hit a LABEL
1909 in case FINISH is NULL, otherwise until we run out of insns. */
1911 for (; insn != NULL; insn = NEXT_INSN (insn))
1913 if ((finish == NULL && GET_CODE (insn) == CODE_LABEL)
1914 || GET_CODE (insn) == BARRIER)
1917 if (GET_CODE (insn) == NOTE /* A line number note? */
1918 && NOTE_LINE_NUMBER (insn) >= 0)
1920 if (a_line_note == NULL)
1923 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1924 != NOTE_LINE_NUMBER (insn));
1926 else if (INSN_P (insn))
1936 if (two_avoided_lines && contains_insn)
1939 locus.file = NOTE_SOURCE_FILE (a_line_note);
1940 locus.line = NOTE_LINE_NUMBER (a_line_note);
1941 warning ("%Hwill never be executed", &locus);
1945 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1946 NLABEL as a return. Accrue modifications into the change group. */
1949 redirect_exp_1 (loc, olabel, nlabel, insn)
1955 RTX_CODE code = GET_CODE (x);
1959 if (code == LABEL_REF)
1961 if (XEXP (x, 0) == olabel)
1965 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1967 n = gen_rtx_RETURN (VOIDmode);
1969 validate_change (insn, loc, n, 1);
1973 else if (code == RETURN && olabel == 0)
1975 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1976 if (loc == &PATTERN (insn))
1977 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1978 validate_change (insn, loc, x, 1);
1982 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1983 && GET_CODE (SET_SRC (x)) == LABEL_REF
1984 && XEXP (SET_SRC (x), 0) == olabel)
1986 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1990 fmt = GET_RTX_FORMAT (code);
1991 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1994 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1995 else if (fmt[i] == 'E')
1998 for (j = 0; j < XVECLEN (x, i); j++)
1999 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2004 /* Similar, but apply the change group and report success or failure. */
2007 redirect_exp (olabel, nlabel, insn)
2013 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2014 loc = &XVECEXP (PATTERN (insn), 0, 0);
2016 loc = &PATTERN (insn);
2018 redirect_exp_1 (loc, olabel, nlabel, insn);
2019 if (num_validated_changes () == 0)
2022 return apply_change_group ();
2025 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2026 the modifications into the change group. Return false if we did
2027 not see how to do that. */
2030 redirect_jump_1 (jump, nlabel)
2033 int ochanges = num_validated_changes ();
2036 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2037 loc = &XVECEXP (PATTERN (jump), 0, 0);
2039 loc = &PATTERN (jump);
2041 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2042 return num_validated_changes () > ochanges;
2045 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2046 jump target label is unused as a result, it and the code following
2049 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2052 The return value will be 1 if the change was made, 0 if it wasn't
2053 (this can only occur for NLABEL == 0). */
2056 redirect_jump (jump, nlabel, delete_unused)
2060 rtx olabel = JUMP_LABEL (jump);
2063 if (nlabel == olabel)
2066 if (! redirect_exp (olabel, nlabel, jump))
2069 JUMP_LABEL (jump) = nlabel;
2071 ++LABEL_NUSES (nlabel);
2073 /* Update labels in any REG_EQUAL note. */
2074 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
2076 if (nlabel && olabel)
2078 rtx dest = XEXP (note, 0);
2080 if (GET_CODE (dest) == IF_THEN_ELSE)
2082 if (GET_CODE (XEXP (dest, 1)) == LABEL_REF
2083 && XEXP (XEXP (dest, 1), 0) == olabel)
2084 XEXP (XEXP (dest, 1), 0) = nlabel;
2085 if (GET_CODE (XEXP (dest, 2)) == LABEL_REF
2086 && XEXP (XEXP (dest, 2), 0) == olabel)
2087 XEXP (XEXP (dest, 2), 0) = nlabel;
2090 remove_note (jump, note);
2093 remove_note (jump, note);
2096 /* If we're eliding the jump over exception cleanups at the end of a
2097 function, move the function end note so that -Wreturn-type works. */
2098 if (olabel && nlabel
2099 && NEXT_INSN (olabel)
2100 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2101 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2102 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2104 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2105 /* Undefined labels will remain outside the insn stream. */
2106 && INSN_UID (olabel))
2107 delete_related_insns (olabel);
2112 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2113 Accrue the modifications into the change group. */
2120 rtx x = pc_set (insn);
2126 code = GET_CODE (x);
2128 if (code == IF_THEN_ELSE)
2130 rtx comp = XEXP (x, 0);
2132 enum rtx_code reversed_code;
2134 /* We can do this in two ways: The preferable way, which can only
2135 be done if this is not an integer comparison, is to reverse
2136 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2137 of the IF_THEN_ELSE. If we can't do either, fail. */
2139 reversed_code = reversed_comparison_code (comp, insn);
2141 if (reversed_code != UNKNOWN)
2143 validate_change (insn, &XEXP (x, 0),
2144 gen_rtx_fmt_ee (reversed_code,
2145 GET_MODE (comp), XEXP (comp, 0),
2152 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2153 validate_change (insn, &XEXP (x, 2), tem, 1);
2159 /* Invert the jump condition of conditional jump insn, INSN.
2161 Return 1 if we can do so, 0 if we cannot find a way to do so that
2162 matches a pattern. */
2168 invert_exp_1 (insn);
2169 if (num_validated_changes () == 0)
2172 return apply_change_group ();
2175 /* Invert the condition of the jump JUMP, and make it jump to label
2176 NLABEL instead of where it jumps now. Accrue changes into the
2177 change group. Return false if we didn't see how to perform the
2178 inversion and redirection. */
2181 invert_jump_1 (jump, nlabel)
2186 ochanges = num_validated_changes ();
2187 invert_exp_1 (jump);
2188 if (num_validated_changes () == ochanges)
2191 return redirect_jump_1 (jump, nlabel);
2194 /* Invert the condition of the jump JUMP, and make it jump to label
2195 NLABEL instead of where it jumps now. Return true if successful. */
2198 invert_jump (jump, nlabel, delete_unused)
2202 /* We have to either invert the condition and change the label or
2203 do neither. Either operation could fail. We first try to invert
2204 the jump. If that succeeds, we try changing the label. If that fails,
2205 we invert the jump back to what it was. */
2207 if (! invert_exp (jump))
2210 if (redirect_jump (jump, nlabel, delete_unused))
2212 /* Remove REG_EQUAL note if we have one. */
2213 rtx note = find_reg_note (jump, REG_EQUAL, NULL_RTX);
2215 remove_note (jump, note);
2217 invert_br_probabilities (jump);
2222 if (! invert_exp (jump))
2223 /* This should just be putting it back the way it was. */
2230 /* Like rtx_equal_p except that it considers two REGs as equal
2231 if they renumber to the same value and considers two commutative
2232 operations to be the same if the order of the operands has been
2235 ??? Addition is not commutative on the PA due to the weird implicit
2236 space register selection rules for memory addresses. Therefore, we
2237 don't consider a + b == b + a.
2239 We could/should make this test a little tighter. Possibly only
2240 disabling it on the PA via some backend macro or only disabling this
2241 case when the PLUS is inside a MEM. */
2244 rtx_renumbered_equal_p (x, y)
2248 RTX_CODE code = GET_CODE (x);
2254 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2255 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2256 && GET_CODE (SUBREG_REG (y)) == REG)))
2258 int reg_x = -1, reg_y = -1;
2259 int byte_x = 0, byte_y = 0;
2261 if (GET_MODE (x) != GET_MODE (y))
2264 /* If we haven't done any renumbering, don't
2265 make any assumptions. */
2266 if (reg_renumber == 0)
2267 return rtx_equal_p (x, y);
2271 reg_x = REGNO (SUBREG_REG (x));
2272 byte_x = SUBREG_BYTE (x);
2274 if (reg_renumber[reg_x] >= 0)
2276 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2277 GET_MODE (SUBREG_REG (x)),
2286 if (reg_renumber[reg_x] >= 0)
2287 reg_x = reg_renumber[reg_x];
2290 if (GET_CODE (y) == SUBREG)
2292 reg_y = REGNO (SUBREG_REG (y));
2293 byte_y = SUBREG_BYTE (y);
2295 if (reg_renumber[reg_y] >= 0)
2297 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2298 GET_MODE (SUBREG_REG (y)),
2307 if (reg_renumber[reg_y] >= 0)
2308 reg_y = reg_renumber[reg_y];
2311 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2314 /* Now we have disposed of all the cases
2315 in which different rtx codes can match. */
2316 if (code != GET_CODE (y))
2329 /* We can't assume nonlocal labels have their following insns yet. */
2330 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2331 return XEXP (x, 0) == XEXP (y, 0);
2333 /* Two label-refs are equivalent if they point at labels
2334 in the same position in the instruction stream. */
2335 return (next_real_insn (XEXP (x, 0))
2336 == next_real_insn (XEXP (y, 0)));
2339 return XSTR (x, 0) == XSTR (y, 0);
2342 /* If we didn't match EQ equality above, they aren't the same. */
2349 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2351 if (GET_MODE (x) != GET_MODE (y))
2354 /* For commutative operations, the RTX match if the operand match in any
2355 order. Also handle the simple binary and unary cases without a loop.
2357 ??? Don't consider PLUS a commutative operator; see comments above. */
2358 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2360 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2361 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2362 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2363 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2364 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2365 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2366 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2367 else if (GET_RTX_CLASS (code) == '1')
2368 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2370 /* Compare the elements. If any pair of corresponding elements
2371 fail to match, return 0 for the whole things. */
2373 fmt = GET_RTX_FORMAT (code);
2374 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2380 if (XWINT (x, i) != XWINT (y, i))
2385 if (XINT (x, i) != XINT (y, i))
2390 if (XTREE (x, i) != XTREE (y, i))
2395 if (strcmp (XSTR (x, i), XSTR (y, i)))
2400 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2405 if (XEXP (x, i) != XEXP (y, i))
2412 if (XVECLEN (x, i) != XVECLEN (y, i))
2414 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2415 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2426 /* If X is a hard register or equivalent to one or a subregister of one,
2427 return the hard register number. If X is a pseudo register that was not
2428 assigned a hard register, return the pseudo register number. Otherwise,
2429 return -1. Any rtx is valid for X. */
2435 if (GET_CODE (x) == REG)
2437 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2438 return reg_renumber[REGNO (x)];
2441 if (GET_CODE (x) == SUBREG)
2443 int base = true_regnum (SUBREG_REG (x));
2444 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2445 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2446 GET_MODE (SUBREG_REG (x)),
2447 SUBREG_BYTE (x), GET_MODE (x));
2452 /* Return regno of the register REG and handle subregs too. */
2454 reg_or_subregno (reg)
2459 if (GET_CODE (reg) == SUBREG)
2460 return REGNO (SUBREG_REG (reg));