1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
42 #include "hard-reg-set.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
62 static int init_label_info PARAMS ((rtx));
63 static void mark_all_labels PARAMS ((rtx));
64 static int duplicate_loop_exit_test PARAMS ((rtx));
65 static void delete_computation PARAMS ((rtx));
66 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
67 static int redirect_exp PARAMS ((rtx, rtx, rtx));
68 static void invert_exp_1 PARAMS ((rtx));
69 static int invert_exp PARAMS ((rtx));
70 static int returnjump_p_1 PARAMS ((rtx *, void *));
71 static void delete_prior_computation PARAMS ((rtx, rtx));
72 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
78 rebuild_jump_labels (f)
84 max_uid = init_label_info (f) + 1;
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
92 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
93 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
94 LABEL_NUSES (XEXP (insn, 0))++;
96 /* Keep track of labels used for marking handlers for exception
97 regions; they cannot usually be deleted. */
99 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
100 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
101 LABEL_NUSES (XEXP (insn, 0))++;
104 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
105 non-fallthru insn. This is not generally true, as multiple barriers
106 may have crept in, or the BARRIER may be separated from the last
107 real insn by one or more NOTEs.
109 This simple pass moves barriers and removes duplicates so that the
115 rtx insn, next, prev;
116 for (insn = get_insns (); insn; insn = next)
118 next = NEXT_INSN (insn);
119 if (GET_CODE (insn) == BARRIER)
121 prev = prev_nonnote_insn (insn);
122 if (GET_CODE (prev) == BARRIER)
123 delete_barrier (insn);
124 else if (prev != PREV_INSN (insn))
125 reorder_insns (insn, insn, prev);
131 copy_loop_headers (f)
135 /* Now iterate optimizing jumps until nothing changes over one pass. */
136 for (insn = f; insn; insn = next)
140 next = NEXT_INSN (insn);
142 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
143 jump. Try to optimize by duplicating the loop exit test if so.
144 This is only safe immediately after regscan, because it uses
145 the values of regno_first_uid and regno_last_uid. */
146 if (GET_CODE (insn) == NOTE
147 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
148 && (temp1 = next_nonnote_insn (insn)) != 0
149 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
151 temp = PREV_INSN (insn);
152 if (duplicate_loop_exit_test (insn))
154 next = NEXT_INSN (temp);
161 purge_line_number_notes (f)
166 /* Delete extraneous line number notes.
167 Note that two consecutive notes for different lines are not really
168 extraneous. There should be some indication where that line belonged,
169 even if it became empty. */
171 for (insn = f; insn; insn = NEXT_INSN (insn))
172 if (GET_CODE (insn) == NOTE)
174 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
175 /* Any previous line note was for the prologue; gdb wants a new
176 note after the prologue even if it is for the same line. */
177 last_note = NULL_RTX;
178 else if (NOTE_LINE_NUMBER (insn) >= 0)
180 /* Delete this note if it is identical to previous note. */
182 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
183 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
185 delete_related_insns (insn);
194 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
195 notes whose labels don't occur in the insn any more. Returns the
196 largest INSN_UID found. */
204 for (insn = f; insn; insn = NEXT_INSN (insn))
206 if (GET_CODE (insn) == CODE_LABEL)
207 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
208 else if (GET_CODE (insn) == JUMP_INSN)
209 JUMP_LABEL (insn) = 0;
210 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
214 for (note = REG_NOTES (insn); note; note = next)
216 next = XEXP (note, 1);
217 if (REG_NOTE_KIND (note) == REG_LABEL
218 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
219 remove_note (insn, note);
222 if (INSN_UID (insn) > largest_uid)
223 largest_uid = INSN_UID (insn);
229 /* Mark the label each jump jumps to.
230 Combine consecutive labels, and count uses of labels. */
238 for (insn = f; insn; insn = NEXT_INSN (insn))
241 if (GET_CODE (insn) == CALL_INSN
242 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
244 mark_all_labels (XEXP (PATTERN (insn), 0));
245 mark_all_labels (XEXP (PATTERN (insn), 1));
246 mark_all_labels (XEXP (PATTERN (insn), 2));
248 /* Canonicalize the tail recursion label attached to the
249 CALL_PLACEHOLDER insn. */
250 if (XEXP (PATTERN (insn), 3))
252 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
253 XEXP (PATTERN (insn), 3));
254 mark_jump_label (label_ref, insn, 0);
255 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
261 mark_jump_label (PATTERN (insn), insn, 0);
262 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
264 /* When we know the LABEL_REF contained in a REG used in
265 an indirect jump, we'll have a REG_LABEL note so that
266 flow can tell where it's going. */
267 if (JUMP_LABEL (insn) == 0)
269 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
272 /* But a LABEL_REF around the REG_LABEL note, so
273 that we can canonicalize it. */
274 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
275 XEXP (label_note, 0));
277 mark_jump_label (label_ref, insn, 0);
278 XEXP (label_note, 0) = XEXP (label_ref, 0);
279 JUMP_LABEL (insn) = XEXP (label_note, 0);
286 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
287 jump. Assume that this unconditional jump is to the exit test code. If
288 the code is sufficiently simple, make a copy of it before INSN,
289 followed by a jump to the exit of the loop. Then delete the unconditional
292 Return 1 if we made the change, else 0.
294 This is only safe immediately after a regscan pass because it uses the
295 values of regno_first_uid and regno_last_uid. */
298 duplicate_loop_exit_test (loop_start)
301 rtx insn, set, reg, p, link;
302 rtx copy = 0, first_copy = 0;
304 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
306 int max_reg = max_reg_num ();
308 rtx loop_pre_header_label;
310 /* Scan the exit code. We do not perform this optimization if any insn:
314 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
315 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
316 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
319 We also do not do this if we find an insn with ASM_OPERANDS. While
320 this restriction should not be necessary, copying an insn with
321 ASM_OPERANDS can confuse asm_noperands in some cases.
323 Also, don't do this if the exit code is more than 20 insns. */
325 for (insn = exitcode;
327 && ! (GET_CODE (insn) == NOTE
328 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
329 insn = NEXT_INSN (insn))
331 switch (GET_CODE (insn))
337 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
338 a jump immediately after the loop start that branches outside
339 the loop but within an outer loop, near the exit test.
340 If we copied this exit test and created a phony
341 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
342 before the exit test look like these could be safely moved
343 out of the loop even if they actually may be never executed.
344 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
346 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
347 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
351 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
352 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
353 /* If we were to duplicate this code, we would not move
354 the BLOCK notes, and so debugging the moved code would
355 be difficult. Thus, we only move the code with -O2 or
362 /* The code below would grossly mishandle REG_WAS_0 notes,
363 so get rid of them here. */
364 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
365 remove_note (insn, p);
367 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
368 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
376 /* Unless INSN is zero, we can do the optimization. */
382 /* See if any insn sets a register only used in the loop exit code and
383 not a user variable. If so, replace it with a new register. */
384 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
385 if (GET_CODE (insn) == INSN
386 && (set = single_set (insn)) != 0
387 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
388 || (GET_CODE (reg) == SUBREG
389 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
390 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
391 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
393 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
394 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
399 /* We can do the replacement. Allocate reg_map if this is the
400 first replacement we found. */
402 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
404 REG_LOOP_TEST_P (reg) = 1;
406 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
409 loop_pre_header_label = gen_label_rtx ();
411 /* Now copy each insn. */
412 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
414 switch (GET_CODE (insn))
417 copy = emit_barrier_before (loop_start);
420 /* Only copy line-number notes. */
421 if (NOTE_LINE_NUMBER (insn) >= 0)
423 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
424 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
429 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
431 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
433 mark_jump_label (PATTERN (copy), copy, 0);
435 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
437 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
438 if (REG_NOTE_KIND (link) != REG_LABEL)
440 if (GET_CODE (link) == EXPR_LIST)
442 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
447 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
452 if (reg_map && REG_NOTES (copy))
453 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
457 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
460 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
461 mark_jump_label (PATTERN (copy), copy, 0);
462 if (REG_NOTES (insn))
464 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
466 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
469 /* Predict conditional jump that do make loop looping as taken.
470 Other jumps are probably exit conditions, so predict
472 if (any_condjump_p (copy))
474 rtx label = JUMP_LABEL (copy);
477 /* The jump_insn after loop_start should be followed
478 by barrier and loopback label. */
479 if (prev_nonnote_insn (label)
480 && (prev_nonnote_insn (prev_nonnote_insn (label))
481 == next_nonnote_insn (loop_start)))
483 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
484 /* To keep pre-header, we need to redirect all loop
485 entrances before the LOOP_BEG note. */
486 redirect_jump (copy, loop_pre_header_label, 0);
489 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
498 /* Record the first insn we copied. We need it so that we can
499 scan the copied insns for new pseudo registers. */
504 /* Now clean up by emitting a jump to the end label and deleting the jump
505 at the start of the loop. */
506 if (! copy || GET_CODE (copy) != BARRIER)
508 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
511 /* Record the first insn we copied. We need it so that we can
512 scan the copied insns for new pseudo registers. This may not
513 be strictly necessary since we should have copied at least one
514 insn above. But I am going to be safe. */
518 mark_jump_label (PATTERN (copy), copy, 0);
519 emit_barrier_before (loop_start);
522 emit_label_before (loop_pre_header_label, loop_start);
524 /* Now scan from the first insn we copied to the last insn we copied
525 (copy) for new pseudo registers. Do this after the code to jump to
526 the end label since that might create a new pseudo too. */
527 reg_scan_update (first_copy, copy, max_reg);
529 /* Mark the exit code as the virtual top of the converted loop. */
530 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
532 delete_related_insns (next_nonnote_insn (loop_start));
541 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
542 notes between START and END out before START. START and END may be such
543 notes. Returns the values of the new starting and ending insns, which
544 may be different if the original ones were such notes.
545 Return true if there were only such notes and no real instructions. */
548 squeeze_notes (startp, endp)
558 rtx past_end = NEXT_INSN (end);
560 for (insn = start; insn != past_end; insn = next)
562 next = NEXT_INSN (insn);
563 if (GET_CODE (insn) == NOTE
564 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
565 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
566 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
567 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
568 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
569 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
575 rtx prev = PREV_INSN (insn);
576 PREV_INSN (insn) = PREV_INSN (start);
577 NEXT_INSN (insn) = start;
578 NEXT_INSN (PREV_INSN (insn)) = insn;
579 PREV_INSN (NEXT_INSN (insn)) = insn;
580 NEXT_INSN (prev) = next;
581 PREV_INSN (next) = prev;
588 /* There were no real instructions. */
589 if (start == past_end)
599 /* Return the label before INSN, or put a new label there. */
602 get_label_before (insn)
607 /* Find an existing label at this point
608 or make a new one if there is none. */
609 label = prev_nonnote_insn (insn);
611 if (label == 0 || GET_CODE (label) != CODE_LABEL)
613 rtx prev = PREV_INSN (insn);
615 label = gen_label_rtx ();
616 emit_label_after (label, prev);
617 LABEL_NUSES (label) = 0;
622 /* Return the label after INSN, or put a new label there. */
625 get_label_after (insn)
630 /* Find an existing label at this point
631 or make a new one if there is none. */
632 label = next_nonnote_insn (insn);
634 if (label == 0 || GET_CODE (label) != CODE_LABEL)
636 label = gen_label_rtx ();
637 emit_label_after (label, insn);
638 LABEL_NUSES (label) = 0;
643 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
644 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
645 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
646 know whether it's source is floating point or integer comparison. Machine
647 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
648 to help this function avoid overhead in these cases. */
650 reversed_comparison_code_parts (code, arg0, arg1, insn)
651 rtx insn, arg0, arg1;
654 enum machine_mode mode;
656 /* If this is not actually a comparison, we can't reverse it. */
657 if (GET_RTX_CLASS (code) != '<')
660 mode = GET_MODE (arg0);
661 if (mode == VOIDmode)
662 mode = GET_MODE (arg1);
664 /* First see if machine description supply us way to reverse the comparison.
665 Give it priority over everything else to allow machine description to do
667 #ifdef REVERSIBLE_CC_MODE
668 if (GET_MODE_CLASS (mode) == MODE_CC
669 && REVERSIBLE_CC_MODE (mode))
671 #ifdef REVERSE_CONDITION
672 return REVERSE_CONDITION (code, mode);
674 return reverse_condition (code);
678 /* Try a few special cases based on the comparison code. */
687 /* It is always safe to reverse EQ and NE, even for the floating
688 point. Similary the unsigned comparisons are never used for
689 floating point so we can reverse them in the default way. */
690 return reverse_condition (code);
695 /* In case we already see unordered comparison, we can be sure to
696 be dealing with floating point so we don't need any more tests. */
697 return reverse_condition_maybe_unordered (code);
702 /* We don't have safe way to reverse these yet. */
708 /* In case we give up IEEE compatibility, all comparisons are reversible. */
709 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
710 || flag_unsafe_math_optimizations)
711 return reverse_condition (code);
713 if (GET_MODE_CLASS (mode) == MODE_CC
720 /* Try to search for the comparison to determine the real mode.
721 This code is expensive, but with sane machine description it
722 will be never used, since REVERSIBLE_CC_MODE will return true
727 for (prev = prev_nonnote_insn (insn);
728 prev != 0 && GET_CODE (prev) != CODE_LABEL;
729 prev = prev_nonnote_insn (prev))
731 rtx set = set_of (arg0, prev);
732 if (set && GET_CODE (set) == SET
733 && rtx_equal_p (SET_DEST (set), arg0))
735 rtx src = SET_SRC (set);
737 if (GET_CODE (src) == COMPARE)
739 rtx comparison = src;
740 arg0 = XEXP (src, 0);
741 mode = GET_MODE (arg0);
742 if (mode == VOIDmode)
743 mode = GET_MODE (XEXP (comparison, 1));
746 /* We can get past reg-reg moves. This may be useful for model
747 of i387 comparisons that first move flag registers around. */
754 /* If register is clobbered in some ununderstandable way,
761 /* An integer condition. */
762 if (GET_CODE (arg0) == CONST_INT
763 || (GET_MODE (arg0) != VOIDmode
764 && GET_MODE_CLASS (mode) != MODE_CC
765 && ! FLOAT_MODE_P (mode)))
766 return reverse_condition (code);
771 /* An wrapper around the previous function to take COMPARISON as rtx
772 expression. This simplifies many callers. */
774 reversed_comparison_code (comparison, insn)
775 rtx comparison, insn;
777 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
779 return reversed_comparison_code_parts (GET_CODE (comparison),
780 XEXP (comparison, 0),
781 XEXP (comparison, 1), insn);
784 /* Given an rtx-code for a comparison, return the code for the negated
785 comparison. If no such code exists, return UNKNOWN.
787 WATCH OUT! reverse_condition is not safe to use on a jump that might
788 be acting on the results of an IEEE floating point comparison, because
789 of the special treatment of non-signaling nans in comparisons.
790 Use reversed_comparison_code instead. */
793 reverse_condition (code)
836 /* Similar, but we're allowed to generate unordered comparisons, which
837 makes it safe for IEEE floating-point. Of course, we have to recognize
838 that the target will support them too... */
841 reverse_condition_maybe_unordered (code)
844 /* Non-IEEE formats don't have unordered conditions. */
845 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
846 return reverse_condition (code);
884 /* Similar, but return the code when two operands of a comparison are swapped.
885 This IS safe for IEEE floating-point. */
888 swap_condition (code)
931 /* Given a comparison CODE, return the corresponding unsigned comparison.
932 If CODE is an equality comparison or already an unsigned comparison,
936 unsigned_condition (code)
963 /* Similarly, return the signed version of a comparison. */
966 signed_condition (code)
993 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
994 truth of CODE1 implies the truth of CODE2. */
997 comparison_dominates_p (code1, code2)
998 enum rtx_code code1, code2;
1000 /* UNKNOWN comparison codes can happen as a result of trying to revert
1002 They can't match anything, so we have to reject them here. */
1003 if (code1 == UNKNOWN || code2 == UNKNOWN)
1012 if (code2 == UNLE || code2 == UNGE)
1017 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1018 || code2 == ORDERED)
1023 if (code2 == UNLE || code2 == NE)
1028 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1033 if (code2 == UNGE || code2 == NE)
1038 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1044 if (code2 == ORDERED)
1049 if (code2 == NE || code2 == ORDERED)
1054 if (code2 == LEU || code2 == NE)
1059 if (code2 == GEU || code2 == NE)
1064 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1065 || code2 == UNGE || code2 == UNGT)
1076 /* Return 1 if INSN is an unconditional jump and nothing else. */
1082 return (GET_CODE (insn) == JUMP_INSN
1083 && GET_CODE (PATTERN (insn)) == SET
1084 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1085 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1088 /* Return nonzero if INSN is a (possibly) conditional jump
1091 Use this function is deprecated, since we need to support combined
1092 branch and compare insns. Use any_condjump_p instead whenever possible. */
1098 rtx x = PATTERN (insn);
1100 if (GET_CODE (x) != SET
1101 || GET_CODE (SET_DEST (x)) != PC)
1105 if (GET_CODE (x) == LABEL_REF)
1108 return (GET_CODE (x) == IF_THEN_ELSE
1109 && ((GET_CODE (XEXP (x, 2)) == PC
1110 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1111 || GET_CODE (XEXP (x, 1)) == RETURN))
1112 || (GET_CODE (XEXP (x, 1)) == PC
1113 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1114 || GET_CODE (XEXP (x, 2)) == RETURN))));
1119 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1122 Use this function is deprecated, since we need to support combined
1123 branch and compare insns. Use any_condjump_p instead whenever possible. */
1126 condjump_in_parallel_p (insn)
1129 rtx x = PATTERN (insn);
1131 if (GET_CODE (x) != PARALLEL)
1134 x = XVECEXP (x, 0, 0);
1136 if (GET_CODE (x) != SET)
1138 if (GET_CODE (SET_DEST (x)) != PC)
1140 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1142 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1144 if (XEXP (SET_SRC (x), 2) == pc_rtx
1145 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1146 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1148 if (XEXP (SET_SRC (x), 1) == pc_rtx
1149 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1150 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1155 /* Return set of PC, otherwise NULL. */
1162 if (GET_CODE (insn) != JUMP_INSN)
1164 pat = PATTERN (insn);
1166 /* The set is allowed to appear either as the insn pattern or
1167 the first set in a PARALLEL. */
1168 if (GET_CODE (pat) == PARALLEL)
1169 pat = XVECEXP (pat, 0, 0);
1170 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1176 /* Return true when insn is an unconditional direct jump,
1177 possibly bundled inside a PARALLEL. */
1180 any_uncondjump_p (insn)
1183 rtx x = pc_set (insn);
1186 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1191 /* Return true when insn is a conditional jump. This function works for
1192 instructions containing PC sets in PARALLELs. The instruction may have
1193 various other effects so before removing the jump you must verify
1196 Note that unlike condjump_p it returns false for unconditional jumps. */
1199 any_condjump_p (insn)
1202 rtx x = pc_set (insn);
1207 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1210 a = GET_CODE (XEXP (SET_SRC (x), 1));
1211 b = GET_CODE (XEXP (SET_SRC (x), 2));
1213 return ((b == PC && (a == LABEL_REF || a == RETURN))
1214 || (a == PC && (b == LABEL_REF || b == RETURN)));
1217 /* Return the label of a conditional jump. */
1220 condjump_label (insn)
1223 rtx x = pc_set (insn);
1228 if (GET_CODE (x) == LABEL_REF)
1230 if (GET_CODE (x) != IF_THEN_ELSE)
1232 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1234 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1239 /* Return true if INSN is a (possibly conditional) return insn. */
1242 returnjump_p_1 (loc, data)
1244 void *data ATTRIBUTE_UNUSED;
1247 return x && GET_CODE (x) == RETURN;
1254 if (GET_CODE (insn) != JUMP_INSN)
1256 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1259 /* Return true if INSN is a jump that only transfers control and
1268 if (GET_CODE (insn) != JUMP_INSN)
1271 set = single_set (insn);
1274 if (GET_CODE (SET_DEST (set)) != PC)
1276 if (side_effects_p (SET_SRC (set)))
1284 /* Return non-zero if X is an RTX that only sets the condition codes
1285 and has no side effects. */
1298 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1301 /* Return 1 if X is an RTX that does nothing but set the condition codes
1302 and CLOBBER or USE registers.
1303 Return -1 if X does explicitly set the condition codes,
1304 but also does other things. */
1317 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1319 if (GET_CODE (x) == PARALLEL)
1323 int other_things = 0;
1324 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1326 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1327 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1329 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1332 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1338 /* Follow any unconditional jump at LABEL;
1339 return the ultimate label reached by any such chain of jumps.
1340 If LABEL is not followed by a jump, return LABEL.
1341 If the chain loops or we can't find end, return LABEL,
1342 since that tells caller to avoid changing the insn.
1344 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1345 a USE or CLOBBER. */
1348 follow_jumps (label)
1358 && (insn = next_active_insn (value)) != 0
1359 && GET_CODE (insn) == JUMP_INSN
1360 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1361 && onlyjump_p (insn))
1362 || GET_CODE (PATTERN (insn)) == RETURN)
1363 && (next = NEXT_INSN (insn))
1364 && GET_CODE (next) == BARRIER);
1367 /* Don't chain through the insn that jumps into a loop
1368 from outside the loop,
1369 since that would create multiple loop entry jumps
1370 and prevent loop optimization. */
1372 if (!reload_completed)
1373 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1374 if (GET_CODE (tem) == NOTE
1375 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1376 /* ??? Optional. Disables some optimizations, but makes
1377 gcov output more accurate with -O. */
1378 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1381 /* If we have found a cycle, make the insn jump to itself. */
1382 if (JUMP_LABEL (insn) == label)
1385 tem = next_active_insn (JUMP_LABEL (insn));
1386 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1387 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1390 value = JUMP_LABEL (insn);
1398 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1399 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1400 in INSN, then store one of them in JUMP_LABEL (INSN).
1401 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1402 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1403 Also, when there are consecutive labels, canonicalize on the last of them.
1405 Note that two labels separated by a loop-beginning note
1406 must be kept distinct if we have not yet done loop-optimization,
1407 because the gap between them is where loop-optimize
1408 will want to move invariant code to. CROSS_JUMP tells us
1409 that loop-optimization is done with. */
1412 mark_jump_label (x, insn, in_mem)
1417 RTX_CODE code = GET_CODE (x);
1441 /* If this is a constant-pool reference, see if it is a label. */
1442 if (CONSTANT_POOL_ADDRESS_P (x))
1443 mark_jump_label (get_pool_constant (x), insn, in_mem);
1448 rtx label = XEXP (x, 0);
1450 /* Ignore remaining references to unreachable labels that
1451 have been deleted. */
1452 if (GET_CODE (label) == NOTE
1453 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1456 if (GET_CODE (label) != CODE_LABEL)
1459 /* Ignore references to labels of containing functions. */
1460 if (LABEL_REF_NONLOCAL_P (x))
1463 XEXP (x, 0) = label;
1464 if (! insn || ! INSN_DELETED_P (insn))
1465 ++LABEL_NUSES (label);
1469 if (GET_CODE (insn) == JUMP_INSN)
1470 JUMP_LABEL (insn) = label;
1473 /* Add a REG_LABEL note for LABEL unless there already
1474 is one. All uses of a label, except for labels
1475 that are the targets of jumps, must have a
1477 if (! find_reg_note (insn, REG_LABEL, label))
1478 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1485 /* Do walk the labels in a vector, but not the first operand of an
1486 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1489 if (! INSN_DELETED_P (insn))
1491 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1493 for (i = 0; i < XVECLEN (x, eltnum); i++)
1494 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1502 fmt = GET_RTX_FORMAT (code);
1503 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1506 mark_jump_label (XEXP (x, i), insn, in_mem);
1507 else if (fmt[i] == 'E')
1510 for (j = 0; j < XVECLEN (x, i); j++)
1511 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1516 /* If all INSN does is set the pc, delete it,
1517 and delete the insn that set the condition codes for it
1518 if that's what the previous thing was. */
1524 rtx set = single_set (insn);
1526 if (set && GET_CODE (SET_DEST (set)) == PC)
1527 delete_computation (insn);
1530 /* Verify INSN is a BARRIER and delete it. */
1533 delete_barrier (insn)
1536 if (GET_CODE (insn) != BARRIER)
1542 /* Recursively delete prior insns that compute the value (used only by INSN
1543 which the caller is deleting) stored in the register mentioned by NOTE
1544 which is a REG_DEAD note associated with INSN. */
1547 delete_prior_computation (note, insn)
1552 rtx reg = XEXP (note, 0);
1554 for (our_prev = prev_nonnote_insn (insn);
1555 our_prev && (GET_CODE (our_prev) == INSN
1556 || GET_CODE (our_prev) == CALL_INSN);
1557 our_prev = prev_nonnote_insn (our_prev))
1559 rtx pat = PATTERN (our_prev);
1561 /* If we reach a CALL which is not calling a const function
1562 or the callee pops the arguments, then give up. */
1563 if (GET_CODE (our_prev) == CALL_INSN
1564 && (! CONST_OR_PURE_CALL_P (our_prev)
1565 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1568 /* If we reach a SEQUENCE, it is too complex to try to
1569 do anything with it, so give up. */
1570 if (GET_CODE (pat) == SEQUENCE)
1573 if (GET_CODE (pat) == USE
1574 && GET_CODE (XEXP (pat, 0)) == INSN)
1575 /* reorg creates USEs that look like this. We leave them
1576 alone because reorg needs them for its own purposes. */
1579 if (reg_set_p (reg, pat))
1581 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1584 if (GET_CODE (pat) == PARALLEL)
1586 /* If we find a SET of something else, we can't
1591 for (i = 0; i < XVECLEN (pat, 0); i++)
1593 rtx part = XVECEXP (pat, 0, i);
1595 if (GET_CODE (part) == SET
1596 && SET_DEST (part) != reg)
1600 if (i == XVECLEN (pat, 0))
1601 delete_computation (our_prev);
1603 else if (GET_CODE (pat) == SET
1604 && GET_CODE (SET_DEST (pat)) == REG)
1606 int dest_regno = REGNO (SET_DEST (pat));
1609 + (dest_regno < FIRST_PSEUDO_REGISTER
1610 ? HARD_REGNO_NREGS (dest_regno,
1611 GET_MODE (SET_DEST (pat))) : 1));
1612 int regno = REGNO (reg);
1615 + (regno < FIRST_PSEUDO_REGISTER
1616 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1618 if (dest_regno >= regno
1619 && dest_endregno <= endregno)
1620 delete_computation (our_prev);
1622 /* We may have a multi-word hard register and some, but not
1623 all, of the words of the register are needed in subsequent
1624 insns. Write REG_UNUSED notes for those parts that were not
1626 else if (dest_regno <= regno
1627 && dest_endregno >= endregno)
1631 REG_NOTES (our_prev)
1632 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1633 REG_NOTES (our_prev));
1635 for (i = dest_regno; i < dest_endregno; i++)
1636 if (! find_regno_note (our_prev, REG_UNUSED, i))
1639 if (i == dest_endregno)
1640 delete_computation (our_prev);
1647 /* If PAT references the register that dies here, it is an
1648 additional use. Hence any prior SET isn't dead. However, this
1649 insn becomes the new place for the REG_DEAD note. */
1650 if (reg_overlap_mentioned_p (reg, pat))
1652 XEXP (note, 1) = REG_NOTES (our_prev);
1653 REG_NOTES (our_prev) = note;
1659 /* Delete INSN and recursively delete insns that compute values used only
1660 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1661 If we are running before flow.c, we need do nothing since flow.c will
1662 delete dead code. We also can't know if the registers being used are
1663 dead or not at this point.
1665 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1666 nothing other than set a register that dies in this insn, we can delete
1669 On machines with CC0, if CC0 is used in this insn, we may be able to
1670 delete the insn that set it. */
1673 delete_computation (insn)
1679 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1681 rtx prev = prev_nonnote_insn (insn);
1682 /* We assume that at this stage
1683 CC's are always set explicitly
1684 and always immediately before the jump that
1685 will use them. So if the previous insn
1686 exists to set the CC's, delete it
1687 (unless it performs auto-increments, etc.). */
1688 if (prev && GET_CODE (prev) == INSN
1689 && sets_cc0_p (PATTERN (prev)))
1691 if (sets_cc0_p (PATTERN (prev)) > 0
1692 && ! side_effects_p (PATTERN (prev)))
1693 delete_computation (prev);
1695 /* Otherwise, show that cc0 won't be used. */
1696 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1697 cc0_rtx, REG_NOTES (prev));
1702 for (note = REG_NOTES (insn); note; note = next)
1704 next = XEXP (note, 1);
1706 if (REG_NOTE_KIND (note) != REG_DEAD
1707 /* Verify that the REG_NOTE is legitimate. */
1708 || GET_CODE (XEXP (note, 0)) != REG)
1711 delete_prior_computation (note, insn);
1714 delete_related_insns (insn);
1717 /* Delete insn INSN from the chain of insns and update label ref counts
1718 and delete insns now unreachable.
1720 Returns the first insn after INSN that was not deleted.
1722 Usage of this instruction is deprecated. Use delete_insn instead and
1723 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1726 delete_related_insns (insn)
1729 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1731 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1733 while (next && INSN_DELETED_P (next))
1734 next = NEXT_INSN (next);
1736 /* This insn is already deleted => return first following nondeleted. */
1737 if (INSN_DELETED_P (insn))
1742 /* If instruction is followed by a barrier,
1743 delete the barrier too. */
1745 if (next != 0 && GET_CODE (next) == BARRIER)
1748 /* If deleting a jump, decrement the count of the label,
1749 and delete the label if it is now unused. */
1751 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1753 rtx lab = JUMP_LABEL (insn), lab_next;
1755 if (LABEL_NUSES (lab) == 0)
1757 /* This can delete NEXT or PREV,
1758 either directly if NEXT is JUMP_LABEL (INSN),
1759 or indirectly through more levels of jumps. */
1760 delete_related_insns (lab);
1762 /* I feel a little doubtful about this loop,
1763 but I see no clean and sure alternative way
1764 to find the first insn after INSN that is not now deleted.
1765 I hope this works. */
1766 while (next && INSN_DELETED_P (next))
1767 next = NEXT_INSN (next);
1770 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1771 && GET_CODE (lab_next) == JUMP_INSN
1772 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1773 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1775 /* If we're deleting the tablejump, delete the dispatch table.
1776 We may not be able to kill the label immediately preceding
1777 just yet, as it might be referenced in code leading up to
1779 delete_related_insns (lab_next);
1783 /* Likewise if we're deleting a dispatch table. */
1785 if (GET_CODE (insn) == JUMP_INSN
1786 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1787 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1789 rtx pat = PATTERN (insn);
1790 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1791 int len = XVECLEN (pat, diff_vec_p);
1793 for (i = 0; i < len; i++)
1794 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1795 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1796 while (next && INSN_DELETED_P (next))
1797 next = NEXT_INSN (next);
1801 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1802 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1803 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1804 if (REG_NOTE_KIND (note) == REG_LABEL
1805 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1806 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1807 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1808 delete_related_insns (XEXP (note, 0));
1810 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1811 prev = PREV_INSN (prev);
1813 /* If INSN was a label and a dispatch table follows it,
1814 delete the dispatch table. The tablejump must have gone already.
1815 It isn't useful to fall through into a table. */
1818 && NEXT_INSN (insn) != 0
1819 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1820 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1821 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1822 next = delete_related_insns (NEXT_INSN (insn));
1824 /* If INSN was a label, delete insns following it if now unreachable. */
1826 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1830 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1831 || code == NOTE || code == BARRIER
1832 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1835 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1836 next = NEXT_INSN (next);
1837 /* Keep going past other deleted labels to delete what follows. */
1838 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1839 next = NEXT_INSN (next);
1841 /* Note: if this deletes a jump, it can cause more
1842 deletion of unreachable code, after a different label.
1843 As long as the value from this recursive call is correct,
1844 this invocation functions correctly. */
1845 next = delete_related_insns (next);
1852 /* Advance from INSN till reaching something not deleted
1853 then return that. May return INSN itself. */
1856 next_nondeleted_insn (insn)
1859 while (INSN_DELETED_P (insn))
1860 insn = NEXT_INSN (insn);
1864 /* Delete a range of insns from FROM to TO, inclusive.
1865 This is for the sake of peephole optimization, so assume
1866 that whatever these insns do will still be done by a new
1867 peephole insn that will replace them. */
1870 delete_for_peephole (from, to)
1877 rtx next = NEXT_INSN (insn);
1878 rtx prev = PREV_INSN (insn);
1880 if (GET_CODE (insn) != NOTE)
1882 INSN_DELETED_P (insn) = 1;
1884 /* Patch this insn out of the chain. */
1885 /* We don't do this all at once, because we
1886 must preserve all NOTEs. */
1888 NEXT_INSN (prev) = next;
1891 PREV_INSN (next) = prev;
1899 /* Note that if TO is an unconditional jump
1900 we *do not* delete the BARRIER that follows,
1901 since the peephole that replaces this sequence
1902 is also an unconditional jump in that case. */
1905 /* We have determined that INSN is never reached, and are about to
1906 delete it. Print a warning if the user asked for one.
1908 To try to make this warning more useful, this should only be called
1909 once per basic block not reached, and it only warns when the basic
1910 block contains more than one line from the current function, and
1911 contains at least one operation. CSE and inlining can duplicate insns,
1912 so it's possible to get spurious warnings from this. */
1915 never_reached_warning (avoided_insn)
1919 rtx a_line_note = NULL;
1920 int two_avoided_lines = 0;
1921 int contains_insn = 0;
1923 if (! warn_notreached)
1926 /* Scan forwards, looking at LINE_NUMBER notes, until
1927 we hit a LABEL or we run out of insns. */
1929 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1931 if (GET_CODE (insn) == CODE_LABEL)
1933 else if (GET_CODE (insn) == NOTE /* A line number note? */
1934 && NOTE_LINE_NUMBER (insn) >= 0)
1936 if (a_line_note == NULL)
1939 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1940 != NOTE_LINE_NUMBER (insn));
1942 else if (INSN_P (insn))
1945 if (two_avoided_lines && contains_insn)
1946 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1947 NOTE_LINE_NUMBER (a_line_note),
1948 "will never be executed");
1951 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1952 NLABEL as a return. Accrue modifications into the change group. */
1955 redirect_exp_1 (loc, olabel, nlabel, insn)
1961 RTX_CODE code = GET_CODE (x);
1965 if (code == LABEL_REF)
1967 if (XEXP (x, 0) == olabel)
1971 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1973 n = gen_rtx_RETURN (VOIDmode);
1975 validate_change (insn, loc, n, 1);
1979 else if (code == RETURN && olabel == 0)
1981 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1982 if (loc == &PATTERN (insn))
1983 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1984 validate_change (insn, loc, x, 1);
1988 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1989 && GET_CODE (SET_SRC (x)) == LABEL_REF
1990 && XEXP (SET_SRC (x), 0) == olabel)
1992 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1996 fmt = GET_RTX_FORMAT (code);
1997 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2000 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
2001 else if (fmt[i] == 'E')
2004 for (j = 0; j < XVECLEN (x, i); j++)
2005 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2010 /* Similar, but apply the change group and report success or failure. */
2013 redirect_exp (olabel, nlabel, insn)
2019 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2020 loc = &XVECEXP (PATTERN (insn), 0, 0);
2022 loc = &PATTERN (insn);
2024 redirect_exp_1 (loc, olabel, nlabel, insn);
2025 if (num_validated_changes () == 0)
2028 return apply_change_group ();
2031 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2032 the modifications into the change group. Return false if we did
2033 not see how to do that. */
2036 redirect_jump_1 (jump, nlabel)
2039 int ochanges = num_validated_changes ();
2042 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2043 loc = &XVECEXP (PATTERN (jump), 0, 0);
2045 loc = &PATTERN (jump);
2047 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2048 return num_validated_changes () > ochanges;
2051 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2052 jump target label is unused as a result, it and the code following
2055 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2058 The return value will be 1 if the change was made, 0 if it wasn't
2059 (this can only occur for NLABEL == 0). */
2062 redirect_jump (jump, nlabel, delete_unused)
2066 rtx olabel = JUMP_LABEL (jump);
2068 if (nlabel == olabel)
2071 if (! redirect_exp (olabel, nlabel, jump))
2074 JUMP_LABEL (jump) = nlabel;
2076 ++LABEL_NUSES (nlabel);
2078 /* If we're eliding the jump over exception cleanups at the end of a
2079 function, move the function end note so that -Wreturn-type works. */
2080 if (olabel && nlabel
2081 && NEXT_INSN (olabel)
2082 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2083 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2084 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2086 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
2087 delete_related_insns (olabel);
2092 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2093 Accrue the modifications into the change group. */
2100 rtx x = pc_set (insn);
2106 code = GET_CODE (x);
2108 if (code == IF_THEN_ELSE)
2110 rtx comp = XEXP (x, 0);
2112 enum rtx_code reversed_code;
2114 /* We can do this in two ways: The preferable way, which can only
2115 be done if this is not an integer comparison, is to reverse
2116 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2117 of the IF_THEN_ELSE. If we can't do either, fail. */
2119 reversed_code = reversed_comparison_code (comp, insn);
2121 if (reversed_code != UNKNOWN)
2123 validate_change (insn, &XEXP (x, 0),
2124 gen_rtx_fmt_ee (reversed_code,
2125 GET_MODE (comp), XEXP (comp, 0),
2132 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2133 validate_change (insn, &XEXP (x, 2), tem, 1);
2139 /* Invert the jump condition of conditional jump insn, INSN.
2141 Return 1 if we can do so, 0 if we cannot find a way to do so that
2142 matches a pattern. */
2148 invert_exp_1 (insn);
2149 if (num_validated_changes () == 0)
2152 return apply_change_group ();
2155 /* Invert the condition of the jump JUMP, and make it jump to label
2156 NLABEL instead of where it jumps now. Accrue changes into the
2157 change group. Return false if we didn't see how to perform the
2158 inversion and redirection. */
2161 invert_jump_1 (jump, nlabel)
2166 ochanges = num_validated_changes ();
2167 invert_exp_1 (jump);
2168 if (num_validated_changes () == ochanges)
2171 return redirect_jump_1 (jump, nlabel);
2174 /* Invert the condition of the jump JUMP, and make it jump to label
2175 NLABEL instead of where it jumps now. Return true if successful. */
2178 invert_jump (jump, nlabel, delete_unused)
2182 /* We have to either invert the condition and change the label or
2183 do neither. Either operation could fail. We first try to invert
2184 the jump. If that succeeds, we try changing the label. If that fails,
2185 we invert the jump back to what it was. */
2187 if (! invert_exp (jump))
2190 if (redirect_jump (jump, nlabel, delete_unused))
2192 invert_br_probabilities (jump);
2197 if (! invert_exp (jump))
2198 /* This should just be putting it back the way it was. */
2205 /* Like rtx_equal_p except that it considers two REGs as equal
2206 if they renumber to the same value and considers two commutative
2207 operations to be the same if the order of the operands has been
2210 ??? Addition is not commutative on the PA due to the weird implicit
2211 space register selection rules for memory addresses. Therefore, we
2212 don't consider a + b == b + a.
2214 We could/should make this test a little tighter. Possibly only
2215 disabling it on the PA via some backend macro or only disabling this
2216 case when the PLUS is inside a MEM. */
2219 rtx_renumbered_equal_p (x, y)
2223 RTX_CODE code = GET_CODE (x);
2229 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2230 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2231 && GET_CODE (SUBREG_REG (y)) == REG)))
2233 int reg_x = -1, reg_y = -1;
2234 int byte_x = 0, byte_y = 0;
2236 if (GET_MODE (x) != GET_MODE (y))
2239 /* If we haven't done any renumbering, don't
2240 make any assumptions. */
2241 if (reg_renumber == 0)
2242 return rtx_equal_p (x, y);
2246 reg_x = REGNO (SUBREG_REG (x));
2247 byte_x = SUBREG_BYTE (x);
2249 if (reg_renumber[reg_x] >= 0)
2251 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2252 GET_MODE (SUBREG_REG (x)),
2261 if (reg_renumber[reg_x] >= 0)
2262 reg_x = reg_renumber[reg_x];
2265 if (GET_CODE (y) == SUBREG)
2267 reg_y = REGNO (SUBREG_REG (y));
2268 byte_y = SUBREG_BYTE (y);
2270 if (reg_renumber[reg_y] >= 0)
2272 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2273 GET_MODE (SUBREG_REG (y)),
2282 if (reg_renumber[reg_y] >= 0)
2283 reg_y = reg_renumber[reg_y];
2286 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2289 /* Now we have disposed of all the cases
2290 in which different rtx codes can match. */
2291 if (code != GET_CODE (y))
2303 return INTVAL (x) == INTVAL (y);
2306 /* We can't assume nonlocal labels have their following insns yet. */
2307 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2308 return XEXP (x, 0) == XEXP (y, 0);
2310 /* Two label-refs are equivalent if they point at labels
2311 in the same position in the instruction stream. */
2312 return (next_real_insn (XEXP (x, 0))
2313 == next_real_insn (XEXP (y, 0)));
2316 return XSTR (x, 0) == XSTR (y, 0);
2319 /* If we didn't match EQ equality above, they aren't the same. */
2326 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2328 if (GET_MODE (x) != GET_MODE (y))
2331 /* For commutative operations, the RTX match if the operand match in any
2332 order. Also handle the simple binary and unary cases without a loop.
2334 ??? Don't consider PLUS a commutative operator; see comments above. */
2335 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2337 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2338 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2339 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2340 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2341 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2342 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2343 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2344 else if (GET_RTX_CLASS (code) == '1')
2345 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2347 /* Compare the elements. If any pair of corresponding elements
2348 fail to match, return 0 for the whole things. */
2350 fmt = GET_RTX_FORMAT (code);
2351 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2357 if (XWINT (x, i) != XWINT (y, i))
2362 if (XINT (x, i) != XINT (y, i))
2367 if (XTREE (x, i) != XTREE (y, i))
2372 if (strcmp (XSTR (x, i), XSTR (y, i)))
2377 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2382 if (XEXP (x, i) != XEXP (y, i))
2389 if (XVECLEN (x, i) != XVECLEN (y, i))
2391 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2392 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2403 /* If X is a hard register or equivalent to one or a subregister of one,
2404 return the hard register number. If X is a pseudo register that was not
2405 assigned a hard register, return the pseudo register number. Otherwise,
2406 return -1. Any rtx is valid for X. */
2412 if (GET_CODE (x) == REG)
2414 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2415 return reg_renumber[REGNO (x)];
2418 if (GET_CODE (x) == SUBREG)
2420 int base = true_regnum (SUBREG_REG (x));
2421 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2422 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2423 GET_MODE (SUBREG_REG (x)),
2424 SUBREG_BYTE (x), GET_MODE (x));
2429 /* Optimize code of the form:
2431 for (x = a[i]; x; ...)
2433 for (x = a[i]; x; ...)
2437 Loop optimize will change the above code into
2441 { ...; if (! (x = ...)) break; }
2444 { ...; if (! (x = ...)) break; }
2447 In general, if the first test fails, the program can branch
2448 directly to `foo' and skip the second try which is doomed to fail.
2449 We run this after loop optimization and before flow analysis. */
2451 /* When comparing the insn patterns, we track the fact that different
2452 pseudo-register numbers may have been used in each computation.
2453 The following array stores an equivalence -- same_regs[I] == J means
2454 that pseudo register I was used in the first set of tests in a context
2455 where J was used in the second set. We also count the number of such
2456 pending equivalences. If nonzero, the expressions really aren't the
2459 static int *same_regs;
2461 static int num_same_regs;
2463 /* Track any registers modified between the target of the first jump and
2464 the second jump. They never compare equal. */
2466 static char *modified_regs;
2468 /* Record if memory was modified. */
2470 static int modified_mem;
2472 /* Called via note_stores on each insn between the target of the first
2473 branch and the second branch. It marks any changed registers. */
2476 mark_modified_reg (dest, x, data)
2479 void *data ATTRIBUTE_UNUSED;
2484 if (GET_CODE (dest) == SUBREG)
2485 dest = SUBREG_REG (dest);
2487 if (GET_CODE (dest) == MEM)
2490 if (GET_CODE (dest) != REG)
2493 regno = REGNO (dest);
2494 if (regno >= FIRST_PSEUDO_REGISTER)
2495 modified_regs[regno] = 1;
2496 /* Don't consider a hard condition code register as modified,
2497 if it is only being set. thread_jumps will check if it is set
2498 to the same value. */
2499 else if (GET_MODE_CLASS (GET_MODE (dest)) != MODE_CC
2500 || GET_CODE (x) != SET
2501 || ! rtx_equal_p (dest, SET_DEST (x))
2502 || HARD_REGNO_NREGS (regno, GET_MODE (dest)) != 1)
2503 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
2504 modified_regs[regno + i] = 1;
2507 /* F is the first insn in the chain of insns. */
2510 thread_jumps (f, max_reg, flag_before_loop)
2513 int flag_before_loop;
2515 /* Basic algorithm is to find a conditional branch,
2516 the label it may branch to, and the branch after
2517 that label. If the two branches test the same condition,
2518 walk back from both branch paths until the insn patterns
2519 differ, or code labels are hit. If we make it back to
2520 the target of the first branch, then we know that the first branch
2521 will either always succeed or always fail depending on the relative
2522 senses of the two branches. So adjust the first branch accordingly
2525 rtx label, b1, b2, t1, t2;
2526 enum rtx_code code1, code2;
2527 rtx b1op0, b1op1, b2op0, b2op1;
2531 enum rtx_code reversed_code1, reversed_code2;
2533 /* Allocate register tables and quick-reset table. */
2534 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
2535 same_regs = (int *) xmalloc (max_reg * sizeof (int));
2536 all_reset = (int *) xmalloc (max_reg * sizeof (int));
2537 for (i = 0; i < max_reg; i++)
2544 for (b1 = f; b1; b1 = NEXT_INSN (b1))
2549 /* Get to a candidate branch insn. */
2550 if (GET_CODE (b1) != JUMP_INSN
2551 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
2554 memset (modified_regs, 0, max_reg * sizeof (char));
2557 memcpy (same_regs, all_reset, max_reg * sizeof (int));
2560 label = JUMP_LABEL (b1);
2562 /* Look for a branch after the target. Record any registers and
2563 memory modified between the target and the branch. Stop when we
2564 get to a label since we can't know what was changed there. */
2565 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
2567 if (GET_CODE (b2) == CODE_LABEL)
2570 else if (GET_CODE (b2) == JUMP_INSN)
2572 /* If this is an unconditional jump and is the only use of
2573 its target label, we can follow it. */
2574 if (any_uncondjump_p (b2)
2576 && JUMP_LABEL (b2) != 0
2577 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
2579 b2 = JUMP_LABEL (b2);
2586 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
2589 if (GET_CODE (b2) == CALL_INSN)
2592 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2593 if (call_used_regs[i] && ! fixed_regs[i]
2594 && i != STACK_POINTER_REGNUM
2595 && i != FRAME_POINTER_REGNUM
2596 && i != HARD_FRAME_POINTER_REGNUM
2597 && i != ARG_POINTER_REGNUM)
2598 modified_regs[i] = 1;
2601 note_stores (PATTERN (b2), mark_modified_reg, NULL);
2604 /* Check the next candidate branch insn from the label
2607 || GET_CODE (b2) != JUMP_INSN
2609 || !any_condjump_p (b2)
2610 || !onlyjump_p (b2))
2615 /* Get the comparison codes and operands, reversing the
2616 codes if appropriate. If we don't have comparison codes,
2617 we can't do anything. */
2618 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
2619 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
2620 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
2621 reversed_code1 = code1;
2622 if (XEXP (SET_SRC (set), 1) == pc_rtx)
2623 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2625 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2627 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
2628 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
2629 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
2630 reversed_code2 = code2;
2631 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
2632 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2634 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2636 /* If they test the same things and knowing that B1 branches
2637 tells us whether or not B2 branches, check if we
2638 can thread the branch. */
2639 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
2640 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
2641 && (comparison_dominates_p (code1, code2)
2642 || comparison_dominates_p (code1, reversed_code2)))
2645 t1 = prev_nonnote_insn (b1);
2646 t2 = prev_nonnote_insn (b2);
2648 while (t1 != 0 && t2 != 0)
2652 /* We have reached the target of the first branch.
2653 If there are no pending register equivalents,
2654 we know that this branch will either always
2655 succeed (if the senses of the two branches are
2656 the same) or always fail (if not). */
2659 if (num_same_regs != 0)
2662 if (comparison_dominates_p (code1, code2))
2663 new_label = JUMP_LABEL (b2);
2665 new_label = get_label_after (b2);
2667 if (JUMP_LABEL (b1) != new_label)
2669 rtx prev = PREV_INSN (new_label);
2671 if (flag_before_loop
2672 && GET_CODE (prev) == NOTE
2673 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
2675 /* Don't thread to the loop label. If a loop
2676 label is reused, loop optimization will
2677 be disabled for that loop. */
2678 new_label = gen_label_rtx ();
2679 emit_label_after (new_label, PREV_INSN (prev));
2681 changed |= redirect_jump (b1, new_label, 1);
2686 /* If either of these is not a normal insn (it might be
2687 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
2688 have already been skipped above.) Similarly, fail
2689 if the insns are different. */
2690 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
2691 || recog_memoized (t1) != recog_memoized (t2)
2692 || ! rtx_equal_for_thread_p (PATTERN (t1),
2696 t1 = prev_nonnote_insn (t1);
2697 t2 = prev_nonnote_insn (t2);
2704 free (modified_regs);
2709 /* This is like RTX_EQUAL_P except that it knows about our handling of
2710 possibly equivalent registers and knows to consider volatile and
2711 modified objects as not equal.
2713 YINSN is the insn containing Y. */
2716 rtx_equal_for_thread_p (x, y, yinsn)
2725 code = GET_CODE (x);
2726 /* Rtx's of different codes cannot be equal. */
2727 if (code != GET_CODE (y))
2730 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
2731 (REG:SI x) and (REG:HI x) are NOT equivalent. */
2733 if (GET_MODE (x) != GET_MODE (y))
2736 /* For floating-point, consider everything unequal. This is a bit
2737 pessimistic, but this pass would only rarely do anything for FP
2739 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
2740 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
2743 /* For commutative operations, the RTX match if the operand match in any
2744 order. Also handle the simple binary and unary cases without a loop. */
2745 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2746 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2747 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
2748 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
2749 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
2750 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2751 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2752 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
2753 else if (GET_RTX_CLASS (code) == '1')
2754 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2756 /* Handle special-cases first. */
2760 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
2763 /* If neither is user variable or hard register, check for possible
2765 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
2766 || REGNO (x) < FIRST_PSEUDO_REGISTER
2767 || REGNO (y) < FIRST_PSEUDO_REGISTER)
2770 if (same_regs[REGNO (x)] == -1)
2772 same_regs[REGNO (x)] = REGNO (y);
2775 /* If this is the first time we are seeing a register on the `Y'
2776 side, see if it is the last use. If not, we can't thread the
2777 jump, so mark it as not equivalent. */
2778 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
2784 return (same_regs[REGNO (x)] == (int) REGNO (y));
2789 /* If memory modified or either volatile, not equivalent.
2790 Else, check address. */
2791 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2794 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2797 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2803 /* Cancel a pending `same_regs' if setting equivalenced registers.
2804 Then process source. */
2805 if (GET_CODE (SET_DEST (x)) == REG
2806 && GET_CODE (SET_DEST (y)) == REG)
2808 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
2810 same_regs[REGNO (SET_DEST (x))] = -1;
2813 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
2818 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
2822 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
2825 return XEXP (x, 0) == XEXP (y, 0);
2828 return XSTR (x, 0) == XSTR (y, 0);
2837 fmt = GET_RTX_FORMAT (code);
2838 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2843 if (XWINT (x, i) != XWINT (y, i))
2849 if (XINT (x, i) != XINT (y, i))
2855 /* Two vectors must have the same length. */
2856 if (XVECLEN (x, i) != XVECLEN (y, i))
2859 /* And the corresponding elements must match. */
2860 for (j = 0; j < XVECLEN (x, i); j++)
2861 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
2862 XVECEXP (y, i, j), yinsn) == 0)
2867 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
2873 if (strcmp (XSTR (x, i), XSTR (y, i)))
2878 /* These are just backpointers, so they don't matter. */
2885 /* It is believed that rtx's at this level will never
2886 contain anything but integers and other rtx's,
2887 except for within LABEL_REFs and SYMBOL_REFs. */