1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "hard-reg-set.h"
30 /* Forward declarations */
31 static void set_of_1 PARAMS ((rtx, rtx, void *));
32 static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *));
33 static int computed_jump_p_1 PARAMS ((rtx));
34 static void parms_set PARAMS ((rtx, rtx, void *));
36 /* Bit flags that specify the machine subtype we are compiling for.
37 Bits are tested using macros TARGET_... defined in the tm.h file
38 and set by `-m...' switches. Must be defined in rtlanal.c. */
42 /* Return 1 if the value of X is unstable
43 (would be different at a different point in the program).
44 The frame pointer, arg pointer, etc. are considered stable
45 (within one function) and so is anything marked `unchanging'. */
51 RTX_CODE code = GET_CODE (x);
58 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
72 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
73 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
74 /* The arg pointer varies if it is not a fixed register. */
75 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
76 || RTX_UNCHANGING_P (x))
78 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
79 /* ??? When call-clobbered, the value is stable modulo the restore
80 that must happen after a call. This currently screws up local-alloc
81 into believing that the restore is not needed. */
82 if (x == pic_offset_table_rtx)
88 if (MEM_VOLATILE_P (x))
97 fmt = GET_RTX_FORMAT (code);
98 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
101 if (rtx_unstable_p (XEXP (x, i)))
104 else if (fmt[i] == 'E')
107 for (j = 0; j < XVECLEN (x, i); j++)
108 if (rtx_unstable_p (XVECEXP (x, i, j)))
115 /* Return 1 if X has a value that can vary even between two
116 executions of the program. 0 means X can be compared reliably
117 against certain constants or near-constants.
118 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
119 zero, we are slightly more conservative.
120 The frame pointer and the arg pointer are considered constant. */
123 rtx_varies_p (x, for_alias)
127 RTX_CODE code = GET_CODE (x);
134 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
147 /* Note that we have to test for the actual rtx used for the frame
148 and arg pointers and not just the register number in case we have
149 eliminated the frame and/or arg pointer and are using it
151 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
152 /* The arg pointer varies if it is not a fixed register. */
153 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
155 if (x == pic_offset_table_rtx
156 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
157 /* ??? When call-clobbered, the value is stable modulo the restore
158 that must happen after a call. This currently screws up
159 local-alloc into believing that the restore is not needed, so we
160 must return 0 only if we are called from alias analysis. */
168 /* The operand 0 of a LO_SUM is considered constant
169 (in fact it is related specifically to operand 1)
170 during alias analysis. */
171 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
172 || rtx_varies_p (XEXP (x, 1), for_alias);
175 if (MEM_VOLATILE_P (x))
184 fmt = GET_RTX_FORMAT (code);
185 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
188 if (rtx_varies_p (XEXP (x, i), for_alias))
191 else if (fmt[i] == 'E')
194 for (j = 0; j < XVECLEN (x, i); j++)
195 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
202 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
205 rtx_addr_can_trap_p (x)
208 enum rtx_code code = GET_CODE (x);
213 return SYMBOL_REF_WEAK (x);
219 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
220 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
221 || x == stack_pointer_rtx
222 /* The arg pointer varies if it is not a fixed register. */
223 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
225 /* All of the virtual frame registers are stack references. */
226 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
227 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
232 return rtx_addr_can_trap_p (XEXP (x, 0));
235 /* An address is assumed not to trap if it is an address that can't
236 trap plus a constant integer or it is the pic register plus a
238 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
239 && GET_CODE (XEXP (x, 1)) == CONST_INT)
240 || (XEXP (x, 0) == pic_offset_table_rtx
241 && CONSTANT_P (XEXP (x, 1))));
245 return rtx_addr_can_trap_p (XEXP (x, 1));
252 return rtx_addr_can_trap_p (XEXP (x, 0));
258 /* If it isn't one of the case above, it can cause a trap. */
262 /* Return 1 if X refers to a memory location whose address
263 cannot be compared reliably with constant addresses,
264 or if X refers to a BLKmode memory object.
265 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
266 zero, we are slightly more conservative. */
269 rtx_addr_varies_p (x, for_alias)
282 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
284 fmt = GET_RTX_FORMAT (code);
285 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
288 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
291 else if (fmt[i] == 'E')
294 for (j = 0; j < XVECLEN (x, i); j++)
295 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
301 /* Return the value of the integer term in X, if one is apparent;
303 Only obvious integer terms are detected.
304 This is used in cse.c with the `related_value' field. */
310 if (GET_CODE (x) == CONST)
313 if (GET_CODE (x) == MINUS
314 && GET_CODE (XEXP (x, 1)) == CONST_INT)
315 return - INTVAL (XEXP (x, 1));
316 if (GET_CODE (x) == PLUS
317 && GET_CODE (XEXP (x, 1)) == CONST_INT)
318 return INTVAL (XEXP (x, 1));
322 /* If X is a constant, return the value sans apparent integer term;
324 Only obvious integer terms are detected. */
327 get_related_value (x)
330 if (GET_CODE (x) != CONST)
333 if (GET_CODE (x) == PLUS
334 && GET_CODE (XEXP (x, 1)) == CONST_INT)
336 else if (GET_CODE (x) == MINUS
337 && GET_CODE (XEXP (x, 1)) == CONST_INT)
342 /* Given a tablejump insn INSN, return the RTL expression for the offset
343 into the jump table. If the offset cannot be determined, then return
346 If EARLIEST is non-zero, it is a pointer to a place where the earliest
347 insn used in locating the offset was found. */
350 get_jump_table_offset (insn, earliest)
364 if (GET_CODE (insn) != JUMP_INSN
365 || ! (label = JUMP_LABEL (insn))
366 || ! (table = NEXT_INSN (label))
367 || GET_CODE (table) != JUMP_INSN
368 || (GET_CODE (PATTERN (table)) != ADDR_VEC
369 && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
370 || ! (set = single_set (insn)))
375 /* Some targets (eg, ARM) emit a tablejump that also
376 contains the out-of-range target. */
377 if (GET_CODE (x) == IF_THEN_ELSE
378 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
381 /* Search backwards and locate the expression stored in X. */
382 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
383 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
386 /* If X is an expression using a relative address then strip
387 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
388 or the jump table label. */
389 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
390 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
392 for (i = 0; i < 2; i++)
397 if (y == pc_rtx || y == pic_offset_table_rtx)
400 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
401 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
404 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
413 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
414 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
418 /* Strip off any sign or zero extension. */
419 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
423 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
424 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
428 /* If X isn't a MEM then this isn't a tablejump we understand. */
429 if (GET_CODE (x) != MEM)
432 /* Strip off the MEM. */
435 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
436 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
439 /* If X isn't a PLUS than this isn't a tablejump we understand. */
440 if (GET_CODE (x) != PLUS)
443 /* At this point we should have an expression representing the jump table
444 plus an offset. Examine each operand in order to determine which one
445 represents the jump table. Knowing that tells us that the other operand
446 must represent the offset. */
447 for (i = 0; i < 2; i++)
452 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
453 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
456 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
457 && reg_mentioned_p (label, y))
466 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
467 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
468 for (i = 0; i < 2; i++)
469 if (XEXP (x, i) == pic_offset_table_rtx)
478 /* Return the RTL expression representing the offset. */
482 /* Return the number of places FIND appears within X. If COUNT_DEST is
483 zero, we do not count occurrences inside the destination of a SET. */
486 count_occurrences (x, find, count_dest)
492 const char *format_ptr;
512 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
517 if (SET_DEST (x) == find && ! count_dest)
518 return count_occurrences (SET_SRC (x), find, count_dest);
525 format_ptr = GET_RTX_FORMAT (code);
528 for (i = 0; i < GET_RTX_LENGTH (code); i++)
530 switch (*format_ptr++)
533 count += count_occurrences (XEXP (x, i), find, count_dest);
537 for (j = 0; j < XVECLEN (x, i); j++)
538 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
545 /* Nonzero if register REG appears somewhere within IN.
546 Also works if REG is not a register; in this case it checks
547 for a subexpression of IN that is Lisp "equal" to REG. */
550 reg_mentioned_p (reg, in)
563 if (GET_CODE (in) == LABEL_REF)
564 return reg == XEXP (in, 0);
566 code = GET_CODE (in);
570 /* Compare registers by number. */
572 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
574 /* These codes have no constituent expressions
582 return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
585 /* These are kept unique for a given value. */
592 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
595 fmt = GET_RTX_FORMAT (code);
597 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
602 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
603 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
606 else if (fmt[i] == 'e'
607 && reg_mentioned_p (reg, XEXP (in, i)))
613 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
614 no CODE_LABEL insn. */
617 no_labels_between_p (beg, end)
623 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
624 if (GET_CODE (p) == CODE_LABEL)
629 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
630 no JUMP_INSN insn. */
633 no_jumps_between_p (beg, end)
637 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
638 if (GET_CODE (p) == JUMP_INSN)
643 /* Nonzero if register REG is used in an insn between
644 FROM_INSN and TO_INSN (exclusive of those two). */
647 reg_used_between_p (reg, from_insn, to_insn)
648 rtx reg, from_insn, to_insn;
652 if (from_insn == to_insn)
655 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
657 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
658 || (GET_CODE (insn) == CALL_INSN
659 && (find_reg_fusage (insn, USE, reg)
660 || find_reg_fusage (insn, CLOBBER, reg)))))
665 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
666 is entirely replaced by a new value and the only use is as a SET_DEST,
667 we do not consider it a reference. */
670 reg_referenced_p (x, body)
676 switch (GET_CODE (body))
679 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
682 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
683 of a REG that occupies all of the REG, the insn references X if
684 it is mentioned in the destination. */
685 if (GET_CODE (SET_DEST (body)) != CC0
686 && GET_CODE (SET_DEST (body)) != PC
687 && GET_CODE (SET_DEST (body)) != REG
688 && ! (GET_CODE (SET_DEST (body)) == SUBREG
689 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
690 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
691 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
692 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
693 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
694 && reg_overlap_mentioned_p (x, SET_DEST (body)))
699 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
700 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
707 return reg_overlap_mentioned_p (x, body);
710 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
713 return reg_overlap_mentioned_p (x, XEXP (body, 0));
716 case UNSPEC_VOLATILE:
717 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
718 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
723 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
724 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
729 if (GET_CODE (XEXP (body, 0)) == MEM)
730 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
735 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
737 return reg_referenced_p (x, COND_EXEC_CODE (body));
744 /* Nonzero if register REG is referenced in an insn between
745 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
749 reg_referenced_between_p (reg, from_insn, to_insn)
750 rtx reg, from_insn, to_insn;
754 if (from_insn == to_insn)
757 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
759 && (reg_referenced_p (reg, PATTERN (insn))
760 || (GET_CODE (insn) == CALL_INSN
761 && find_reg_fusage (insn, USE, reg))))
766 /* Nonzero if register REG is set or clobbered in an insn between
767 FROM_INSN and TO_INSN (exclusive of those two). */
770 reg_set_between_p (reg, from_insn, to_insn)
771 rtx reg, from_insn, to_insn;
775 if (from_insn == to_insn)
778 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
779 if (INSN_P (insn) && reg_set_p (reg, insn))
784 /* Internals of reg_set_between_p. */
786 reg_set_p (reg, insn)
791 /* We can be passed an insn or part of one. If we are passed an insn,
792 check if a side-effect of the insn clobbers REG. */
795 if (FIND_REG_INC_NOTE (insn, reg)
796 || (GET_CODE (insn) == CALL_INSN
797 /* We'd like to test call_used_regs here, but rtlanal.c can't
798 reference that variable due to its use in genattrtab. So
799 we'll just be more conservative.
801 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
802 information holds all clobbered registers. */
803 && ((GET_CODE (reg) == REG
804 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
805 || GET_CODE (reg) == MEM
806 || find_reg_fusage (insn, CLOBBER, reg))))
809 body = PATTERN (insn);
812 return set_of (reg, insn) != NULL_RTX;
815 /* Similar to reg_set_between_p, but check all registers in X. Return 0
816 only if none of them are modified between START and END. Do not
817 consider non-registers one way or the other. */
820 regs_set_between_p (x, start, end)
824 enum rtx_code code = GET_CODE (x);
840 return reg_set_between_p (x, start, end);
846 fmt = GET_RTX_FORMAT (code);
847 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
849 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
852 else if (fmt[i] == 'E')
853 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
854 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
861 /* Similar to reg_set_between_p, but check all registers in X. Return 0
862 only if none of them are modified between START and END. Return 1 if
863 X contains a MEM; this routine does not perform any memory aliasing. */
866 modified_between_p (x, start, end)
870 enum rtx_code code = GET_CODE (x);
888 /* If the memory is not constant, assume it is modified. If it is
889 constant, we still have to check the address. */
890 if (! RTX_UNCHANGING_P (x))
895 return reg_set_between_p (x, start, end);
901 fmt = GET_RTX_FORMAT (code);
902 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
904 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
907 else if (fmt[i] == 'E')
908 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
909 if (modified_between_p (XVECEXP (x, i, j), start, end))
916 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
917 of them are modified in INSN. Return 1 if X contains a MEM; this routine
918 does not perform any memory aliasing. */
921 modified_in_p (x, insn)
925 enum rtx_code code = GET_CODE (x);
943 /* If the memory is not constant, assume it is modified. If it is
944 constant, we still have to check the address. */
945 if (! RTX_UNCHANGING_P (x))
950 return reg_set_p (x, insn);
956 fmt = GET_RTX_FORMAT (code);
957 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
959 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
962 else if (fmt[i] == 'E')
963 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
964 if (modified_in_p (XVECEXP (x, i, j), insn))
971 /* Return true if anything in insn X is (anti,output,true) dependent on
972 anything in insn Y. */
975 insn_dependent_p (x, y)
980 if (! INSN_P (x) || ! INSN_P (y))
984 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
989 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
996 /* A helper routine for insn_dependent_p called through note_stores. */
999 insn_dependent_p_1 (x, pat, data)
1001 rtx pat ATTRIBUTE_UNUSED;
1004 rtx * pinsn = (rtx *) data;
1006 if (*pinsn && reg_mentioned_p (x, *pinsn))
1010 /* Helper function for set_of. */
1018 set_of_1 (x, pat, data1)
1023 struct set_of_data *data = (struct set_of_data *) (data1);
1024 if (rtx_equal_p (x, data->pat)
1025 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1029 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1030 (either directly or via STRICT_LOW_PART and similar modifiers). */
1035 struct set_of_data data;
1036 data.found = NULL_RTX;
1038 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1042 /* Given an INSN, return a SET expression if this insn has only a single SET.
1043 It may also have CLOBBERs, USEs, or SET whose output
1044 will not be used, which we ignore. */
1047 single_set_2 (insn, pat)
1051 int set_verified = 1;
1054 if (GET_CODE (pat) == PARALLEL)
1056 for (i = 0; i < XVECLEN (pat, 0); i++)
1058 rtx sub = XVECEXP (pat, 0, i);
1059 switch (GET_CODE (sub))
1066 /* We can consider insns having multiple sets, where all
1067 but one are dead as single set insns. In common case
1068 only single set is present in the pattern so we want
1069 to avoid checking for REG_UNUSED notes unless necessary.
1071 When we reach set first time, we just expect this is
1072 the single set we are looking for and only when more
1073 sets are found in the insn, we check them. */
1076 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1077 && !side_effects_p (set))
1083 set = sub, set_verified = 0;
1084 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1085 || side_effects_p (sub))
1097 /* Given an INSN, return nonzero if it has more than one SET, else return
1101 multiple_sets (insn)
1107 /* INSN must be an insn. */
1108 if (! INSN_P (insn))
1111 /* Only a PARALLEL can have multiple SETs. */
1112 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1114 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1115 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1117 /* If we have already found a SET, then return now. */
1125 /* Either zero or one SET. */
1129 /* Return nonzero if the destination of SET equals the source
1130 and there are no side effects. */
1136 rtx src = SET_SRC (set);
1137 rtx dst = SET_DEST (set);
1139 if (side_effects_p (src) || side_effects_p (dst))
1142 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1143 return rtx_equal_p (dst, src);
1145 if (dst == pc_rtx && src == pc_rtx)
1148 if (GET_CODE (dst) == SIGN_EXTRACT
1149 || GET_CODE (dst) == ZERO_EXTRACT)
1150 return rtx_equal_p (XEXP (dst, 0), src)
1151 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx;
1153 if (GET_CODE (dst) == STRICT_LOW_PART)
1154 dst = XEXP (dst, 0);
1156 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1158 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1160 src = SUBREG_REG (src);
1161 dst = SUBREG_REG (dst);
1164 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1165 && REGNO (src) == REGNO (dst));
1168 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1175 rtx pat = PATTERN (insn);
1177 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1180 /* Insns carrying these notes are useful later on. */
1181 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1184 /* For now treat an insn with a REG_RETVAL note as a
1185 a special insn which should not be considered a no-op. */
1186 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1189 if (GET_CODE (pat) == SET && set_noop_p (pat))
1192 if (GET_CODE (pat) == PARALLEL)
1195 /* If nothing but SETs of registers to themselves,
1196 this insn can also be deleted. */
1197 for (i = 0; i < XVECLEN (pat, 0); i++)
1199 rtx tem = XVECEXP (pat, 0, i);
1201 if (GET_CODE (tem) == USE
1202 || GET_CODE (tem) == CLOBBER)
1205 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1215 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1216 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1217 If the object was modified, if we hit a partial assignment to X, or hit a
1218 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1219 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1223 find_last_value (x, pinsn, valid_to, allow_hwreg)
1231 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1235 rtx set = single_set (p);
1236 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1238 if (set && rtx_equal_p (x, SET_DEST (set)))
1240 rtx src = SET_SRC (set);
1242 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1243 src = XEXP (note, 0);
1245 if ((valid_to == NULL_RTX
1246 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1247 /* Reject hard registers because we don't usually want
1248 to use them; we'd rather use a pseudo. */
1249 && (! (GET_CODE (src) == REG
1250 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1257 /* If set in non-simple way, we don't have a value. */
1258 if (reg_set_p (x, p))
1265 /* Return nonzero if register in range [REGNO, ENDREGNO)
1266 appears either explicitly or implicitly in X
1267 other than being stored into.
1269 References contained within the substructure at LOC do not count.
1270 LOC may be zero, meaning don't ignore anything. */
1273 refers_to_regno_p (regno, endregno, x, loc)
1274 unsigned int regno, endregno;
1279 unsigned int x_regno;
1284 /* The contents of a REG_NONNEG note is always zero, so we must come here
1285 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1289 code = GET_CODE (x);
1294 x_regno = REGNO (x);
1296 /* If we modifying the stack, frame, or argument pointer, it will
1297 clobber a virtual register. In fact, we could be more precise,
1298 but it isn't worth it. */
1299 if ((x_regno == STACK_POINTER_REGNUM
1300 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1301 || x_regno == ARG_POINTER_REGNUM
1303 || x_regno == FRAME_POINTER_REGNUM)
1304 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1307 return (endregno > x_regno
1308 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1309 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1313 /* If this is a SUBREG of a hard reg, we can see exactly which
1314 registers are being modified. Otherwise, handle normally. */
1315 if (GET_CODE (SUBREG_REG (x)) == REG
1316 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1318 unsigned int inner_regno = subreg_regno (x);
1319 unsigned int inner_endregno
1320 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1321 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1323 return endregno > inner_regno && regno < inner_endregno;
1329 if (&SET_DEST (x) != loc
1330 /* Note setting a SUBREG counts as referring to the REG it is in for
1331 a pseudo but not for hard registers since we can
1332 treat each word individually. */
1333 && ((GET_CODE (SET_DEST (x)) == SUBREG
1334 && loc != &SUBREG_REG (SET_DEST (x))
1335 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1336 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1337 && refers_to_regno_p (regno, endregno,
1338 SUBREG_REG (SET_DEST (x)), loc))
1339 || (GET_CODE (SET_DEST (x)) != REG
1340 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1343 if (code == CLOBBER || loc == &SET_SRC (x))
1352 /* X does not match, so try its subexpressions. */
1354 fmt = GET_RTX_FORMAT (code);
1355 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1357 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1365 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1368 else if (fmt[i] == 'E')
1371 for (j = XVECLEN (x, i) - 1; j >=0; j--)
1372 if (loc != &XVECEXP (x, i, j)
1373 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1380 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1381 we check if any register number in X conflicts with the relevant register
1382 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1383 contains a MEM (we don't bother checking for memory addresses that can't
1384 conflict because we expect this to be a rare case. */
1387 reg_overlap_mentioned_p (x, in)
1390 unsigned int regno, endregno;
1392 /* Overly conservative. */
1393 if (GET_CODE (x) == STRICT_LOW_PART)
1396 /* If either argument is a constant, then modifying X can not affect IN. */
1397 if (CONSTANT_P (x) || CONSTANT_P (in))
1400 switch (GET_CODE (x))
1403 regno = REGNO (SUBREG_REG (x));
1404 if (regno < FIRST_PSEUDO_REGISTER)
1405 regno = subreg_regno (x);
1411 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1412 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1413 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1420 if (GET_CODE (in) == MEM)
1423 fmt = GET_RTX_FORMAT (GET_CODE (in));
1424 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1425 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1434 return reg_mentioned_p (x, in);
1440 /* If any register in here refers to it we return true. */
1441 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1442 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1443 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1455 /* Return the last value to which REG was set prior to INSN. If we can't
1456 find it easily, return 0.
1458 We only return a REG, SUBREG, or constant because it is too hard to
1459 check if a MEM remains unchanged. */
1462 reg_set_last (x, insn)
1466 rtx orig_insn = insn;
1468 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1469 Stop when we reach a label or X is a hard reg and we reach a
1470 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1472 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1474 /* We compare with <= here, because reg_set_last_last_regno
1475 is actually the number of the first reg *not* in X. */
1477 insn && GET_CODE (insn) != CODE_LABEL
1478 && ! (GET_CODE (insn) == CALL_INSN
1479 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1480 insn = PREV_INSN (insn))
1483 rtx set = set_of (x, insn);
1484 /* OK, this function modify our register. See if we understand it. */
1488 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1490 last_value = SET_SRC (x);
1491 if (CONSTANT_P (last_value)
1492 || ((GET_CODE (last_value) == REG
1493 || GET_CODE (last_value) == SUBREG)
1494 && ! reg_set_between_p (last_value,
1505 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1506 (X would be the pattern of an insn).
1507 FUN receives two arguments:
1508 the REG, MEM, CC0 or PC being stored in or clobbered,
1509 the SET or CLOBBER rtx that does the store.
1511 If the item being stored in or clobbered is a SUBREG of a hard register,
1512 the SUBREG will be passed. */
1515 note_stores (x, fun, data)
1517 void (*fun) PARAMS ((rtx, rtx, void *));
1522 if (GET_CODE (x) == COND_EXEC)
1523 x = COND_EXEC_CODE (x);
1525 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1527 rtx dest = SET_DEST (x);
1529 while ((GET_CODE (dest) == SUBREG
1530 && (GET_CODE (SUBREG_REG (dest)) != REG
1531 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1532 || GET_CODE (dest) == ZERO_EXTRACT
1533 || GET_CODE (dest) == SIGN_EXTRACT
1534 || GET_CODE (dest) == STRICT_LOW_PART)
1535 dest = XEXP (dest, 0);
1537 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1538 each of whose first operand is a register. We can't know what
1539 precisely is being set in these cases, so make up a CLOBBER to pass
1541 if (GET_CODE (dest) == PARALLEL)
1543 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1544 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1545 (*fun) (XEXP (XVECEXP (dest, 0, i), 0),
1546 gen_rtx_CLOBBER (VOIDmode,
1547 XEXP (XVECEXP (dest, 0, i), 0)),
1551 (*fun) (dest, x, data);
1554 else if (GET_CODE (x) == PARALLEL)
1555 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1556 note_stores (XVECEXP (x, 0, i), fun, data);
1559 /* Like notes_stores, but call FUN for each expression that is being
1560 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1561 FUN for each expression, not any interior subexpressions. FUN receives a
1562 pointer to the expression and the DATA passed to this function.
1564 Note that this is not quite the same test as that done in reg_referenced_p
1565 since that considers something as being referenced if it is being
1566 partially set, while we do not. */
1569 note_uses (pbody, fun, data)
1571 void (*fun) PARAMS ((rtx *, void *));
1577 switch (GET_CODE (body))
1580 (*fun) (&COND_EXEC_TEST (body), data);
1581 note_uses (&COND_EXEC_CODE (body), fun, data);
1585 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1586 note_uses (&XVECEXP (body, 0, i), fun, data);
1590 (*fun) (&XEXP (body, 0), data);
1594 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1595 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1599 (*fun) (&TRAP_CONDITION (body), data);
1603 (*fun) (&XEXP (body, 0), data);
1607 case UNSPEC_VOLATILE:
1608 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1609 (*fun) (&XVECEXP (body, 0, i), data);
1613 if (GET_CODE (XEXP (body, 0)) == MEM)
1614 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1619 rtx dest = SET_DEST (body);
1621 /* For sets we replace everything in source plus registers in memory
1622 expression in store and operands of a ZERO_EXTRACT. */
1623 (*fun) (&SET_SRC (body), data);
1625 if (GET_CODE (dest) == ZERO_EXTRACT)
1627 (*fun) (&XEXP (dest, 1), data);
1628 (*fun) (&XEXP (dest, 2), data);
1631 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1632 dest = XEXP (dest, 0);
1634 if (GET_CODE (dest) == MEM)
1635 (*fun) (&XEXP (dest, 0), data);
1640 /* All the other possibilities never store. */
1641 (*fun) (pbody, data);
1646 /* Return nonzero if X's old contents don't survive after INSN.
1647 This will be true if X is (cc0) or if X is a register and
1648 X dies in INSN or because INSN entirely sets X.
1650 "Entirely set" means set directly and not through a SUBREG,
1651 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1652 Likewise, REG_INC does not count.
1654 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1655 but for this use that makes no difference, since regs don't overlap
1656 during their lifetimes. Therefore, this function may be used
1657 at any time after deaths have been computed (in flow.c).
1659 If REG is a hard reg that occupies multiple machine registers, this
1660 function will only return 1 if each of those registers will be replaced
1664 dead_or_set_p (insn, x)
1668 unsigned int regno, last_regno;
1671 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1672 if (GET_CODE (x) == CC0)
1675 if (GET_CODE (x) != REG)
1679 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1680 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1682 for (i = regno; i <= last_regno; i++)
1683 if (! dead_or_set_regno_p (insn, i))
1689 /* Utility function for dead_or_set_p to check an individual register. Also
1690 called from flow.c. */
1693 dead_or_set_regno_p (insn, test_regno)
1695 unsigned int test_regno;
1697 unsigned int regno, endregno;
1700 /* See if there is a death note for something that includes TEST_REGNO. */
1701 if (find_regno_note (insn, REG_DEAD, test_regno))
1704 if (GET_CODE (insn) == CALL_INSN
1705 && find_regno_fusage (insn, CLOBBER, test_regno))
1708 pattern = PATTERN (insn);
1710 if (GET_CODE (pattern) == COND_EXEC)
1711 pattern = COND_EXEC_CODE (pattern);
1713 if (GET_CODE (pattern) == SET)
1715 rtx dest = SET_DEST (PATTERN (insn));
1717 /* A value is totally replaced if it is the destination or the
1718 destination is a SUBREG of REGNO that does not change the number of
1720 if (GET_CODE (dest) == SUBREG
1721 && (((GET_MODE_SIZE (GET_MODE (dest))
1722 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1723 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1724 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1725 dest = SUBREG_REG (dest);
1727 if (GET_CODE (dest) != REG)
1730 regno = REGNO (dest);
1731 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1732 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1734 return (test_regno >= regno && test_regno < endregno);
1736 else if (GET_CODE (pattern) == PARALLEL)
1740 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1742 rtx body = XVECEXP (pattern, 0, i);
1744 if (GET_CODE (body) == COND_EXEC)
1745 body = COND_EXEC_CODE (body);
1747 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1749 rtx dest = SET_DEST (body);
1751 if (GET_CODE (dest) == SUBREG
1752 && (((GET_MODE_SIZE (GET_MODE (dest))
1753 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1754 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1755 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1756 dest = SUBREG_REG (dest);
1758 if (GET_CODE (dest) != REG)
1761 regno = REGNO (dest);
1762 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1763 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1765 if (test_regno >= regno && test_regno < endregno)
1774 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1775 If DATUM is nonzero, look for one whose datum is DATUM. */
1778 find_reg_note (insn, kind, datum)
1785 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1786 if (! INSN_P (insn))
1789 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1790 if (REG_NOTE_KIND (link) == kind
1791 && (datum == 0 || datum == XEXP (link, 0)))
1796 /* Return the reg-note of kind KIND in insn INSN which applies to register
1797 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1798 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1799 it might be the case that the note overlaps REGNO. */
1802 find_regno_note (insn, kind, regno)
1809 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1810 if (! INSN_P (insn))
1813 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1814 if (REG_NOTE_KIND (link) == kind
1815 /* Verify that it is a register, so that scratch and MEM won't cause a
1817 && GET_CODE (XEXP (link, 0)) == REG
1818 && REGNO (XEXP (link, 0)) <= regno
1819 && ((REGNO (XEXP (link, 0))
1820 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1821 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1822 GET_MODE (XEXP (link, 0)))))
1828 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1832 find_reg_equal_equiv_note (insn)
1837 if (single_set (insn) == 0)
1839 else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1842 return find_reg_note (insn, REG_EQUAL, NULL_RTX);
1845 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1846 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1849 find_reg_fusage (insn, code, datum)
1854 /* If it's not a CALL_INSN, it can't possibly have a
1855 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1856 if (GET_CODE (insn) != CALL_INSN)
1862 if (GET_CODE (datum) != REG)
1866 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1868 link = XEXP (link, 1))
1869 if (GET_CODE (XEXP (link, 0)) == code
1870 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1875 unsigned int regno = REGNO (datum);
1877 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1878 to pseudo registers, so don't bother checking. */
1880 if (regno < FIRST_PSEUDO_REGISTER)
1882 unsigned int end_regno
1883 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1886 for (i = regno; i < end_regno; i++)
1887 if (find_regno_fusage (insn, code, i))
1895 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1896 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1899 find_regno_fusage (insn, code, regno)
1906 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1907 to pseudo registers, so don't bother checking. */
1909 if (regno >= FIRST_PSEUDO_REGISTER
1910 || GET_CODE (insn) != CALL_INSN )
1913 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1915 unsigned int regnote;
1918 if (GET_CODE (op = XEXP (link, 0)) == code
1919 && GET_CODE (reg = XEXP (op, 0)) == REG
1920 && (regnote = REGNO (reg)) <= regno
1921 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
1928 /* Remove register note NOTE from the REG_NOTES of INSN. */
1931 remove_note (insn, note)
1937 if (note == NULL_RTX)
1940 if (REG_NOTES (insn) == note)
1942 REG_NOTES (insn) = XEXP (note, 1);
1946 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1947 if (XEXP (link, 1) == note)
1949 XEXP (link, 1) = XEXP (note, 1);
1956 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1957 return 1 if it is found. A simple equality test is used to determine if
1961 in_expr_list_p (listp, node)
1967 for (x = listp; x; x = XEXP (x, 1))
1968 if (node == XEXP (x, 0))
1974 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1975 remove that entry from the list if it is found.
1977 A simple equality test is used to determine if NODE matches. */
1980 remove_node_from_expr_list (node, listp)
1985 rtx prev = NULL_RTX;
1989 if (node == XEXP (temp, 0))
1991 /* Splice the node out of the list. */
1993 XEXP (prev, 1) = XEXP (temp, 1);
1995 *listp = XEXP (temp, 1);
2001 temp = XEXP (temp, 1);
2005 /* Nonzero if X contains any volatile instructions. These are instructions
2006 which may cause unpredictable machine state instructions, and thus no
2007 instructions should be moved or combined across them. This includes
2008 only volatile asms and UNSPEC_VOLATILE instructions. */
2016 code = GET_CODE (x);
2036 case UNSPEC_VOLATILE:
2037 /* case TRAP_IF: This isn't clear yet. */
2041 if (MEM_VOLATILE_P (x))
2048 /* Recursively scan the operands of this expression. */
2051 const char *fmt = GET_RTX_FORMAT (code);
2054 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2058 if (volatile_insn_p (XEXP (x, i)))
2061 else if (fmt[i] == 'E')
2064 for (j = 0; j < XVECLEN (x, i); j++)
2065 if (volatile_insn_p (XVECEXP (x, i, j)))
2073 /* Nonzero if X contains any volatile memory references
2074 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2082 code = GET_CODE (x);
2101 case UNSPEC_VOLATILE:
2102 /* case TRAP_IF: This isn't clear yet. */
2107 if (MEM_VOLATILE_P (x))
2114 /* Recursively scan the operands of this expression. */
2117 const char *fmt = GET_RTX_FORMAT (code);
2120 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2124 if (volatile_refs_p (XEXP (x, i)))
2127 else if (fmt[i] == 'E')
2130 for (j = 0; j < XVECLEN (x, i); j++)
2131 if (volatile_refs_p (XVECEXP (x, i, j)))
2139 /* Similar to above, except that it also rejects register pre- and post-
2148 code = GET_CODE (x);
2166 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2167 when some combination can't be done. If we see one, don't think
2168 that we can simplify the expression. */
2169 return (GET_MODE (x) != VOIDmode);
2178 case UNSPEC_VOLATILE:
2179 /* case TRAP_IF: This isn't clear yet. */
2184 if (MEM_VOLATILE_P (x))
2191 /* Recursively scan the operands of this expression. */
2194 const char *fmt = GET_RTX_FORMAT (code);
2197 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2201 if (side_effects_p (XEXP (x, i)))
2204 else if (fmt[i] == 'E')
2207 for (j = 0; j < XVECLEN (x, i); j++)
2208 if (side_effects_p (XVECEXP (x, i, j)))
2216 /* Return nonzero if evaluating rtx X might cause a trap. */
2228 code = GET_CODE (x);
2231 /* Handle these cases quickly. */
2244 case UNSPEC_VOLATILE:
2249 return MEM_VOLATILE_P (x);
2251 /* Memory ref can trap unless it's a static var or a stack slot. */
2253 return rtx_addr_can_trap_p (XEXP (x, 0));
2255 /* Division by a non-constant might trap. */
2260 if (! CONSTANT_P (XEXP (x, 1))
2261 || GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2263 /* This was const0_rtx, but by not using that,
2264 we can link this file into other programs. */
2265 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2270 /* An EXPR_LIST is used to represent a function call. This
2271 certainly may trap. */
2279 /* Some floating point comparisons may trap. */
2280 /* ??? There is no machine independent way to check for tests that trap
2281 when COMPARE is used, though many targets do make this distinction.
2282 For instance, sparc uses CCFPE for compares which generate exceptions
2283 and CCFP for compares which do not generate exceptions. */
2284 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2286 /* But often the compare has some CC mode, so check operand
2288 if (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_FLOAT
2289 || GET_MODE_CLASS (GET_MODE (XEXP (x, 1))) == MODE_FLOAT)
2295 /* These operations don't trap even with floating point. */
2299 /* Any floating arithmetic may trap. */
2300 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2304 fmt = GET_RTX_FORMAT (code);
2305 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2309 if (may_trap_p (XEXP (x, i)))
2312 else if (fmt[i] == 'E')
2315 for (j = 0; j < XVECLEN (x, i); j++)
2316 if (may_trap_p (XVECEXP (x, i, j)))
2323 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2324 i.e., an inequality. */
2327 inequality_comparisons_p (x)
2332 enum rtx_code code = GET_CODE (x);
2361 len = GET_RTX_LENGTH (code);
2362 fmt = GET_RTX_FORMAT (code);
2364 for (i = 0; i < len; i++)
2368 if (inequality_comparisons_p (XEXP (x, i)))
2371 else if (fmt[i] == 'E')
2374 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2375 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2383 /* Replace any occurrence of FROM in X with TO. The function does
2384 not enter into CONST_DOUBLE for the replace.
2386 Note that copying is not done so X must not be shared unless all copies
2387 are to be modified. */
2390 replace_rtx (x, from, to)
2396 /* The following prevents loops occurrence when we change MEM in
2397 CONST_DOUBLE onto the same CONST_DOUBLE. */
2398 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2404 /* Allow this function to make replacements in EXPR_LISTs. */
2408 fmt = GET_RTX_FORMAT (GET_CODE (x));
2409 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2412 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2413 else if (fmt[i] == 'E')
2414 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2415 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2421 /* Throughout the rtx X, replace many registers according to REG_MAP.
2422 Return the replacement for X (which may be X with altered contents).
2423 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2424 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2426 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2427 should not be mapped to pseudos or vice versa since validate_change
2430 If REPLACE_DEST is 1, replacements are also done in destinations;
2431 otherwise, only sources are replaced. */
2434 replace_regs (x, reg_map, nregs, replace_dest)
2447 code = GET_CODE (x);
2461 /* Verify that the register has an entry before trying to access it. */
2462 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2464 /* SUBREGs can't be shared. Always return a copy to ensure that if
2465 this replacement occurs more than once then each instance will
2466 get distinct rtx. */
2467 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2468 return copy_rtx (reg_map[REGNO (x)]);
2469 return reg_map[REGNO (x)];
2474 /* Prevent making nested SUBREGs. */
2475 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2476 && reg_map[REGNO (SUBREG_REG (x))] != 0
2477 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2479 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2480 return simplify_gen_subreg (GET_MODE (x), map_val,
2481 GET_MODE (SUBREG_REG (x)),
2488 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2490 else if (GET_CODE (SET_DEST (x)) == MEM
2491 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2492 /* Even if we are not to replace destinations, replace register if it
2493 is CONTAINED in destination (destination is memory or
2494 STRICT_LOW_PART). */
2495 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2497 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2498 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2501 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2508 fmt = GET_RTX_FORMAT (code);
2509 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2512 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2513 else if (fmt[i] == 'E')
2516 for (j = 0; j < XVECLEN (x, i); j++)
2517 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2518 nregs, replace_dest);
2524 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2525 constant that is not in the constant pool and not in the condition
2526 of an IF_THEN_ELSE. */
2529 computed_jump_p_1 (x)
2532 enum rtx_code code = GET_CODE (x);
2550 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2551 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2554 return (computed_jump_p_1 (XEXP (x, 1))
2555 || computed_jump_p_1 (XEXP (x, 2)));
2561 fmt = GET_RTX_FORMAT (code);
2562 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2565 && computed_jump_p_1 (XEXP (x, i)))
2568 else if (fmt[i] == 'E')
2569 for (j = 0; j < XVECLEN (x, i); j++)
2570 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2577 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2579 Tablejumps and casesi insns are not considered indirect jumps;
2580 we can recognize them by a (use (label_ref)). */
2583 computed_jump_p (insn)
2587 if (GET_CODE (insn) == JUMP_INSN)
2589 rtx pat = PATTERN (insn);
2591 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2593 else if (GET_CODE (pat) == PARALLEL)
2595 int len = XVECLEN (pat, 0);
2596 int has_use_labelref = 0;
2598 for (i = len - 1; i >= 0; i--)
2599 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2600 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2602 has_use_labelref = 1;
2604 if (! has_use_labelref)
2605 for (i = len - 1; i >= 0; i--)
2606 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2607 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2608 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2611 else if (GET_CODE (pat) == SET
2612 && SET_DEST (pat) == pc_rtx
2613 && computed_jump_p_1 (SET_SRC (pat)))
2619 /* Traverse X via depth-first search, calling F for each
2620 sub-expression (including X itself). F is also passed the DATA.
2621 If F returns -1, do not traverse sub-expressions, but continue
2622 traversing the rest of the tree. If F ever returns any other
2623 non-zero value, stop the traversal, and return the value returned
2624 by F. Otherwise, return 0. This function does not traverse inside
2625 tree structure that contains RTX_EXPRs, or into sub-expressions
2626 whose format code is `0' since it is not known whether or not those
2627 codes are actually RTL.
2629 This routine is very general, and could (should?) be used to
2630 implement many of the other routines in this file. */
2633 for_each_rtx (x, f, data)
2644 result = (*f) (x, data);
2646 /* Do not traverse sub-expressions. */
2648 else if (result != 0)
2649 /* Stop the traversal. */
2653 /* There are no sub-expressions. */
2656 length = GET_RTX_LENGTH (GET_CODE (*x));
2657 format = GET_RTX_FORMAT (GET_CODE (*x));
2659 for (i = 0; i < length; ++i)
2664 result = for_each_rtx (&XEXP (*x, i), f, data);
2671 if (XVEC (*x, i) != 0)
2674 for (j = 0; j < XVECLEN (*x, i); ++j)
2676 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2684 /* Nothing to do. */
2693 /* Searches X for any reference to REGNO, returning the rtx of the
2694 reference found if any. Otherwise, returns NULL_RTX. */
2697 regno_use_in (regno, x)
2705 if (GET_CODE (x) == REG && REGNO (x) == regno)
2708 fmt = GET_RTX_FORMAT (GET_CODE (x));
2709 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2713 if ((tem = regno_use_in (regno, XEXP (x, i))))
2716 else if (fmt[i] == 'E')
2717 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2718 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2725 /* Return a value indicating whether OP, an operand of a commutative
2726 operation, is preferred as the first or second operand. The higher
2727 the value, the stronger the preference for being the first operand.
2728 We use negative values to indicate a preference for the first operand
2729 and positive values for the second operand. */
2732 commutative_operand_precedence (op)
2735 /* Constants always come the second operand. Prefer "nice" constants. */
2736 if (GET_CODE (op) == CONST_INT)
2738 if (GET_CODE (op) == CONST_DOUBLE)
2740 if (CONSTANT_P (op))
2743 /* SUBREGs of objects should come second. */
2744 if (GET_CODE (op) == SUBREG
2745 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
2748 /* If only one operand is a `neg', `not',
2749 `mult', `plus', or `minus' expression, it will be the first
2751 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
2752 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
2753 || GET_CODE (op) == MINUS)
2756 /* Complex expressions should be the first, so decrease priority
2758 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
2763 /* Return 1 iff it is necessary to swap operands of commutative operation
2764 in order to canonicalize expression. */
2767 swap_commutative_operands_p (x, y)
2770 return (commutative_operand_precedence (x)
2771 < commutative_operand_precedence (y));
2774 /* Return 1 if X is an autoincrement side effect and the register is
2775 not the stack pointer. */
2780 switch (GET_CODE (x))
2788 /* There are no REG_INC notes for SP. */
2789 if (XEXP (x, 0) != stack_pointer_rtx)
2797 /* Return 1 if the sequence of instructions beginning with FROM and up
2798 to and including TO is safe to move. If NEW_TO is non-NULL, and
2799 the sequence is not already safe to move, but can be easily
2800 extended to a sequence which is safe, then NEW_TO will point to the
2801 end of the extended sequence.
2803 For now, this function only checks that the region contains whole
2804 exception regions, but it could be extended to check additional
2805 conditions as well. */
2808 insns_safe_to_move_p (from, to, new_to)
2813 int eh_region_count = 0;
2817 /* By default, assume the end of the region will be what was
2824 if (GET_CODE (r) == NOTE)
2826 switch (NOTE_LINE_NUMBER (r))
2828 case NOTE_INSN_EH_REGION_BEG:
2832 case NOTE_INSN_EH_REGION_END:
2833 if (eh_region_count == 0)
2834 /* This sequence of instructions contains the end of
2835 an exception region, but not he beginning. Moving
2836 it will cause chaos. */
2847 /* If we've passed TO, and we see a non-note instruction, we
2848 can't extend the sequence to a movable sequence. */
2854 /* It's OK to move the sequence if there were matched sets of
2855 exception region notes. */
2856 return eh_region_count == 0;
2861 /* It's OK to move the sequence if there were matched sets of
2862 exception region notes. */
2863 if (past_to_p && eh_region_count == 0)
2869 /* Go to the next instruction. */
2876 /* Return non-zero if IN contains a piece of rtl that has the address LOC */
2878 loc_mentioned_in_p (loc, in)
2881 enum rtx_code code = GET_CODE (in);
2882 const char *fmt = GET_RTX_FORMAT (code);
2885 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2887 if (loc == &in->fld[i].rtx)
2891 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2894 else if (fmt[i] == 'E')
2895 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2896 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2902 /* Given a subreg X, return the bit offset where the subreg begins
2903 (counting from the least significant bit of the reg). */
2909 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
2910 enum machine_mode mode = GET_MODE (x);
2911 unsigned int bitpos;
2915 /* A paradoxical subreg begins at bit position 0. */
2916 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
2919 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2920 /* If the subreg crosses a word boundary ensure that
2921 it also begins and ends on a word boundary. */
2922 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
2923 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
2924 && (SUBREG_BYTE (x) % UNITS_PER_WORD
2925 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
2928 if (WORDS_BIG_ENDIAN)
2929 word = (GET_MODE_SIZE (inner_mode)
2930 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
2932 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
2933 bitpos = word * BITS_PER_WORD;
2935 if (BYTES_BIG_ENDIAN)
2936 byte = (GET_MODE_SIZE (inner_mode)
2937 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
2939 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
2940 bitpos += byte * BITS_PER_UNIT;
2945 /* This function returns the regno offset of a subreg expression.
2946 xregno - A regno of an inner hard subreg_reg (or what will become one).
2947 xmode - The mode of xregno.
2948 offset - The byte offset.
2949 ymode - The mode of a top level SUBREG (or what may become one).
2950 RETURN - The regno offset which would be used.
2951 This function can be overridden by defining SUBREG_REGNO_OFFSET,
2952 taking the same parameters. */
2954 subreg_regno_offset (xregno, xmode, offset, ymode)
2955 unsigned int xregno;
2956 enum machine_mode xmode;
2957 unsigned int offset;
2958 enum machine_mode ymode;
2961 int nregs_xmode, nregs_ymode;
2962 int mode_multiple, nregs_multiple;
2965 /* Check for an override, and use it instead. */
2966 #ifdef SUBREG_REGNO_OFFSET
2967 ret = SUBREG_REGNO_OFFSET (xregno, xmode, offset, ymode);
2969 if (xregno >= FIRST_PSEUDO_REGISTER)
2972 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
2973 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
2974 if (offset == 0 || nregs_xmode == nregs_ymode)
2977 /* size of ymode must not be greater than the size of xmode. */
2978 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
2979 if (mode_multiple == 0)
2982 y_offset = offset / GET_MODE_SIZE (ymode);
2983 nregs_multiple = nregs_xmode / nregs_ymode;
2984 ret = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
2990 /* Return the final regno that a subreg expression refers to. */
2996 rtx subreg = SUBREG_REG (x);
2997 int regno = REGNO (subreg);
2999 ret = regno + subreg_regno_offset (regno,
3006 struct parms_set_data
3012 /* Helper function for noticing stores to parameter registers. */
3014 parms_set (x, pat, data)
3015 rtx x, pat ATTRIBUTE_UNUSED;
3018 struct parms_set_data *d = data;
3019 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3020 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3022 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3027 /* Look backward for first parameter to be loaded.
3028 Do not skip BOUNDARY. */
3030 find_first_parameter_load (call_insn, boundary)
3031 rtx call_insn, boundary;
3033 struct parms_set_data parm;
3036 /* Since different machines initialize their parameter registers
3037 in different orders, assume nothing. Collect the set of all
3038 parameter registers. */
3039 CLEAR_HARD_REG_SET (parm.regs);
3041 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3042 if (GET_CODE (XEXP (p, 0)) == USE
3043 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3045 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3048 /* We only care about registers which can hold function
3050 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3053 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3058 /* Search backward for the first set of a register in this set. */
3059 while (parm.nregs && before != boundary)
3061 before = PREV_INSN (before);
3063 /* It is possible that some loads got CSEed from one call to
3064 another. Stop in that case. */
3065 if (GET_CODE (before) == CALL_INSN)
3068 /* Our caller needs either ensure that we will find all sets
3069 (in case code has not been optimized yet), or take care
3070 for possible labels in a way by setting boundary to preceding
3072 if (GET_CODE (before) == CODE_LABEL)
3074 if (before != boundary)
3079 if (INSN_P (before))
3080 note_stores (PATTERN (before), parms_set, &parm);