1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
42 /* Information about a subreg of a hard register. */
45 /* Offset of first hard register involved in the subreg. */
47 /* Number of hard registers involved in the subreg. */
49 /* Whether this subreg can be represented as a hard reg with the new
54 /* Forward declarations */
55 static void set_of_1 (rtx, rtx, void *);
56 static bool covers_regno_p (const_rtx, unsigned int);
57 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
58 static int rtx_referenced_p_1 (rtx *, void *);
59 static int computed_jump_p_1 (const_rtx);
60 static void parms_set (rtx, rtx, void *);
61 static void subreg_get_info (unsigned int, enum machine_mode,
62 unsigned int, enum machine_mode,
63 struct subreg_info *);
65 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
66 const_rtx, enum machine_mode,
67 unsigned HOST_WIDE_INT);
68 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
69 const_rtx, enum machine_mode,
70 unsigned HOST_WIDE_INT);
71 static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
74 static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
75 enum machine_mode, unsigned int);
77 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
78 -1 if a code has no such operand. */
79 static int non_rtx_starting_operands[NUM_RTX_CODE];
81 /* Bit flags that specify the machine subtype we are compiling for.
82 Bits are tested using macros TARGET_... defined in the tm.h file
83 and set by `-m...' switches. Must be defined in rtlanal.c. */
87 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
88 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
89 SIGN_EXTEND then while narrowing we also have to enforce the
90 representation and sign-extend the value to mode DESTINATION_REP.
92 If the value is already sign-extended to DESTINATION_REP mode we
93 can just switch to DESTINATION mode on it. For each pair of
94 integral modes SOURCE and DESTINATION, when truncating from SOURCE
95 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
96 contains the number of high-order bits in SOURCE that have to be
97 copies of the sign-bit so that we can do this mode-switch to
101 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
103 /* Return 1 if the value of X is unstable
104 (would be different at a different point in the program).
105 The frame pointer, arg pointer, etc. are considered stable
106 (within one function) and so is anything marked `unchanging'. */
109 rtx_unstable_p (const_rtx x)
111 const RTX_CODE code = GET_CODE (x);
118 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
129 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
130 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
131 /* The arg pointer varies if it is not a fixed register. */
132 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
134 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
135 /* ??? When call-clobbered, the value is stable modulo the restore
136 that must happen after a call. This currently screws up local-alloc
137 into believing that the restore is not needed. */
138 if (x == pic_offset_table_rtx)
144 if (MEM_VOLATILE_P (x))
153 fmt = GET_RTX_FORMAT (code);
154 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
157 if (rtx_unstable_p (XEXP (x, i)))
160 else if (fmt[i] == 'E')
163 for (j = 0; j < XVECLEN (x, i); j++)
164 if (rtx_unstable_p (XVECEXP (x, i, j)))
171 /* Return 1 if X has a value that can vary even between two
172 executions of the program. 0 means X can be compared reliably
173 against certain constants or near-constants.
174 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
175 zero, we are slightly more conservative.
176 The frame pointer and the arg pointer are considered constant. */
179 rtx_varies_p (const_rtx x, bool for_alias)
192 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
203 /* Note that we have to test for the actual rtx used for the frame
204 and arg pointers and not just the register number in case we have
205 eliminated the frame and/or arg pointer and are using it
207 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
208 /* The arg pointer varies if it is not a fixed register. */
209 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
211 if (x == pic_offset_table_rtx
212 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
213 /* ??? When call-clobbered, the value is stable modulo the restore
214 that must happen after a call. This currently screws up
215 local-alloc into believing that the restore is not needed, so we
216 must return 0 only if we are called from alias analysis. */
224 /* The operand 0 of a LO_SUM is considered constant
225 (in fact it is related specifically to operand 1)
226 during alias analysis. */
227 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
228 || rtx_varies_p (XEXP (x, 1), for_alias);
231 if (MEM_VOLATILE_P (x))
240 fmt = GET_RTX_FORMAT (code);
241 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
244 if (rtx_varies_p (XEXP (x, i), for_alias))
247 else if (fmt[i] == 'E')
250 for (j = 0; j < XVECLEN (x, i); j++)
251 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
258 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
259 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
260 whether nonzero is returned for unaligned memory accesses on strict
261 alignment machines. */
264 rtx_addr_can_trap_p_1 (const_rtx x, enum machine_mode mode, bool unaligned_mems)
266 enum rtx_code code = GET_CODE (x);
271 return SYMBOL_REF_WEAK (x);
277 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
278 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
279 || x == stack_pointer_rtx
280 /* The arg pointer varies if it is not a fixed register. */
281 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
283 /* All of the virtual frame registers are stack references. */
284 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
285 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
290 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
293 /* An address is assumed not to trap if:
294 - it is an address that can't trap plus a constant integer,
295 with the proper remainder modulo the mode size if we are
296 considering unaligned memory references. */
297 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
298 && GET_CODE (XEXP (x, 1)) == CONST_INT)
300 HOST_WIDE_INT offset;
302 if (!STRICT_ALIGNMENT
304 || GET_MODE_SIZE (mode) == 0)
307 offset = INTVAL (XEXP (x, 1));
309 #ifdef SPARC_STACK_BOUNDARY_HACK
310 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
311 the real alignment of %sp. However, when it does this, the
312 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
313 if (SPARC_STACK_BOUNDARY_HACK
314 && (XEXP (x, 0) == stack_pointer_rtx
315 || XEXP (x, 0) == hard_frame_pointer_rtx))
316 offset -= STACK_POINTER_OFFSET;
319 return offset % GET_MODE_SIZE (mode) != 0;
322 /* - or it is the pic register plus a constant. */
323 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
330 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
337 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
343 /* If it isn't one of the case above, it can cause a trap. */
347 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
350 rtx_addr_can_trap_p (const_rtx x)
352 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
355 /* Return true if X is an address that is known to not be zero. */
358 nonzero_address_p (const_rtx x)
360 const enum rtx_code code = GET_CODE (x);
365 return !SYMBOL_REF_WEAK (x);
371 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
372 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
373 || x == stack_pointer_rtx
374 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
376 /* All of the virtual frame registers are stack references. */
377 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
378 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
383 return nonzero_address_p (XEXP (x, 0));
386 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
387 return nonzero_address_p (XEXP (x, 0));
388 /* Handle PIC references. */
389 else if (XEXP (x, 0) == pic_offset_table_rtx
390 && CONSTANT_P (XEXP (x, 1)))
395 /* Similar to the above; allow positive offsets. Further, since
396 auto-inc is only allowed in memories, the register must be a
398 if (GET_CODE (XEXP (x, 1)) == CONST_INT
399 && INTVAL (XEXP (x, 1)) > 0)
401 return nonzero_address_p (XEXP (x, 0));
404 /* Similarly. Further, the offset is always positive. */
411 return nonzero_address_p (XEXP (x, 0));
414 return nonzero_address_p (XEXP (x, 1));
420 /* If it isn't one of the case above, might be zero. */
424 /* Return 1 if X refers to a memory location whose address
425 cannot be compared reliably with constant addresses,
426 or if X refers to a BLKmode memory object.
427 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
428 zero, we are slightly more conservative. */
431 rtx_addr_varies_p (const_rtx x, bool for_alias)
442 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
444 fmt = GET_RTX_FORMAT (code);
445 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
448 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
451 else if (fmt[i] == 'E')
454 for (j = 0; j < XVECLEN (x, i); j++)
455 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
461 /* Return the value of the integer term in X, if one is apparent;
463 Only obvious integer terms are detected.
464 This is used in cse.c with the `related_value' field. */
467 get_integer_term (const_rtx x)
469 if (GET_CODE (x) == CONST)
472 if (GET_CODE (x) == MINUS
473 && GET_CODE (XEXP (x, 1)) == CONST_INT)
474 return - INTVAL (XEXP (x, 1));
475 if (GET_CODE (x) == PLUS
476 && GET_CODE (XEXP (x, 1)) == CONST_INT)
477 return INTVAL (XEXP (x, 1));
481 /* If X is a constant, return the value sans apparent integer term;
483 Only obvious integer terms are detected. */
486 get_related_value (const_rtx x)
488 if (GET_CODE (x) != CONST)
491 if (GET_CODE (x) == PLUS
492 && GET_CODE (XEXP (x, 1)) == CONST_INT)
494 else if (GET_CODE (x) == MINUS
495 && GET_CODE (XEXP (x, 1)) == CONST_INT)
500 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
501 to somewhere in the same object or object_block as SYMBOL. */
504 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
508 if (GET_CODE (symbol) != SYMBOL_REF)
516 if (CONSTANT_POOL_ADDRESS_P (symbol)
517 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
520 decl = SYMBOL_REF_DECL (symbol);
521 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
525 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
526 && SYMBOL_REF_BLOCK (symbol)
527 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
528 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
529 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
535 /* Split X into a base and a constant offset, storing them in *BASE_OUT
536 and *OFFSET_OUT respectively. */
539 split_const (rtx x, rtx *base_out, rtx *offset_out)
541 if (GET_CODE (x) == CONST)
544 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
546 *base_out = XEXP (x, 0);
547 *offset_out = XEXP (x, 1);
552 *offset_out = const0_rtx;
555 /* Return the number of places FIND appears within X. If COUNT_DEST is
556 zero, we do not count occurrences inside the destination of a SET. */
559 count_occurrences (const_rtx x, const_rtx find, int count_dest)
563 const char *format_ptr;
584 count = count_occurrences (XEXP (x, 0), find, count_dest);
586 count += count_occurrences (XEXP (x, 1), find, count_dest);
590 if (MEM_P (find) && rtx_equal_p (x, find))
595 if (SET_DEST (x) == find && ! count_dest)
596 return count_occurrences (SET_SRC (x), find, count_dest);
603 format_ptr = GET_RTX_FORMAT (code);
606 for (i = 0; i < GET_RTX_LENGTH (code); i++)
608 switch (*format_ptr++)
611 count += count_occurrences (XEXP (x, i), find, count_dest);
615 for (j = 0; j < XVECLEN (x, i); j++)
616 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
624 /* Nonzero if register REG appears somewhere within IN.
625 Also works if REG is not a register; in this case it checks
626 for a subexpression of IN that is Lisp "equal" to REG. */
629 reg_mentioned_p (const_rtx reg, const_rtx in)
641 if (GET_CODE (in) == LABEL_REF)
642 return reg == XEXP (in, 0);
644 code = GET_CODE (in);
648 /* Compare registers by number. */
650 return REG_P (reg) && REGNO (in) == REGNO (reg);
652 /* These codes have no constituent expressions
662 /* These are kept unique for a given value. */
669 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
672 fmt = GET_RTX_FORMAT (code);
674 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
679 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
680 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
683 else if (fmt[i] == 'e'
684 && reg_mentioned_p (reg, XEXP (in, i)))
690 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
691 no CODE_LABEL insn. */
694 no_labels_between_p (const_rtx beg, const_rtx end)
699 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
705 /* Nonzero if register REG is used in an insn between
706 FROM_INSN and TO_INSN (exclusive of those two). */
709 reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
713 if (from_insn == to_insn)
716 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
718 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
719 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
724 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
725 is entirely replaced by a new value and the only use is as a SET_DEST,
726 we do not consider it a reference. */
729 reg_referenced_p (const_rtx x, const_rtx body)
733 switch (GET_CODE (body))
736 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
739 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
740 of a REG that occupies all of the REG, the insn references X if
741 it is mentioned in the destination. */
742 if (GET_CODE (SET_DEST (body)) != CC0
743 && GET_CODE (SET_DEST (body)) != PC
744 && !REG_P (SET_DEST (body))
745 && ! (GET_CODE (SET_DEST (body)) == SUBREG
746 && REG_P (SUBREG_REG (SET_DEST (body)))
747 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
748 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
749 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
750 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
751 && reg_overlap_mentioned_p (x, SET_DEST (body)))
756 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
757 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
764 return reg_overlap_mentioned_p (x, body);
767 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
770 return reg_overlap_mentioned_p (x, XEXP (body, 0));
773 case UNSPEC_VOLATILE:
774 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
775 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
780 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
781 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
786 if (MEM_P (XEXP (body, 0)))
787 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
792 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
794 return reg_referenced_p (x, COND_EXEC_CODE (body));
801 /* Nonzero if register REG is set or clobbered in an insn between
802 FROM_INSN and TO_INSN (exclusive of those two). */
805 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
809 if (from_insn == to_insn)
812 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
813 if (INSN_P (insn) && reg_set_p (reg, insn))
818 /* Internals of reg_set_between_p. */
820 reg_set_p (rtx reg, rtx insn)
822 /* We can be passed an insn or part of one. If we are passed an insn,
823 check if a side-effect of the insn clobbers REG. */
825 && (FIND_REG_INC_NOTE (insn, reg)
828 && REGNO (reg) < FIRST_PSEUDO_REGISTER
829 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
830 GET_MODE (reg), REGNO (reg)))
832 || find_reg_fusage (insn, CLOBBER, reg)))))
835 return set_of (reg, insn) != NULL_RTX;
838 /* Similar to reg_set_between_p, but check all registers in X. Return 0
839 only if none of them are modified between START and END. Return 1 if
840 X contains a MEM; this routine does usememory aliasing. */
843 modified_between_p (rtx x, rtx start, rtx end)
845 enum rtx_code code = GET_CODE (x);
868 if (modified_between_p (XEXP (x, 0), start, end))
870 if (MEM_READONLY_P (x))
872 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
873 if (memory_modified_in_insn_p (x, insn))
879 return reg_set_between_p (x, start, end);
885 fmt = GET_RTX_FORMAT (code);
886 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
888 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
891 else if (fmt[i] == 'E')
892 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
893 if (modified_between_p (XVECEXP (x, i, j), start, end))
900 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
901 of them are modified in INSN. Return 1 if X contains a MEM; this routine
902 does use memory aliasing. */
905 modified_in_p (rtx x, rtx insn)
907 enum rtx_code code = GET_CODE (x);
926 if (modified_in_p (XEXP (x, 0), insn))
928 if (MEM_READONLY_P (x))
930 if (memory_modified_in_insn_p (x, insn))
936 return reg_set_p (x, insn);
942 fmt = GET_RTX_FORMAT (code);
943 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
945 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
948 else if (fmt[i] == 'E')
949 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
950 if (modified_in_p (XVECEXP (x, i, j), insn))
957 /* Helper function for set_of. */
965 set_of_1 (rtx x, rtx pat, void *data1)
967 struct set_of_data *data = (struct set_of_data *) (data1);
968 if (rtx_equal_p (x, data->pat)
969 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
973 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
974 (either directly or via STRICT_LOW_PART and similar modifiers). */
976 set_of (rtx pat, rtx insn)
978 struct set_of_data data;
979 data.found = NULL_RTX;
981 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
985 /* Given an INSN, return a SET expression if this insn has only a single SET.
986 It may also have CLOBBERs, USEs, or SET whose output
987 will not be used, which we ignore. */
990 single_set_2 (const_rtx insn, const_rtx pat)
993 int set_verified = 1;
996 if (GET_CODE (pat) == PARALLEL)
998 for (i = 0; i < XVECLEN (pat, 0); i++)
1000 rtx sub = XVECEXP (pat, 0, i);
1001 switch (GET_CODE (sub))
1008 /* We can consider insns having multiple sets, where all
1009 but one are dead as single set insns. In common case
1010 only single set is present in the pattern so we want
1011 to avoid checking for REG_UNUSED notes unless necessary.
1013 When we reach set first time, we just expect this is
1014 the single set we are looking for and only when more
1015 sets are found in the insn, we check them. */
1018 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1019 && !side_effects_p (set))
1025 set = sub, set_verified = 0;
1026 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1027 || side_effects_p (sub))
1039 /* Given an INSN, return nonzero if it has more than one SET, else return
1043 multiple_sets (const_rtx insn)
1048 /* INSN must be an insn. */
1049 if (! INSN_P (insn))
1052 /* Only a PARALLEL can have multiple SETs. */
1053 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1055 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1056 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1058 /* If we have already found a SET, then return now. */
1066 /* Either zero or one SET. */
1070 /* Return nonzero if the destination of SET equals the source
1071 and there are no side effects. */
1074 set_noop_p (const_rtx set)
1076 rtx src = SET_SRC (set);
1077 rtx dst = SET_DEST (set);
1079 if (dst == pc_rtx && src == pc_rtx)
1082 if (MEM_P (dst) && MEM_P (src))
1083 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1085 if (GET_CODE (dst) == ZERO_EXTRACT)
1086 return rtx_equal_p (XEXP (dst, 0), src)
1087 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1088 && !side_effects_p (src);
1090 if (GET_CODE (dst) == STRICT_LOW_PART)
1091 dst = XEXP (dst, 0);
1093 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1095 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1097 src = SUBREG_REG (src);
1098 dst = SUBREG_REG (dst);
1101 return (REG_P (src) && REG_P (dst)
1102 && REGNO (src) == REGNO (dst));
1105 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1109 noop_move_p (const_rtx insn)
1111 rtx pat = PATTERN (insn);
1113 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1116 /* Insns carrying these notes are useful later on. */
1117 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1120 /* For now treat an insn with a REG_RETVAL note as a
1121 a special insn which should not be considered a no-op. */
1122 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1125 if (GET_CODE (pat) == SET && set_noop_p (pat))
1128 if (GET_CODE (pat) == PARALLEL)
1131 /* If nothing but SETs of registers to themselves,
1132 this insn can also be deleted. */
1133 for (i = 0; i < XVECLEN (pat, 0); i++)
1135 rtx tem = XVECEXP (pat, 0, i);
1137 if (GET_CODE (tem) == USE
1138 || GET_CODE (tem) == CLOBBER)
1141 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1151 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1152 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1153 If the object was modified, if we hit a partial assignment to X, or hit a
1154 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1155 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1159 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1163 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1167 rtx set = single_set (p);
1168 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1170 if (set && rtx_equal_p (x, SET_DEST (set)))
1172 rtx src = SET_SRC (set);
1174 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1175 src = XEXP (note, 0);
1177 if ((valid_to == NULL_RTX
1178 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1179 /* Reject hard registers because we don't usually want
1180 to use them; we'd rather use a pseudo. */
1182 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1189 /* If set in non-simple way, we don't have a value. */
1190 if (reg_set_p (x, p))
1197 /* Return nonzero if register in range [REGNO, ENDREGNO)
1198 appears either explicitly or implicitly in X
1199 other than being stored into.
1201 References contained within the substructure at LOC do not count.
1202 LOC may be zero, meaning don't ignore anything. */
1205 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1209 unsigned int x_regno;
1214 /* The contents of a REG_NONNEG note is always zero, so we must come here
1215 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1219 code = GET_CODE (x);
1224 x_regno = REGNO (x);
1226 /* If we modifying the stack, frame, or argument pointer, it will
1227 clobber a virtual register. In fact, we could be more precise,
1228 but it isn't worth it. */
1229 if ((x_regno == STACK_POINTER_REGNUM
1230 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1231 || x_regno == ARG_POINTER_REGNUM
1233 || x_regno == FRAME_POINTER_REGNUM)
1234 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1237 return endregno > x_regno && regno < END_REGNO (x);
1240 /* If this is a SUBREG of a hard reg, we can see exactly which
1241 registers are being modified. Otherwise, handle normally. */
1242 if (REG_P (SUBREG_REG (x))
1243 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1245 unsigned int inner_regno = subreg_regno (x);
1246 unsigned int inner_endregno
1247 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1248 ? subreg_nregs (x) : 1);
1250 return endregno > inner_regno && regno < inner_endregno;
1256 if (&SET_DEST (x) != loc
1257 /* Note setting a SUBREG counts as referring to the REG it is in for
1258 a pseudo but not for hard registers since we can
1259 treat each word individually. */
1260 && ((GET_CODE (SET_DEST (x)) == SUBREG
1261 && loc != &SUBREG_REG (SET_DEST (x))
1262 && REG_P (SUBREG_REG (SET_DEST (x)))
1263 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1264 && refers_to_regno_p (regno, endregno,
1265 SUBREG_REG (SET_DEST (x)), loc))
1266 || (!REG_P (SET_DEST (x))
1267 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1270 if (code == CLOBBER || loc == &SET_SRC (x))
1279 /* X does not match, so try its subexpressions. */
1281 fmt = GET_RTX_FORMAT (code);
1282 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1284 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1292 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1295 else if (fmt[i] == 'E')
1298 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1299 if (loc != &XVECEXP (x, i, j)
1300 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1307 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1308 we check if any register number in X conflicts with the relevant register
1309 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1310 contains a MEM (we don't bother checking for memory addresses that can't
1311 conflict because we expect this to be a rare case. */
1314 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1316 unsigned int regno, endregno;
1318 /* If either argument is a constant, then modifying X can not
1319 affect IN. Here we look at IN, we can profitably combine
1320 CONSTANT_P (x) with the switch statement below. */
1321 if (CONSTANT_P (in))
1325 switch (GET_CODE (x))
1327 case STRICT_LOW_PART:
1330 /* Overly conservative. */
1335 regno = REGNO (SUBREG_REG (x));
1336 if (regno < FIRST_PSEUDO_REGISTER)
1337 regno = subreg_regno (x);
1338 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1339 ? subreg_nregs (x) : 1);
1344 endregno = END_REGNO (x);
1346 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1356 fmt = GET_RTX_FORMAT (GET_CODE (in));
1357 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1360 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1363 else if (fmt[i] == 'E')
1366 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1367 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1377 return reg_mentioned_p (x, in);
1383 /* If any register in here refers to it we return true. */
1384 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1385 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1386 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1392 gcc_assert (CONSTANT_P (x));
1397 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1398 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1399 ignored by note_stores, but passed to FUN.
1401 FUN receives three arguments:
1402 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1403 2. the SET or CLOBBER rtx that does the store,
1404 3. the pointer DATA provided to note_stores.
1406 If the item being stored in or clobbered is a SUBREG of a hard register,
1407 the SUBREG will be passed. */
1410 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1414 if (GET_CODE (x) == COND_EXEC)
1415 x = COND_EXEC_CODE (x);
1417 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1419 rtx dest = SET_DEST (x);
1421 while ((GET_CODE (dest) == SUBREG
1422 && (!REG_P (SUBREG_REG (dest))
1423 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1424 || GET_CODE (dest) == ZERO_EXTRACT
1425 || GET_CODE (dest) == STRICT_LOW_PART)
1426 dest = XEXP (dest, 0);
1428 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1429 each of whose first operand is a register. */
1430 if (GET_CODE (dest) == PARALLEL)
1432 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1433 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1434 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1437 (*fun) (dest, x, data);
1440 else if (GET_CODE (x) == PARALLEL)
1441 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1442 note_stores (XVECEXP (x, 0, i), fun, data);
1445 /* Like notes_stores, but call FUN for each expression that is being
1446 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1447 FUN for each expression, not any interior subexpressions. FUN receives a
1448 pointer to the expression and the DATA passed to this function.
1450 Note that this is not quite the same test as that done in reg_referenced_p
1451 since that considers something as being referenced if it is being
1452 partially set, while we do not. */
1455 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1460 switch (GET_CODE (body))
1463 (*fun) (&COND_EXEC_TEST (body), data);
1464 note_uses (&COND_EXEC_CODE (body), fun, data);
1468 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1469 note_uses (&XVECEXP (body, 0, i), fun, data);
1473 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1474 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1478 (*fun) (&XEXP (body, 0), data);
1482 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1483 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1487 (*fun) (&TRAP_CONDITION (body), data);
1491 (*fun) (&XEXP (body, 0), data);
1495 case UNSPEC_VOLATILE:
1496 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1497 (*fun) (&XVECEXP (body, 0, i), data);
1501 if (MEM_P (XEXP (body, 0)))
1502 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1507 rtx dest = SET_DEST (body);
1509 /* For sets we replace everything in source plus registers in memory
1510 expression in store and operands of a ZERO_EXTRACT. */
1511 (*fun) (&SET_SRC (body), data);
1513 if (GET_CODE (dest) == ZERO_EXTRACT)
1515 (*fun) (&XEXP (dest, 1), data);
1516 (*fun) (&XEXP (dest, 2), data);
1519 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1520 dest = XEXP (dest, 0);
1523 (*fun) (&XEXP (dest, 0), data);
1528 /* All the other possibilities never store. */
1529 (*fun) (pbody, data);
1534 /* Return nonzero if X's old contents don't survive after INSN.
1535 This will be true if X is (cc0) or if X is a register and
1536 X dies in INSN or because INSN entirely sets X.
1538 "Entirely set" means set directly and not through a SUBREG, or
1539 ZERO_EXTRACT, so no trace of the old contents remains.
1540 Likewise, REG_INC does not count.
1542 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1543 but for this use that makes no difference, since regs don't overlap
1544 during their lifetimes. Therefore, this function may be used
1545 at any time after deaths have been computed.
1547 If REG is a hard reg that occupies multiple machine registers, this
1548 function will only return 1 if each of those registers will be replaced
1552 dead_or_set_p (const_rtx insn, const_rtx x)
1554 unsigned int regno, end_regno;
1557 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1558 if (GET_CODE (x) == CC0)
1561 gcc_assert (REG_P (x));
1564 end_regno = END_REGNO (x);
1565 for (i = regno; i < end_regno; i++)
1566 if (! dead_or_set_regno_p (insn, i))
1572 /* Return TRUE iff DEST is a register or subreg of a register and
1573 doesn't change the number of words of the inner register, and any
1574 part of the register is TEST_REGNO. */
1577 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1579 unsigned int regno, endregno;
1581 if (GET_CODE (dest) == SUBREG
1582 && (((GET_MODE_SIZE (GET_MODE (dest))
1583 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1584 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1585 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1586 dest = SUBREG_REG (dest);
1591 regno = REGNO (dest);
1592 endregno = END_REGNO (dest);
1593 return (test_regno >= regno && test_regno < endregno);
1596 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1597 any member matches the covers_regno_no_parallel_p criteria. */
1600 covers_regno_p (const_rtx dest, unsigned int test_regno)
1602 if (GET_CODE (dest) == PARALLEL)
1604 /* Some targets place small structures in registers for return
1605 values of functions, and those registers are wrapped in
1606 PARALLELs that we may see as the destination of a SET. */
1609 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1611 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1612 if (inner != NULL_RTX
1613 && covers_regno_no_parallel_p (inner, test_regno))
1620 return covers_regno_no_parallel_p (dest, test_regno);
1623 /* Utility function for dead_or_set_p to check an individual register. */
1626 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1630 /* See if there is a death note for something that includes TEST_REGNO. */
1631 if (find_regno_note (insn, REG_DEAD, test_regno))
1635 && find_regno_fusage (insn, CLOBBER, test_regno))
1638 pattern = PATTERN (insn);
1640 if (GET_CODE (pattern) == COND_EXEC)
1641 pattern = COND_EXEC_CODE (pattern);
1643 if (GET_CODE (pattern) == SET)
1644 return covers_regno_p (SET_DEST (pattern), test_regno);
1645 else if (GET_CODE (pattern) == PARALLEL)
1649 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1651 rtx body = XVECEXP (pattern, 0, i);
1653 if (GET_CODE (body) == COND_EXEC)
1654 body = COND_EXEC_CODE (body);
1656 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1657 && covers_regno_p (SET_DEST (body), test_regno))
1665 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1666 If DATUM is nonzero, look for one whose datum is DATUM. */
1669 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1675 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1676 if (! INSN_P (insn))
1680 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1681 if (REG_NOTE_KIND (link) == kind)
1686 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1687 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1692 /* Return the reg-note of kind KIND in insn INSN which applies to register
1693 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1694 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1695 it might be the case that the note overlaps REGNO. */
1698 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1702 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1703 if (! INSN_P (insn))
1706 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1707 if (REG_NOTE_KIND (link) == kind
1708 /* Verify that it is a register, so that scratch and MEM won't cause a
1710 && REG_P (XEXP (link, 0))
1711 && REGNO (XEXP (link, 0)) <= regno
1712 && END_REGNO (XEXP (link, 0)) > regno)
1717 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1721 find_reg_equal_equiv_note (const_rtx insn)
1728 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1729 if (REG_NOTE_KIND (link) == REG_EQUAL
1730 || REG_NOTE_KIND (link) == REG_EQUIV)
1732 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1733 insns that have multiple sets. Checking single_set to
1734 make sure of this is not the proper check, as explained
1735 in the comment in set_unique_reg_note.
1737 This should be changed into an assert. */
1738 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1745 /* Check whether INSN is a single_set whose source is known to be
1746 equivalent to a constant. Return that constant if so, otherwise
1750 find_constant_src (const_rtx insn)
1754 set = single_set (insn);
1757 x = avoid_constant_pool_reference (SET_SRC (set));
1762 note = find_reg_equal_equiv_note (insn);
1763 if (note && CONSTANT_P (XEXP (note, 0)))
1764 return XEXP (note, 0);
1769 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1770 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1773 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1775 /* If it's not a CALL_INSN, it can't possibly have a
1776 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1786 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1788 link = XEXP (link, 1))
1789 if (GET_CODE (XEXP (link, 0)) == code
1790 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1795 unsigned int regno = REGNO (datum);
1797 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1798 to pseudo registers, so don't bother checking. */
1800 if (regno < FIRST_PSEUDO_REGISTER)
1802 unsigned int end_regno = END_HARD_REGNO (datum);
1805 for (i = regno; i < end_regno; i++)
1806 if (find_regno_fusage (insn, code, i))
1814 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1815 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1818 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1822 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1823 to pseudo registers, so don't bother checking. */
1825 if (regno >= FIRST_PSEUDO_REGISTER
1829 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1833 if (GET_CODE (op = XEXP (link, 0)) == code
1834 && REG_P (reg = XEXP (op, 0))
1835 && REGNO (reg) <= regno
1836 && END_HARD_REGNO (reg) > regno)
1843 /* Return true if INSN is a call to a pure function. */
1846 pure_call_p (const_rtx insn)
1850 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1853 /* Look for the note that differentiates const and pure functions. */
1854 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1858 if (GET_CODE (u = XEXP (link, 0)) == USE
1859 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1860 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1867 /* Remove register note NOTE from the REG_NOTES of INSN. */
1870 remove_note (rtx insn, const_rtx note)
1874 if (note == NULL_RTX)
1877 if (REG_NOTES (insn) == note)
1878 REG_NOTES (insn) = XEXP (note, 1);
1880 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1881 if (XEXP (link, 1) == note)
1883 XEXP (link, 1) = XEXP (note, 1);
1887 switch (REG_NOTE_KIND (note))
1891 df_notes_rescan (insn);
1898 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1901 remove_reg_equal_equiv_notes (rtx insn)
1905 loc = ®_NOTES (insn);
1908 enum reg_note kind = REG_NOTE_KIND (*loc);
1909 if (kind == REG_EQUAL || kind == REG_EQUIV)
1910 *loc = XEXP (*loc, 1);
1912 loc = &XEXP (*loc, 1);
1916 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1917 return 1 if it is found. A simple equality test is used to determine if
1921 in_expr_list_p (const_rtx listp, const_rtx node)
1925 for (x = listp; x; x = XEXP (x, 1))
1926 if (node == XEXP (x, 0))
1932 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1933 remove that entry from the list if it is found.
1935 A simple equality test is used to determine if NODE matches. */
1938 remove_node_from_expr_list (const_rtx node, rtx *listp)
1941 rtx prev = NULL_RTX;
1945 if (node == XEXP (temp, 0))
1947 /* Splice the node out of the list. */
1949 XEXP (prev, 1) = XEXP (temp, 1);
1951 *listp = XEXP (temp, 1);
1957 temp = XEXP (temp, 1);
1961 /* Nonzero if X contains any volatile instructions. These are instructions
1962 which may cause unpredictable machine state instructions, and thus no
1963 instructions should be moved or combined across them. This includes
1964 only volatile asms and UNSPEC_VOLATILE instructions. */
1967 volatile_insn_p (const_rtx x)
1969 const RTX_CODE code = GET_CODE (x);
1989 case UNSPEC_VOLATILE:
1990 /* case TRAP_IF: This isn't clear yet. */
1995 if (MEM_VOLATILE_P (x))
2002 /* Recursively scan the operands of this expression. */
2005 const char *const fmt = GET_RTX_FORMAT (code);
2008 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2012 if (volatile_insn_p (XEXP (x, i)))
2015 else if (fmt[i] == 'E')
2018 for (j = 0; j < XVECLEN (x, i); j++)
2019 if (volatile_insn_p (XVECEXP (x, i, j)))
2027 /* Nonzero if X contains any volatile memory references
2028 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2031 volatile_refs_p (const_rtx x)
2033 const RTX_CODE code = GET_CODE (x);
2051 case UNSPEC_VOLATILE:
2057 if (MEM_VOLATILE_P (x))
2064 /* Recursively scan the operands of this expression. */
2067 const char *const fmt = GET_RTX_FORMAT (code);
2070 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2074 if (volatile_refs_p (XEXP (x, i)))
2077 else if (fmt[i] == 'E')
2080 for (j = 0; j < XVECLEN (x, i); j++)
2081 if (volatile_refs_p (XVECEXP (x, i, j)))
2089 /* Similar to above, except that it also rejects register pre- and post-
2093 side_effects_p (const_rtx x)
2095 const RTX_CODE code = GET_CODE (x);
2113 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2114 when some combination can't be done. If we see one, don't think
2115 that we can simplify the expression. */
2116 return (GET_MODE (x) != VOIDmode);
2125 case UNSPEC_VOLATILE:
2126 /* case TRAP_IF: This isn't clear yet. */
2132 if (MEM_VOLATILE_P (x))
2139 /* Recursively scan the operands of this expression. */
2142 const char *fmt = GET_RTX_FORMAT (code);
2145 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2149 if (side_effects_p (XEXP (x, i)))
2152 else if (fmt[i] == 'E')
2155 for (j = 0; j < XVECLEN (x, i); j++)
2156 if (side_effects_p (XVECEXP (x, i, j)))
2164 enum may_trap_p_flags
2166 MTP_UNALIGNED_MEMS = 1,
2169 /* Return nonzero if evaluating rtx X might cause a trap.
2170 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2171 unaligned memory accesses on strict alignment machines. If
2172 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2173 cannot trap at its current location, but it might become trapping if moved
2177 may_trap_p_1 (const_rtx x, unsigned flags)
2182 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2186 code = GET_CODE (x);
2189 /* Handle these cases quickly. */
2203 case UNSPEC_VOLATILE:
2208 return MEM_VOLATILE_P (x);
2210 /* Memory ref can trap unless it's a static var or a stack slot. */
2212 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2213 reference; moving it out of condition might cause its address
2215 !(flags & MTP_AFTER_MOVE)
2217 && (!STRICT_ALIGNMENT || !unaligned_mems))
2220 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2222 /* Division by a non-constant might trap. */
2227 if (HONOR_SNANS (GET_MODE (x)))
2229 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2230 return flag_trapping_math;
2231 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2236 /* An EXPR_LIST is used to represent a function call. This
2237 certainly may trap. */
2246 /* Some floating point comparisons may trap. */
2247 if (!flag_trapping_math)
2249 /* ??? There is no machine independent way to check for tests that trap
2250 when COMPARE is used, though many targets do make this distinction.
2251 For instance, sparc uses CCFPE for compares which generate exceptions
2252 and CCFP for compares which do not generate exceptions. */
2253 if (HONOR_NANS (GET_MODE (x)))
2255 /* But often the compare has some CC mode, so check operand
2257 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2258 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2264 if (HONOR_SNANS (GET_MODE (x)))
2266 /* Often comparison is CC mode, so check operand modes. */
2267 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2268 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2273 /* Conversion of floating point might trap. */
2274 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2281 /* These operations don't trap even with floating point. */
2285 /* Any floating arithmetic may trap. */
2286 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2287 && flag_trapping_math)
2291 fmt = GET_RTX_FORMAT (code);
2292 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2296 if (may_trap_p_1 (XEXP (x, i), flags))
2299 else if (fmt[i] == 'E')
2302 for (j = 0; j < XVECLEN (x, i); j++)
2303 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2310 /* Return nonzero if evaluating rtx X might cause a trap. */
2313 may_trap_p (const_rtx x)
2315 return may_trap_p_1 (x, 0);
2318 /* Return nonzero if evaluating rtx X might cause a trap, when the expression
2319 is moved from its current location by some optimization. */
2322 may_trap_after_code_motion_p (const_rtx x)
2324 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2327 /* Same as above, but additionally return nonzero if evaluating rtx X might
2328 cause a fault. We define a fault for the purpose of this function as a
2329 erroneous execution condition that cannot be encountered during the normal
2330 execution of a valid program; the typical example is an unaligned memory
2331 access on a strict alignment machine. The compiler guarantees that it
2332 doesn't generate code that will fault from a valid program, but this
2333 guarantee doesn't mean anything for individual instructions. Consider
2334 the following example:
2336 struct S { int d; union { char *cp; int *ip; }; };
2338 int foo(struct S *s)
2346 on a strict alignment machine. In a valid program, foo will never be
2347 invoked on a structure for which d is equal to 1 and the underlying
2348 unique field of the union not aligned on a 4-byte boundary, but the
2349 expression *s->ip might cause a fault if considered individually.
2351 At the RTL level, potentially problematic expressions will almost always
2352 verify may_trap_p; for example, the above dereference can be emitted as
2353 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2354 However, suppose that foo is inlined in a caller that causes s->cp to
2355 point to a local character variable and guarantees that s->d is not set
2356 to 1; foo may have been effectively translated into pseudo-RTL as:
2359 (set (reg:SI) (mem:SI (%fp - 7)))
2361 (set (reg:QI) (mem:QI (%fp - 7)))
2363 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2364 memory reference to a stack slot, but it will certainly cause a fault
2365 on a strict alignment machine. */
2368 may_trap_or_fault_p (const_rtx x)
2370 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2373 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2374 i.e., an inequality. */
2377 inequality_comparisons_p (const_rtx x)
2381 const enum rtx_code code = GET_CODE (x);
2411 len = GET_RTX_LENGTH (code);
2412 fmt = GET_RTX_FORMAT (code);
2414 for (i = 0; i < len; i++)
2418 if (inequality_comparisons_p (XEXP (x, i)))
2421 else if (fmt[i] == 'E')
2424 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2425 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2433 /* Replace any occurrence of FROM in X with TO. The function does
2434 not enter into CONST_DOUBLE for the replace.
2436 Note that copying is not done so X must not be shared unless all copies
2437 are to be modified. */
2440 replace_rtx (rtx x, rtx from, rtx to)
2445 /* The following prevents loops occurrence when we change MEM in
2446 CONST_DOUBLE onto the same CONST_DOUBLE. */
2447 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2453 /* Allow this function to make replacements in EXPR_LISTs. */
2457 if (GET_CODE (x) == SUBREG)
2459 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2461 if (GET_CODE (new) == CONST_INT)
2463 x = simplify_subreg (GET_MODE (x), new,
2464 GET_MODE (SUBREG_REG (x)),
2469 SUBREG_REG (x) = new;
2473 else if (GET_CODE (x) == ZERO_EXTEND)
2475 rtx new = replace_rtx (XEXP (x, 0), from, to);
2477 if (GET_CODE (new) == CONST_INT)
2479 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2480 new, GET_MODE (XEXP (x, 0)));
2489 fmt = GET_RTX_FORMAT (GET_CODE (x));
2490 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2493 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2494 else if (fmt[i] == 'E')
2495 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2496 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2502 /* Replace occurrences of the old label in *X with the new one.
2503 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2506 replace_label (rtx *x, void *data)
2509 rtx old_label = ((replace_label_data *) data)->r1;
2510 rtx new_label = ((replace_label_data *) data)->r2;
2511 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2516 if (GET_CODE (l) == SYMBOL_REF
2517 && CONSTANT_POOL_ADDRESS_P (l))
2519 rtx c = get_pool_constant (l);
2520 if (rtx_referenced_p (old_label, c))
2523 replace_label_data *d = (replace_label_data *) data;
2525 /* Create a copy of constant C; replace the label inside
2526 but do not update LABEL_NUSES because uses in constant pool
2528 new_c = copy_rtx (c);
2529 d->update_label_nuses = false;
2530 for_each_rtx (&new_c, replace_label, data);
2531 d->update_label_nuses = update_label_nuses;
2533 /* Add the new constant NEW_C to constant pool and replace
2534 the old reference to constant by new reference. */
2535 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2536 *x = replace_rtx (l, l, new_l);
2541 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2542 field. This is not handled by for_each_rtx because it doesn't
2543 handle unprinted ('0') fields. */
2544 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2545 JUMP_LABEL (l) = new_label;
2547 if ((GET_CODE (l) == LABEL_REF
2548 || GET_CODE (l) == INSN_LIST)
2549 && XEXP (l, 0) == old_label)
2551 XEXP (l, 0) = new_label;
2552 if (update_label_nuses)
2554 ++LABEL_NUSES (new_label);
2555 --LABEL_NUSES (old_label);
2563 /* When *BODY is equal to X or X is directly referenced by *BODY
2564 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2565 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2568 rtx_referenced_p_1 (rtx *body, void *x)
2572 if (*body == NULL_RTX)
2573 return y == NULL_RTX;
2575 /* Return true if a label_ref *BODY refers to label Y. */
2576 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2577 return XEXP (*body, 0) == y;
2579 /* If *BODY is a reference to pool constant traverse the constant. */
2580 if (GET_CODE (*body) == SYMBOL_REF
2581 && CONSTANT_POOL_ADDRESS_P (*body))
2582 return rtx_referenced_p (y, get_pool_constant (*body));
2584 /* By default, compare the RTL expressions. */
2585 return rtx_equal_p (*body, y);
2588 /* Return true if X is referenced in BODY. */
2591 rtx_referenced_p (rtx x, rtx body)
2593 return for_each_rtx (&body, rtx_referenced_p_1, x);
2596 /* If INSN is a tablejump return true and store the label (before jump table) to
2597 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2600 tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2605 && (label = JUMP_LABEL (insn)) != NULL_RTX
2606 && (table = next_active_insn (label)) != NULL_RTX
2608 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2609 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2620 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2621 constant that is not in the constant pool and not in the condition
2622 of an IF_THEN_ELSE. */
2625 computed_jump_p_1 (const_rtx x)
2627 const enum rtx_code code = GET_CODE (x);
2646 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2647 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2650 return (computed_jump_p_1 (XEXP (x, 1))
2651 || computed_jump_p_1 (XEXP (x, 2)));
2657 fmt = GET_RTX_FORMAT (code);
2658 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2661 && computed_jump_p_1 (XEXP (x, i)))
2664 else if (fmt[i] == 'E')
2665 for (j = 0; j < XVECLEN (x, i); j++)
2666 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2673 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2675 Tablejumps and casesi insns are not considered indirect jumps;
2676 we can recognize them by a (use (label_ref)). */
2679 computed_jump_p (const_rtx insn)
2684 rtx pat = PATTERN (insn);
2686 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2688 else if (GET_CODE (pat) == PARALLEL)
2690 int len = XVECLEN (pat, 0);
2691 int has_use_labelref = 0;
2693 for (i = len - 1; i >= 0; i--)
2694 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2695 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2697 has_use_labelref = 1;
2699 if (! has_use_labelref)
2700 for (i = len - 1; i >= 0; i--)
2701 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2702 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2703 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2706 else if (GET_CODE (pat) == SET
2707 && SET_DEST (pat) == pc_rtx
2708 && computed_jump_p_1 (SET_SRC (pat)))
2714 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2715 calls. Processes the subexpressions of EXP and passes them to F. */
2717 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2720 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2723 for (; format[n] != '\0'; n++)
2730 result = (*f) (x, data);
2732 /* Do not traverse sub-expressions. */
2734 else if (result != 0)
2735 /* Stop the traversal. */
2739 /* There are no sub-expressions. */
2742 i = non_rtx_starting_operands[GET_CODE (*x)];
2745 result = for_each_rtx_1 (*x, i, f, data);
2753 if (XVEC (exp, n) == 0)
2755 for (j = 0; j < XVECLEN (exp, n); ++j)
2758 x = &XVECEXP (exp, n, j);
2759 result = (*f) (x, data);
2761 /* Do not traverse sub-expressions. */
2763 else if (result != 0)
2764 /* Stop the traversal. */
2768 /* There are no sub-expressions. */
2771 i = non_rtx_starting_operands[GET_CODE (*x)];
2774 result = for_each_rtx_1 (*x, i, f, data);
2782 /* Nothing to do. */
2790 /* Traverse X via depth-first search, calling F for each
2791 sub-expression (including X itself). F is also passed the DATA.
2792 If F returns -1, do not traverse sub-expressions, but continue
2793 traversing the rest of the tree. If F ever returns any other
2794 nonzero value, stop the traversal, and return the value returned
2795 by F. Otherwise, return 0. This function does not traverse inside
2796 tree structure that contains RTX_EXPRs, or into sub-expressions
2797 whose format code is `0' since it is not known whether or not those
2798 codes are actually RTL.
2800 This routine is very general, and could (should?) be used to
2801 implement many of the other routines in this file. */
2804 for_each_rtx (rtx *x, rtx_function f, void *data)
2810 result = (*f) (x, data);
2812 /* Do not traverse sub-expressions. */
2814 else if (result != 0)
2815 /* Stop the traversal. */
2819 /* There are no sub-expressions. */
2822 i = non_rtx_starting_operands[GET_CODE (*x)];
2826 return for_each_rtx_1 (*x, i, f, data);
2830 /* Searches X for any reference to REGNO, returning the rtx of the
2831 reference found if any. Otherwise, returns NULL_RTX. */
2834 regno_use_in (unsigned int regno, rtx x)
2840 if (REG_P (x) && REGNO (x) == regno)
2843 fmt = GET_RTX_FORMAT (GET_CODE (x));
2844 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2848 if ((tem = regno_use_in (regno, XEXP (x, i))))
2851 else if (fmt[i] == 'E')
2852 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2853 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2860 /* Return a value indicating whether OP, an operand of a commutative
2861 operation, is preferred as the first or second operand. The higher
2862 the value, the stronger the preference for being the first operand.
2863 We use negative values to indicate a preference for the first operand
2864 and positive values for the second operand. */
2867 commutative_operand_precedence (rtx op)
2869 enum rtx_code code = GET_CODE (op);
2871 /* Constants always come the second operand. Prefer "nice" constants. */
2872 if (code == CONST_INT)
2874 if (code == CONST_DOUBLE)
2876 op = avoid_constant_pool_reference (op);
2877 code = GET_CODE (op);
2879 switch (GET_RTX_CLASS (code))
2882 if (code == CONST_INT)
2884 if (code == CONST_DOUBLE)
2889 /* SUBREGs of objects should come second. */
2890 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2895 /* Complex expressions should be the first, so decrease priority
2896 of objects. Prefer pointer objects over non pointer objects. */
2897 if ((REG_P (op) && REG_POINTER (op))
2898 || (MEM_P (op) && MEM_POINTER (op)))
2902 case RTX_COMM_ARITH:
2903 /* Prefer operands that are themselves commutative to be first.
2904 This helps to make things linear. In particular,
2905 (and (and (reg) (reg)) (not (reg))) is canonical. */
2909 /* If only one operand is a binary expression, it will be the first
2910 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2911 is canonical, although it will usually be further simplified. */
2915 /* Then prefer NEG and NOT. */
2916 if (code == NEG || code == NOT)
2924 /* Return 1 iff it is necessary to swap operands of commutative operation
2925 in order to canonicalize expression. */
2928 swap_commutative_operands_p (rtx x, rtx y)
2930 return (commutative_operand_precedence (x)
2931 < commutative_operand_precedence (y));
2934 /* Return 1 if X is an autoincrement side effect and the register is
2935 not the stack pointer. */
2937 auto_inc_p (const_rtx x)
2939 switch (GET_CODE (x))
2947 /* There are no REG_INC notes for SP. */
2948 if (XEXP (x, 0) != stack_pointer_rtx)
2956 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2958 loc_mentioned_in_p (rtx *loc, const_rtx in)
2967 code = GET_CODE (in);
2968 fmt = GET_RTX_FORMAT (code);
2969 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2971 if (loc == &in->u.fld[i].rt_rtx)
2975 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2978 else if (fmt[i] == 'E')
2979 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2980 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2986 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2987 and SUBREG_BYTE, return the bit offset where the subreg begins
2988 (counting from the least significant bit of the operand). */
2991 subreg_lsb_1 (enum machine_mode outer_mode,
2992 enum machine_mode inner_mode,
2993 unsigned int subreg_byte)
2995 unsigned int bitpos;
2999 /* A paradoxical subreg begins at bit position 0. */
3000 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3003 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3004 /* If the subreg crosses a word boundary ensure that
3005 it also begins and ends on a word boundary. */
3006 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3007 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3008 && (subreg_byte % UNITS_PER_WORD
3009 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3011 if (WORDS_BIG_ENDIAN)
3012 word = (GET_MODE_SIZE (inner_mode)
3013 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3015 word = subreg_byte / UNITS_PER_WORD;
3016 bitpos = word * BITS_PER_WORD;
3018 if (BYTES_BIG_ENDIAN)
3019 byte = (GET_MODE_SIZE (inner_mode)
3020 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3022 byte = subreg_byte % UNITS_PER_WORD;
3023 bitpos += byte * BITS_PER_UNIT;
3028 /* Given a subreg X, return the bit offset where the subreg begins
3029 (counting from the least significant bit of the reg). */
3032 subreg_lsb (const_rtx x)
3034 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3038 /* Fill in information about a subreg of a hard register.
3039 xregno - A regno of an inner hard subreg_reg (or what will become one).
3040 xmode - The mode of xregno.
3041 offset - The byte offset.
3042 ymode - The mode of a top level SUBREG (or what may become one).
3043 info - Pointer to structure to fill in. */
3045 subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3046 unsigned int offset, enum machine_mode ymode,
3047 struct subreg_info *info)
3049 int nregs_xmode, nregs_ymode;
3050 int mode_multiple, nregs_multiple;
3051 int offset_adj, y_offset, y_offset_adj;
3052 int regsize_xmode, regsize_ymode;
3055 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3059 /* If there are holes in a non-scalar mode in registers, we expect
3060 that it is made up of its units concatenated together. */
3061 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3063 enum machine_mode xmode_unit;
3065 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3066 if (GET_MODE_INNER (xmode) == VOIDmode)
3069 xmode_unit = GET_MODE_INNER (xmode);
3070 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3071 gcc_assert (nregs_xmode
3072 == (GET_MODE_NUNITS (xmode)
3073 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3074 gcc_assert (hard_regno_nregs[xregno][xmode]
3075 == (hard_regno_nregs[xregno][xmode_unit]
3076 * GET_MODE_NUNITS (xmode)));
3078 /* You can only ask for a SUBREG of a value with holes in the middle
3079 if you don't cross the holes. (Such a SUBREG should be done by
3080 picking a different register class, or doing it in memory if
3081 necessary.) An example of a value with holes is XCmode on 32-bit
3082 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3083 3 for each part, but in memory it's two 128-bit parts.
3084 Padding is assumed to be at the end (not necessarily the 'high part')
3086 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3087 < GET_MODE_NUNITS (xmode))
3088 && (offset / GET_MODE_SIZE (xmode_unit)
3089 != ((offset + GET_MODE_SIZE (ymode) - 1)
3090 / GET_MODE_SIZE (xmode_unit))))
3092 info->representable_p = false;
3097 nregs_xmode = hard_regno_nregs[xregno][xmode];
3099 nregs_ymode = hard_regno_nregs[xregno][ymode];
3101 /* Paradoxical subregs are otherwise valid. */
3104 && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3106 info->representable_p = true;
3107 /* If this is a big endian paradoxical subreg, which uses more
3108 actual hard registers than the original register, we must
3109 return a negative offset so that we find the proper highpart
3111 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3112 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3113 info->offset = nregs_xmode - nregs_ymode;
3116 info->nregs = nregs_ymode;
3120 /* If registers store different numbers of bits in the different
3121 modes, we cannot generally form this subreg. */
3122 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3123 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3124 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3125 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3127 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3128 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3129 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3131 info->representable_p = false;
3133 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3134 info->offset = offset / regsize_xmode;
3137 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3139 info->representable_p = false;
3141 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3142 info->offset = offset / regsize_xmode;
3147 /* Lowpart subregs are otherwise valid. */
3148 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3150 info->representable_p = true;
3153 if (offset == 0 || nregs_xmode == nregs_ymode)
3156 info->nregs = nregs_ymode;
3161 /* This should always pass, otherwise we don't know how to verify
3162 the constraint. These conditions may be relaxed but
3163 subreg_regno_offset would need to be redesigned. */
3164 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3165 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3167 /* The XMODE value can be seen as a vector of NREGS_XMODE
3168 values. The subreg must represent a lowpart of given field.
3169 Compute what field it is. */
3170 offset_adj = offset;
3171 offset_adj -= subreg_lowpart_offset (ymode,
3172 mode_for_size (GET_MODE_BITSIZE (xmode)
3176 /* Size of ymode must not be greater than the size of xmode. */
3177 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3178 gcc_assert (mode_multiple != 0);
3180 y_offset = offset / GET_MODE_SIZE (ymode);
3181 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3182 nregs_multiple = nregs_xmode / nregs_ymode;
3184 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3185 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3189 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3192 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3193 info->nregs = nregs_ymode;
3196 /* This function returns the regno offset of a subreg expression.
3197 xregno - A regno of an inner hard subreg_reg (or what will become one).
3198 xmode - The mode of xregno.
3199 offset - The byte offset.
3200 ymode - The mode of a top level SUBREG (or what may become one).
3201 RETURN - The regno offset which would be used. */
3203 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3204 unsigned int offset, enum machine_mode ymode)
3206 struct subreg_info info;
3207 subreg_get_info (xregno, xmode, offset, ymode, &info);
3211 /* This function returns true when the offset is representable via
3212 subreg_offset in the given regno.
3213 xregno - A regno of an inner hard subreg_reg (or what will become one).
3214 xmode - The mode of xregno.
3215 offset - The byte offset.
3216 ymode - The mode of a top level SUBREG (or what may become one).
3217 RETURN - Whether the offset is representable. */
3219 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3220 unsigned int offset, enum machine_mode ymode)
3222 struct subreg_info info;
3223 subreg_get_info (xregno, xmode, offset, ymode, &info);
3224 return info.representable_p;
3227 /* Return the final regno that a subreg expression refers to. */
3229 subreg_regno (const_rtx x)
3232 rtx subreg = SUBREG_REG (x);
3233 int regno = REGNO (subreg);
3235 ret = regno + subreg_regno_offset (regno,
3243 /* Return the number of registers that a subreg expression refers
3246 subreg_nregs (const_rtx x)
3248 struct subreg_info info;
3249 rtx subreg = SUBREG_REG (x);
3250 int regno = REGNO (subreg);
3252 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3257 struct parms_set_data
3263 /* Helper function for noticing stores to parameter registers. */
3265 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3267 struct parms_set_data *d = data;
3268 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3269 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3271 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3276 /* Look backward for first parameter to be loaded.
3277 Note that loads of all parameters will not necessarily be
3278 found if CSE has eliminated some of them (e.g., an argument
3279 to the outer function is passed down as a parameter).
3280 Do not skip BOUNDARY. */
3282 find_first_parameter_load (rtx call_insn, rtx boundary)
3284 struct parms_set_data parm;
3285 rtx p, before, first_set;
3287 /* Since different machines initialize their parameter registers
3288 in different orders, assume nothing. Collect the set of all
3289 parameter registers. */
3290 CLEAR_HARD_REG_SET (parm.regs);
3292 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3293 if (GET_CODE (XEXP (p, 0)) == USE
3294 && REG_P (XEXP (XEXP (p, 0), 0)))
3296 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3298 /* We only care about registers which can hold function
3300 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3303 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3307 first_set = call_insn;
3309 /* Search backward for the first set of a register in this set. */
3310 while (parm.nregs && before != boundary)
3312 before = PREV_INSN (before);
3314 /* It is possible that some loads got CSEed from one call to
3315 another. Stop in that case. */
3316 if (CALL_P (before))
3319 /* Our caller needs either ensure that we will find all sets
3320 (in case code has not been optimized yet), or take care
3321 for possible labels in a way by setting boundary to preceding
3323 if (LABEL_P (before))
3325 gcc_assert (before == boundary);
3329 if (INSN_P (before))
3331 int nregs_old = parm.nregs;
3332 note_stores (PATTERN (before), parms_set, &parm);
3333 /* If we found something that did not set a parameter reg,
3334 we're done. Do not keep going, as that might result
3335 in hoisting an insn before the setting of a pseudo
3336 that is used by the hoisted insn. */
3337 if (nregs_old != parm.nregs)
3346 /* Return true if we should avoid inserting code between INSN and preceding
3347 call instruction. */
3350 keep_with_call_p (rtx insn)
3354 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3356 if (REG_P (SET_DEST (set))
3357 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3358 && fixed_regs[REGNO (SET_DEST (set))]
3359 && general_operand (SET_SRC (set), VOIDmode))
3361 if (REG_P (SET_SRC (set))
3362 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3363 && REG_P (SET_DEST (set))
3364 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3366 /* There may be a stack pop just after the call and before the store
3367 of the return register. Search for the actual store when deciding
3368 if we can break or not. */
3369 if (SET_DEST (set) == stack_pointer_rtx)
3371 rtx i2 = next_nonnote_insn (insn);
3372 if (i2 && keep_with_call_p (i2))
3379 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3380 to non-complex jumps. That is, direct unconditional, conditional,
3381 and tablejumps, but not computed jumps or returns. It also does
3382 not apply to the fallthru case of a conditional jump. */
3385 label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3387 rtx tmp = JUMP_LABEL (jump_insn);
3392 if (tablejump_p (jump_insn, NULL, &tmp))
3394 rtvec vec = XVEC (PATTERN (tmp),
3395 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3396 int i, veclen = GET_NUM_ELEM (vec);
3398 for (i = 0; i < veclen; ++i)
3399 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3407 /* Return an estimate of the cost of computing rtx X.
3408 One use is in cse, to decide which expression to keep in the hash table.
3409 Another is in rtl generation, to pick the cheapest way to multiply.
3410 Other uses like the latter are expected in the future. */
3413 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3423 /* Compute the default costs of certain things.
3424 Note that targetm.rtx_costs can override the defaults. */
3426 code = GET_CODE (x);
3430 total = COSTS_N_INSNS (5);
3436 total = COSTS_N_INSNS (7);
3439 /* Used in combine.c as a marker. */
3443 total = COSTS_N_INSNS (1);
3453 /* If we can't tie these modes, make this expensive. The larger
3454 the mode, the more expensive it is. */
3455 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3456 return COSTS_N_INSNS (2
3457 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3461 if (targetm.rtx_costs (x, code, outer_code, &total))
3466 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3467 which is already in total. */
3469 fmt = GET_RTX_FORMAT (code);
3470 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3472 total += rtx_cost (XEXP (x, i), code);
3473 else if (fmt[i] == 'E')
3474 for (j = 0; j < XVECLEN (x, i); j++)
3475 total += rtx_cost (XVECEXP (x, i, j), code);
3480 /* Return cost of address expression X.
3481 Expect that X is properly formed address reference. */
3484 address_cost (rtx x, enum machine_mode mode)
3486 /* We may be asked for cost of various unusual addresses, such as operands
3487 of push instruction. It is not worthwhile to complicate writing
3488 of the target hook by such cases. */
3490 if (!memory_address_p (mode, x))
3493 return targetm.address_cost (x);
3496 /* If the target doesn't override, compute the cost as with arithmetic. */
3499 default_address_cost (rtx x)
3501 return rtx_cost (x, MEM);
3505 unsigned HOST_WIDE_INT
3506 nonzero_bits (const_rtx x, enum machine_mode mode)
3508 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3512 num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3514 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3517 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3518 It avoids exponential behavior in nonzero_bits1 when X has
3519 identical subexpressions on the first or the second level. */
3521 static unsigned HOST_WIDE_INT
3522 cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3523 enum machine_mode known_mode,
3524 unsigned HOST_WIDE_INT known_ret)
3526 if (x == known_x && mode == known_mode)
3529 /* Try to find identical subexpressions. If found call
3530 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3531 precomputed value for the subexpression as KNOWN_RET. */
3533 if (ARITHMETIC_P (x))
3535 rtx x0 = XEXP (x, 0);
3536 rtx x1 = XEXP (x, 1);
3538 /* Check the first level. */
3540 return nonzero_bits1 (x, mode, x0, mode,
3541 cached_nonzero_bits (x0, mode, known_x,
3542 known_mode, known_ret));
3544 /* Check the second level. */
3545 if (ARITHMETIC_P (x0)
3546 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3547 return nonzero_bits1 (x, mode, x1, mode,
3548 cached_nonzero_bits (x1, mode, known_x,
3549 known_mode, known_ret));
3551 if (ARITHMETIC_P (x1)
3552 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3553 return nonzero_bits1 (x, mode, x0, mode,
3554 cached_nonzero_bits (x0, mode, known_x,
3555 known_mode, known_ret));
3558 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3561 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3562 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3563 is less useful. We can't allow both, because that results in exponential
3564 run time recursion. There is a nullstone testcase that triggered
3565 this. This macro avoids accidental uses of num_sign_bit_copies. */
3566 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3568 /* Given an expression, X, compute which bits in X can be nonzero.
3569 We don't care about bits outside of those defined in MODE.
3571 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3572 an arithmetic operation, we can do better. */
3574 static unsigned HOST_WIDE_INT
3575 nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
3576 enum machine_mode known_mode,
3577 unsigned HOST_WIDE_INT known_ret)
3579 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3580 unsigned HOST_WIDE_INT inner_nz;
3582 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3584 /* For floating-point values, assume all bits are needed. */
3585 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3588 /* If X is wider than MODE, use its mode instead. */
3589 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3591 mode = GET_MODE (x);
3592 nonzero = GET_MODE_MASK (mode);
3593 mode_width = GET_MODE_BITSIZE (mode);
3596 if (mode_width > HOST_BITS_PER_WIDE_INT)
3597 /* Our only callers in this case look for single bit values. So
3598 just return the mode mask. Those tests will then be false. */
3601 #ifndef WORD_REGISTER_OPERATIONS
3602 /* If MODE is wider than X, but both are a single word for both the host
3603 and target machines, we can compute this from which bits of the
3604 object might be nonzero in its own mode, taking into account the fact
3605 that on many CISC machines, accessing an object in a wider mode
3606 causes the high-order bits to become undefined. So they are
3607 not known to be zero. */
3609 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3610 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3611 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3612 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3614 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3615 known_x, known_mode, known_ret);
3616 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3621 code = GET_CODE (x);
3625 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3626 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3627 all the bits above ptr_mode are known to be zero. */
3628 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3630 nonzero &= GET_MODE_MASK (ptr_mode);
3633 /* Include declared information about alignment of pointers. */
3634 /* ??? We don't properly preserve REG_POINTER changes across
3635 pointer-to-integer casts, so we can't trust it except for
3636 things that we know must be pointers. See execute/960116-1.c. */
3637 if ((x == stack_pointer_rtx
3638 || x == frame_pointer_rtx
3639 || x == arg_pointer_rtx)
3640 && REGNO_POINTER_ALIGN (REGNO (x)))
3642 unsigned HOST_WIDE_INT alignment
3643 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3645 #ifdef PUSH_ROUNDING
3646 /* If PUSH_ROUNDING is defined, it is possible for the
3647 stack to be momentarily aligned only to that amount,
3648 so we pick the least alignment. */
3649 if (x == stack_pointer_rtx && PUSH_ARGS)
3650 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3654 nonzero &= ~(alignment - 1);
3658 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3659 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3660 known_mode, known_ret,
3664 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3665 known_mode, known_ret);
3667 return nonzero_for_hook;
3671 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3672 /* If X is negative in MODE, sign-extend the value. */
3673 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3674 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3675 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3681 #ifdef LOAD_EXTEND_OP
3682 /* In many, if not most, RISC machines, reading a byte from memory
3683 zeros the rest of the register. Noticing that fact saves a lot
3684 of extra zero-extends. */
3685 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3686 nonzero &= GET_MODE_MASK (GET_MODE (x));
3691 case UNEQ: case LTGT:
3692 case GT: case GTU: case UNGT:
3693 case LT: case LTU: case UNLT:
3694 case GE: case GEU: case UNGE:
3695 case LE: case LEU: case UNLE:
3696 case UNORDERED: case ORDERED:
3697 /* If this produces an integer result, we know which bits are set.
3698 Code here used to clear bits outside the mode of X, but that is
3700 /* Mind that MODE is the mode the caller wants to look at this
3701 operation in, and not the actual operation mode. We can wind
3702 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3703 that describes the results of a vector compare. */
3704 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3705 && mode_width <= HOST_BITS_PER_WIDE_INT)
3706 nonzero = STORE_FLAG_VALUE;
3711 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3712 and num_sign_bit_copies. */
3713 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3714 == GET_MODE_BITSIZE (GET_MODE (x)))
3718 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3719 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3724 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3725 and num_sign_bit_copies. */
3726 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3727 == GET_MODE_BITSIZE (GET_MODE (x)))
3733 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3734 known_x, known_mode, known_ret)
3735 & GET_MODE_MASK (mode));
3739 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3740 known_x, known_mode, known_ret);
3741 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3742 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3746 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3747 Otherwise, show all the bits in the outer mode but not the inner
3749 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3750 known_x, known_mode, known_ret);
3751 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3753 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3755 & (((HOST_WIDE_INT) 1
3756 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3757 inner_nz |= (GET_MODE_MASK (mode)
3758 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3761 nonzero &= inner_nz;
3765 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3766 known_x, known_mode, known_ret)
3767 & cached_nonzero_bits (XEXP (x, 1), mode,
3768 known_x, known_mode, known_ret);
3772 case UMIN: case UMAX: case SMIN: case SMAX:
3774 unsigned HOST_WIDE_INT nonzero0 =
3775 cached_nonzero_bits (XEXP (x, 0), mode,
3776 known_x, known_mode, known_ret);
3778 /* Don't call nonzero_bits for the second time if it cannot change
3780 if ((nonzero & nonzero0) != nonzero)
3782 | cached_nonzero_bits (XEXP (x, 1), mode,
3783 known_x, known_mode, known_ret);
3787 case PLUS: case MINUS:
3789 case DIV: case UDIV:
3790 case MOD: case UMOD:
3791 /* We can apply the rules of arithmetic to compute the number of
3792 high- and low-order zero bits of these operations. We start by
3793 computing the width (position of the highest-order nonzero bit)
3794 and the number of low-order zero bits for each value. */
3796 unsigned HOST_WIDE_INT nz0 =
3797 cached_nonzero_bits (XEXP (x, 0), mode,
3798 known_x, known_mode, known_ret);
3799 unsigned HOST_WIDE_INT nz1 =
3800 cached_nonzero_bits (XEXP (x, 1), mode,
3801 known_x, known_mode, known_ret);
3802 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3803 int width0 = floor_log2 (nz0) + 1;
3804 int width1 = floor_log2 (nz1) + 1;
3805 int low0 = floor_log2 (nz0 & -nz0);
3806 int low1 = floor_log2 (nz1 & -nz1);
3807 HOST_WIDE_INT op0_maybe_minusp
3808 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3809 HOST_WIDE_INT op1_maybe_minusp
3810 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3811 unsigned int result_width = mode_width;
3817 result_width = MAX (width0, width1) + 1;
3818 result_low = MIN (low0, low1);
3821 result_low = MIN (low0, low1);
3824 result_width = width0 + width1;
3825 result_low = low0 + low1;
3830 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3831 result_width = width0;
3836 result_width = width0;
3841 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3842 result_width = MIN (width0, width1);
3843 result_low = MIN (low0, low1);
3848 result_width = MIN (width0, width1);
3849 result_low = MIN (low0, low1);
3855 if (result_width < mode_width)
3856 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3859 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3861 #ifdef POINTERS_EXTEND_UNSIGNED
3862 /* If pointers extend unsigned and this is an addition or subtraction
3863 to a pointer in Pmode, all the bits above ptr_mode are known to be
3865 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3866 && (code == PLUS || code == MINUS)
3867 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3868 nonzero &= GET_MODE_MASK (ptr_mode);
3874 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3875 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3876 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3880 /* If this is a SUBREG formed for a promoted variable that has
3881 been zero-extended, we know that at least the high-order bits
3882 are zero, though others might be too. */
3884 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3885 nonzero = GET_MODE_MASK (GET_MODE (x))
3886 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3887 known_x, known_mode, known_ret);
3889 /* If the inner mode is a single word for both the host and target
3890 machines, we can compute this from which bits of the inner
3891 object might be nonzero. */
3892 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3893 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3894 <= HOST_BITS_PER_WIDE_INT))
3896 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3897 known_x, known_mode, known_ret);
3899 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3900 /* If this is a typical RISC machine, we only have to worry
3901 about the way loads are extended. */
3902 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3904 & (((unsigned HOST_WIDE_INT) 1
3905 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3907 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3908 || !MEM_P (SUBREG_REG (x)))
3911 /* On many CISC machines, accessing an object in a wider mode
3912 causes the high-order bits to become undefined. So they are
3913 not known to be zero. */
3914 if (GET_MODE_SIZE (GET_MODE (x))
3915 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3916 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3917 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3926 /* The nonzero bits are in two classes: any bits within MODE
3927 that aren't in GET_MODE (x) are always significant. The rest of the
3928 nonzero bits are those that are significant in the operand of
3929 the shift when shifted the appropriate number of bits. This
3930 shows that high-order bits are cleared by the right shift and
3931 low-order bits by left shifts. */
3932 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3933 && INTVAL (XEXP (x, 1)) >= 0
3934 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3936 enum machine_mode inner_mode = GET_MODE (x);
3937 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3938 int count = INTVAL (XEXP (x, 1));
3939 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3940 unsigned HOST_WIDE_INT op_nonzero =
3941 cached_nonzero_bits (XEXP (x, 0), mode,
3942 known_x, known_mode, known_ret);
3943 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3944 unsigned HOST_WIDE_INT outer = 0;
3946 if (mode_width > width)
3947 outer = (op_nonzero & nonzero & ~mode_mask);
3949 if (code == LSHIFTRT)
3951 else if (code == ASHIFTRT)
3955 /* If the sign bit may have been nonzero before the shift, we
3956 need to mark all the places it could have been copied to
3957 by the shift as possibly nonzero. */
3958 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3959 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3961 else if (code == ASHIFT)
3964 inner = ((inner << (count % width)
3965 | (inner >> (width - (count % width)))) & mode_mask);
3967 nonzero &= (outer | inner);
3973 /* This is at most the number of bits in the mode. */
3974 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3978 /* If CLZ has a known value at zero, then the nonzero bits are
3979 that value, plus the number of bits in the mode minus one. */
3980 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3981 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3987 /* If CTZ has a known value at zero, then the nonzero bits are
3988 that value, plus the number of bits in the mode minus one. */
3989 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3990 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4001 unsigned HOST_WIDE_INT nonzero_true =
4002 cached_nonzero_bits (XEXP (x, 1), mode,
4003 known_x, known_mode, known_ret);
4005 /* Don't call nonzero_bits for the second time if it cannot change
4007 if ((nonzero & nonzero_true) != nonzero)
4008 nonzero &= nonzero_true
4009 | cached_nonzero_bits (XEXP (x, 2), mode,
4010 known_x, known_mode, known_ret);
4021 /* See the macro definition above. */
4022 #undef cached_num_sign_bit_copies
4025 /* The function cached_num_sign_bit_copies is a wrapper around
4026 num_sign_bit_copies1. It avoids exponential behavior in
4027 num_sign_bit_copies1 when X has identical subexpressions on the
4028 first or the second level. */
4031 cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4032 enum machine_mode known_mode,
4033 unsigned int known_ret)
4035 if (x == known_x && mode == known_mode)
4038 /* Try to find identical subexpressions. If found call
4039 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4040 the precomputed value for the subexpression as KNOWN_RET. */
4042 if (ARITHMETIC_P (x))
4044 rtx x0 = XEXP (x, 0);
4045 rtx x1 = XEXP (x, 1);
4047 /* Check the first level. */
4050 num_sign_bit_copies1 (x, mode, x0, mode,
4051 cached_num_sign_bit_copies (x0, mode, known_x,
4055 /* Check the second level. */
4056 if (ARITHMETIC_P (x0)
4057 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4059 num_sign_bit_copies1 (x, mode, x1, mode,
4060 cached_num_sign_bit_copies (x1, mode, known_x,
4064 if (ARITHMETIC_P (x1)
4065 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4067 num_sign_bit_copies1 (x, mode, x0, mode,
4068 cached_num_sign_bit_copies (x0, mode, known_x,
4073 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4076 /* Return the number of bits at the high-order end of X that are known to
4077 be equal to the sign bit. X will be used in mode MODE; if MODE is
4078 VOIDmode, X will be used in its own mode. The returned value will always
4079 be between 1 and the number of bits in MODE. */
4082 num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4083 enum machine_mode known_mode,
4084 unsigned int known_ret)
4086 enum rtx_code code = GET_CODE (x);
4087 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4088 int num0, num1, result;
4089 unsigned HOST_WIDE_INT nonzero;
4091 /* If we weren't given a mode, use the mode of X. If the mode is still
4092 VOIDmode, we don't know anything. Likewise if one of the modes is
4095 if (mode == VOIDmode)
4096 mode = GET_MODE (x);
4098 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4101 /* For a smaller object, just ignore the high bits. */
4102 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4104 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4105 known_x, known_mode, known_ret);
4107 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4110 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4112 #ifndef WORD_REGISTER_OPERATIONS
4113 /* If this machine does not do all register operations on the entire
4114 register and MODE is wider than the mode of X, we can say nothing
4115 at all about the high-order bits. */
4118 /* Likewise on machines that do, if the mode of the object is smaller
4119 than a word and loads of that size don't sign extend, we can say
4120 nothing about the high order bits. */
4121 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4122 #ifdef LOAD_EXTEND_OP
4123 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4134 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4135 /* If pointers extend signed and this is a pointer in Pmode, say that
4136 all the bits above ptr_mode are known to be sign bit copies. */
4137 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4139 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4143 unsigned int copies_for_hook = 1, copies = 1;
4144 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4145 known_mode, known_ret,
4149 copies = cached_num_sign_bit_copies (new, mode, known_x,
4150 known_mode, known_ret);
4152 if (copies > 1 || copies_for_hook > 1)
4153 return MAX (copies, copies_for_hook);
4155 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4160 #ifdef LOAD_EXTEND_OP
4161 /* Some RISC machines sign-extend all loads of smaller than a word. */
4162 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4163 return MAX (1, ((int) bitwidth
4164 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4169 /* If the constant is negative, take its 1's complement and remask.
4170 Then see how many zero bits we have. */
4171 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4172 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4173 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4174 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4176 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4179 /* If this is a SUBREG for a promoted object that is sign-extended
4180 and we are looking at it in a wider mode, we know that at least the
4181 high-order bits are known to be sign bit copies. */
4183 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4185 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4186 known_x, known_mode, known_ret);
4187 return MAX ((int) bitwidth
4188 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4192 /* For a smaller object, just ignore the high bits. */
4193 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4195 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4196 known_x, known_mode, known_ret);
4197 return MAX (1, (num0
4198 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4202 #ifdef WORD_REGISTER_OPERATIONS
4203 #ifdef LOAD_EXTEND_OP
4204 /* For paradoxical SUBREGs on machines where all register operations
4205 affect the entire register, just look inside. Note that we are
4206 passing MODE to the recursive call, so the number of sign bit copies
4207 will remain relative to that mode, not the inner mode. */
4209 /* This works only if loads sign extend. Otherwise, if we get a
4210 reload for the inner part, it may be loaded from the stack, and
4211 then we lose all sign bit copies that existed before the store
4214 if ((GET_MODE_SIZE (GET_MODE (x))
4215 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4216 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4217 && MEM_P (SUBREG_REG (x)))
4218 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4219 known_x, known_mode, known_ret);
4225 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4226 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4230 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4231 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4232 known_x, known_mode, known_ret));
4235 /* For a smaller object, just ignore the high bits. */
4236 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4237 known_x, known_mode, known_ret);
4238 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4242 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4243 known_x, known_mode, known_ret);
4245 case ROTATE: case ROTATERT:
4246 /* If we are rotating left by a number of bits less than the number
4247 of sign bit copies, we can just subtract that amount from the
4249 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4250 && INTVAL (XEXP (x, 1)) >= 0
4251 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4253 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4254 known_x, known_mode, known_ret);
4255 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4256 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4261 /* In general, this subtracts one sign bit copy. But if the value
4262 is known to be positive, the number of sign bit copies is the
4263 same as that of the input. Finally, if the input has just one bit
4264 that might be nonzero, all the bits are copies of the sign bit. */
4265 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4266 known_x, known_mode, known_ret);
4267 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4268 return num0 > 1 ? num0 - 1 : 1;
4270 nonzero = nonzero_bits (XEXP (x, 0), mode);
4275 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4280 case IOR: case AND: case XOR:
4281 case SMIN: case SMAX: case UMIN: case UMAX:
4282 /* Logical operations will preserve the number of sign-bit copies.
4283 MIN and MAX operations always return one of the operands. */
4284 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4285 known_x, known_mode, known_ret);
4286 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4287 known_x, known_mode, known_ret);
4289 /* If num1 is clearing some of the top bits then regardless of
4290 the other term, we are guaranteed to have at least that many
4291 high-order zero bits. */
4294 && bitwidth <= HOST_BITS_PER_WIDE_INT
4295 && GET_CODE (XEXP (x, 1)) == CONST_INT
4296 && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4299 /* Similarly for IOR when setting high-order bits. */
4302 && bitwidth <= HOST_BITS_PER_WIDE_INT
4303 && GET_CODE (XEXP (x, 1)) == CONST_INT
4304 && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4307 return MIN (num0, num1);
4309 case PLUS: case MINUS:
4310 /* For addition and subtraction, we can have a 1-bit carry. However,
4311 if we are subtracting 1 from a positive number, there will not
4312 be such a carry. Furthermore, if the positive number is known to
4313 be 0 or 1, we know the result is either -1 or 0. */
4315 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4316 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4318 nonzero = nonzero_bits (XEXP (x, 0), mode);
4319 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4320 return (nonzero == 1 || nonzero == 0 ? bitwidth
4321 : bitwidth - floor_log2 (nonzero) - 1);
4324 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4325 known_x, known_mode, known_ret);
4326 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4327 known_x, known_mode, known_ret);
4328 result = MAX (1, MIN (num0, num1) - 1);
4330 #ifdef POINTERS_EXTEND_UNSIGNED
4331 /* If pointers extend signed and this is an addition or subtraction
4332 to a pointer in Pmode, all the bits above ptr_mode are known to be
4334 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4335 && (code == PLUS || code == MINUS)
4336 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4337 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4338 - GET_MODE_BITSIZE (ptr_mode) + 1),
4344 /* The number of bits of the product is the sum of the number of
4345 bits of both terms. However, unless one of the terms if known
4346 to be positive, we must allow for an additional bit since negating
4347 a negative number can remove one sign bit copy. */
4349 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4350 known_x, known_mode, known_ret);
4351 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4352 known_x, known_mode, known_ret);
4354 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4356 && (bitwidth > HOST_BITS_PER_WIDE_INT
4357 || (((nonzero_bits (XEXP (x, 0), mode)
4358 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4359 && ((nonzero_bits (XEXP (x, 1), mode)
4360 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4363 return MAX (1, result);
4366 /* The result must be <= the first operand. If the first operand
4367 has the high bit set, we know nothing about the number of sign
4369 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4371 else if ((nonzero_bits (XEXP (x, 0), mode)
4372 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4375 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4376 known_x, known_mode, known_ret);
4379 /* The result must be <= the second operand. */
4380 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4381 known_x, known_mode, known_ret);
4384 /* Similar to unsigned division, except that we have to worry about
4385 the case where the divisor is negative, in which case we have
4387 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4388 known_x, known_mode, known_ret);
4390 && (bitwidth > HOST_BITS_PER_WIDE_INT
4391 || (nonzero_bits (XEXP (x, 1), mode)
4392 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4398 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4399 known_x, known_mode, known_ret);
4401 && (bitwidth > HOST_BITS_PER_WIDE_INT
4402 || (nonzero_bits (XEXP (x, 1), mode)
4403 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4409 /* Shifts by a constant add to the number of bits equal to the
4411 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4412 known_x, known_mode, known_ret);
4413 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4414 && INTVAL (XEXP (x, 1)) > 0)
4415 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4420 /* Left shifts destroy copies. */
4421 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4422 || INTVAL (XEXP (x, 1)) < 0
4423 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4426 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4427 known_x, known_mode, known_ret);
4428 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4431 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4432 known_x, known_mode, known_ret);
4433 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4434 known_x, known_mode, known_ret);
4435 return MIN (num0, num1);
4437 case EQ: case NE: case GE: case GT: case LE: case LT:
4438 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4439 case GEU: case GTU: case LEU: case LTU:
4440 case UNORDERED: case ORDERED:
4441 /* If the constant is negative, take its 1's complement and remask.
4442 Then see how many zero bits we have. */
4443 nonzero = STORE_FLAG_VALUE;
4444 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4445 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4446 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4448 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4454 /* If we haven't been able to figure it out by one of the above rules,
4455 see if some of the high-order bits are known to be zero. If so,
4456 count those bits and return one less than that amount. If we can't
4457 safely compute the mask for this mode, always return BITWIDTH. */
4459 bitwidth = GET_MODE_BITSIZE (mode);
4460 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4463 nonzero = nonzero_bits (x, mode);
4464 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4465 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4468 /* Calculate the rtx_cost of a single instruction. A return value of
4469 zero indicates an instruction pattern without a known cost. */
4472 insn_rtx_cost (rtx pat)
4477 /* Extract the single set rtx from the instruction pattern.
4478 We can't use single_set since we only have the pattern. */
4479 if (GET_CODE (pat) == SET)
4481 else if (GET_CODE (pat) == PARALLEL)
4484 for (i = 0; i < XVECLEN (pat, 0); i++)
4486 rtx x = XVECEXP (pat, 0, i);
4487 if (GET_CODE (x) == SET)
4500 cost = rtx_cost (SET_SRC (set), SET);
4501 return cost > 0 ? cost : COSTS_N_INSNS (1);
4504 /* Given an insn INSN and condition COND, return the condition in a
4505 canonical form to simplify testing by callers. Specifically:
4507 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4508 (2) Both operands will be machine operands; (cc0) will have been replaced.
4509 (3) If an operand is a constant, it will be the second operand.
4510 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4511 for GE, GEU, and LEU.
4513 If the condition cannot be understood, or is an inequality floating-point
4514 comparison which needs to be reversed, 0 will be returned.
4516 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4518 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4519 insn used in locating the condition was found. If a replacement test
4520 of the condition is desired, it should be placed in front of that
4521 insn and we will be sure that the inputs are still valid.
4523 If WANT_REG is nonzero, we wish the condition to be relative to that
4524 register, if possible. Therefore, do not canonicalize the condition
4525 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4526 to be a compare to a CC mode register.
4528 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4532 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4533 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4540 int reverse_code = 0;
4541 enum machine_mode mode;
4542 basic_block bb = BLOCK_FOR_INSN (insn);
4544 code = GET_CODE (cond);
4545 mode = GET_MODE (cond);
4546 op0 = XEXP (cond, 0);
4547 op1 = XEXP (cond, 1);
4550 code = reversed_comparison_code (cond, insn);
4551 if (code == UNKNOWN)
4557 /* If we are comparing a register with zero, see if the register is set
4558 in the previous insn to a COMPARE or a comparison operation. Perform
4559 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4562 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4563 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4564 && op1 == CONST0_RTX (GET_MODE (op0))
4567 /* Set nonzero when we find something of interest. */
4571 /* If comparison with cc0, import actual comparison from compare
4575 if ((prev = prev_nonnote_insn (prev)) == 0
4576 || !NONJUMP_INSN_P (prev)
4577 || (set = single_set (prev)) == 0
4578 || SET_DEST (set) != cc0_rtx)
4581 op0 = SET_SRC (set);
4582 op1 = CONST0_RTX (GET_MODE (op0));
4588 /* If this is a COMPARE, pick up the two things being compared. */
4589 if (GET_CODE (op0) == COMPARE)
4591 op1 = XEXP (op0, 1);
4592 op0 = XEXP (op0, 0);
4595 else if (!REG_P (op0))
4598 /* Go back to the previous insn. Stop if it is not an INSN. We also
4599 stop if it isn't a single set or if it has a REG_INC note because
4600 we don't want to bother dealing with it. */
4602 if ((prev = prev_nonnote_insn (prev)) == 0
4603 || !NONJUMP_INSN_P (prev)
4604 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4605 /* In cfglayout mode, there do not have to be labels at the
4606 beginning of a block, or jumps at the end, so the previous
4607 conditions would not stop us when we reach bb boundary. */
4608 || BLOCK_FOR_INSN (prev) != bb)
4611 set = set_of (op0, prev);
4614 && (GET_CODE (set) != SET
4615 || !rtx_equal_p (SET_DEST (set), op0)))
4618 /* If this is setting OP0, get what it sets it to if it looks
4622 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4623 #ifdef FLOAT_STORE_FLAG_VALUE
4624 REAL_VALUE_TYPE fsfv;
4627 /* ??? We may not combine comparisons done in a CCmode with
4628 comparisons not done in a CCmode. This is to aid targets
4629 like Alpha that have an IEEE compliant EQ instruction, and
4630 a non-IEEE compliant BEQ instruction. The use of CCmode is
4631 actually artificial, simply to prevent the combination, but
4632 should not affect other platforms.
4634 However, we must allow VOIDmode comparisons to match either
4635 CCmode or non-CCmode comparison, because some ports have
4636 modeless comparisons inside branch patterns.
4638 ??? This mode check should perhaps look more like the mode check
4639 in simplify_comparison in combine. */
4641 if ((GET_CODE (SET_SRC (set)) == COMPARE
4644 && GET_MODE_CLASS (inner_mode) == MODE_INT
4645 && (GET_MODE_BITSIZE (inner_mode)
4646 <= HOST_BITS_PER_WIDE_INT)
4647 && (STORE_FLAG_VALUE
4648 & ((HOST_WIDE_INT) 1
4649 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4650 #ifdef FLOAT_STORE_FLAG_VALUE
4652 && SCALAR_FLOAT_MODE_P (inner_mode)
4653 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4654 REAL_VALUE_NEGATIVE (fsfv)))
4657 && COMPARISON_P (SET_SRC (set))))
4658 && (((GET_MODE_CLASS (mode) == MODE_CC)
4659 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4660 || mode == VOIDmode || inner_mode == VOIDmode))
4662 else if (((code == EQ
4664 && (GET_MODE_BITSIZE (inner_mode)
4665 <= HOST_BITS_PER_WIDE_INT)
4666 && GET_MODE_CLASS (inner_mode) == MODE_INT
4667 && (STORE_FLAG_VALUE
4668 & ((HOST_WIDE_INT) 1
4669 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4670 #ifdef FLOAT_STORE_FLAG_VALUE
4672 && SCALAR_FLOAT_MODE_P (inner_mode)
4673 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4674 REAL_VALUE_NEGATIVE (fsfv)))
4677 && COMPARISON_P (SET_SRC (set))
4678 && (((GET_MODE_CLASS (mode) == MODE_CC)
4679 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4680 || mode == VOIDmode || inner_mode == VOIDmode))
4690 else if (reg_set_p (op0, prev))
4691 /* If this sets OP0, but not directly, we have to give up. */
4696 /* If the caller is expecting the condition to be valid at INSN,
4697 make sure X doesn't change before INSN. */
4698 if (valid_at_insn_p)
4699 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4701 if (COMPARISON_P (x))
4702 code = GET_CODE (x);
4705 code = reversed_comparison_code (x, prev);
4706 if (code == UNKNOWN)
4711 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4717 /* If constant is first, put it last. */
4718 if (CONSTANT_P (op0))
4719 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4721 /* If OP0 is the result of a comparison, we weren't able to find what
4722 was really being compared, so fail. */
4724 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4727 /* Canonicalize any ordered comparison with integers involving equality
4728 if we can do computations in the relevant mode and we do not
4731 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4732 && GET_CODE (op1) == CONST_INT
4733 && GET_MODE (op0) != VOIDmode
4734 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4736 HOST_WIDE_INT const_val = INTVAL (op1);
4737 unsigned HOST_WIDE_INT uconst_val = const_val;
4738 unsigned HOST_WIDE_INT max_val
4739 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4744 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4745 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4748 /* When cross-compiling, const_val might be sign-extended from
4749 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4751 if ((HOST_WIDE_INT) (const_val & max_val)
4752 != (((HOST_WIDE_INT) 1
4753 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4754 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4758 if (uconst_val < max_val)
4759 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4763 if (uconst_val != 0)
4764 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4772 /* Never return CC0; return zero instead. */
4776 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4779 /* Given a jump insn JUMP, return the condition that will cause it to branch
4780 to its JUMP_LABEL. If the condition cannot be understood, or is an
4781 inequality floating-point comparison which needs to be reversed, 0 will
4784 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4785 insn used in locating the condition was found. If a replacement test
4786 of the condition is desired, it should be placed in front of that
4787 insn and we will be sure that the inputs are still valid. If EARLIEST
4788 is null, the returned condition will be valid at INSN.
4790 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4791 compare CC mode register.
4793 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4796 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4802 /* If this is not a standard conditional jump, we can't parse it. */
4804 || ! any_condjump_p (jump))
4806 set = pc_set (jump);
4808 cond = XEXP (SET_SRC (set), 0);
4810 /* If this branches to JUMP_LABEL when the condition is false, reverse
4813 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4814 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4816 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4817 allow_cc_mode, valid_at_insn_p);
4820 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4821 TARGET_MODE_REP_EXTENDED.
4823 Note that we assume that the property of
4824 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4825 narrower than mode B. I.e., if A is a mode narrower than B then in
4826 order to be able to operate on it in mode B, mode A needs to
4827 satisfy the requirements set by the representation of mode B. */
4830 init_num_sign_bit_copies_in_rep (void)
4832 enum machine_mode mode, in_mode;
4834 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4835 in_mode = GET_MODE_WIDER_MODE (mode))
4836 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4837 mode = GET_MODE_WIDER_MODE (mode))
4839 enum machine_mode i;
4841 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4842 extends to the next widest mode. */
4843 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4844 || GET_MODE_WIDER_MODE (mode) == in_mode);
4846 /* We are in in_mode. Count how many bits outside of mode
4847 have to be copies of the sign-bit. */
4848 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4850 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4852 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4853 /* We can only check sign-bit copies starting from the
4854 top-bit. In order to be able to check the bits we
4855 have already seen we pretend that subsequent bits
4856 have to be sign-bit copies too. */
4857 || num_sign_bit_copies_in_rep [in_mode][mode])
4858 num_sign_bit_copies_in_rep [in_mode][mode]
4859 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4864 /* Suppose that truncation from the machine mode of X to MODE is not a
4865 no-op. See if there is anything special about X so that we can
4866 assume it already contains a truncated value of MODE. */
4869 truncated_to_mode (enum machine_mode mode, const_rtx x)
4871 /* This register has already been used in MODE without explicit
4873 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4876 /* See if we already satisfy the requirements of MODE. If yes we
4877 can just switch to MODE. */
4878 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4879 && (num_sign_bit_copies (x, GET_MODE (x))
4880 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4886 /* Initialize non_rtx_starting_operands, which is used to speed up
4892 for (i = 0; i < NUM_RTX_CODE; i++)
4894 const char *format = GET_RTX_FORMAT (i);
4895 const char *first = strpbrk (format, "eEV");
4896 non_rtx_starting_operands[i] = first ? first - format : -1;
4899 init_num_sign_bit_copies_in_rep ();
4902 /* Check whether this is a constant pool constant. */
4904 constant_pool_constant_p (rtx x)
4906 x = avoid_constant_pool_reference (x);
4907 return GET_CODE (x) == CONST_DOUBLE;