fmt = GET_RTX_FORMAT (code);
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
{
- int val;
+ int val, j;
switch (fmt[i])
{
case 'w':
case '0':
break;
+ case 'E':
+ if (XVECLEN (x, i) != XVECLEN (y, i))
+ return 0;
+ for (j = XVECLEN (x, i) - 1; j >= 0; --j)
+ {
+ val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
+ if (val == 0)
+ return 0;
+ if (val == 2)
+ success_2 = 1;
+ }
+ break;
+
/* It is believed that rtx's at this level will never
contain anything but integers and other rtx's,
except for within LABEL_REFs and SYMBOL_REFs. */
substed_operand[i] = recog_operand[i]
= reg_equiv_mem[regno];
#endif
- if (reg_equiv_address[regno] != 0)
+ if (reg_equiv_address[regno] != 0
+ && (set == 0 || &SET_DEST (set) != recog_operand_loc[i]))
{
/* If reg_equiv_address is not a constant address, copy it,
since it may be shared. */
actually fail are extremely rare, so it turns out to be better to fix
the problem here by not generating cases that choose_reload_regs will
fail for. */
- /* There is a similar problem with RELAOD_FOR_INPUT_ADDRESS /
+ /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
a single operand.
We can reduce the register pressure by exploiting that a
if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
{
+ pat = PATTERN (p);
+
+ /* Watch out for unspec_volatile, and volatile asms. */
+ if (volatile_insn_p (pat))
+ return 0;
+
/* If this insn P stores in either GOAL or VALUE, return 0.
If GOAL is a memory ref and this insn writes memory, return 0.
If GOAL is a memory ref and its address is not constant,
and this insn P changes a register used in GOAL, return 0. */
- pat = PATTERN (p);
if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
{
register rtx dest = SET_DEST (pat);
&& reg_overlap_mentioned_for_reload_p (dest,
goal))
return 0;
+ if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
+ return 0;
}
else if (goal_mem && GET_CODE (dest) == MEM
&& ! push_operand (dest, GET_MODE (dest)))