else
size = GET_MODE_SIZE (mode);
- /* Align all function arguments to start in even-numbered registers,
- for "movw" on the enhanced core (to keep call conventions the same
- on all devices, do it even if "movw" is not available). Odd-sized
- arguments leave holes above them - registers still available for
- other uses. Use -mpack-args for compatibility with old asm code
- (the new convention will still be used for libgcc calls). */
+ /* Align all function arguments to start in even-numbered registers.
+ Odd-sized arguments leave holes above them. */
- if (!(type && TARGET_PACK_ARGS))
- size += size & 1;
-
- return size;
+ return (size + 1) & ~1;
}
/* Controls whether a function argument is passed
return (AS1 (clr,%0) CR_TAB
AS1 (inc,%0));
}
- else if (src == const2_rtx)
- {
- if (reg_was_0 (insn, dest))
- {
- *l = 2;
- return (AS1 (inc,%0 ; reg_was_0) CR_TAB
- AS1 (inc,%0));
- }
-
- *l = 3;
- return (AS1 (clr,%0) CR_TAB
- AS1 (inc,%0) CR_TAB
- AS1 (inc,%0));
- }
else if (src == constm1_rtx)
{
/* Immediate constants -1 to any register */
return (AS1 (clr,%0) CR_TAB
AS1 (dec,%0));
}
-
+ else
+ {
+ int bit_nr = exact_log2 (INTVAL (src));
+
+ if (bit_nr >= 0)
+ {
+ if (reg_was_0 (insn, dest))
+ {
+ *l = 2;
+ if (!real_l)
+ output_asm_insn ("set ; reg_was_0", operands);
+ }
+ else
+ {
+ *l = 3;
+ if (!real_l)
+ output_asm_insn ((AS1 (clr,%0) CR_TAB
+ "set"), operands);
+ }
+ if (!real_l)
+ avr_output_bld (operands, bit_nr);
+
+ return "";
+ }
+ }
}
/* Last resort, larger than loading from memory. */
AS1 (clr,%B0) CR_TAB
AS1 (inc,%A0));
}
- else if (src == const2_rtx)
- {
- if (reg_was_0 (insn, dest))
- {
- *l = 2;
- return (AS1 (inc,%0 ; reg_was_0) CR_TAB
- AS1 (inc,%0));
- }
-
- *l = 4;
- return (AS1 (clr,%A0) CR_TAB
- AS1 (clr,%B0) CR_TAB
- AS1 (inc,%A0) CR_TAB
- AS1 (inc,%A0));
- }
else if (src == constm1_rtx)
{
/* Immediate constants -1 to any register */
AS1 (dec,%A0) CR_TAB
AS2 (mov,%B0,%A0));
}
+ else
+ {
+ int bit_nr = exact_log2 (INTVAL (src));
+
+ if (bit_nr >= 0)
+ {
+ if (reg_was_0 (insn, dest))
+ {
+ *l = 2;
+ if (!real_l)
+ output_asm_insn ("set ; reg_was_0", operands);
+ }
+ else
+ {
+ *l = 4;
+ if (!real_l)
+ output_asm_insn ((AS1 (clr,%A0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ "set"), operands);
+ }
+ if (!real_l)
+ avr_output_bld (operands, bit_nr);
+
+ return "";
+ }
+ }
+
if ((INTVAL (src) & 0xff) == 0)
{
*l = 5;
if (GET_CODE (src) == CONST_INT)
{
+ const char *clr_op0 =
+ AVR_ENHANCED ? (AS1 (clr,%A0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS2 (movw,%C0,%A0))
+ : (AS1 (clr,%A0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%D0));
+
if (src == const0_rtx) /* mov r,L */
{
- if (AVR_ENHANCED)
- {
- *l = 3;
- return (AS1 (clr,%A0) CR_TAB
- AS1 (clr,%B0) CR_TAB
- AS2 (movw,%C0,%A0));
- }
- *l = 4;
- return (AS1 (clr,%A0) CR_TAB
- AS1 (clr,%B0) CR_TAB
- AS1 (clr,%C0) CR_TAB
- AS1 (clr,%D0));
+ *l = AVR_ENHANCED ? 3 : 4;
+ return clr_op0;
}
else if (src == const1_rtx)
{
*l = 1;
return AS1 (inc,%A0 ; reg_was_0);
}
- if (AVR_ENHANCED)
- {
- *l = 4;
- return (AS1 (clr,%A0) CR_TAB
- AS1 (clr,%B0) CR_TAB
- AS2 (movw,%C0,%A0) CR_TAB
- AS1 (inc,%A0));
- }
- *l = 5;
- return (AS1 (clr,%A0) CR_TAB
- AS1 (clr,%B0) CR_TAB
- AS1 (clr,%C0) CR_TAB
- AS1 (clr,%D0) CR_TAB
- AS1 (inc,%A0));
- }
- else if (src == const2_rtx)
- {
- if (reg_was_0 (insn, dest))
- {
- *l = 2;
- return (AS1 (inc,%A0 ; reg_was_0) CR_TAB
- AS1 (inc,%A0));
- }
-
- if (AVR_ENHANCED)
- {
- *l = 5;
- return (AS1 (clr,%D0) CR_TAB
- AS1 (clr,%C0) CR_TAB
- AS2 (movw,%A0,%C0) CR_TAB
- AS1 (inc,%A0) CR_TAB
- AS1 (inc,%A0));
- }
- *l = 6;
- return (AS1 (clr,%D0) CR_TAB
- AS1 (clr,%B0) CR_TAB
- AS1 (clr,%C0) CR_TAB
- AS1 (clr,%A0) CR_TAB
- AS1 (inc,%A0) CR_TAB
- AS1 (inc,%A0));
+ if (!real_l)
+ output_asm_insn (clr_op0, operands);
+ *l = AVR_ENHANCED ? 4 : 5;
+ return AS1 (inc,%A0);
}
else if (src == constm1_rtx)
{
AS2 (mov,%C0,%A0) CR_TAB
AS2 (mov,%D0,%A0));
}
+ else
+ {
+ int bit_nr = exact_log2 (INTVAL (src));
+
+ if (bit_nr >= 0)
+ {
+ if (reg_was_0 (insn, dest))
+ {
+ *l = 2;
+ if (!real_l)
+ output_asm_insn ("set ; reg_was_0", operands);
+ }
+ else
+ {
+ *l = AVR_ENHANCED ? 5 : 6;
+ if (!real_l)
+ {
+ output_asm_insn (clr_op0, operands);
+ output_asm_insn ("set", operands);
+ }
+ }
+ if (!real_l)
+ avr_output_bld (operands, bit_nr);
+
+ return "";
+ }
+ }
}
/* Last resort, better than loading from memory. */
}
-/* Generate asm equivalent for various shift's.
- Shift count are CONST_INT or REG. */
+/* Generate asm equivalent for various shifts.
+ Shift count is a CONST_INT, MEM or REG.
+ This only handles cases that are not already
+ carefully hand-optimized in ?sh??i3_out. */
void
-out_shift_with_cnt (template, insn, operands, len)
+out_shift_with_cnt (template, insn, operands, len, t_len)
const char *template;
rtx insn;
rtx operands[];
int *len;
+ int t_len; /* Length of template. */
{
rtx op[10];
- char str[300];
+ char str[500];
int second_label = 1;
-
+ int saved_in_tmp = 0;
+ int use_zero_reg = 0;
+
op[0] = operands[0];
op[1] = operands[1];
op[2] = operands[2];
op[3] = operands[3];
str[0] = 0;
-
- if (CONSTANT_P (operands[2]))
+
+ if (len)
+ *len = 1;
+
+ if (GET_CODE (operands[2]) == CONST_INT)
{
- if (len)
- ++*len;
+ int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
+ int count = INTVAL (operands[2]);
+ int max_len = 10; /* If larger than this, always use a loop. */
+
+ if (count < 8 && !scratch)
+ use_zero_reg = 1;
+
+ if (optimize_size)
+ max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
+
+ if (t_len * count <= max_len)
+ {
+ /* Output shifts inline with no loop - faster. */
+ if (len)
+ *len = t_len * count;
+ else
+ {
+ while (count-- > 0)
+ output_asm_insn (template, op);
+ }
+
+ return;
+ }
+
+ if (scratch)
+ {
+ if (!len)
+ strcat (str, AS2 (ldi,%3,%2));
+ }
+ else if (use_zero_reg)
+ {
+ /* Hack to save one word: use __zero_reg__ as loop counter.
+ Set one bit, then shift in a loop until it is 0 again. */
+
+ op[3] = zero_reg_rtx;
+ if (len)
+ *len = 2;
+ else
+ strcat (str, ("set" CR_TAB
+ AS2 (bld,%3,%2-1)));
+ }
else
- strcat (str, AS2 (ldi,%3,lo8((%2)-1)));
+ {
+ /* No scratch register available, use one from LD_REGS (saved in
+ __tmp_reg__) that doesn't overlap with registers to shift. */
+
+ op[3] = gen_rtx (REG, QImode,
+ ((true_regnum (operands[0]) - 1) & 15) + 16);
+ op[4] = tmp_reg_rtx;
+ saved_in_tmp = 1;
+
+ if (len)
+ *len = 3; /* Includes "mov %3,%4" after the loop. */
+ else
+ strcat (str, (AS2 (mov,%4,%3) CR_TAB
+ AS2 (ldi,%3,%2)));
+ }
+
second_label = 0;
}
else if (GET_CODE (operands[2]) == MEM)
{
- int mov_len;
rtx op_mov[10];
op[3] = op_mov[0] = tmp_reg_rtx;
op_mov[1] = op[2];
-
- if (!len)
- {
- output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
- strcat (str, AS1 (rjmp,2f));
- }
+
+ if (len)
+ out_movqi_r_mr (insn, op_mov, len);
else
- {
- out_movqi_r_mr (insn, op_mov, &mov_len);
- *len += mov_len + 1;
- }
+ output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
}
else if (register_operand (operands[2], QImode))
{
else
{
op[3] = tmp_reg_rtx;
- if (len)
- ++*len;
- else
- strcat (str, AS2 (mov,%3,%2) CR_TAB);
+ if (!len)
+ strcat (str, (AS2 (mov,%3,%2) CR_TAB));
}
-
+ }
+ else
+ fatal_insn ("Bad shift insn:", insn);
+
+ if (second_label)
+ {
if (len)
++*len;
else
strcat (str, AS1 (rjmp,2f));
-
}
- if (!len)
+
+ if (len)
+ *len += t_len + 2; /* template + dec + brXX */
+ else
{
strcat (str, "\n1:\t");
strcat (str, template);
strcat (str, second_label ? "\n2:\t" : "\n\t");
- strcat (str,
- AS1 (dec,%3) CR_TAB
- AS1 (brpl,1b));
+ strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
+ strcat (str, CR_TAB);
+ strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
+ if (saved_in_tmp)
+ strcat (str, (CR_TAB AS2 (mov,%3,%4)));
output_asm_insn (str, op);
}
}
else if (CONSTANT_P (operands[2]))
fatal_insn ("Internal compiler bug.\nIncorrect shift:", insn);
- if (len)
- *len = 3;
out_shift_with_cnt (AS1 (lsl,%0),
- insn, operands, len);
+ insn, operands, len, 1);
return "";
}
{
if (GET_CODE (operands[2]) == CONST_INT)
{
+ int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
+ int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
int k;
- int *t=len;
+ int *t = len;
if (!len)
len = &k;
switch (INTVAL (operands[2]))
{
- case 1:
- *len = 2;
- return (AS1 (lsl,%A0) CR_TAB
- AS1 (rol,%B0));
+ case 4:
+ if (optimize_size && scratch)
+ break; /* 5 */
+ if (ldi_ok)
+ {
+ *len = 6;
+ return (AS1 (swap,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS2 (andi,%B0,0xf0) CR_TAB
+ AS2 (eor,%B0,%A0) CR_TAB
+ AS2 (andi,%A0,0xf0) CR_TAB
+ AS2 (eor,%B0,%A0));
+ }
+ if (scratch)
+ {
+ *len = 7;
+ return (AS1 (swap,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS2 (ldi,%3,0xf0) CR_TAB
+ AS2 (and,%B0,%3) CR_TAB
+ AS2 (eor,%B0,%A0) CR_TAB
+ AS2 (and,%A0,%3) CR_TAB
+ AS2 (eor,%B0,%A0));
+ }
+ break; /* optimize_size ? 6 : 8 */
- case 2:
- *len = 4;
- return (AS1 (lsl,%A0) CR_TAB
- AS1 (rol,%B0) CR_TAB
- AS1 (lsl,%A0) CR_TAB
- AS1 (rol,%B0));
+ case 5:
+ if (optimize_size)
+ break; /* scratch ? 5 : 6 */
+ if (ldi_ok)
+ {
+ *len = 8;
+ return (AS1 (lsl,%A0) CR_TAB
+ AS1 (rol,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS2 (andi,%B0,0xf0) CR_TAB
+ AS2 (eor,%B0,%A0) CR_TAB
+ AS2 (andi,%A0,0xf0) CR_TAB
+ AS2 (eor,%B0,%A0));
+ }
+ if (scratch)
+ {
+ *len = 9;
+ return (AS1 (lsl,%A0) CR_TAB
+ AS1 (rol,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS2 (ldi,%3,0xf0) CR_TAB
+ AS2 (and,%B0,%3) CR_TAB
+ AS2 (eor,%B0,%A0) CR_TAB
+ AS2 (and,%A0,%3) CR_TAB
+ AS2 (eor,%B0,%A0));
+ }
+ break; /* 10 */
+
+ case 6:
+ if (optimize_size)
+ break; /* scratch ? 5 : 6 */
+ *len = 9;
+ return (AS1 (clr,__tmp_reg__) CR_TAB
+ AS1 (lsr,%B0) CR_TAB
+ AS1 (ror,%A0) CR_TAB
+ AS1 (ror,__tmp_reg__) CR_TAB
+ AS1 (lsr,%B0) CR_TAB
+ AS1 (ror,%A0) CR_TAB
+ AS1 (ror,__tmp_reg__) CR_TAB
+ AS2 (mov,%B0,%A0) CR_TAB
+ AS2 (mov,%A0,__tmp_reg__));
case 7:
*len = 5;
AS1 (lsl,%B0));
case 12:
- if (test_hard_reg_class (LD_REGS, operands[0]))
+ if (ldi_ok)
{
*len = 4;
return (AS2 (mov,%B0,%A0) CR_TAB
AS1 (swap,%B0) CR_TAB
AS2 (andi,%B0,0xf0));
}
- /* %3 is a scratch register from class LD_REGS */
- *len = 5;
+ if (scratch)
+ {
+ *len = 5;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS2 (ldi,%3,0xf0) CR_TAB
+ AS2 (and,%B0,%3));
+ }
+ *len = 6;
return (AS2 (mov,%B0,%A0) CR_TAB
AS1 (clr,%A0) CR_TAB
- AS1 (swap,%B0) CR_TAB
- AS2 (ldi,%3,0xf0) CR_TAB
- AS2 (and,%B0,%3));
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0));
case 13:
- if (test_hard_reg_class (LD_REGS, operands[0]))
+ if (ldi_ok)
{
*len = 5;
return (AS2 (mov,%B0,%A0) CR_TAB
AS1 (lsl,%B0) CR_TAB
AS2 (andi,%B0,0xe0));
}
- if (AVR_ENHANCED)
+ if (AVR_ENHANCED && scratch)
{
*len = 5;
return (AS2 (ldi,%3,0x20) CR_TAB
AS1 (clr,%A0) CR_TAB
AS1 (clr,__zero_reg__));
}
- break;
+ if (optimize_size && scratch)
+ break; /* 5 */
+ if (scratch)
+ {
+ *len = 6;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (ldi,%3,0xe0) CR_TAB
+ AS2 (and,%B0,%3));
+ }
+ if (AVR_ENHANCED)
+ {
+ *len = 6;
+ return ("set" CR_TAB
+ AS2 (bld,r1,5) CR_TAB
+ AS2 (mul,%A0,r1) CR_TAB
+ AS2 (mov,%B0,r0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ *len = 7;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0));
case 14:
- if (AVR_ENHANCED)
+ if (AVR_ENHANCED && ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (ldi,%B0,0x40) CR_TAB
+ AS2 (mul,%A0,%B0) CR_TAB
+ AS2 (mov,%B0,r0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ if (AVR_ENHANCED && scratch)
{
*len = 5;
return (AS2 (ldi,%3,0x40) CR_TAB
AS1 (clr,%A0) CR_TAB
AS1 (clr,__zero_reg__));
}
- break;
+ if (optimize_size && ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS2 (ldi,%A0,6) "\n1:\t"
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (dec,%A0) CR_TAB
+ AS1 (brne,1b));
+ }
+ if (optimize_size && scratch)
+ break; /* 5 */
+ *len = 6;
+ return (AS1 (clr,%B0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (ror,%B0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (ror,%B0) CR_TAB
+ AS1 (clr,%A0));
case 15:
*len = 4;
}
len = t;
}
- if (len)
- *len = 4;
- out_shift_with_cnt (AS1 (lsl,%0) CR_TAB
- AS1 (rol,%B0),
- insn, operands, len);
+ out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
+ AS1 (rol,%B0)),
+ insn, operands, len, 2);
return "";
}
if (GET_CODE (operands[2]) == CONST_INT)
{
int k;
- int *t=len;
+ int *t = len;
if (!len)
len = &k;
switch (INTVAL (operands[2]))
{
- case 1:
- *len = 4;
- return (AS1 (lsl,%A0) CR_TAB
- AS1 (rol,%B0) CR_TAB
- AS1 (rol,%C0) CR_TAB
- AS1 (rol,%D0));
-
- case 2:
- /* Loop is one word smaller, but slower and needs a register. */
- *len = 8;
- return (AS1 (lsl,%A0) CR_TAB
- AS1 (rol,%B0) CR_TAB
- AS1 (rol,%C0) CR_TAB
- AS1 (rol,%D0) CR_TAB
- AS1 (lsl,%A0) CR_TAB
- AS1 (rol,%B0) CR_TAB
- AS1 (rol,%C0) CR_TAB
- AS1 (rol,%D0));
-
case 8:
{
int reg0 = true_regnum (operands[0]);
}
len = t;
}
- if (len)
- *len = 6;
- out_shift_with_cnt (AS1 (lsl,%0) CR_TAB
- AS1 (rol,%B0) CR_TAB
- AS1 (rol,%C0) CR_TAB
- AS1 (rol,%D0),
- insn, operands, len);
+ out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
+ AS1 (rol,%B0) CR_TAB
+ AS1 (rol,%C0) CR_TAB
+ AS1 (rol,%D0)),
+ insn, operands, len, 4);
return "";
}
else if (CONSTANT_P (operands[2]))
fatal_insn ("Internal compiler bug.\nIncorrect shift:", insn);
- if (len)
- *len = 3;
out_shift_with_cnt (AS1 (asr,%0),
- insn, operands, len);
+ insn, operands, len, 1);
return "";
}
{
if (GET_CODE (operands[2]) == CONST_INT)
{
+ int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
+ int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
int k;
int *t = len;
switch (INTVAL (operands[2]))
{
- case 1:
- *len=2;
- return (AS1 (asr,%B0) CR_TAB
- AS1 (ror,%A0));
+ case 4:
+ case 5:
+ /* XXX try to optimize this too? */
+ break;
- case 2:
- *len=4;
- return (AS1 (asr,%B0) CR_TAB
- AS1 (ror,%A0) CR_TAB
- AS1 (asr,%B0) CR_TAB
- AS1 (ror,%A0));
+ case 6:
+ if (optimize_size)
+ break; /* scratch ? 5 : 6 */
+ *len = 8;
+ return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
+ AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (lsl,__tmp_reg__) CR_TAB
+ AS1 (rol,%A0) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (lsl,__tmp_reg__) CR_TAB
+ AS1 (rol,%A0) CR_TAB
+ AS1 (rol,%B0));
case 7:
*len = 4;
AS2 (sbc,%B0,%B0));
case 8:
- if (true_regnum (operands[0]) != true_regnum (operands[1]) + 1)
+ {
+ int reg0 = true_regnum (operands[0]);
+ int reg1 = true_regnum (operands[1]);
+
+ if (reg0 == reg1)
+ return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (sbc,%B0,%B0));
+ else if (reg0 == reg1 + 1)
+ return *len = 3, (AS1 (clr,%B0) CR_TAB
+ AS2 (sbrc,%A0,7) CR_TAB
+ AS1 (dec,%B0));
+
return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
AS1 (clr,%B0) CR_TAB
AS2 (sbrc,%A0,7) CR_TAB
AS1 (dec,%B0));
- else
- return *len = 3, (AS1 (clr,%B0) CR_TAB
- AS2 (sbrc,%A0,7) CR_TAB
- AS1 (dec,%B0));
+ }
case 9:
*len = 4;
AS1 (asr,%A0));
case 11:
- if (AVR_ENHANCED && test_hard_reg_class (LD_REGS, operands[0]))
+ if (AVR_ENHANCED && ldi_ok)
{
*len = 5;
- return (AS2 (ldi,%3,0x20) CR_TAB
- AS2 (muls,%B0,%3) CR_TAB
- AS2 (mov,%A0,r1) CR_TAB
- AS2 (sbc,%B0,%B0) CR_TAB
+ return (AS2 (ldi,%A0,0x20) CR_TAB
+ AS2 (muls,%B0,%A0) CR_TAB
+ AS2 (mov,%A0,r1) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
AS1 (clr,__zero_reg__));
}
- break;
+ if (optimize_size && scratch)
+ break; /* 5 */
+ *len = 6;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0));
case 12:
- if (AVR_ENHANCED && test_hard_reg_class (LD_REGS, operands[0]))
+ if (AVR_ENHANCED && ldi_ok)
{
*len = 5;
- return (AS2 (ldi,%3,0x10) CR_TAB
- AS2 (muls,%B0,%3) CR_TAB
- AS2 (mov,%A0,r1) CR_TAB
- AS2 (sbc,%B0,%B0) CR_TAB
+ return (AS2 (ldi,%A0,0x10) CR_TAB
+ AS2 (muls,%B0,%A0) CR_TAB
+ AS2 (mov,%A0,r1) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
AS1 (clr,__zero_reg__));
}
- break;
+ if (optimize_size && scratch)
+ break; /* 5 */
+ *len = 7;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0));
case 13:
- if (AVR_ENHANCED && test_hard_reg_class (LD_REGS, operands[0]))
+ if (AVR_ENHANCED && ldi_ok)
{
*len = 5;
- return (AS2 (ldi,%3,0x08) CR_TAB
- AS2 (muls,%B0,%3) CR_TAB
- AS2 (mov,%A0,r1) CR_TAB
- AS2 (sbc,%B0,%B0) CR_TAB
+ return (AS2 (ldi,%A0,0x08) CR_TAB
+ AS2 (muls,%B0,%A0) CR_TAB
+ AS2 (mov,%A0,r1) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
AS1 (clr,__zero_reg__));
}
- break;
+ if (optimize_size)
+ break; /* scratch ? 5 : 7 */
+ *len = 8;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0));
case 14:
*len = 5;
}
len = t;
}
- if (len)
- *len = 4;
- out_shift_with_cnt (AS1 (asr,%B0) CR_TAB
- AS1 (ror,%A0),
- insn, operands, len);
+ out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
+ AS1 (ror,%A0)),
+ insn, operands, len, 2);
return "";
}
switch (INTVAL (operands[2]))
{
- case 1:
- *len=4;
- return (AS1 (asr,%D0) CR_TAB
- AS1 (ror,%C0) CR_TAB
- AS1 (ror,%B0) CR_TAB
- AS1 (ror,%A0));
-
- case 2:
- /* Loop is one word smaller, but slower and needs a register. */
- *len = 8;
- return (AS1 (asr,%D0) CR_TAB
- AS1 (ror,%C0) CR_TAB
- AS1 (ror,%B0) CR_TAB
- AS1 (ror,%A0) CR_TAB
- AS1 (asr,%D0) CR_TAB
- AS1 (ror,%C0) CR_TAB
- AS1 (ror,%B0) CR_TAB
- AS1 (ror,%A0));
-
case 8:
{
int reg0 = true_regnum (operands[0]);
}
len = t;
}
- if (len)
- *len = 6;
- out_shift_with_cnt (AS1 (asr,%D0) CR_TAB
- AS1 (ror,%C0) CR_TAB
- AS1 (ror,%B0) CR_TAB
- AS1 (ror,%A0),
- insn, operands, len);
+ out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
+ AS1 (ror,%C0) CR_TAB
+ AS1 (ror,%B0) CR_TAB
+ AS1 (ror,%A0)),
+ insn, operands, len, 4);
return "";
}
else if (CONSTANT_P (operands[2]))
fatal_insn ("Internal compiler bug.\nIncorrect shift:", insn);
- if (len)
- *len = 3;
out_shift_with_cnt (AS1 (lsr,%0),
- insn, operands, len);
+ insn, operands, len, 1);
return "";
}
{
if (GET_CODE (operands[2]) == CONST_INT)
{
+ int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
+ int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
int k;
int *t = len;
-
+
if (!len)
len = &k;
switch (INTVAL (operands[2]))
{
- case 1:
- *len = 2;
- return (AS1 (lsr,%B0) CR_TAB
- AS1 (ror,%A0));
-
- case 2:
- *len = 4;
- return (AS1 (lsr,%B0) CR_TAB
- AS1 (ror,%A0) CR_TAB
- AS1 (lsr,%B0) CR_TAB
- AS1 (ror,%A0));
+ case 4:
+ if (optimize_size && scratch)
+ break; /* 5 */
+ if (ldi_ok)
+ {
+ *len = 6;
+ return (AS1 (swap,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS2 (andi,%A0,0x0f) CR_TAB
+ AS2 (eor,%A0,%B0) CR_TAB
+ AS2 (andi,%B0,0x0f) CR_TAB
+ AS2 (eor,%A0,%B0));
+ }
+ if (scratch)
+ {
+ *len = 7;
+ return (AS1 (swap,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS2 (ldi,%3,0x0f) CR_TAB
+ AS2 (and,%A0,%3) CR_TAB
+ AS2 (eor,%A0,%B0) CR_TAB
+ AS2 (and,%B0,%3) CR_TAB
+ AS2 (eor,%A0,%B0));
+ }
+ break; /* optimize_size ? 6 : 8 */
+
+ case 5:
+ if (optimize_size)
+ break; /* scratch ? 5 : 6 */
+ if (ldi_ok)
+ {
+ *len = 8;
+ return (AS1 (lsr,%B0) CR_TAB
+ AS1 (ror,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS2 (andi,%A0,0x0f) CR_TAB
+ AS2 (eor,%A0,%B0) CR_TAB
+ AS2 (andi,%B0,0x0f) CR_TAB
+ AS2 (eor,%A0,%B0));
+ }
+ if (scratch)
+ {
+ *len = 9;
+ return (AS1 (lsr,%B0) CR_TAB
+ AS1 (ror,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS2 (ldi,%3,0x0f) CR_TAB
+ AS2 (and,%A0,%3) CR_TAB
+ AS2 (eor,%A0,%B0) CR_TAB
+ AS2 (and,%B0,%3) CR_TAB
+ AS2 (eor,%A0,%B0));
+ }
+ break; /* 10 */
+
+ case 6:
+ if (optimize_size)
+ break; /* scratch ? 5 : 6 */
+ *len = 9;
+ return (AS1 (clr,__tmp_reg__) CR_TAB
+ AS1 (lsl,%A0) CR_TAB
+ AS1 (rol,%B0) CR_TAB
+ AS1 (rol,__tmp_reg__) CR_TAB
+ AS1 (lsl,%A0) CR_TAB
+ AS1 (rol,%B0) CR_TAB
+ AS1 (rol,__tmp_reg__) CR_TAB
+ AS2 (mov,%A0,%B0) CR_TAB
+ AS2 (mov,%B0,__tmp_reg__));
case 7:
*len = 5;
AS1 (lsr,%A0));
case 12:
- if (test_hard_reg_class (LD_REGS, operands[0]))
+ if (ldi_ok)
{
*len = 4;
return (AS2 (mov,%A0,%B0) CR_TAB
AS1 (swap,%A0) CR_TAB
AS2 (andi,%A0,0x0f));
}
- /* %3 is a scratch register from class LD_REGS */
- *len = 5;
+ if (scratch)
+ {
+ *len = 5;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS2 (ldi,%3,0x0f) CR_TAB
+ AS2 (and,%A0,%3));
+ }
+ *len = 6;
return (AS2 (mov,%A0,%B0) CR_TAB
AS1 (clr,%B0) CR_TAB
- AS1 (swap,%A0) CR_TAB
- AS2 (ldi,%3,0x0f) CR_TAB
- AS2 (and,%A0,%3));
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0));
case 13:
- if (test_hard_reg_class (LD_REGS, operands[0]))
+ if (ldi_ok)
{
*len = 5;
return (AS2 (mov,%A0,%B0) CR_TAB
AS1 (lsr,%A0) CR_TAB
AS2 (andi,%A0,0x07));
}
- if (AVR_ENHANCED)
+ if (AVR_ENHANCED && scratch)
{
*len = 5;
return (AS2 (ldi,%3,0x08) CR_TAB
AS1 (clr,%B0) CR_TAB
AS1 (clr,__zero_reg__));
}
- break;
+ if (optimize_size && scratch)
+ break; /* 5 */
+ if (scratch)
+ {
+ *len = 6;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS2 (ldi,%3,0x07) CR_TAB
+ AS2 (and,%A0,%3));
+ }
+ if (AVR_ENHANCED)
+ {
+ *len = 6;
+ return ("set" CR_TAB
+ AS2 (bld,r1,3) CR_TAB
+ AS2 (mul,%B0,r1) CR_TAB
+ AS2 (mov,%A0,r1) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ *len = 7;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0));
case 14:
- if (AVR_ENHANCED)
+ if (AVR_ENHANCED && ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (ldi,%A0,0x04) CR_TAB
+ AS2 (mul,%B0,%A0) CR_TAB
+ AS2 (mov,%A0,r1) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ if (AVR_ENHANCED && scratch)
{
*len = 5;
return (AS2 (ldi,%3,0x04) CR_TAB
AS1 (clr,%B0) CR_TAB
AS1 (clr,__zero_reg__));
}
- break;
+ if (optimize_size && ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS2 (ldi,%B0,6) "\n1:\t"
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (dec,%B0) CR_TAB
+ AS1 (brne,1b));
+ }
+ if (optimize_size && scratch)
+ break; /* 5 */
+ *len = 6;
+ return (AS1 (clr,%A0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (rol,%A0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (rol,%A0) CR_TAB
+ AS1 (clr,%B0));
case 15:
*len = 4;
- return (AS1 (lsl,%B0) CR_TAB
- AS2 (sbc,%A0,%A0) CR_TAB
- AS1 (neg,%A0) CR_TAB
+ return (AS1 (clr,%A0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (rol,%A0) CR_TAB
AS1 (clr,%B0));
}
len = t;
}
- if (len)
- *len = 4;
- out_shift_with_cnt (AS1 (lsr,%B0) CR_TAB
- AS1 (ror,%A0),
- insn, operands, len);
+ out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
+ AS1 (ror,%A0)),
+ insn, operands, len, 2);
return "";
}
switch (INTVAL (operands[2]))
{
- case 1:
- *len = 4;
- return (AS1 (lsr,%D0) CR_TAB
- AS1 (ror,%C0) CR_TAB
- AS1 (ror,%B0) CR_TAB
- AS1 (ror,%A0));
-
- case 2:
- /* Loop is one word smaller, but slower and needs a register. */
- *len = 8;
- return (AS1 (lsr,%D0) CR_TAB
- AS1 (ror,%C0) CR_TAB
- AS1 (ror,%B0) CR_TAB
- AS1 (ror,%A0) CR_TAB
- AS1 (lsr,%D0) CR_TAB
- AS1 (ror,%C0) CR_TAB
- AS1 (ror,%B0) CR_TAB
- AS1 (ror,%A0));
-
case 8:
{
int reg0 = true_regnum (operands[0]);
}
len = t;
}
- if (len)
- *len = 6;
- out_shift_with_cnt (AS1 (lsr,%D0) CR_TAB
- AS1 (ror,%C0) CR_TAB
- AS1 (ror,%B0) CR_TAB
- AS1 (ror,%A0),
- insn, operands, len);
+ out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
+ AS1 (ror,%C0) CR_TAB
+ AS1 (ror,%B0) CR_TAB
+ AS1 (ror,%A0)),
+ insn, operands, len, 4);
return "";
}
}
return "";
}
+
+void
+avr_output_bld (operands, bit_nr)
+ rtx operands[];
+ int bit_nr;
+{
+ static char s[] = "bld %A0,0";
+
+ s[5] = 'A' + (bit_nr >> 3);
+ s[8] = '0' + (bit_nr & 7);
+ output_asm_insn (s, operands);
+}
+
[(set (match_operand:QI 0 "register_operand" "=l")
(match_operand:QI 1 "immediate_operand" "i"))
(clobber (match_operand:QI 2 "register_operand" "=&d"))]
- ""
+ "reload_completed"
"ldi %2,lo8(%1)
mov %0,%2"
[(set_attr "length" "2")
[(set (match_operand:HI 0 "register_operand" "=r")
(match_operand:HI 1 "immediate_operand" "i"))
(clobber (match_operand:QI 2 "register_operand" "=&d"))]
- ""
+ "reload_completed"
"* return output_reload_inhi (insn, operands, NULL);"
[(set_attr "length" "4")
(set_attr "cc" "none")])
[(set (match_operand:SI 0 "register_operand" "=r")
(match_operand:SI 1 "immediate_operand" "i"))
(clobber (match_operand:QI 2 "register_operand" "=&d"))]
- ""
+ "reload_completed"
"* return output_reload_insisf (insn, operands, NULL);"
[(set_attr "length" "8")
(set_attr "cc" "none")])
;; arithmetic shift left
(define_insn "ashlqi3"
- [(set (match_operand:QI 0 "register_operand" "=r,!d,r,r")
- (ashift:QI (match_operand:QI 1 "register_operand" "0,0,0,0")
- (match_operand:QI 2 "general_operand" "r,n,n,Qm")))]
+ [(set (match_operand:QI 0 "register_operand" "=r,r,r,!d,r,r")
+ (ashift:QI (match_operand:QI 1 "register_operand" "0,0,0,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,P,K,n,n,Qm")))]
""
"* return ashlqi3_out (insn, operands, NULL);"
- [(set_attr "length" "5,4,6,7")
- (set_attr "cc" "clobber,set_czn,set_czn,clobber")])
+ [(set_attr "length" "5,1,2,4,6,9")
+ (set_attr "cc" "clobber,set_czn,set_czn,set_czn,set_czn,clobber")])
(define_insn "ashlhi3"
[(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r,r")
(ashift:HI (match_operand:HI 1 "register_operand" "0,0,r,0,0,0")
- (match_operand:QI 2 "general_operand" "r,P,O,K,i,Qm")))
- (clobber (match_scratch:QI 3 "=X,X,X,X,&d,X"))]
+ (match_operand:QI 2 "general_operand" "r,P,O,K,n,Qm")))]
""
"* return ashlhi3_out (insn, operands, NULL);"
- [(set_attr "length" "7,2,2,4,5,8")
+ [(set_attr "length" "6,2,2,4,10,10")
(set_attr "cc" "clobber,set_n,clobber,set_n,clobber,clobber")])
(define_insn "ashlsi3"
[(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r")
(ashift:SI (match_operand:SI 1 "register_operand" "0,0,r,0,0,0")
- (match_operand:QI 2 "general_operand" "r,P,O,K,i,Qm")))
- (clobber (match_scratch:QI 3 "=X,X,X,X,&d,X"))]
+ (match_operand:QI 2 "general_operand" "r,P,O,K,n,Qm")))]
""
"* return ashlsi3_out (insn, operands, NULL);"
- [(set_attr "length" "9,4,4,8,7,10")
+ [(set_attr "length" "8,4,4,8,10,12")
(set_attr "cc" "clobber,set_n,clobber,set_n,clobber,clobber")])
+;; Optimize if a scratch register from LD_REGS happens to be available.
+
+(define_peephole2
+ [(match_scratch:QI 3 "d")
+ (set (match_operand:HI 0 "register_operand" "")
+ (ashift:HI (match_operand:HI 1 "register_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")))]
+ ""
+ [(parallel [(set (match_dup 0) (ashift:HI (match_dup 1) (match_dup 2)))
+ (clobber (match_dup 3))])]
+ "")
+
+(define_insn "*ashlhi3_const"
+ [(set (match_operand:HI 0 "register_operand" "=r,r,r,r")
+ (ashift:HI (match_operand:HI 1 "register_operand" "0,r,0,0")
+ (match_operand:QI 2 "const_int_operand" "P,O,K,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
+ "reload_completed"
+ "* return ashlhi3_out (insn, operands, NULL);"
+ [(set_attr "length" "2,2,4,10")
+ (set_attr "cc" "set_n,clobber,set_n,clobber")])
+
+(define_peephole2
+ [(match_scratch:QI 3 "d")
+ (set (match_operand:SI 0 "register_operand" "")
+ (ashift:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")))]
+ ""
+ [(parallel [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
+ (clobber (match_dup 3))])]
+ "")
+
+(define_insn "*ashlsi3_const"
+ [(set (match_operand:SI 0 "register_operand" "=r,r,r")
+ (ashift:SI (match_operand:SI 1 "register_operand" "0,r,0")
+ (match_operand:QI 2 "const_int_operand" "P,O,n")))
+ (clobber (match_scratch:QI 3 "=X,X,&d"))]
+ "reload_completed"
+ "* return ashlsi3_out (insn, operands, NULL);"
+ [(set_attr "length" "4,4,10")
+ (set_attr "cc" "set_n,clobber,clobber")])
+
;; >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >>
;; arithmetic shift right
(match_operand:QI 2 "general_operand" "r,P,K,n,Qm")))]
""
"* return ashrqi3_out (insn, operands, NULL);"
- [(set_attr "length" "5,1,2,5,7")
- (set_attr "cc" "clobber,set_zn,set_zn,clobber,clobber")])
+ [(set_attr "length" "5,1,2,5,9")
+ (set_attr "cc" "clobber,clobber,clobber,clobber,clobber")])
(define_insn "ashrhi3"
[(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r,r")
- (ashiftrt:HI (match_operand:HI 1 "register_operand" "0,0,0,r,0,0")
- (match_operand:QI 2 "general_operand" "r,P,K,O,i,Qm")))
- (clobber (match_scratch:QI 3 "=X,X,X,X,&d,X"))]
+ (ashiftrt:HI (match_operand:HI 1 "register_operand" "0,0,r,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,P,O,K,n,Qm")))]
""
"* return ashrhi3_out (insn, operands, NULL);"
- [(set_attr "length" "7,2,4,4,5,8")
- (set_attr "cc" "clobber,clobber,clobber,clobber,clobber,clobber")])
+ [(set_attr "length" "6,2,4,4,10,10")
+ (set_attr "cc" "clobber,clobber,set_n,clobber,clobber,clobber")])
(define_insn "ashrsi3"
[(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r")
(ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0,r,0,0,0")
- (match_operand:QI 2 "general_operand" "r,P,O,K,i,Qm")))
- (clobber (match_scratch:QI 3 "=X,X,X,X,&d,X"))]
+ (match_operand:QI 2 "general_operand" "r,P,O,K,n,Qm")))]
""
"* return ashrsi3_out (insn, operands, NULL);"
- [(set_attr "length" "9,4,6,8,7,10")
- (set_attr "cc" "clobber,clobber,clobber,clobber,clobber,clobber")])
+ [(set_attr "length" "8,4,6,8,10,12")
+ (set_attr "cc" "clobber,clobber,set_n,clobber,clobber,clobber")])
+
+;; Optimize if a scratch register from LD_REGS happens to be available.
+
+(define_peephole2
+ [(match_scratch:QI 3 "d")
+ (set (match_operand:HI 0 "register_operand" "")
+ (ashiftrt:HI (match_operand:HI 1 "register_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")))]
+ ""
+ [(parallel [(set (match_dup 0) (ashiftrt:HI (match_dup 1) (match_dup 2)))
+ (clobber (match_dup 3))])]
+ "")
+
+(define_insn "*ashrhi3_const"
+ [(set (match_operand:HI 0 "register_operand" "=r,r,r,r")
+ (ashiftrt:HI (match_operand:HI 1 "register_operand" "0,r,0,0")
+ (match_operand:QI 2 "const_int_operand" "P,O,K,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
+ "reload_completed"
+ "* return ashrhi3_out (insn, operands, NULL);"
+ [(set_attr "length" "2,4,4,10")
+ (set_attr "cc" "clobber,set_n,clobber,clobber")])
+
+(define_peephole2
+ [(match_scratch:QI 3 "d")
+ (set (match_operand:SI 0 "register_operand" "")
+ (ashiftrt:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")))]
+ ""
+ [(parallel [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (match_dup 2)))
+ (clobber (match_dup 3))])]
+ "")
+
+(define_insn "*ashrsi3_const"
+ [(set (match_operand:SI 0 "register_operand" "=r,r,r")
+ (ashiftrt:SI (match_operand:SI 1 "register_operand" "0,r,0")
+ (match_operand:QI 2 "const_int_operand" "P,O,n")))
+ (clobber (match_scratch:QI 3 "=X,X,&d"))]
+ "reload_completed"
+ "* return ashrsi3_out (insn, operands, NULL);"
+ [(set_attr "length" "4,4,10")
+ (set_attr "cc" "clobber,set_n,clobber")])
;; >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >>
;; logical shift right
(define_insn "lshrqi3"
- [(set (match_operand:QI 0 "register_operand" "=r,d,r,r")
- (lshiftrt:QI (match_operand:QI 1 "register_operand" "0,0,0,0")
- (match_operand:QI 2 "general_operand" "r,n,n,Qm")))]
+ [(set (match_operand:QI 0 "register_operand" "=r,r,r,!d,r,r")
+ (lshiftrt:QI (match_operand:QI 1 "register_operand" "0,0,0,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,P,K,n,n,Qm")))]
""
"* return lshrqi3_out (insn, operands, NULL);"
- [(set_attr "length" "6,4,6,7")
- (set_attr "cc" "clobber,set_czn,set_czn,clobber")])
+ [(set_attr "length" "5,1,2,4,6,9")
+ (set_attr "cc" "clobber,set_czn,set_czn,set_czn,set_czn,clobber")])
(define_insn "lshrhi3"
[(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r,r")
- (lshiftrt:HI (match_operand:HI 1 "register_operand" "0,0,0,r,0,0")
- (match_operand:QI 2 "general_operand" "r,P,K,O,i,Qm")))
- (clobber (match_scratch:QI 3 "=X,X,X,X,&d,X"))]
+ (lshiftrt:HI (match_operand:HI 1 "register_operand" "0,0,r,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,P,O,K,n,Qm")))]
""
"* return lshrhi3_out (insn, operands, NULL);"
- [(set_attr "length" "7,2,4,2,5,8")
+ [(set_attr "length" "6,2,2,4,10,10")
(set_attr "cc" "clobber,clobber,clobber,clobber,clobber,clobber")])
(define_insn "lshrsi3"
[(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r")
(lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0,r,0,0,0")
- (match_operand:QI 2 "general_operand" "r,P,O,K,i,Qm")))
- (clobber (match_scratch:QI 3 "=X,X,X,X,&d,X"))]
+ (match_operand:QI 2 "general_operand" "r,P,O,K,n,Qm")))]
""
"* return lshrsi3_out (insn, operands, NULL);"
- [(set_attr "length" "9,4,4,8,7,10")
+ [(set_attr "length" "8,4,4,8,10,12")
(set_attr "cc" "clobber,clobber,clobber,clobber,clobber,clobber")])
+;; Optimize if a scratch register from LD_REGS happens to be available.
+
+(define_peephole2
+ [(match_scratch:QI 3 "d")
+ (set (match_operand:HI 0 "register_operand" "")
+ (lshiftrt:HI (match_operand:HI 1 "register_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")))]
+ ""
+ [(parallel [(set (match_dup 0) (lshiftrt:HI (match_dup 1) (match_dup 2)))
+ (clobber (match_dup 3))])]
+ "")
+
+(define_insn "*lshrhi3_const"
+ [(set (match_operand:HI 0 "register_operand" "=r,r,r,r")
+ (lshiftrt:HI (match_operand:HI 1 "register_operand" "0,r,0,0")
+ (match_operand:QI 2 "const_int_operand" "P,O,K,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
+ "reload_completed"
+ "* return lshrhi3_out (insn, operands, NULL);"
+ [(set_attr "length" "2,2,4,10")
+ (set_attr "cc" "clobber,clobber,clobber,clobber")])
+
+(define_peephole2
+ [(match_scratch:QI 3 "d")
+ (set (match_operand:SI 0 "register_operand" "")
+ (lshiftrt:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")))]
+ ""
+ [(parallel [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (match_dup 2)))
+ (clobber (match_dup 3))])]
+ "")
+
+(define_insn "*lshrsi3_const"
+ [(set (match_operand:SI 0 "register_operand" "=r,r,r")
+ (lshiftrt:SI (match_operand:SI 1 "register_operand" "0,r,0")
+ (match_operand:QI 2 "const_int_operand" "P,O,n")))
+ (clobber (match_scratch:QI 3 "=X,X,&d"))]
+ "reload_completed"
+ "* return lshrsi3_out (insn, operands, NULL);"
+ [(set_attr "length" "4,4,10")
+ (set_attr "cc" "clobber,clobber,clobber")])
+
;; abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x)
;; abs