let Inst{7} = fc{0};
let Inst{4} = 0b0;
- let Defs = [VPR, P0];
+ let Defs = [VPR];
}
class MVE_VPTt1<string suffix, bits<2> size, dag iops>
class MVE_VPTt1i<string suffix, bits<2> size>
: MVE_VPTt1<suffix, size,
- (ins vpt_mask:$Mk, pred_basic_i:$fc, MQPR:$Qn, MQPR:$Qm)> {
+ (ins vpt_mask:$Mk, MQPR:$Qn, MQPR:$Qm, pred_basic_i:$fc)> {
let Inst{12} = 0b0;
let Inst{0} = 0b0;
}
class MVE_VPTt1u<string suffix, bits<2> size>
: MVE_VPTt1<suffix, size,
- (ins vpt_mask:$Mk, pred_basic_u:$fc, MQPR:$Qn, MQPR:$Qm)> {
+ (ins vpt_mask:$Mk, MQPR:$Qn, MQPR:$Qm, pred_basic_u:$fc)> {
let Inst{12} = 0b0;
let Inst{0} = 0b1;
}
class MVE_VPTt1s<string suffix, bits<2> size>
: MVE_VPTt1<suffix, size,
- (ins vpt_mask:$Mk, pred_basic_s:$fc, MQPR:$Qn, MQPR:$Qm)> {
+ (ins vpt_mask:$Mk, MQPR:$Qn, MQPR:$Qm, pred_basic_s:$fc)> {
let Inst{12} = 0b1;
}
class MVE_VPTt2i<string suffix, bits<2> size>
: MVE_VPTt2<suffix, size,
- (ins vpt_mask:$Mk, pred_basic_i:$fc, MQPR:$Qn, GPRwithZR:$Rm)> {
+ (ins vpt_mask:$Mk, MQPR:$Qn, GPRwithZR:$Rm, pred_basic_i:$fc)> {
let Inst{12} = 0b0;
let Inst{5} = 0b0;
}
class MVE_VPTt2u<string suffix, bits<2> size>
: MVE_VPTt2<suffix, size,
- (ins vpt_mask:$Mk, pred_basic_u:$fc, MQPR:$Qn, GPRwithZR:$Rm)> {
+ (ins vpt_mask:$Mk, MQPR:$Qn, GPRwithZR:$Rm, pred_basic_u:$fc)> {
let Inst{12} = 0b0;
let Inst{5} = 0b1;
}
class MVE_VPTt2s<string suffix, bits<2> size>
: MVE_VPTt2<suffix, size,
- (ins vpt_mask:$Mk, pred_basic_s:$fc, MQPR:$Qn, GPRwithZR:$Rm)> {
+ (ins vpt_mask:$Mk, MQPR:$Qn, GPRwithZR:$Rm, pred_basic_s:$fc)> {
let Inst{12} = 0b1;
}
let Inst{7} = fc{0};
let Inst{4} = 0b0;
- let Defs = [P0];
+ let Defs = [VPR];
let Predicates = [HasMVEFloat];
}
class MVE_VPTft1<string suffix, bit size>
- : MVE_VPTf<suffix, size, (ins vpt_mask:$Mk, pred_basic_fp:$fc, MQPR:$Qn, MQPR:$Qm),
+ : MVE_VPTf<suffix, size, (ins vpt_mask:$Mk, MQPR:$Qn, MQPR:$Qm, pred_basic_fp:$fc),
"$fc, $Qn, $Qm"> {
bits<3> fc;
bits<4> Qm;
def MVE_VPTv8f16 : MVE_VPTft1<"f16", 0b1>;
class MVE_VPTft2<string suffix, bit size>
- : MVE_VPTf<suffix, size, (ins vpt_mask:$Mk, pred_basic_fp:$fc, MQPR:$Qn, GPRwithZR:$Rm),
+ : MVE_VPTf<suffix, size, (ins vpt_mask:$Mk, MQPR:$Qn, GPRwithZR:$Rm, pred_basic_fp:$fc),
"$fc, $Qn, $Rm"> {
bits<3> fc;
bits<4> Rm;
let Unpredictable{7} = 0b1;
let Unpredictable{5} = 0b1;
- let Defs = [P0];
+ let Defs = [VPR];
}
def MVE_VPSEL : MVE_p<(outs MQPR:$Qd), (ins MQPR:$Qn, MQPR:$Qm), NoItinerary,
TETE = 15 // 0b1111
};
+unsigned VCMPOpcodeToVPT(unsigned Opcode) {
+ switch (Opcode) {
+ case ARM::MVE_VCMPf32:
+ return ARM::MVE_VPTv4f32;
+ case ARM::MVE_VCMPf16:
+ return ARM::MVE_VPTv8f16;
+ case ARM::MVE_VCMPi8:
+ return ARM::MVE_VPTv16i8;
+ case ARM::MVE_VCMPi16:
+ return ARM::MVE_VPTv8i16;
+ case ARM::MVE_VCMPi32:
+ return ARM::MVE_VPTv4i32;
+ case ARM::MVE_VCMPu8:
+ return ARM::MVE_VPTv16u8;
+ case ARM::MVE_VCMPu16:
+ return ARM::MVE_VPTv8u16;
+ case ARM::MVE_VCMPu32:
+ return ARM::MVE_VPTv4u32;
+ case ARM::MVE_VCMPs8:
+ return ARM::MVE_VPTv16s8;
+ case ARM::MVE_VCMPs16:
+ return ARM::MVE_VPTv8s16;
+ case ARM::MVE_VCMPs32:
+ return ARM::MVE_VPTv4s32;
+
+ case ARM::MVE_VCMPf32r:
+ return ARM::MVE_VPTv4f32r;
+ case ARM::MVE_VCMPf16r:
+ return ARM::MVE_VPTv8f16r;
+ case ARM::MVE_VCMPi8r:
+ return ARM::MVE_VPTv16i8r;
+ case ARM::MVE_VCMPi16r:
+ return ARM::MVE_VPTv8i16r;
+ case ARM::MVE_VCMPi32r:
+ return ARM::MVE_VPTv4i32r;
+ case ARM::MVE_VCMPu8r:
+ return ARM::MVE_VPTv16u8r;
+ case ARM::MVE_VCMPu16r:
+ return ARM::MVE_VPTv8u16r;
+ case ARM::MVE_VCMPu32r:
+ return ARM::MVE_VPTv4u32r;
+ case ARM::MVE_VCMPs8r:
+ return ARM::MVE_VPTv16s8r;
+ case ARM::MVE_VCMPs16r:
+ return ARM::MVE_VPTv8s16r;
+ case ARM::MVE_VCMPs32r:
+ return ARM::MVE_VPTv4s32r;
+
+ default:
+ return 0;
+ }
+}
+
+MachineInstr *findVCMPToFoldIntoVPST(MachineBasicBlock::iterator MI,
+ const TargetRegisterInfo *TRI,
+ unsigned &NewOpcode) {
+ // Search backwards to the instruction that defines VPR. This may or not
+ // be a VCMP, we check that after this loop. If we find another instruction
+ // that reads cpsr, we return nullptr.
+ MachineBasicBlock::iterator CmpMI = MI;
+ while (CmpMI != MI->getParent()->begin()) {
+ --CmpMI;
+ if (CmpMI->modifiesRegister(ARM::VPR, TRI))
+ break;
+ if (CmpMI->readsRegister(ARM::VPR, TRI))
+ break;
+ }
+
+ if (CmpMI == MI)
+ return nullptr;
+ NewOpcode = VCMPOpcodeToVPT(CmpMI->getOpcode());
+ if (NewOpcode == 0)
+ return nullptr;
+
+ // Search forward from CmpMI to MI, checking if either register was def'd
+ if (registerDefinedBetween(CmpMI->getOperand(1).getReg(), std::next(CmpMI),
+ MI, TRI))
+ return nullptr;
+ if (registerDefinedBetween(CmpMI->getOperand(2).getReg(), std::next(CmpMI),
+ MI, TRI))
+ return nullptr;
+ return &*CmpMI;
+}
+
bool MVEVPTBlock::InsertVPTBlocks(MachineBasicBlock &Block) {
bool Modified = false;
MachineBasicBlock::instr_iterator MBIter = Block.instr_begin();
++MBIter;
};
- // Create the new VPST
- MachineInstrBuilder MIBuilder =
- BuildMI(Block, MI, dl, TII->get(ARM::MVE_VPST));
+ unsigned BlockMask = 0;
switch (VPTInstCnt) {
case 1:
- MIBuilder.addImm(VPTMaskValue::T);
+ BlockMask = VPTMaskValue::T;
break;
case 2:
- MIBuilder.addImm(VPTMaskValue::TT);
+ BlockMask = VPTMaskValue::TT;
break;
case 3:
- MIBuilder.addImm(VPTMaskValue::TTT);
+ BlockMask = VPTMaskValue::TTT;
break;
case 4:
- MIBuilder.addImm(VPTMaskValue::TTTT);
+ BlockMask = VPTMaskValue::TTTT;
break;
default:
llvm_unreachable("Unexpected number of instruction in a VPT block");
};
+ // Search back for a VCMP that can be folded to create a VPT, or else create
+ // a VPST directly
+ MachineInstrBuilder MIBuilder;
+ unsigned NewOpcode;
+ MachineInstr *VCMP = findVCMPToFoldIntoVPST(MI, TRI, NewOpcode);
+ if (VCMP) {
+ LLVM_DEBUG(dbgs() << " folding VCMP into VPST: "; VCMP->dump());
+ MIBuilder = BuildMI(Block, MI, dl, TII->get(NewOpcode));
+ MIBuilder.addImm(BlockMask);
+ MIBuilder.add(VCMP->getOperand(1));
+ MIBuilder.add(VCMP->getOperand(2));
+ MIBuilder.add(VCMP->getOperand(3));
+ VCMP->eraseFromParent();
+ } else {
+ MIBuilder = BuildMI(Block, MI, dl, TII->get(ARM::MVE_VPST));
+ MIBuilder.addImm(BlockMask);
+ }
+
finalizeBundle(
Block, MachineBasicBlock::instr_iterator(MIBuilder.getInstr()), MBIter);
; CHECK-LABEL: foo_v4i32_v4i32:
; CHECK: @ %bb.0: @ %entry
; CHECK-NEXT: vldrw.u32 q0, [r1]
-; CHECK-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-NEXT: vpstt
+; CHECK-NEXT: vptt.s32 gt, q0, zr
; CHECK-NEXT: vldrwt.u32 q0, [r2]
; CHECK-NEXT: vstrwt.32 q0, [r0]
; CHECK-NEXT: bx lr
; CHECK-LABEL: foo_v8i16_v8i16:
; CHECK: @ %bb.0: @ %entry
; CHECK-NEXT: vldrh.u16 q0, [r1]
-; CHECK-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-NEXT: vpstt
+; CHECK-NEXT: vptt.s16 gt, q0, zr
; CHECK-NEXT: vldrht.u16 q0, [r2]
; CHECK-NEXT: vstrht.16 q0, [r0]
; CHECK-NEXT: bx lr
; CHECK-LABEL: foo_v16i8_v16i8:
; CHECK: @ %bb.0: @ %entry
; CHECK-NEXT: vldrb.u8 q0, [r1]
-; CHECK-NEXT: vcmp.s8 gt, q0, zr
-; CHECK-NEXT: vpstt
+; CHECK-NEXT: vptt.s8 gt, q0, zr
; CHECK-NEXT: vldrbt.u8 q0, [r2]
; CHECK-NEXT: vstrbt.8 q0, [r0]
; CHECK-NEXT: bx lr
; CHECK-NEXT: sub sp, #8
; CHECK-NEXT: vldrh.u16 q0, [r1]
; CHECK-NEXT: movs r3, #0
-; CHECK-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.s16 gt, q0, zr
; CHECK-NEXT: vldrht.u16 q0, [r2]
; CHECK-NEXT: vmrs r1, p0
; CHECK-NEXT: and r2, r1, #1
; CHECK-NEXT: sub sp, #4
; CHECK-NEXT: vldrw.u32 q0, [r1]
; CHECK-NEXT: movs r3, #0
-; CHECK-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.s32 gt, q0, zr
; CHECK-NEXT: vldrwt.u32 q0, [r2]
; CHECK-NEXT: vmrs r1, p0
; CHECK-NEXT: and r2, r1, #1
; CHECK-NEXT: sub sp, #4
; CHECK-NEXT: vldrw.u32 q0, [r1]
; CHECK-NEXT: movs r3, #0
-; CHECK-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.s32 gt, q0, zr
; CHECK-NEXT: vldrwt.u32 q0, [r2]
; CHECK-NEXT: vmrs r1, p0
; CHECK-NEXT: and r2, r1, #1
; CHECK-LABEL: foo_v4f32_v4f32:
; CHECK: @ %bb.0: @ %entry
; CHECK-NEXT: vldrw.u32 q0, [r1]
-; CHECK-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-NEXT: vpstt
+; CHECK-NEXT: vptt.s32 gt, q0, zr
; CHECK-NEXT: vldrwt.u32 q0, [r2]
; CHECK-NEXT: vstrwt.32 q0, [r0]
; CHECK-NEXT: bx lr
; CHECK-LABEL: foo_v8f16_v8f16:
; CHECK: @ %bb.0: @ %entry
; CHECK-NEXT: vldrh.u16 q0, [r1]
-; CHECK-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-NEXT: vpstt
+; CHECK-NEXT: vptt.s16 gt, q0, zr
; CHECK-NEXT: vldrht.u16 q0, [r2]
; CHECK-NEXT: vstrht.16 q0, [r0]
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @masked_v4i32_align4_zero(<4 x i32> *%dest, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4i32_align4_zero:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vldrwt.u32 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v4i32_align4_zero:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vldrwt.u32 q1, [r0]
; CHECK-BE-NEXT: vrev64.32 q0, q1
; CHECK-BE-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @masked_v4i32_align4_undef(<4 x i32> *%dest, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4i32_align4_undef:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vldrwt.u32 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v4i32_align4_undef:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vldrwt.u32 q1, [r0]
; CHECK-BE-NEXT: vrev64.32 q0, q1
; CHECK-BE-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @masked_v4i32_align1_undef(<4 x i32> *%dest, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4i32_align1_undef:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v4i32_align1_undef:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-BE-NEXT: vrev32.8 q1, q0
; CHECK-BE-NEXT: vrev64.32 q0, q1
define arm_aapcs_vfpcc <4 x i32> @masked_v4i32_align4_other(<4 x i32> *%dest, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4i32_align4_other:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vldrwt.u32 q1, [r0]
; CHECK-LE-NEXT: vpsel q0, q1, q0
; CHECK-LE-NEXT: bx lr
; CHECK-BE-LABEL: masked_v4i32_align4_other:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vldrwt.u32 q0, [r0]
; CHECK-BE-NEXT: vpsel q1, q0, q1
; CHECK-BE-NEXT: vrev64.32 q0, q1
define arm_aapcs_vfpcc i8* @masked_v4i32_preinc(i8* %x, i8* %y, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4i32_preinc:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vldrwt.u32 q0, [r0, #4]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: vstrw.32 q0, [r1]
; CHECK-BE-LABEL: masked_v4i32_preinc:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vldrwt.u32 q0, [r0, #4]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: vstrw.32 q0, [r1]
define arm_aapcs_vfpcc i8* @masked_v4i32_postinc(i8* %x, i8* %y, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4i32_postinc:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vldrwt.u32 q0, [r0]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: vstrw.32 q0, [r1]
; CHECK-BE-LABEL: masked_v4i32_postinc:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vldrwt.u32 q0, [r0]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: vstrw.32 q0, [r1]
define arm_aapcs_vfpcc <8 x i16> @masked_v8i16_align4_zero(<8 x i16> *%dest, <8 x i16> %a) {
; CHECK-LE-LABEL: masked_v8i16_align4_zero:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
; CHECK-LE-NEXT: vmov.i32 q1, #0x0
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vldrht.u16 q0, [r0]
; CHECK-LE-NEXT: vpsel q0, q0, q1
; CHECK-LE-NEXT: bx lr
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vmov.i32 q1, #0x0
; CHECK-BE-NEXT: vrev64.16 q2, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q2, zr
; CHECK-BE-NEXT: vrev32.16 q1, q1
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q2, zr
; CHECK-BE-NEXT: vldrht.u16 q0, [r0]
; CHECK-BE-NEXT: vpsel q1, q0, q1
; CHECK-BE-NEXT: vrev64.16 q0, q1
define arm_aapcs_vfpcc <8 x i16> @masked_v8i16_align4_undef(<8 x i16> *%dest, <8 x i16> %a) {
; CHECK-LE-LABEL: masked_v8i16_align4_undef:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vldrht.u16 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v8i16_align4_undef:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.16 q1, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q1, zr
; CHECK-BE-NEXT: vldrht.u16 q1, [r0]
; CHECK-BE-NEXT: vrev64.16 q0, q1
; CHECK-BE-NEXT: bx lr
define arm_aapcs_vfpcc <8 x i16> @masked_v8i16_align1_undef(<8 x i16> *%dest, <8 x i16> %a) {
; CHECK-LE-LABEL: masked_v8i16_align1_undef:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v8i16_align1_undef:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.16 q1, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q1, zr
; CHECK-BE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-BE-NEXT: vrev16.8 q1, q0
; CHECK-BE-NEXT: vrev64.16 q0, q1
define arm_aapcs_vfpcc <8 x i16> @masked_v8i16_align4_other(<8 x i16> *%dest, <8 x i16> %a) {
; CHECK-LE-LABEL: masked_v8i16_align4_other:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vldrht.u16 q1, [r0]
; CHECK-LE-NEXT: vpsel q0, q1, q0
; CHECK-LE-NEXT: bx lr
; CHECK-BE-LABEL: masked_v8i16_align4_other:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.16 q1, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q1, zr
; CHECK-BE-NEXT: vldrht.u16 q0, [r0]
; CHECK-BE-NEXT: vpsel q1, q0, q1
; CHECK-BE-NEXT: vrev64.16 q0, q1
; CHECK-LE: @ %bb.0: @ %entry
; CHECK-LE-NEXT: vldr d1, [sp]
; CHECK-LE-NEXT: vmov d0, r2, r3
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vldrht.u16 q0, [r0, #4]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: vstrw.32 q0, [r1]
; CHECK-BE-NEXT: vldr d1, [sp]
; CHECK-BE-NEXT: vmov d0, r3, r2
; CHECK-BE-NEXT: vrev64.16 q1, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q1, zr
; CHECK-BE-NEXT: vldrht.u16 q0, [r0, #4]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: vstrh.16 q0, [r1]
define arm_aapcs_vfpcc i8* @masked_v8i16_postinc(i8* %x, i8* %y, <8 x i16> %a) {
; CHECK-LE-LABEL: masked_v8i16_postinc:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vldrht.u16 q0, [r0]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: vstrw.32 q0, [r1]
; CHECK-BE-LABEL: masked_v8i16_postinc:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.16 q1, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q1, zr
; CHECK-BE-NEXT: vldrht.u16 q0, [r0]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: vstrh.16 q0, [r1]
define arm_aapcs_vfpcc <16 x i8> @masked_v16i8_align4_zero(<16 x i8> *%dest, <16 x i8> %a) {
; CHECK-LE-LABEL: masked_v16i8_align4_zero:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s8 gt, q0, zr
; CHECK-LE-NEXT: vmov.i32 q1, #0x0
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s8 gt, q0, zr
; CHECK-LE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-LE-NEXT: vpsel q0, q0, q1
; CHECK-LE-NEXT: bx lr
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vmov.i32 q1, #0x0
; CHECK-BE-NEXT: vrev64.8 q2, q0
-; CHECK-BE-NEXT: vcmp.s8 gt, q2, zr
; CHECK-BE-NEXT: vrev32.8 q1, q1
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s8 gt, q2, zr
; CHECK-BE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-BE-NEXT: vpsel q1, q0, q1
; CHECK-BE-NEXT: vrev64.8 q0, q1
define arm_aapcs_vfpcc <16 x i8> @masked_v16i8_align4_undef(<16 x i8> *%dest, <16 x i8> %a) {
; CHECK-LE-LABEL: masked_v16i8_align4_undef:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s8 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s8 gt, q0, zr
; CHECK-LE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v16i8_align4_undef:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.8 q1, q0
-; CHECK-BE-NEXT: vcmp.s8 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s8 gt, q1, zr
; CHECK-BE-NEXT: vldrbt.u8 q1, [r0]
; CHECK-BE-NEXT: vrev64.8 q0, q1
; CHECK-BE-NEXT: bx lr
define arm_aapcs_vfpcc <16 x i8> @masked_v16i8_align4_other(<16 x i8> *%dest, <16 x i8> %a) {
; CHECK-LE-LABEL: masked_v16i8_align4_other:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s8 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s8 gt, q0, zr
; CHECK-LE-NEXT: vldrbt.u8 q1, [r0]
; CHECK-LE-NEXT: vpsel q0, q1, q0
; CHECK-LE-NEXT: bx lr
; CHECK-BE-LABEL: masked_v16i8_align4_other:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.8 q1, q0
-; CHECK-BE-NEXT: vcmp.s8 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s8 gt, q1, zr
; CHECK-BE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-BE-NEXT: vpsel q1, q0, q1
; CHECK-BE-NEXT: vrev64.8 q0, q1
define arm_aapcs_vfpcc i8* @masked_v16i8_preinc(i8* %x, i8* %y, <16 x i8> %a) {
; CHECK-LE-LABEL: masked_v16i8_preinc:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s8 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s8 gt, q0, zr
; CHECK-LE-NEXT: vldrbt.u8 q0, [r0, #4]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: vstrw.32 q0, [r1]
; CHECK-BE-LABEL: masked_v16i8_preinc:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.8 q1, q0
-; CHECK-BE-NEXT: vcmp.s8 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s8 gt, q1, zr
; CHECK-BE-NEXT: vldrbt.u8 q0, [r0, #4]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: vstrb.8 q0, [r1]
define arm_aapcs_vfpcc i8* @masked_v16i8_postinc(i8* %x, i8* %y, <16 x i8> %a) {
; CHECK-LE-LABEL: masked_v16i8_postinc:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s8 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s8 gt, q0, zr
; CHECK-LE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: vstrw.32 q0, [r1]
; CHECK-BE-LABEL: masked_v16i8_postinc:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.8 q1, q0
-; CHECK-BE-NEXT: vcmp.s8 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s8 gt, q1, zr
; CHECK-BE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: vstrb.8 q0, [r1]
define arm_aapcs_vfpcc <4 x float> @masked_v4f32_align4_zero(<4 x float> *%dest, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4f32_align4_zero:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
; CHECK-LE-NEXT: vmov.i32 q1, #0x0
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vldrwt.u32 q0, [r0]
; CHECK-LE-NEXT: vpsel q0, q0, q1
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v4f32_align4_zero:
; CHECK-BE: @ %bb.0: @ %entry
-; CHECK-BE-NEXT: vrev64.32 q2, q0
; CHECK-BE-NEXT: vmov.i32 q1, #0x0
-; CHECK-BE-NEXT: vcmp.s32 gt, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vrev64.32 q2, q0
+; CHECK-BE-NEXT: vpt.s32 gt, q2, zr
; CHECK-BE-NEXT: vldrwt.u32 q0, [r0]
; CHECK-BE-NEXT: vpsel q1, q0, q1
; CHECK-BE-NEXT: vrev64.32 q0, q1
define arm_aapcs_vfpcc <4 x float> @masked_v4f32_align4_undef(<4 x float> *%dest, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4f32_align4_undef:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vldrwt.u32 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v4f32_align4_undef:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vldrwt.u32 q1, [r0]
; CHECK-BE-NEXT: vrev64.32 q0, q1
; CHECK-BE-NEXT: bx lr
define arm_aapcs_vfpcc <4 x float> @masked_v4f32_align1_undef(<4 x float> *%dest, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4f32_align1_undef:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v4f32_align1_undef:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-BE-NEXT: vrev32.8 q1, q0
; CHECK-BE-NEXT: vrev64.32 q0, q1
define arm_aapcs_vfpcc <4 x float> @masked_v4f32_align4_other(<4 x float> *%dest, <4 x i32> %a, <4 x float> %b) {
; CHECK-LE-LABEL: masked_v4f32_align4_other:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vldrwt.u32 q0, [r0]
; CHECK-LE-NEXT: vpsel q0, q0, q1
; CHECK-LE-NEXT: bx lr
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q2, q1
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vldrwt.u32 q0, [r0]
; CHECK-BE-NEXT: vpsel q1, q0, q2
; CHECK-BE-NEXT: vrev64.32 q0, q1
define arm_aapcs_vfpcc i8* @masked_v4f32_preinc(i8* %x, i8* %y, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4f32_preinc:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vldrwt.u32 q0, [r0, #4]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: vstrw.32 q0, [r1]
; CHECK-BE-LABEL: masked_v4f32_preinc:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vldrwt.u32 q0, [r0, #4]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: vstrw.32 q0, [r1]
define arm_aapcs_vfpcc i8* @masked_v4f32_postinc(i8* %x, i8* %y, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4f32_postinc:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vldrwt.u32 q0, [r0]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: vstrw.32 q0, [r1]
; CHECK-BE-LABEL: masked_v4f32_postinc:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vldrwt.u32 q0, [r0]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: vstrw.32 q0, [r1]
define arm_aapcs_vfpcc <8 x half> @masked_v8f16_align4_zero(<8 x half> *%dest, <8 x i16> %a) {
; CHECK-LE-LABEL: masked_v8f16_align4_zero:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
; CHECK-LE-NEXT: vmov.i32 q1, #0x0
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vldrht.u16 q0, [r0]
; CHECK-LE-NEXT: vpsel q0, q0, q1
; CHECK-LE-NEXT: bx lr
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vmov.i32 q1, #0x0
; CHECK-BE-NEXT: vrev64.16 q2, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q2, zr
; CHECK-BE-NEXT: vrev32.16 q1, q1
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q2, zr
; CHECK-BE-NEXT: vldrht.u16 q0, [r0]
; CHECK-BE-NEXT: vpsel q1, q0, q1
; CHECK-BE-NEXT: vrev64.16 q0, q1
define arm_aapcs_vfpcc <8 x half> @masked_v8f16_align4_undef(<8 x half> *%dest, <8 x i16> %a) {
; CHECK-LE-LABEL: masked_v8f16_align4_undef:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vldrht.u16 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v8f16_align4_undef:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.16 q1, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q1, zr
; CHECK-BE-NEXT: vldrht.u16 q1, [r0]
; CHECK-BE-NEXT: vrev64.16 q0, q1
; CHECK-BE-NEXT: bx lr
define arm_aapcs_vfpcc <8 x half> @masked_v8f16_align1_undef(<8 x half> *%dest, <8 x i16> %a) {
; CHECK-LE-LABEL: masked_v8f16_align1_undef:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v8f16_align1_undef:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.16 q1, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q1, zr
; CHECK-BE-NEXT: vldrbt.u8 q0, [r0]
; CHECK-BE-NEXT: vrev16.8 q1, q0
; CHECK-BE-NEXT: vrev64.16 q0, q1
define arm_aapcs_vfpcc <8 x half> @masked_v8f16_align4_other(<8 x half> *%dest, <8 x i16> %a, <8 x half> %b) {
; CHECK-LE-LABEL: masked_v8f16_align4_other:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vldrht.u16 q0, [r0]
; CHECK-LE-NEXT: vpsel q0, q0, q1
; CHECK-LE-NEXT: bx lr
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.16 q2, q1
; CHECK-BE-NEXT: vrev64.16 q1, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q1, zr
; CHECK-BE-NEXT: vldrht.u16 q0, [r0]
; CHECK-BE-NEXT: vpsel q1, q0, q2
; CHECK-BE-NEXT: vrev64.16 q0, q1
define arm_aapcs_vfpcc i8* @masked_v8f16_preinc(i8* %x, i8* %y, <8 x i16> %a) {
; CHECK-LE-LABEL: masked_v8f16_preinc:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vldrht.u16 q0, [r0, #4]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: vstrw.32 q0, [r1]
; CHECK-BE-LABEL: masked_v8f16_preinc:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.16 q1, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q1, zr
; CHECK-BE-NEXT: vldrht.u16 q0, [r0, #4]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: vstrh.16 q0, [r1]
define arm_aapcs_vfpcc i8* @masked_v8f16_postinc(i8* %x, i8* %y, <8 x i16> %a) {
; CHECK-LE-LABEL: masked_v8f16_postinc:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vldrht.u16 q0, [r0]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: vstrw.32 q0, [r1]
; CHECK-BE-LABEL: masked_v8f16_postinc:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.16 q1, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q1, zr
; CHECK-BE-NEXT: vldrht.u16 q0, [r0]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: vstrh.16 q0, [r1]
define arm_aapcs_vfpcc void @masked_v4i32(<4 x i32> *%dest, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4i32:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vstrwt.32 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v4i32:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vstrwt.32 q1, [r0]
; CHECK-BE-NEXT: bx lr
entry:
define arm_aapcs_vfpcc void @masked_v4i32_align1(<4 x i32> *%dest, <4 x i32> %a) {
; CHECK-LE-LABEL: masked_v4i32_align1:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vstrbt.8 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q1, q0
; CHECK-BE-NEXT: vrev32.8 q0, q1
-; CHECK-BE-NEXT: vcmp.s32 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q1, zr
; CHECK-BE-NEXT: vstrbt.8 q0, [r0]
; CHECK-BE-NEXT: bx lr
entry:
; CHECK-LE-NEXT: vldr d1, [sp]
; CHECK-LE-NEXT: vldrw.u32 q1, [r1]
; CHECK-LE-NEXT: vmov d0, r2, r3
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vstrwt.32 q1, [r0, #4]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: bx lr
; CHECK-BE-NEXT: vldrw.u32 q1, [r1]
; CHECK-BE-NEXT: vmov d0, r3, r2
; CHECK-BE-NEXT: vrev64.32 q2, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q2, zr
; CHECK-BE-NEXT: vstrwt.32 q1, [r0, #4]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: bx lr
; CHECK-LE-NEXT: vldr d1, [sp]
; CHECK-LE-NEXT: vldrw.u32 q1, [r1]
; CHECK-LE-NEXT: vmov d0, r2, r3
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vstrwt.32 q1, [r0]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: bx lr
; CHECK-BE-NEXT: vldrw.u32 q1, [r1]
; CHECK-BE-NEXT: vmov d0, r3, r2
; CHECK-BE-NEXT: vrev64.32 q2, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q2, zr
; CHECK-BE-NEXT: vstrwt.32 q1, [r0]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: bx lr
define arm_aapcs_vfpcc void @masked_v8i16(<8 x i16> *%dest, <8 x i16> %a) {
; CHECK-LE-LABEL: masked_v8i16:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vstrht.16 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v8i16:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.16 q1, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q1, zr
; CHECK-BE-NEXT: vstrht.16 q1, [r0]
; CHECK-BE-NEXT: bx lr
entry:
define arm_aapcs_vfpcc void @masked_v8i16_align1(<8 x i16> *%dest, <8 x i16> %a) {
; CHECK-LE-LABEL: masked_v8i16_align1:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vstrbt.8 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.16 q1, q0
; CHECK-BE-NEXT: vrev16.8 q0, q1
-; CHECK-BE-NEXT: vcmp.s16 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q1, zr
; CHECK-BE-NEXT: vstrbt.8 q0, [r0]
; CHECK-BE-NEXT: bx lr
entry:
; CHECK-LE-NEXT: vldr d1, [sp]
; CHECK-LE-NEXT: vldrw.u32 q1, [r1]
; CHECK-LE-NEXT: vmov d0, r2, r3
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vstrht.16 q1, [r0, #4]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: bx lr
; CHECK-BE-NEXT: vldrh.u16 q1, [r1]
; CHECK-BE-NEXT: vmov d0, r3, r2
; CHECK-BE-NEXT: vrev64.16 q2, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q2, zr
; CHECK-BE-NEXT: vstrht.16 q1, [r0, #4]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: bx lr
; CHECK-LE-NEXT: vldr d1, [sp]
; CHECK-LE-NEXT: vldrw.u32 q1, [r1]
; CHECK-LE-NEXT: vmov d0, r2, r3
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vstrht.16 q1, [r0]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: bx lr
; CHECK-BE-NEXT: vldrh.u16 q1, [r1]
; CHECK-BE-NEXT: vmov d0, r3, r2
; CHECK-BE-NEXT: vrev64.16 q2, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q2, zr
; CHECK-BE-NEXT: vstrht.16 q1, [r0]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: bx lr
define arm_aapcs_vfpcc void @masked_v16i8(<16 x i8> *%dest, <16 x i8> %a) {
; CHECK-LE-LABEL: masked_v16i8:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.s8 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s8 gt, q0, zr
; CHECK-LE-NEXT: vstrbt.8 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-LABEL: masked_v16i8:
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.8 q1, q0
-; CHECK-BE-NEXT: vcmp.s8 gt, q1, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s8 gt, q1, zr
; CHECK-BE-NEXT: vstrbt.8 q1, [r0]
; CHECK-BE-NEXT: bx lr
entry:
; CHECK-LE-NEXT: vldr d1, [sp]
; CHECK-LE-NEXT: vldrw.u32 q1, [r1]
; CHECK-LE-NEXT: vmov d0, r2, r3
-; CHECK-LE-NEXT: vcmp.s8 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s8 gt, q0, zr
; CHECK-LE-NEXT: vstrbt.8 q1, [r0, #4]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: bx lr
; CHECK-BE-NEXT: vldrb.u8 q1, [r1]
; CHECK-BE-NEXT: vmov d0, r3, r2
; CHECK-BE-NEXT: vrev64.8 q2, q0
-; CHECK-BE-NEXT: vcmp.s8 gt, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s8 gt, q2, zr
; CHECK-BE-NEXT: vstrbt.8 q1, [r0, #4]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: bx lr
; CHECK-LE-NEXT: vldr d1, [sp]
; CHECK-LE-NEXT: vldrw.u32 q1, [r1]
; CHECK-LE-NEXT: vmov d0, r2, r3
-; CHECK-LE-NEXT: vcmp.s8 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s8 gt, q0, zr
; CHECK-LE-NEXT: vstrbt.8 q1, [r0]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: bx lr
; CHECK-BE-NEXT: vldrb.u8 q1, [r1]
; CHECK-BE-NEXT: vmov d0, r3, r2
; CHECK-BE-NEXT: vrev64.8 q2, q0
-; CHECK-BE-NEXT: vcmp.s8 gt, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s8 gt, q2, zr
; CHECK-BE-NEXT: vstrbt.8 q1, [r0]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: bx lr
define arm_aapcs_vfpcc void @masked_v4f32(<4 x float> *%dest, <4 x float> %a, <4 x i32> %b) {
; CHECK-LE-LABEL: masked_v4f32:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.i32 ne, q1, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.i32 ne, q1, zr
; CHECK-LE-NEXT: vstrwt.32 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.32 q2, q1
; CHECK-BE-NEXT: vrev64.32 q1, q0
-; CHECK-BE-NEXT: vcmp.i32 ne, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.i32 ne, q2, zr
; CHECK-BE-NEXT: vstrwt.32 q1, [r0]
; CHECK-BE-NEXT: bx lr
entry:
define arm_aapcs_vfpcc void @masked_v4f32_align1(<4 x float> *%dest, <4 x float> %a, <4 x i32> %b) {
; CHECK-LE-LABEL: masked_v4f32_align1:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.i32 ne, q1, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.i32 ne, q1, zr
; CHECK-LE-NEXT: vstrbt.8 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-NEXT: vrev64.32 q2, q1
; CHECK-BE-NEXT: vrev64.32 q1, q0
; CHECK-BE-NEXT: vrev32.8 q0, q1
-; CHECK-BE-NEXT: vcmp.i32 ne, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.i32 ne, q2, zr
; CHECK-BE-NEXT: vstrbt.8 q0, [r0]
; CHECK-BE-NEXT: bx lr
entry:
; CHECK-LE-NEXT: vldr d1, [sp]
; CHECK-LE-NEXT: vldrw.u32 q1, [r1]
; CHECK-LE-NEXT: vmov d0, r2, r3
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vstrwt.32 q1, [r0, #4]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: bx lr
; CHECK-BE-NEXT: vldrw.u32 q1, [r1]
; CHECK-BE-NEXT: vmov d0, r3, r2
; CHECK-BE-NEXT: vrev64.32 q2, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q2, zr
; CHECK-BE-NEXT: vstrwt.32 q1, [r0, #4]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: bx lr
; CHECK-LE-NEXT: vldr d1, [sp]
; CHECK-LE-NEXT: vldrw.u32 q1, [r1]
; CHECK-LE-NEXT: vmov d0, r2, r3
-; CHECK-LE-NEXT: vcmp.s32 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s32 gt, q0, zr
; CHECK-LE-NEXT: vstrwt.32 q1, [r0]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: bx lr
; CHECK-BE-NEXT: vldrw.u32 q1, [r1]
; CHECK-BE-NEXT: vmov d0, r3, r2
; CHECK-BE-NEXT: vrev64.32 q2, q0
-; CHECK-BE-NEXT: vcmp.s32 gt, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s32 gt, q2, zr
; CHECK-BE-NEXT: vstrwt.32 q1, [r0]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: bx lr
define arm_aapcs_vfpcc void @masked_v8f16(<8 x half> *%dest, <8 x half> %a, <8 x i16> %b) {
; CHECK-LE-LABEL: masked_v8f16:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.i16 ne, q1, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.i16 ne, q1, zr
; CHECK-LE-NEXT: vstrht.16 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE: @ %bb.0: @ %entry
; CHECK-BE-NEXT: vrev64.16 q2, q1
; CHECK-BE-NEXT: vrev64.16 q1, q0
-; CHECK-BE-NEXT: vcmp.i16 ne, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.i16 ne, q2, zr
; CHECK-BE-NEXT: vstrht.16 q1, [r0]
; CHECK-BE-NEXT: bx lr
entry:
define arm_aapcs_vfpcc void @masked_v8f16_align1(<8 x half> *%dest, <8 x half> %a, <8 x i16> %b) {
; CHECK-LE-LABEL: masked_v8f16_align1:
; CHECK-LE: @ %bb.0: @ %entry
-; CHECK-LE-NEXT: vcmp.i16 ne, q1, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.i16 ne, q1, zr
; CHECK-LE-NEXT: vstrbt.8 q0, [r0]
; CHECK-LE-NEXT: bx lr
;
; CHECK-BE-NEXT: vrev64.16 q2, q0
; CHECK-BE-NEXT: vrev16.8 q0, q2
; CHECK-BE-NEXT: vrev64.16 q2, q1
-; CHECK-BE-NEXT: vcmp.i16 ne, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.i16 ne, q2, zr
; CHECK-BE-NEXT: vstrbt.8 q0, [r0]
; CHECK-BE-NEXT: bx lr
entry:
; CHECK-LE-NEXT: vldr d1, [sp]
; CHECK-LE-NEXT: vldrw.u32 q1, [r1]
; CHECK-LE-NEXT: vmov d0, r2, r3
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vstrht.16 q1, [r0, #4]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: bx lr
; CHECK-BE-NEXT: vldrh.u16 q1, [r1]
; CHECK-BE-NEXT: vmov d0, r3, r2
; CHECK-BE-NEXT: vrev64.16 q2, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q2, zr
; CHECK-BE-NEXT: vstrht.16 q1, [r0, #4]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: bx lr
; CHECK-LE-NEXT: vldr d1, [sp]
; CHECK-LE-NEXT: vldrw.u32 q1, [r1]
; CHECK-LE-NEXT: vmov d0, r2, r3
-; CHECK-LE-NEXT: vcmp.s16 gt, q0, zr
-; CHECK-LE-NEXT: vpst
+; CHECK-LE-NEXT: vpt.s16 gt, q0, zr
; CHECK-LE-NEXT: vstrht.16 q1, [r0]
; CHECK-LE-NEXT: adds r0, #4
; CHECK-LE-NEXT: bx lr
; CHECK-BE-NEXT: vldrh.u16 q1, [r1]
; CHECK-BE-NEXT: vmov d0, r3, r2
; CHECK-BE-NEXT: vrev64.16 q2, q0
-; CHECK-BE-NEXT: vcmp.s16 gt, q2, zr
-; CHECK-BE-NEXT: vpst
+; CHECK-BE-NEXT: vpt.s16 gt, q2, zr
; CHECK-BE-NEXT: vstrht.16 q1, [r0]
; CHECK-BE-NEXT: adds r0, #4
; CHECK-BE-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpnez_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpnez_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.i32 ne, q1, zr
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpsltz_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpsltz_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.s32 lt, q1, zr
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpsgtz_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpsgtz_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.s32 gt, q1, zr
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpslez_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpslez_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.s32 le, q1, zr
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpsgez_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpsgez_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.s32 ge, q1, zr
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpugtz_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpugtz_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.i32 ne, q1, zr
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpulez_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpulez_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.u32 cs, q1, zr
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpeq_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpeq_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.i32 eq, q1, q2
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpne_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpne_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.i32 ne, q1, q2
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpslt_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpslt_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.s32 gt, q2, q1
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpsgt_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpsgt_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.s32 gt, q1, q2
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpsle_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpsle_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.s32 ge, q2, q1
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpsge_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpsge_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.s32 ge, q1, q2
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpult_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpult_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.u32 hi, q2, q1
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpugt_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpugt_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.u32 hi, q1, q2
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpule_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpule_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.u32 cs, q2, q1
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpuge_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpuge_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.u32 cs, q1, q2
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpeqr_v4i1(<4 x i32> %a, <4 x i32> %b, i32 %c) {
; CHECK-LABEL: cmpeqr_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.i32 eq, q1, r0
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpner_v4i1(<4 x i32> %a, <4 x i32> %b, i32 %c) {
; CHECK-LABEL: cmpner_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.i32 ne, q1, r0
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpsltr_v4i1(<4 x i32> %a, <4 x i32> %b, i32 %c) {
; CHECK-LABEL: cmpsltr_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
; CHECK-NEXT: vdup.32 q2, r0
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.s32 gt, q2, q1
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpsgtr_v4i1(<4 x i32> %a, <4 x i32> %b, i32 %c) {
; CHECK-LABEL: cmpsgtr_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.s32 gt, q1, r0
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpsler_v4i1(<4 x i32> %a, <4 x i32> %b, i32 %c) {
; CHECK-LABEL: cmpsler_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
; CHECK-NEXT: vdup.32 q2, r0
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.s32 ge, q2, q1
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpsger_v4i1(<4 x i32> %a, <4 x i32> %b, i32 %c) {
; CHECK-LABEL: cmpsger_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.s32 ge, q1, r0
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpultr_v4i1(<4 x i32> %a, <4 x i32> %b, i32 %c) {
; CHECK-LABEL: cmpultr_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
; CHECK-NEXT: vdup.32 q2, r0
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.u32 hi, q2, q1
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpugtr_v4i1(<4 x i32> %a, <4 x i32> %b, i32 %c) {
; CHECK-LABEL: cmpugtr_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.u32 hi, q1, r0
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpuler_v4i1(<4 x i32> %a, <4 x i32> %b, i32 %c) {
; CHECK-LABEL: cmpuler_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
; CHECK-NEXT: vdup.32 q2, r0
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.u32 cs, q2, q1
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @cmpuger_v4i1(<4 x i32> %a, <4 x i32> %b, i32 %c) {
; CHECK-LABEL: cmpuger_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 eq, q0, zr
; CHECK-NEXT: vcmpt.u32 cs, q1, r0
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <8 x i16> @cmpeq_v8i1(<8 x i16> %a, <8 x i16> %b, <8 x i16> %c) {
; CHECK-LABEL: cmpeq_v8i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i16 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i16 eq, q0, zr
; CHECK-NEXT: vcmpt.i16 eq, q1, q2
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <8 x i16> @cmpeqr_v8i1(<8 x i16> %a, <8 x i16> %b, i16 %c) {
; CHECK-LABEL: cmpeqr_v8i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i16 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i16 eq, q0, zr
; CHECK-NEXT: vcmpt.i16 eq, q1, r0
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <16 x i8> @cmpeq_v16i1(<16 x i8> %a, <16 x i8> %b, <16 x i8> %c) {
; CHECK-LABEL: cmpeq_v16i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i8 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i8 eq, q0, zr
; CHECK-NEXT: vcmpt.i8 eq, q1, q2
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <16 x i8> @cmpeqr_v16i1(<16 x i8> %a, <16 x i8> %b, i8 %c) {
; CHECK-LABEL: cmpeqr_v16i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i8 eq, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i8 eq, q0, zr
; CHECK-NEXT: vcmpt.i8 eq, q1, r0
; CHECK-NEXT: vpsel q0, q0, q1
; CHECK-NEXT: bx lr
define arm_aapcs_vfpcc <4 x i32> @vpnot_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: vpnot_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.s32 lt, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.s32 lt, q0, zr
; CHECK-NEXT: vcmpt.s32 gt, q1, zr
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpst
define arm_aapcs_vfpcc <4 x i32> @cmpeqz_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpeqz_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.i32 ne, q1, zr
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <4 x i32> @cmpnez_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpnez_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.i32 eq, q1, zr
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <4 x i32> @cmpsltz_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpsltz_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.s32 ge, q1, zr
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <4 x i32> @cmpsgtz_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpsgtz_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.s32 le, q1, zr
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <4 x i32> @cmpslez_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpslez_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.s32 gt, q1, zr
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <4 x i32> @cmpsgez_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpsgez_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.s32 lt, q1, zr
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <4 x i32> @cmpugtz_v4i1(<4 x i32> %a, <4 x i32> %b) {
; CHECK-LABEL: cmpugtz_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.i32 eq, q1, zr
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <4 x i32> @cmpeq_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpeq_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.i32 ne, q1, q2
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <4 x i32> @cmpne_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpne_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.i32 eq, q1, q2
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <4 x i32> @cmpslt_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpslt_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.s32 le, q2, q1
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <4 x i32> @cmpsgt_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpsgt_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.s32 le, q1, q2
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <4 x i32> @cmpsle_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpsle_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.s32 lt, q2, q1
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <4 x i32> @cmpsge_v4i1(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) {
; CHECK-LABEL: cmpsge_v4i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i32 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i32 ne, q0, zr
; CHECK-NEXT: vcmpt.s32 lt, q1, q2
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <8 x i16> @cmpeqz_v8i1(<8 x i16> %a, <8 x i16> %b) {
; CHECK-LABEL: cmpeqz_v8i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i16 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i16 ne, q0, zr
; CHECK-NEXT: vcmpt.i16 ne, q1, zr
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <8 x i16> @cmpeq_v8i1(<8 x i16> %a, <8 x i16> %b, <8 x i16> %c) {
; CHECK-LABEL: cmpeq_v8i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i16 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i16 ne, q0, zr
; CHECK-NEXT: vcmpt.i16 ne, q1, q2
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <16 x i8> @cmpeqz_v16i1(<16 x i8> %a, <16 x i8> %b) {
; CHECK-LABEL: cmpeqz_v16i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i8 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i8 ne, q0, zr
; CHECK-NEXT: vcmpt.i8 ne, q1, zr
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
define arm_aapcs_vfpcc <16 x i8> @cmpeq_v16i1(<16 x i8> %a, <16 x i8> %b, <16 x i8> %c) {
; CHECK-LABEL: cmpeq_v16i1:
; CHECK: @ %bb.0: @ %entry
-; CHECK-NEXT: vcmp.i8 ne, q0, zr
-; CHECK-NEXT: vpst
+; CHECK-NEXT: vpt.i8 ne, q0, zr
; CHECK-NEXT: vcmpt.i8 ne, q1, q2
; CHECK-NEXT: vpnot
; CHECK-NEXT: vpsel q0, q0, q1
;
; CHECK-MVEFP-LABEL: vcmp_one_v4f32:
; CHECK-MVEFP: @ %bb.0: @ %entry
-; CHECK-MVEFP-NEXT: vcmp.f32 le, q1, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f32 le, q1, q0
; CHECK-MVEFP-NEXT: vcmpt.f32 le, q0, q1
; CHECK-MVEFP-NEXT: vpnot
; CHECK-MVEFP-NEXT: vpsel q0, q2, q3
;
; CHECK-MVEFP-LABEL: vcmp_ueq_v4f32:
; CHECK-MVEFP: @ %bb.0: @ %entry
-; CHECK-MVEFP-NEXT: vcmp.f32 le, q1, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f32 le, q1, q0
; CHECK-MVEFP-NEXT: vcmpt.f32 le, q0, q1
; CHECK-MVEFP-NEXT: vpsel q0, q2, q3
; CHECK-MVEFP-NEXT: bx lr
;
; CHECK-MVEFP-LABEL: vcmp_ord_v4f32:
; CHECK-MVEFP: @ %bb.0: @ %entry
-; CHECK-MVEFP-NEXT: vcmp.f32 le, q1, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f32 le, q1, q0
; CHECK-MVEFP-NEXT: vcmpt.f32 lt, q0, q1
; CHECK-MVEFP-NEXT: vpnot
; CHECK-MVEFP-NEXT: vpsel q0, q2, q3
;
; CHECK-MVEFP-LABEL: vcmp_uno_v4f32:
; CHECK-MVEFP: @ %bb.0: @ %entry
-; CHECK-MVEFP-NEXT: vcmp.f32 le, q1, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f32 le, q1, q0
; CHECK-MVEFP-NEXT: vcmpt.f32 lt, q0, q1
; CHECK-MVEFP-NEXT: vpsel q0, q2, q3
; CHECK-MVEFP-NEXT: bx lr
;
; CHECK-MVEFP-LABEL: vcmp_one_v8f16:
; CHECK-MVEFP: @ %bb.0: @ %entry
-; CHECK-MVEFP-NEXT: vcmp.f16 le, q1, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f16 le, q1, q0
; CHECK-MVEFP-NEXT: vcmpt.f16 le, q0, q1
; CHECK-MVEFP-NEXT: vpnot
; CHECK-MVEFP-NEXT: vpsel q0, q2, q3
;
; CHECK-MVEFP-LABEL: vcmp_ueq_v8f16:
; CHECK-MVEFP: @ %bb.0: @ %entry
-; CHECK-MVEFP-NEXT: vcmp.f16 le, q1, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f16 le, q1, q0
; CHECK-MVEFP-NEXT: vcmpt.f16 le, q0, q1
; CHECK-MVEFP-NEXT: vpsel q0, q2, q3
; CHECK-MVEFP-NEXT: bx lr
;
; CHECK-MVEFP-LABEL: vcmp_ord_v8f16:
; CHECK-MVEFP: @ %bb.0: @ %entry
-; CHECK-MVEFP-NEXT: vcmp.f16 le, q1, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f16 le, q1, q0
; CHECK-MVEFP-NEXT: vcmpt.f16 lt, q0, q1
; CHECK-MVEFP-NEXT: vpnot
; CHECK-MVEFP-NEXT: vpsel q0, q2, q3
;
; CHECK-MVEFP-LABEL: vcmp_uno_v8f16:
; CHECK-MVEFP: @ %bb.0: @ %entry
-; CHECK-MVEFP-NEXT: vcmp.f16 le, q1, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f16 le, q1, q0
; CHECK-MVEFP-NEXT: vcmpt.f16 lt, q0, q1
; CHECK-MVEFP-NEXT: vpsel q0, q2, q3
; CHECK-MVEFP-NEXT: bx lr
; CHECK-MVEFP: @ %bb.0: @ %entry
; CHECK-MVEFP-NEXT: vmov r0, s4
; CHECK-MVEFP-NEXT: vdup.32 q1, r0
-; CHECK-MVEFP-NEXT: vcmp.f32 le, q1, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f32 le, q1, q0
; CHECK-MVEFP-NEXT: vcmpt.f32 le, q0, r0
; CHECK-MVEFP-NEXT: vpnot
; CHECK-MVEFP-NEXT: vpsel q0, q2, q3
; CHECK-MVEFP: @ %bb.0: @ %entry
; CHECK-MVEFP-NEXT: vmov r0, s4
; CHECK-MVEFP-NEXT: vdup.32 q1, r0
-; CHECK-MVEFP-NEXT: vcmp.f32 le, q1, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f32 le, q1, q0
; CHECK-MVEFP-NEXT: vcmpt.f32 le, q0, r0
; CHECK-MVEFP-NEXT: vpsel q0, q2, q3
; CHECK-MVEFP-NEXT: bx lr
; CHECK-MVEFP: @ %bb.0: @ %entry
; CHECK-MVEFP-NEXT: vmov r0, s4
; CHECK-MVEFP-NEXT: vdup.32 q1, r0
-; CHECK-MVEFP-NEXT: vcmp.f32 le, q1, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f32 le, q1, q0
; CHECK-MVEFP-NEXT: vcmpt.f32 lt, q0, r0
; CHECK-MVEFP-NEXT: vpnot
; CHECK-MVEFP-NEXT: vpsel q0, q2, q3
; CHECK-MVEFP: @ %bb.0: @ %entry
; CHECK-MVEFP-NEXT: vmov r0, s4
; CHECK-MVEFP-NEXT: vdup.32 q1, r0
-; CHECK-MVEFP-NEXT: vcmp.f32 le, q1, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f32 le, q1, q0
; CHECK-MVEFP-NEXT: vcmpt.f32 lt, q0, r0
; CHECK-MVEFP-NEXT: vpsel q0, q2, q3
; CHECK-MVEFP-NEXT: bx lr
; CHECK-MVEFP-NEXT: vldr.16 s12, [r0]
; CHECK-MVEFP-NEXT: vmov r0, s12
; CHECK-MVEFP-NEXT: vdup.16 q3, r0
-; CHECK-MVEFP-NEXT: vcmp.f16 le, q3, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f16 le, q3, q0
; CHECK-MVEFP-NEXT: vcmpt.f16 le, q0, r0
; CHECK-MVEFP-NEXT: vpnot
; CHECK-MVEFP-NEXT: vpsel q0, q1, q2
; CHECK-MVEFP-NEXT: vldr.16 s12, [r0]
; CHECK-MVEFP-NEXT: vmov r0, s12
; CHECK-MVEFP-NEXT: vdup.16 q3, r0
-; CHECK-MVEFP-NEXT: vcmp.f16 le, q3, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f16 le, q3, q0
; CHECK-MVEFP-NEXT: vcmpt.f16 le, q0, r0
; CHECK-MVEFP-NEXT: vpsel q0, q1, q2
; CHECK-MVEFP-NEXT: bx lr
; CHECK-MVEFP-NEXT: vldr.16 s12, [r0]
; CHECK-MVEFP-NEXT: vmov r0, s12
; CHECK-MVEFP-NEXT: vdup.16 q3, r0
-; CHECK-MVEFP-NEXT: vcmp.f16 le, q3, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f16 le, q3, q0
; CHECK-MVEFP-NEXT: vcmpt.f16 lt, q0, r0
; CHECK-MVEFP-NEXT: vpnot
; CHECK-MVEFP-NEXT: vpsel q0, q1, q2
; CHECK-MVEFP-NEXT: vldr.16 s12, [r0]
; CHECK-MVEFP-NEXT: vmov r0, s12
; CHECK-MVEFP-NEXT: vdup.16 q3, r0
-; CHECK-MVEFP-NEXT: vcmp.f16 le, q3, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f16 le, q3, q0
; CHECK-MVEFP-NEXT: vcmpt.f16 lt, q0, r0
; CHECK-MVEFP-NEXT: vpsel q0, q1, q2
; CHECK-MVEFP-NEXT: bx lr
; CHECK-MVEFP-LABEL: vcmp_one_v4f32:
; CHECK-MVEFP: @ %bb.0: @ %entry
; CHECK-MVEFP-NEXT: vmov.i32 q3, #0x0
-; CHECK-MVEFP-NEXT: vcmp.f32 le, q3, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f32 le, q3, q0
; CHECK-MVEFP-NEXT: vcmpt.f32 le, q0, q3
; CHECK-MVEFP-NEXT: vpnot
; CHECK-MVEFP-NEXT: vpsel q0, q1, q2
; CHECK-MVEFP-LABEL: vcmp_ueq_v4f32:
; CHECK-MVEFP: @ %bb.0: @ %entry
; CHECK-MVEFP-NEXT: vmov.i32 q3, #0x0
-; CHECK-MVEFP-NEXT: vcmp.f32 le, q3, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f32 le, q3, q0
; CHECK-MVEFP-NEXT: vcmpt.f32 le, q0, q3
; CHECK-MVEFP-NEXT: vpsel q0, q1, q2
; CHECK-MVEFP-NEXT: bx lr
; CHECK-MVEFP-LABEL: vcmp_ord_v4f32:
; CHECK-MVEFP: @ %bb.0: @ %entry
; CHECK-MVEFP-NEXT: vmov.i32 q3, #0x0
-; CHECK-MVEFP-NEXT: vcmp.f32 le, q3, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f32 le, q3, q0
; CHECK-MVEFP-NEXT: vcmpt.f32 lt, q0, q3
; CHECK-MVEFP-NEXT: vpnot
; CHECK-MVEFP-NEXT: vpsel q0, q1, q2
; CHECK-MVEFP-LABEL: vcmp_uno_v4f32:
; CHECK-MVEFP: @ %bb.0: @ %entry
; CHECK-MVEFP-NEXT: vmov.i32 q3, #0x0
-; CHECK-MVEFP-NEXT: vcmp.f32 le, q3, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f32 le, q3, q0
; CHECK-MVEFP-NEXT: vcmpt.f32 lt, q0, q3
; CHECK-MVEFP-NEXT: vpsel q0, q1, q2
; CHECK-MVEFP-NEXT: bx lr
; CHECK-MVEFP-LABEL: vcmp_one_v8f16:
; CHECK-MVEFP: @ %bb.0: @ %entry
; CHECK-MVEFP-NEXT: vmov.i32 q3, #0x0
-; CHECK-MVEFP-NEXT: vcmp.f16 le, q3, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f16 le, q3, q0
; CHECK-MVEFP-NEXT: vcmpt.f16 le, q0, q3
; CHECK-MVEFP-NEXT: vpnot
; CHECK-MVEFP-NEXT: vpsel q0, q1, q2
; CHECK-MVEFP-LABEL: vcmp_ueq_v8f16:
; CHECK-MVEFP: @ %bb.0: @ %entry
; CHECK-MVEFP-NEXT: vmov.i32 q3, #0x0
-; CHECK-MVEFP-NEXT: vcmp.f16 le, q3, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f16 le, q3, q0
; CHECK-MVEFP-NEXT: vcmpt.f16 le, q0, q3
; CHECK-MVEFP-NEXT: vpsel q0, q1, q2
; CHECK-MVEFP-NEXT: bx lr
; CHECK-MVEFP-LABEL: vcmp_ord_v8f16:
; CHECK-MVEFP: @ %bb.0: @ %entry
; CHECK-MVEFP-NEXT: vmov.i32 q3, #0x0
-; CHECK-MVEFP-NEXT: vcmp.f16 le, q3, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f16 le, q3, q0
; CHECK-MVEFP-NEXT: vcmpt.f16 lt, q0, q3
; CHECK-MVEFP-NEXT: vpnot
; CHECK-MVEFP-NEXT: vpsel q0, q1, q2
; CHECK-MVEFP-LABEL: vcmp_uno_v8f16:
; CHECK-MVEFP: @ %bb.0: @ %entry
; CHECK-MVEFP-NEXT: vmov.i32 q3, #0x0
-; CHECK-MVEFP-NEXT: vcmp.f16 le, q3, q0
-; CHECK-MVEFP-NEXT: vpst
+; CHECK-MVEFP-NEXT: vpt.f16 le, q3, q0
; CHECK-MVEFP-NEXT: vcmpt.f16 lt, q0, q3
; CHECK-MVEFP-NEXT: vpsel q0, q1, q2
; CHECK-MVEFP-NEXT: bx lr
; CHECK-LABEL: name: test_vminnmq_m_f32_v2
; CHECK: liveins: $q0, $q1, $q2, $r0
; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q1, implicit killed $q2, implicit killed $vpr, implicit killed $q0 {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, killed renamable $q2, 1, killed renamable $vpr, killed renamable $q0
+ ; CHECK: BUNDLE implicit-def dead $vpr, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q1, implicit killed $q2, implicit killed $q0 {
+ ; CHECK: MVE_VPST 8, implicit-def $vpr
+ ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, killed renamable $q2, 1, internal killed renamable $vpr, killed renamable $q0
; CHECK: }
; CHECK: tBX_RET 14, $noreg, implicit $q0
$vpr = VMSR_P0 killed $r0, 14, $noreg
; CHECK-LABEL: name: test_vminnmq_m_f32_v2
; CHECK: liveins: $q0, $q1, $q2, $q3, $r0
; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q2, implicit killed $q3, implicit killed $vpr, implicit killed $q0, implicit killed $q1 {
- ; CHECK: MVE_VPST 4, implicit-def $p0
- ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q2, renamable $q3, 1, renamable $vpr, killed renamable $q0
- ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q0, killed renamable $q3, 1, killed renamable $vpr, killed renamable $q1
+ ; CHECK: BUNDLE implicit-def dead $vpr, implicit-def dead $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q2, implicit killed $q3, implicit killed $q0, implicit killed $q1 {
+ ; CHECK: MVE_VPST 4, implicit-def $vpr
+ ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q2, renamable $q3, 1, internal renamable $vpr, killed renamable $q0
+ ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q0, killed renamable $q3, 1, internal killed renamable $vpr, killed renamable $q1
; CHECK: }
; CHECK: $q0 = MVE_VORR killed $q1, killed $q1, 0, $noreg, undef $q0
; CHECK: tBX_RET 14, $noreg, implicit $q0
; CHECK-LABEL: name: test_vminnmq_m_f32_v2
; CHECK: liveins: $q0, $q1, $q2, $q3, $r0
; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q2, implicit-def $d4, implicit-def $s8, implicit-def $s9, implicit-def $d5, implicit-def $s10, implicit-def $s11, implicit-def dead $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q2, implicit killed $q3, implicit killed $vpr, implicit killed $q0, implicit killed $q1 {
- ; CHECK: MVE_VPST 1, implicit-def $p0
- ; CHECK: renamable $q2 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q2, renamable $q3, 1, renamable $vpr, undef renamable $q2
- ; CHECK: renamable $q2 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q2, internal renamable $q2, 1, renamable $vpr, internal undef renamable $q2
- ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q2, renamable $q3, 1, renamable $vpr, killed renamable $q0
- ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q0, killed renamable $q3, 1, killed renamable $vpr, killed renamable $q1
+ ; CHECK: BUNDLE implicit-def dead $vpr, implicit-def dead $q2, implicit-def $d4, implicit-def $s8, implicit-def $s9, implicit-def $d5, implicit-def $s10, implicit-def $s11, implicit-def dead $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q2, implicit killed $q3, implicit killed $q0, implicit killed $q1 {
+ ; CHECK: MVE_VPST 1, implicit-def $vpr
+ ; CHECK: renamable $q2 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q2, renamable $q3, 1, internal renamable $vpr, undef renamable $q2
+ ; CHECK: renamable $q2 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q2, internal renamable $q2, 1, internal renamable $vpr, internal undef renamable $q2
+ ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q2, renamable $q3, 1, internal renamable $vpr, killed renamable $q0
+ ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q0, killed renamable $q3, 1, internal killed renamable $vpr, killed renamable $q1
; CHECK: }
; CHECK: $q0 = MVE_VORR killed $q1, killed $q1, 0, $noreg, undef $q0
; CHECK: tBX_RET 14, $noreg, implicit $q0
; CHECK-LABEL: name: test_vminnmq_m_f32_v2
; CHECK: liveins: $q0, $q1, $q2, $q3, $r0
; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q2, implicit-def $d4, implicit-def $s8, implicit-def $s9, implicit-def $d5, implicit-def $s10, implicit-def $s11, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q2, implicit $q3, implicit $vpr, implicit killed $q0 {
- ; CHECK: MVE_VPST 1, implicit-def $p0
- ; CHECK: renamable $q2 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q2, renamable $q3, 1, renamable $vpr, undef renamable $q2
- ; CHECK: renamable $q2 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q2, internal renamable $q2, 1, renamable $vpr, internal undef renamable $q2
- ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q2, renamable $q3, 1, renamable $vpr, killed renamable $q0
- ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q0, renamable $q3, 1, renamable $vpr, internal undef renamable $q0
+ ; CHECK: BUNDLE implicit-def $vpr, implicit-def dead $q2, implicit-def $d4, implicit-def $s8, implicit-def $s9, implicit-def $d5, implicit-def $s10, implicit-def $s11, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q2, implicit $q3, implicit killed $q0 {
+ ; CHECK: MVE_VPST 1, implicit-def $vpr
+ ; CHECK: renamable $q2 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q2, renamable $q3, 1, internal renamable $vpr, undef renamable $q2
+ ; CHECK: renamable $q2 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q2, internal renamable $q2, 1, internal renamable $vpr, internal undef renamable $q2
+ ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q2, renamable $q3, 1, internal renamable $vpr, killed renamable $q0
+ ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q0, renamable $q3, 1, internal renamable $vpr, internal undef renamable $q0
; CHECK: }
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q0, implicit killed $q3, implicit killed $vpr, implicit killed $q1 {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q0, killed renamable $q3, 1, killed renamable $vpr, killed renamable $q1
+ ; CHECK: BUNDLE implicit-def dead $vpr, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q0, implicit killed $q3, implicit killed $q1 {
+ ; CHECK: MVE_VPST 8, implicit-def $vpr
+ ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q0, killed renamable $q3, 1, internal killed renamable $vpr, killed renamable $q1
; CHECK: }
; CHECK: $q0 = MVE_VORR killed $q1, killed $q1, 0, $noreg, undef $q0
; CHECK: tBX_RET 14, $noreg, implicit $q0
; CHECK: liveins: $q0, $q1, $q2, $r0
; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg
; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3
- ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q1, implicit $q2, implicit $vpr, implicit killed $q3 {
- ; CHECK: MVE_VPST 4, implicit-def $p0
- ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3
- ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, internal renamable $q3, 1, renamable $vpr, undef renamable $q1
+ ; CHECK: BUNDLE implicit-def $vpr, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q1, implicit $q2, implicit killed $q3 {
+ ; CHECK: MVE_VPST 4, implicit-def $vpr
+ ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, internal renamable $vpr, killed renamable $q3
+ ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, internal renamable $q3, 1, internal renamable $vpr, undef renamable $q1
; CHECK: }
; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3
- ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q1, implicit killed $q2, implicit killed $vpr, implicit killed $q3, implicit killed $q0 {
- ; CHECK: MVE_VPST 4, implicit-def $p0
- ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3
- ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, killed renamable $q2, 1, killed renamable $vpr, killed renamable $q0
+ ; CHECK: BUNDLE implicit-def dead $vpr, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q1, implicit killed $q2, implicit killed $q3, implicit killed $q0 {
+ ; CHECK: MVE_VPST 4, implicit-def $vpr
+ ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, internal renamable $vpr, killed renamable $q3
+ ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, killed renamable $q2, 1, internal killed renamable $vpr, killed renamable $q0
; CHECK: }
; CHECK: tBX_RET 14, $noreg, implicit $q0
$vpr = VMSR_P0 killed $r0, 14, $noreg
; CHECK: liveins: $q0, $q1, $q2, $r0, $r1
; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg
; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit killed $q1, implicit $q2, implicit killed $vpr, implicit killed $q3 {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, killed renamable $vpr, killed renamable $q3
+ ; CHECK: BUNDLE implicit-def dead $vpr, implicit-def $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit killed $q1, implicit $q2, implicit killed $q3 {
+ ; CHECK: MVE_VPST 8, implicit-def $vpr
+ ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, internal killed renamable $vpr, killed renamable $q3
; CHECK: }
; CHECK: $vpr = VMSR_P0 killed $r1, 14, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q3, implicit killed $q2, implicit killed $vpr, implicit killed $q0 {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q3, killed renamable $q2, 1, killed renamable $vpr, killed renamable $q0
+ ; CHECK: BUNDLE implicit-def dead $vpr, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q3, implicit killed $q2, implicit killed $q0 {
+ ; CHECK: MVE_VPST 8, implicit-def $vpr
+ ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q3, killed renamable $q2, 1, internal killed renamable $vpr, killed renamable $q0
; CHECK: }
; CHECK: tBX_RET 14, $noreg, implicit $q0
$vpr = VMSR_P0 killed $r0, 14, $noreg
; CHECK: liveins: $q0, $q1, $q2, $r0
; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg
; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3
- ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q1, implicit $q2, implicit $vpr, implicit killed $q3 {
- ; CHECK: MVE_VPST 4, implicit-def $p0
- ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3
- ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, internal renamable $q3, 1, renamable $vpr, undef renamable $q1
+ ; CHECK: BUNDLE implicit-def $vpr, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q1, implicit $q2, implicit killed $q3 {
+ ; CHECK: MVE_VPST 4, implicit-def $vpr
+ ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, internal renamable $vpr, killed renamable $q3
+ ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, internal renamable $q3, 1, internal renamable $vpr, undef renamable $q1
; CHECK: }
; CHECK: bb.1.bb2:
; CHECK: liveins: $q0, $q1, $q2, $q3, $vpr
- ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q1, implicit killed $q2, implicit killed $vpr, implicit killed $q3, implicit killed $q0 {
- ; CHECK: MVE_VPST 4, implicit-def $p0
- ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3
- ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, killed renamable $q2, 1, killed renamable $vpr, killed renamable $q0
+ ; CHECK: BUNDLE implicit-def dead $vpr, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q1, implicit killed $q2, implicit killed $q3, implicit killed $q0 {
+ ; CHECK: MVE_VPST 4, implicit-def $vpr
+ ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, internal renamable $vpr, killed renamable $q3
+ ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, killed renamable $q2, 1, internal killed renamable $vpr, killed renamable $q0
; CHECK: }
; CHECK: tBX_RET 14, $noreg, implicit $q0
bb.0.entry:
; CHECK: liveins: $q0, $q1, $q2, $r0
; CHECK: $vpr = VMSR_P0 killed $r0, 14, $noreg
; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit killed $q1, implicit $q2, implicit $vpr, implicit killed $q3 {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3
+ ; CHECK: BUNDLE implicit-def $vpr, implicit-def $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit killed $q1, implicit $q2, implicit killed $q3 {
+ ; CHECK: MVE_VPST 8, implicit-def $vpr
+ ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, internal renamable $vpr, killed renamable $q3
; CHECK: }
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q3, implicit $vpr, implicit undef $q1 {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q3, renamable $q3, 1, renamable $vpr, undef renamable $q1
+ ; CHECK: BUNDLE implicit-def $vpr, implicit-def $q1, implicit-def $d2, implicit-def $s4, implicit-def $s5, implicit-def $d3, implicit-def $s6, implicit-def $s7, implicit killed $q3, implicit undef $q1 {
+ ; CHECK: MVE_VPST 8, implicit-def $vpr
+ ; CHECK: renamable $q1 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q3, renamable $q3, 1, internal renamable $vpr, undef renamable $q1
; CHECK: }
; CHECK: $q3 = MVE_VORR $q0, $q0, 0, $noreg, undef $q3
- ; CHECK: BUNDLE implicit-def $p0, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q1, implicit killed $q2, implicit killed $vpr, implicit killed $q3, implicit killed $q0 {
- ; CHECK: MVE_VPST 4, implicit-def $p0
- ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, renamable $vpr, killed renamable $q3
- ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, killed renamable $q2, 1, killed renamable $vpr, killed renamable $q0
+ ; CHECK: BUNDLE implicit-def dead $vpr, implicit-def dead $q3, implicit-def $d6, implicit-def $s12, implicit-def $s13, implicit-def $d7, implicit-def $s14, implicit-def $s15, implicit-def $q0, implicit-def $d0, implicit-def $s0, implicit-def $s1, implicit-def $d1, implicit-def $s2, implicit-def $s3, implicit killed $q1, implicit killed $q2, implicit killed $q3, implicit killed $q0 {
+ ; CHECK: MVE_VPST 4, implicit-def $vpr
+ ; CHECK: renamable $q3 = nnan ninf nsz MVE_VMINNMf32 killed renamable $q1, renamable $q2, 1, internal renamable $vpr, killed renamable $q3
+ ; CHECK: renamable $q0 = nnan ninf nsz MVE_VMINNMf32 internal killed renamable $q3, killed renamable $q2, 1, internal killed renamable $vpr, killed renamable $q0
; CHECK: }
; CHECK: tBX_RET 14, $noreg, implicit $q0
$vpr = VMSR_P0 killed $r0, 14, $noreg
; CHECK-LABEL: name: vpnot
; CHECK: liveins: $q0, $q1, $q2
- ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr
+ ; CHECK: BUNDLE implicit-def $vpr, implicit $q0, implicit $zr, implicit $q1 {
+ ; CHECK: MVE_VPTv4s32r 8, renamable $q0, $zr, 11, implicit-def $vpr
+ ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, internal killed renamable $vpr
; CHECK: }
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit killed $q2, implicit $zr, implicit killed $vpr {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, killed renamable $vpr
+ ; CHECK: BUNDLE implicit-def $vpr, implicit killed $q2, implicit $zr {
+ ; CHECK: MVE_VPST 8, implicit-def $vpr
+ ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, internal killed renamable $vpr
; CHECK: }
; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, killed renamable $vpr
; CHECK: tBX_RET 14, $noreg, implicit $q0
; CHECK: bb.0.entry:
; CHECK: successors: %bb.1(0x80000000)
; CHECK: liveins: $q0, $q1, $q2
- ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr
+ ; CHECK: BUNDLE implicit-def $vpr, implicit $q0, implicit $zr, implicit $q1 {
+ ; CHECK: MVE_VPTv4s32r 8, renamable $q0, $zr, 11, implicit-def $vpr
+ ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, internal killed renamable $vpr
; CHECK: }
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
; CHECK: bb.1.bb2:
; CHECK: liveins: $q0, $q1, $q2, $vpr
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit killed $q2, implicit $zr, implicit killed $vpr {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, killed renamable $vpr
+ ; CHECK: BUNDLE implicit-def $vpr, implicit killed $q2, implicit $zr {
+ ; CHECK: MVE_VPST 8, implicit-def $vpr
+ ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, internal killed renamable $vpr
; CHECK: }
; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, killed renamable $vpr
; CHECK: tBX_RET 14, $noreg, implicit $q0
; CHECK-LABEL: name: vpnot_two
; CHECK: liveins: $q0, $q1, $q2
- ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr
+ ; CHECK: BUNDLE implicit-def $vpr, implicit $q0, implicit $zr, implicit $q1 {
+ ; CHECK: MVE_VPTv4s32r 8, renamable $q0, $zr, 11, implicit-def $vpr
+ ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, internal killed renamable $vpr
; CHECK: }
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit killed $q2, implicit $zr, implicit killed $vpr {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, killed renamable $vpr
+ ; CHECK: BUNDLE implicit-def $vpr, implicit killed $q2, implicit $zr {
+ ; CHECK: MVE_VPST 8, implicit-def $vpr
+ ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, internal killed renamable $vpr
; CHECK: }
; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, killed renamable $vpr
; CHECK: tBX_RET 14, $noreg, implicit $q0
; CHECK-LABEL: name: vpnot_lots
; CHECK: liveins: $q0, $q1, $q2
- ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr
+ ; CHECK: BUNDLE implicit-def $vpr, implicit $q0, implicit $zr, implicit $q1 {
+ ; CHECK: MVE_VPTv4s32r 8, renamable $q0, $zr, 11, implicit-def $vpr
+ ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, internal killed renamable $vpr
; CHECK: }
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit killed $q2, implicit $zr, implicit killed $vpr {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, killed renamable $vpr
+ ; CHECK: BUNDLE implicit-def $vpr, implicit killed $q2, implicit $zr {
+ ; CHECK: MVE_VPST 8, implicit-def $vpr
+ ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, internal killed renamable $vpr
; CHECK: }
; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, killed renamable $vpr
; CHECK: tBX_RET 14, $noreg, implicit $q0
; CHECK: liveins: $q0, $q1, $q2
; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr, implicit killed $q2 {
- ; CHECK: MVE_VPST 4, implicit-def $p0
- ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr
+ ; CHECK: BUNDLE implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $q2 {
+ ; CHECK: MVE_VPST 4, implicit-def $vpr
+ ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, internal killed renamable $vpr
; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, internal killed renamable $vpr
; CHECK: }
; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, killed renamable $vpr
; CHECK: liveins: $q0, $q1, $q2
; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q0, $zr, 11, 0, $noreg
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit $q1, implicit $zr, implicit killed $vpr {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, killed renamable $vpr
+ ; CHECK: BUNDLE implicit-def $vpr, implicit $q1, implicit $zr {
+ ; CHECK: MVE_VPST 8, implicit-def $vpr
+ ; CHECK: renamable $vpr = MVE_VCMPs32r renamable $q1, $zr, 12, 1, internal killed renamable $vpr
; CHECK: }
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
- ; CHECK: BUNDLE implicit-def $p0, implicit-def $vpr, implicit killed $q2, implicit $zr, implicit killed $vpr {
- ; CHECK: MVE_VPST 8, implicit-def $p0
- ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, killed renamable $vpr
+ ; CHECK: BUNDLE implicit-def $vpr, implicit killed $q2, implicit $zr {
+ ; CHECK: MVE_VPST 8, implicit-def $vpr
+ ; CHECK: renamable $vpr = MVE_VCMPi32r killed renamable $q2, $zr, 0, 1, internal killed renamable $vpr
; CHECK: }
; CHECK: renamable $vpr = MVE_VPNOT killed renamable $vpr, 0, $noreg
; CHECK: renamable $q0 = MVE_VPSEL killed renamable $q0, killed renamable $q1, 0, killed renamable $vpr