return FoldedVOp;
// fold (add x, 0) -> x, vector edition
- if (ISD::isBuildVectorAllZeros(N1.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N1.getNode()))
return N0;
- if (ISD::isBuildVectorAllZeros(N0.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N0.getNode()))
return N1;
}
// TODO SimplifyVBinOp
// fold (add_sat x, 0) -> x, vector edition
- if (ISD::isBuildVectorAllZeros(N1.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N1.getNode()))
return N0;
- if (ISD::isBuildVectorAllZeros(N0.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N0.getNode()))
return N1;
}
return FoldedVOp;
// fold (sub x, 0) -> x, vector edition
- if (ISD::isBuildVectorAllZeros(N1.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N1.getNode()))
return N0;
}
// TODO SimplifyVBinOp
// fold (sub_sat x, 0) -> x, vector edition
- if (ISD::isBuildVectorAllZeros(N1.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N1.getNode()))
return N0;
}
if (VT.isVector()) {
// fold (mulhs x, 0) -> 0
// do not return N0/N1, because undef node may exist.
- if (ISD::isBuildVectorAllZeros(N0.getNode()) ||
- ISD::isBuildVectorAllZeros(N1.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N0.getNode()) ||
+ ISD::isConstantSplatVectorAllZeros(N1.getNode()))
return DAG.getConstant(0, DL, VT);
}
if (VT.isVector()) {
// fold (mulhu x, 0) -> 0
// do not return N0/N1, because undef node may exist.
- if (ISD::isBuildVectorAllZeros(N0.getNode()) ||
- ISD::isBuildVectorAllZeros(N1.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N0.getNode()) ||
+ ISD::isConstantSplatVectorAllZeros(N1.getNode()))
return DAG.getConstant(0, DL, VT);
}
return FoldedVOp;
// fold (and x, 0) -> 0, vector edition
- if (ISD::isBuildVectorAllZeros(N0.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N0.getNode()))
// do not return N0, because undef node may exist in N0
return DAG.getConstant(APInt::getNullValue(N0.getScalarValueSizeInBits()),
SDLoc(N), N0.getValueType());
- if (ISD::isBuildVectorAllZeros(N1.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N1.getNode()))
// do not return N1, because undef node may exist in N1
return DAG.getConstant(APInt::getNullValue(N1.getScalarValueSizeInBits()),
SDLoc(N), N1.getValueType());
return FoldedVOp;
// fold (or x, 0) -> x, vector edition
- if (ISD::isBuildVectorAllZeros(N0.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N0.getNode()))
return N1;
- if (ISD::isBuildVectorAllZeros(N1.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N1.getNode()))
return N0;
// fold (or x, -1) -> -1, vector edition
return FoldedVOp;
// fold (xor x, 0) -> x, vector edition
- if (ISD::isBuildVectorAllZeros(N0.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N0.getNode()))
return N1;
- if (ISD::isBuildVectorAllZeros(N1.getNode()))
+ if (ISD::isConstantSplatVectorAllZeros(N1.getNode()))
return N0;
}
ret <vscale x 16 x i8> %res
}
+define <vscale x 16 x i8> @add_i8_zero(<vscale x 16 x i8> %a) {
+; CHECK-LABEL: add_i8_zero:
+; CHECK: // %bb.0:
+; CHECK-NEXT: ret
+ %res = add <vscale x 16 x i8> %a, zeroinitializer
+ ret <vscale x 16 x i8> %res
+}
+
define <vscale x 2 x i64> @sub_i64(<vscale x 2 x i64> %a, <vscale x 2 x i64> %b) {
; CHECK-LABEL: sub_i64:
; CHECK: // %bb.0:
ret <vscale x 16 x i8> %res
}
+define <vscale x 16 x i8> @sub_i8_zero(<vscale x 16 x i8> %a) {
+; CHECK-LABEL: sub_i8_zero:
+; CHECK: // %bb.0:
+; CHECK-NEXT: ret
+ %res = sub <vscale x 16 x i8> %a, zeroinitializer
+ ret <vscale x 16 x i8> %res
+}
+
define <vscale x 16 x i8> @abs_nxv16i8(<vscale x 16 x i8> %a) {
; CHECK-LABEL: abs_nxv16i8:
; CHECK: // %bb.0:
ret <vscale x 4 x i32> %res
}
+define <vscale x 4 x i32> @sqadd_i32_zero(<vscale x 4 x i32> %a) {
+; CHECK-LABEL: sqadd_i32_zero:
+; CHECK: // %bb.0:
+; CHECK-NEXT: ret
+ %res = call <vscale x 4 x i32> @llvm.sadd.sat.nxv4i32(<vscale x 4 x i32> %a, <vscale x 4 x i32> zeroinitializer)
+ ret <vscale x 4 x i32> %res
+}
+
define <vscale x 8 x i16> @sqadd_i16(<vscale x 8 x i16> %a, <vscale x 8 x i16> %b) {
; CHECK-LABEL: sqadd_i16:
; CHECK: // %bb.0:
ret <vscale x 2 x i64> %res
}
+define <vscale x 2 x i64> @sqsub_i64_zero(<vscale x 2 x i64> %a) {
+; CHECK-LABEL: sqsub_i64_zero:
+; CHECK: // %bb.0:
+; CHECK-NEXT: ret
+ %res = call <vscale x 2 x i64> @llvm.ssub.sat.nxv2i64(<vscale x 2 x i64> %a, <vscale x 2 x i64> zeroinitializer)
+ ret <vscale x 2 x i64> %res
+}
+
define <vscale x 4 x i32> @sqsub_i32(<vscale x 4 x i32> %a, <vscale x 4 x i32> %b) {
; CHECK-LABEL: sqsub_i32:
; CHECK: // %bb.0:
ret <vscale x 16 x i8> %res
}
+define <vscale x 16 x i8> @and_b_zero(<vscale x 16 x i8> %a) {
+; CHECK-LABEL: and_b_zero:
+; CHECK: // %bb.0:
+; CHECK-NEXT: mov z0.b, #0 // =0x0
+; CHECK-NEXT: ret
+ %res = and <vscale x 16 x i8> %a, zeroinitializer
+ ret <vscale x 16 x i8> %res
+}
+
define <vscale x 2 x i1> @and_pred_d(<vscale x 2 x i1> %a, <vscale x 2 x i1> %b) {
; CHECK-LABEL: and_pred_d:
; CHECK: // %bb.0:
ret <vscale x 16 x i8> %res
}
+define <vscale x 16 x i8> @or_b_zero(<vscale x 16 x i8> %a) {
+; CHECK-LABEL: or_b_zero:
+; CHECK: // %bb.0:
+; CHECK-NEXT: ret
+ %res = or <vscale x 16 x i8> %a, zeroinitializer
+ ret <vscale x 16 x i8> %res
+}
+
define <vscale x 2 x i1> @or_pred_d(<vscale x 2 x i1> %a, <vscale x 2 x i1> %b) {
; CHECK-LABEL: or_pred_d:
; CHECK: // %bb.0:
ret <vscale x 16 x i8> %res
}
+define <vscale x 16 x i8> @xor_b_zero(<vscale x 16 x i8> %a) {
+; CHECK-LABEL: xor_b_zero:
+; CHECK: // %bb.0:
+; CHECK-NEXT: ret
+ %res = xor <vscale x 16 x i8> %a, zeroinitializer
+ ret <vscale x 16 x i8> %res
+}
+
define <vscale x 2 x i1> @xor_pred_d(<vscale x 2 x i1> %a, <vscale x 2 x i1> %b) {
; CHECK-LABEL: xor_pred_d:
; CHECK: // %bb.0:
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,mf4,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v25, v25, 1
; CHECK-NEXT: vsrl.vi v26, v25, 15
; CHECK-NEXT: vand.vi v26, v26, -1
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,mf2,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v25, v25, 1
; CHECK-NEXT: vsrl.vi v26, v25, 15
; CHECK-NEXT: vand.vi v26, v26, -1
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m1,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v25, v25, 1
; CHECK-NEXT: vsrl.vi v26, v25, 15
; CHECK-NEXT: vand.vi v26, v26, -1
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m2,ta,mu
; CHECK-NEXT: vmulh.vx v26, v8, a0
-; CHECK-NEXT: vadd.vi v26, v26, 0
; CHECK-NEXT: vsra.vi v26, v26, 1
; CHECK-NEXT: vsrl.vi v28, v26, 15
; CHECK-NEXT: vand.vi v28, v28, -1
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m4,ta,mu
; CHECK-NEXT: vmulh.vx v28, v8, a0
-; CHECK-NEXT: vadd.vi v28, v28, 0
; CHECK-NEXT: vsra.vi v28, v28, 1
; CHECK-NEXT: vsrl.vi v8, v28, 15
; CHECK-NEXT: vand.vi v8, v8, -1
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m8,ta,mu
; CHECK-NEXT: vmulh.vx v8, v8, a0
-; CHECK-NEXT: vadd.vi v8, v8, 0
; CHECK-NEXT: vsra.vi v8, v8, 1
; CHECK-NEXT: vsrl.vi v16, v8, 15
; CHECK-NEXT: vand.vi v16, v16, -1
; CHECK-NEXT: vsrl.vx v26, v26, a0
; CHECK-NEXT: vor.vv v25, v26, v25
; CHECK-NEXT: vmulh.vv v25, v8, v25
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v26, v25, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v25, v25, a0
; CHECK-NEXT: vsrl.vx v28, v28, a0
; CHECK-NEXT: vor.vv v26, v28, v26
; CHECK-NEXT: vmulh.vv v26, v8, v26
-; CHECK-NEXT: vadd.vi v26, v26, 0
; CHECK-NEXT: vsra.vi v28, v26, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v26, v26, a0
; CHECK-NEXT: vsrl.vx v12, v12, a0
; CHECK-NEXT: vor.vv v28, v12, v28
; CHECK-NEXT: vmulh.vv v28, v8, v28
-; CHECK-NEXT: vadd.vi v28, v28, 0
; CHECK-NEXT: vsra.vi v8, v28, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v28, v28, a0
; CHECK-NEXT: vsrl.vx v24, v24, a0
; CHECK-NEXT: vor.vv v16, v24, v16
; CHECK-NEXT: vmulh.vv v8, v8, v16
-; CHECK-NEXT: vadd.vi v8, v8, 0
; CHECK-NEXT: vsra.vi v16, v8, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v8, v8, a0
; CHECK-NEXT: addiw a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,mf4,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v25, v25, 1
; CHECK-NEXT: vsrl.vi v26, v25, 15
; CHECK-NEXT: vand.vi v26, v26, -1
; CHECK-NEXT: addiw a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,mf2,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v25, v25, 1
; CHECK-NEXT: vsrl.vi v26, v25, 15
; CHECK-NEXT: vand.vi v26, v26, -1
; CHECK-NEXT: addiw a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m1,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v25, v25, 1
; CHECK-NEXT: vsrl.vi v26, v25, 15
; CHECK-NEXT: vand.vi v26, v26, -1
; CHECK-NEXT: addiw a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m2,ta,mu
; CHECK-NEXT: vmulh.vx v26, v8, a0
-; CHECK-NEXT: vadd.vi v26, v26, 0
; CHECK-NEXT: vsra.vi v26, v26, 1
; CHECK-NEXT: vsrl.vi v28, v26, 15
; CHECK-NEXT: vand.vi v28, v28, -1
; CHECK-NEXT: addiw a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m4,ta,mu
; CHECK-NEXT: vmulh.vx v28, v8, a0
-; CHECK-NEXT: vadd.vi v28, v28, 0
; CHECK-NEXT: vsra.vi v28, v28, 1
; CHECK-NEXT: vsrl.vi v8, v28, 15
; CHECK-NEXT: vand.vi v8, v8, -1
; CHECK-NEXT: addiw a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m8,ta,mu
; CHECK-NEXT: vmulh.vx v8, v8, a0
-; CHECK-NEXT: vadd.vi v8, v8, 0
; CHECK-NEXT: vsra.vi v8, v8, 1
; CHECK-NEXT: vsrl.vi v16, v8, 15
; CHECK-NEXT: vand.vi v16, v16, -1
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e64,m1,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v26, v25, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v25, v25, a0
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e64,m2,ta,mu
; CHECK-NEXT: vmulh.vx v26, v8, a0
-; CHECK-NEXT: vadd.vi v26, v26, 0
; CHECK-NEXT: vsra.vi v28, v26, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v26, v26, a0
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e64,m4,ta,mu
; CHECK-NEXT: vmulh.vx v28, v8, a0
-; CHECK-NEXT: vadd.vi v28, v28, 0
; CHECK-NEXT: vsra.vi v8, v28, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v28, v28, a0
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e64,m8,ta,mu
; CHECK-NEXT: vmulh.vx v8, v8, a0
-; CHECK-NEXT: vadd.vi v8, v8, 0
; CHECK-NEXT: vsra.vi v16, v8, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v8, v8, a0
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,mf4,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v25, v25, 1
; CHECK-NEXT: vsrl.vi v26, v25, 15
; CHECK-NEXT: vand.vi v26, v26, -1
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,mf2,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v25, v25, 1
; CHECK-NEXT: vsrl.vi v26, v25, 15
; CHECK-NEXT: vand.vi v26, v26, -1
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m1,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v25, v25, 1
; CHECK-NEXT: vsrl.vi v26, v25, 15
; CHECK-NEXT: vand.vi v26, v26, -1
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m2,ta,mu
; CHECK-NEXT: vmulh.vx v26, v8, a0
-; CHECK-NEXT: vadd.vi v26, v26, 0
; CHECK-NEXT: vsra.vi v26, v26, 1
; CHECK-NEXT: vsrl.vi v28, v26, 15
; CHECK-NEXT: vand.vi v28, v28, -1
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m4,ta,mu
; CHECK-NEXT: vmulh.vx v28, v8, a0
-; CHECK-NEXT: vadd.vi v28, v28, 0
; CHECK-NEXT: vsra.vi v28, v28, 1
; CHECK-NEXT: vsrl.vi v12, v28, 15
; CHECK-NEXT: vand.vi v12, v12, -1
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m8,ta,mu
; CHECK-NEXT: vmulh.vx v16, v8, a0
-; CHECK-NEXT: vadd.vi v16, v16, 0
; CHECK-NEXT: vsra.vi v16, v16, 1
; CHECK-NEXT: vsrl.vi v24, v16, 15
; CHECK-NEXT: vand.vi v24, v24, -1
; CHECK-NEXT: vsrl.vx v26, v26, a0
; CHECK-NEXT: vor.vv v25, v26, v25
; CHECK-NEXT: vmulh.vv v25, v8, v25
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v26, v25, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v25, v25, a0
; CHECK-NEXT: vsrl.vx v28, v28, a0
; CHECK-NEXT: vor.vv v26, v28, v26
; CHECK-NEXT: vmulh.vv v26, v8, v26
-; CHECK-NEXT: vadd.vi v26, v26, 0
; CHECK-NEXT: vsra.vi v28, v26, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v26, v26, a0
; CHECK-NEXT: vsrl.vx v12, v12, a0
; CHECK-NEXT: vor.vv v28, v12, v28
; CHECK-NEXT: vmulh.vv v28, v8, v28
-; CHECK-NEXT: vadd.vi v28, v28, 0
; CHECK-NEXT: vsra.vi v12, v28, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v28, v28, a0
; CHECK-NEXT: vsrl.vx v24, v24, a0
; CHECK-NEXT: vor.vv v16, v24, v16
; CHECK-NEXT: vmulh.vv v16, v8, v16
-; CHECK-NEXT: vadd.vi v16, v16, 0
; CHECK-NEXT: vsra.vi v24, v16, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v16, v16, a0
; CHECK-NEXT: addiw a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,mf4,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v25, v25, 1
; CHECK-NEXT: vsrl.vi v26, v25, 15
; CHECK-NEXT: vand.vi v26, v26, -1
; CHECK-NEXT: addiw a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,mf2,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v25, v25, 1
; CHECK-NEXT: vsrl.vi v26, v25, 15
; CHECK-NEXT: vand.vi v26, v26, -1
; CHECK-NEXT: addiw a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m1,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v25, v25, 1
; CHECK-NEXT: vsrl.vi v26, v25, 15
; CHECK-NEXT: vand.vi v26, v26, -1
; CHECK-NEXT: addiw a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m2,ta,mu
; CHECK-NEXT: vmulh.vx v26, v8, a0
-; CHECK-NEXT: vadd.vi v26, v26, 0
; CHECK-NEXT: vsra.vi v26, v26, 1
; CHECK-NEXT: vsrl.vi v28, v26, 15
; CHECK-NEXT: vand.vi v28, v28, -1
; CHECK-NEXT: addiw a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m4,ta,mu
; CHECK-NEXT: vmulh.vx v28, v8, a0
-; CHECK-NEXT: vadd.vi v28, v28, 0
; CHECK-NEXT: vsra.vi v28, v28, 1
; CHECK-NEXT: vsrl.vi v12, v28, 15
; CHECK-NEXT: vand.vi v12, v12, -1
; CHECK-NEXT: addiw a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e16,m8,ta,mu
; CHECK-NEXT: vmulh.vx v16, v8, a0
-; CHECK-NEXT: vadd.vi v16, v16, 0
; CHECK-NEXT: vsra.vi v16, v16, 1
; CHECK-NEXT: vsrl.vi v24, v16, 15
; CHECK-NEXT: vand.vi v24, v24, -1
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e64,m1,ta,mu
; CHECK-NEXT: vmulh.vx v25, v8, a0
-; CHECK-NEXT: vadd.vi v25, v25, 0
; CHECK-NEXT: vsra.vi v26, v25, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v25, v25, a0
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e64,m2,ta,mu
; CHECK-NEXT: vmulh.vx v26, v8, a0
-; CHECK-NEXT: vadd.vi v26, v26, 0
; CHECK-NEXT: vsra.vi v28, v26, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v26, v26, a0
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e64,m4,ta,mu
; CHECK-NEXT: vmulh.vx v28, v8, a0
-; CHECK-NEXT: vadd.vi v28, v28, 0
; CHECK-NEXT: vsra.vi v12, v28, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v28, v28, a0
; CHECK-NEXT: addi a0, a0, 1755
; CHECK-NEXT: vsetvli a1, zero, e64,m8,ta,mu
; CHECK-NEXT: vmulh.vx v16, v8, a0
-; CHECK-NEXT: vadd.vi v16, v16, 0
; CHECK-NEXT: vsra.vi v24, v16, 1
; CHECK-NEXT: addi a0, zero, 63
; CHECK-NEXT: vsrl.vx v16, v16, a0