KnownOne |= KnownOne2;
break;
case ISD::XOR: {
- computeKnownBits(Op.getOperand(1), KnownZero, KnownOne, Depth+1);
- computeKnownBits(Op.getOperand(0), KnownZero2, KnownOne2, Depth+1);
+ computeKnownBits(Op.getOperand(1), KnownZero, KnownOne, DemandedElts,
+ Depth + 1);
+ computeKnownBits(Op.getOperand(0), KnownZero2, KnownOne2, DemandedElts,
+ Depth + 1);
// Output known-0 bits are known if clear or set in both the LHS & RHS.
APInt KnownZeroOut = (KnownZero & KnownZero2) | (KnownOne & KnownOne2);
; X32-NEXT: vpand {{\.LCPI.*}}, %xmm0, %xmm0
; X32-NEXT: vpxor {{\.LCPI.*}}, %xmm0, %xmm0
; X32-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; X32-NEXT: vpblendw {{.*#+}} xmm1 = xmm0[0],mem[1],xmm0[2],mem[3],xmm0[4],mem[5],xmm0[6],mem[7]
-; X32-NEXT: vpsrld $16, %xmm0, %xmm0
-; X32-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],mem[1],xmm0[2],mem[3],xmm0[4],mem[5],xmm0[6],mem[7]
-; X32-NEXT: vaddps {{\.LCPI.*}}, %xmm0, %xmm0
-; X32-NEXT: vaddps %xmm0, %xmm1, %xmm0
+; X32-NEXT: vcvtdq2ps %xmm0, %xmm0
; X32-NEXT: retl
;
; X64-LABEL: knownbits_mask_xor_shuffle_uitofp:
; X64-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
; X64-NEXT: vpxor {{.*}}(%rip), %xmm0, %xmm0
; X64-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; X64-NEXT: vpblendw {{.*#+}} xmm1 = xmm0[0],mem[1],xmm0[2],mem[3],xmm0[4],mem[5],xmm0[6],mem[7]
-; X64-NEXT: vpsrld $16, %xmm0, %xmm0
-; X64-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],mem[1],xmm0[2],mem[3],xmm0[4],mem[5],xmm0[6],mem[7]
-; X64-NEXT: vaddps {{.*}}(%rip), %xmm0, %xmm0
-; X64-NEXT: vaddps %xmm0, %xmm1, %xmm0
+; X64-NEXT: vcvtdq2ps %xmm0, %xmm0
; X64-NEXT: retq
%1 = and <4 x i32> %a0, <i32 -1, i32 -1, i32 255, i32 4085>
%2 = xor <4 x i32> %1, <i32 65535, i32 65535, i32 65535, i32 65535>