; AVX512VPOPCNTDQ-NEXT: # kill: %XMM0<def> %XMM0<kill> %ZMM0<kill>
; AVX512VPOPCNTDQ-NEXT: vzeroupper
; AVX512VPOPCNTDQ-NEXT: retq
+;
+; BITALG_NOVLX-LABEL: testv2i64:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm2
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG_NOVLX-NEXT: vpshufb %xmm2, %xmm3, %xmm2
+; BITALG_NOVLX-NEXT: vpsrlw $4, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpshufb %xmm0, %xmm3, %xmm0
+; BITALG_NOVLX-NEXT: vpaddb %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv2i64:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm2
+; BITALG-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %xmm2, %xmm3, %xmm2
+; BITALG-NEXT: vpsrlw $4, %xmm0, %xmm0
+; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm0
+; BITALG-NEXT: vpshufb %xmm0, %xmm3, %xmm0
+; BITALG-NEXT: vpaddb %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
+; BITALG-NEXT: retq
%out = call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %in)
ret <2 x i64> %out
}
; AVX512VPOPCNTDQ-NEXT: # kill: %XMM0<def> %XMM0<kill> %ZMM0<kill>
; AVX512VPOPCNTDQ-NEXT: vzeroupper
; AVX512VPOPCNTDQ-NEXT: retq
+;
+; BITALG_NOVLX-LABEL: testv4i32:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm2
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG_NOVLX-NEXT: vpshufb %xmm2, %xmm3, %xmm2
+; BITALG_NOVLX-NEXT: vpsrlw $4, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpshufb %xmm0, %xmm3, %xmm0
+; BITALG_NOVLX-NEXT: vpaddb %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
+; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
+; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
+; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv4i32:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm2
+; BITALG-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %xmm2, %xmm3, %xmm2
+; BITALG-NEXT: vpsrlw $4, %xmm0, %xmm0
+; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm0
+; BITALG-NEXT: vpshufb %xmm0, %xmm3, %xmm0
+; BITALG-NEXT: vpaddb %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
+; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
+; BITALG-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
+; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
+; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: retq
%out = call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %in)
ret <4 x i32> %out
}
; AVX: # BB#0:
; AVX-NEXT: vmovaps {{.*#+}} xmm0 = [1,64]
; AVX-NEXT: retq
+;
+; BITALG_NOVLX-LABEL: foldv2i64:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} xmm0 = [1,64]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv2i64:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} xmm0 = [1,64]
+; BITALG-NEXT: retq
%out = call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> <i64 256, i64 -1>)
ret <2 x i64> %out
}
; AVX: # BB#0:
; AVX-NEXT: vmovaps {{.*#+}} xmm0 = [1,32,0,8]
; AVX-NEXT: retq
+;
+; BITALG_NOVLX-LABEL: foldv4i32:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} xmm0 = [1,32,0,8]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv4i32:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} xmm0 = [1,32,0,8]
+; BITALG-NEXT: retq
%out = call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> <i32 256, i32 -1, i32 0, i32 255>)
ret <4 x i32> %out
}
; AVX: # BB#0:
; AVX-NEXT: vmovaps {{.*#+}} xmm0 = [1,16,0,8,0,3,2,3]
; AVX-NEXT: retq
+;
+; BITALG_NOVLX-LABEL: foldv8i16:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} xmm0 = [1,16,0,8,0,3,2,3]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv8i16:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} xmm0 = [1,16,0,8,0,3,2,3]
+; BITALG-NEXT: retq
%out = call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> <i16 256, i16 -1, i16 0, i16 255, i16 -65536, i16 7, i16 24, i16 88>)
ret <8 x i16> %out
}
; AVX: # BB#0:
; AVX-NEXT: vmovaps {{.*#+}} xmm0 = [0,8,0,8,0,3,2,3,7,7,1,1,1,1,1,1]
; AVX-NEXT: retq
+;
+; BITALG_NOVLX-LABEL: foldv16i8:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} xmm0 = [0,8,0,8,0,3,2,3,7,7,1,1,1,1,1,1]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv16i8:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} xmm0 = [0,8,0,8,0,3,2,3,7,7,1,1,1,1,1,1]
+; BITALG-NEXT: retq
%out = call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> <i8 256, i8 -1, i8 0, i8 255, i8 -65536, i8 7, i8 24, i8 88, i8 -2, i8 254, i8 1, i8 2, i8 4, i8 8, i8 16, i8 32>)
ret <16 x i8> %out
}
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
; AVX512VPOPCNTDQ-NEXT: # kill: %YMM0<def> %YMM0<kill> %ZMM0<kill>
; AVX512VPOPCNTDQ-NEXT: retq
+;
+; BITALG_NOVLX-LABEL: testv4i64:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG_NOVLX-NEXT: vpand %ymm1, %ymm0, %ymm2
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG_NOVLX-NEXT: vpshufb %ymm2, %ymm3, %ymm2
+; BITALG_NOVLX-NEXT: vpsrlw $4, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpand %ymm1, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpshufb %ymm0, %ymm3, %ymm0
+; BITALG_NOVLX-NEXT: vpaddb %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv4i64:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpand %ymm1, %ymm0, %ymm2
+; BITALG-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %ymm2, %ymm3, %ymm2
+; BITALG-NEXT: vpsrlw $4, %ymm0, %ymm0
+; BITALG-NEXT: vpand %ymm1, %ymm0, %ymm0
+; BITALG-NEXT: vpshufb %ymm0, %ymm3, %ymm0
+; BITALG-NEXT: vpaddb %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0
+; BITALG-NEXT: retq
%out = call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %in)
ret <4 x i64> %out
}
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
; AVX512VPOPCNTDQ-NEXT: # kill: %YMM0<def> %YMM0<kill> %ZMM0<kill>
; AVX512VPOPCNTDQ-NEXT: retq
+;
+; BITALG_NOVLX-LABEL: testv8i32:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG_NOVLX-NEXT: vpand %ymm1, %ymm0, %ymm2
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG_NOVLX-NEXT: vpshufb %ymm2, %ymm3, %ymm2
+; BITALG_NOVLX-NEXT: vpsrlw $4, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpand %ymm1, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpshufb %ymm0, %ymm3, %ymm0
+; BITALG_NOVLX-NEXT: vpaddb %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
+; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm2, %ymm2
+; BITALG_NOVLX-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
+; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpackuswb %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv8i32:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpand %ymm1, %ymm0, %ymm2
+; BITALG-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %ymm2, %ymm3, %ymm2
+; BITALG-NEXT: vpsrlw $4, %ymm0, %ymm0
+; BITALG-NEXT: vpand %ymm1, %ymm0, %ymm0
+; BITALG-NEXT: vpshufb %ymm0, %ymm3, %ymm0
+; BITALG-NEXT: vpaddb %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
+; BITALG-NEXT: vpsadbw %ymm1, %ymm2, %ymm2
+; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
+; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0
+; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: retq
%out = call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %in)
ret <8 x i32> %out
}
; AVX512VPOPCNTDQ: # BB#0:
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
; AVX512VPOPCNTDQ-NEXT: retq
+;
+; BITALG-LABEL: testv8i64:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpandq %zmm1, %zmm0, %zmm2
+; BITALG-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %zmm2, %zmm3, %zmm2
+; BITALG-NEXT: vpsrlw $4, %zmm0, %zmm0
+; BITALG-NEXT: vpandq %zmm1, %zmm0, %zmm0
+; BITALG-NEXT: vpshufb %zmm0, %zmm3, %zmm0
+; BITALG-NEXT: vpaddb %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0
+; BITALG-NEXT: retq
%out = call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %in)
ret <8 x i64> %out
}
; AVX512VPOPCNTDQ: # BB#0:
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
; AVX512VPOPCNTDQ-NEXT: retq
+;
+; BITALG-LABEL: testv16i32:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpandq %zmm1, %zmm0, %zmm2
+; BITALG-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %zmm2, %zmm3, %zmm2
+; BITALG-NEXT: vpsrlw $4, %zmm0, %zmm0
+; BITALG-NEXT: vpandq %zmm1, %zmm0, %zmm0
+; BITALG-NEXT: vpshufb %zmm0, %zmm3, %zmm0
+; BITALG-NEXT: vpaddb %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpunpckhdq {{.*#+}} zmm2 = zmm0[2],zmm1[2],zmm0[3],zmm1[3],zmm0[6],zmm1[6],zmm0[7],zmm1[7],zmm0[10],zmm1[10],zmm0[11],zmm1[11],zmm0[14],zmm1[14],zmm0[15],zmm1[15]
+; BITALG-NEXT: vpsadbw %zmm1, %zmm2, %zmm2
+; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13]
+; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0
+; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: retq
%out = call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %in)
ret <16 x i32> %out
}
; AVX512VPOPCNTDQ-NEXT: vzeroupper
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: testv2i64:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsubq %xmm0, %xmm1, %xmm2
+; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
+; BITALG_NOVLX-NEXT: vpaddq %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm3
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG_NOVLX-NEXT: vpshufb %xmm3, %xmm4, %xmm3
+; BITALG_NOVLX-NEXT: vpsrlw $4, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpshufb %xmm0, %xmm4, %xmm0
+; BITALG_NOVLX-NEXT: vpaddb %xmm3, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv2i64:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubq %xmm0, %xmm1, %xmm2
+; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
+; BITALG-NEXT: vpaddq %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm3
+; BITALG-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %xmm3, %xmm4, %xmm3
+; BITALG-NEXT: vpsrlw $4, %xmm0, %xmm0
+; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vpshufb %xmm0, %xmm4, %xmm0
+; BITALG-NEXT: vpaddb %xmm3, %xmm0, %xmm0
+; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: testv2i64:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: pxor %xmm1, %xmm1
; AVX512VPOPCNTDQ-NEXT: vzeroupper
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: testv2i64u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsubq %xmm0, %xmm1, %xmm2
+; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
+; BITALG_NOVLX-NEXT: vpaddq %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm3
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG_NOVLX-NEXT: vpshufb %xmm3, %xmm4, %xmm3
+; BITALG_NOVLX-NEXT: vpsrlw $4, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpshufb %xmm0, %xmm4, %xmm0
+; BITALG_NOVLX-NEXT: vpaddb %xmm3, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv2i64u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubq %xmm0, %xmm1, %xmm2
+; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
+; BITALG-NEXT: vpaddq %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm3
+; BITALG-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %xmm3, %xmm4, %xmm3
+; BITALG-NEXT: vpsrlw $4, %xmm0, %xmm0
+; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vpshufb %xmm0, %xmm4, %xmm0
+; BITALG-NEXT: vpaddb %xmm3, %xmm0, %xmm0
+; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: testv2i64u:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: pxor %xmm1, %xmm1
; AVX512VPOPCNTDQ-NEXT: vzeroupper
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: testv4i32:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsubd %xmm0, %xmm1, %xmm2
+; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
+; BITALG_NOVLX-NEXT: vpaddd %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm3
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG_NOVLX-NEXT: vpshufb %xmm3, %xmm4, %xmm3
+; BITALG_NOVLX-NEXT: vpsrlw $4, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpshufb %xmm0, %xmm4, %xmm0
+; BITALG_NOVLX-NEXT: vpaddb %xmm3, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
+; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
+; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
+; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv4i32:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubd %xmm0, %xmm1, %xmm2
+; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
+; BITALG-NEXT: vpaddd %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm3
+; BITALG-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %xmm3, %xmm4, %xmm3
+; BITALG-NEXT: vpsrlw $4, %xmm0, %xmm0
+; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vpshufb %xmm0, %xmm4, %xmm0
+; BITALG-NEXT: vpaddb %xmm3, %xmm0, %xmm0
+; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
+; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
+; BITALG-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
+; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
+; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: testv4i32:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: pxor %xmm1, %xmm1
; AVX512VPOPCNTDQ-NEXT: vzeroupper
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: testv4i32u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsubd %xmm0, %xmm1, %xmm2
+; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
+; BITALG_NOVLX-NEXT: vpaddd %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm3
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG_NOVLX-NEXT: vpshufb %xmm3, %xmm4, %xmm3
+; BITALG_NOVLX-NEXT: vpsrlw $4, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpshufb %xmm0, %xmm4, %xmm0
+; BITALG_NOVLX-NEXT: vpaddb %xmm3, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
+; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
+; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero
+; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv4i32u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubd %xmm0, %xmm1, %xmm2
+; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
+; BITALG-NEXT: vpaddd %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm3
+; BITALG-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %xmm3, %xmm4, %xmm3
+; BITALG-NEXT: vpsrlw $4, %xmm0, %xmm0
+; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: vpshufb %xmm0, %xmm4, %xmm0
+; BITALG-NEXT: vpaddb %xmm3, %xmm0, %xmm0
+; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
+; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2
+; BITALG-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
+; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0
+; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: testv4i32u:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: pxor %xmm1, %xmm1
; AVX512VPOPCNTDQ-NEXT: vzeroupper
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: testv8i16u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsubw %xmm0, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpaddw %xmm1, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
+; BITALG_NOVLX-NEXT: # kill: %XMM0<def> %XMM0<kill> %ZMM0<kill>
+; BITALG_NOVLX-NEXT: vzeroupper
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv8i16u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubw %xmm0, %xmm1, %xmm1
+; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm0
+; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpaddw %xmm1, %xmm0, %xmm0
+; BITALG-NEXT: vpopcntw %xmm0, %xmm0
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: testv8i16u:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: pxor %xmm1, %xmm1
; AVX512VPOPCNTDQ-NEXT: vzeroupper
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: testv16i8u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsubb %xmm0, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpaddb %xmm1, %xmm0, %xmm0
+; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
+; BITALG_NOVLX-NEXT: # kill: %XMM0<def> %XMM0<kill> %ZMM0<kill>
+; BITALG_NOVLX-NEXT: vzeroupper
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv16i8u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubb %xmm0, %xmm1, %xmm1
+; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm0
+; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpaddb %xmm1, %xmm0, %xmm0
+; BITALG-NEXT: vpopcntb %xmm0, %xmm0
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: testv16i8u:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: pxor %xmm1, %xmm1
; AVX512VPOPCNTDQ-NEXT: vmovq %rax, %xmm0
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv2i64:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: movl $8, %eax
+; BITALG_NOVLX-NEXT: vmovq %rax, %xmm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv2i64:
+; BITALG: # BB#0:
+; BITALG-NEXT: movl $8, %eax
+; BITALG-NEXT: vmovq %rax, %xmm0
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: foldv2i64:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: movl $8, %eax
; AVX512VPOPCNTDQ-NEXT: vmovq %rax, %xmm0
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv2i64u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: movl $8, %eax
+; BITALG_NOVLX-NEXT: vmovq %rax, %xmm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv2i64u:
+; BITALG: # BB#0:
+; BITALG-NEXT: movl $8, %eax
+; BITALG-NEXT: vmovq %rax, %xmm0
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: foldv2i64u:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: movl $8, %eax
; AVX512VPOPCNTDQ-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,32,0]
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv4i32:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,32,0]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv4i32:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,32,0]
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: foldv4i32:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: movaps {{.*#+}} xmm0 = [8,0,32,0]
; AVX512VPOPCNTDQ-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,32,0]
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv4i32u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,32,0]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv4i32u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,32,0]
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: foldv4i32u:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: movaps {{.*#+}} xmm0 = [8,0,32,0]
; AVX512VPOPCNTDQ-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,16,0,16,0,3,3]
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv8i16:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,16,0,16,0,3,3]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv8i16:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,16,0,16,0,3,3]
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: foldv8i16:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: movaps {{.*#+}} xmm0 = [8,0,16,0,16,0,3,3]
; AVX512VPOPCNTDQ-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,16,0,16,0,3,3]
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv8i16u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,16,0,16,0,3,3]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv8i16u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,16,0,16,0,3,3]
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: foldv8i16u:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: movaps {{.*#+}} xmm0 = [8,0,16,0,16,0,3,3]
; AVX512VPOPCNTDQ-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5]
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv16i8:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv16i8:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5]
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: foldv16i8:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: movaps {{.*#+}} xmm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5]
; AVX512VPOPCNTDQ-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5]
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv16i8u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv16i8u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} xmm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5]
+; BITALG-NEXT: retq
+;
; X32-SSE-LABEL: foldv16i8u:
; X32-SSE: # BB#0:
; X32-SSE-NEXT: movaps {{.*#+}} xmm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5]
; AVX512VPOPCNTDQ-NEXT: # kill: %YMM0<def> %YMM0<kill> %ZMM0<kill>
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: testv4i64:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsubq %ymm0, %ymm1, %ymm2
+; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2
+; BITALG_NOVLX-NEXT: vpaddq %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm3
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG_NOVLX-NEXT: vpshufb %ymm3, %ymm4, %ymm3
+; BITALG_NOVLX-NEXT: vpsrlw $4, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpshufb %ymm0, %ymm4, %ymm0
+; BITALG_NOVLX-NEXT: vpaddb %ymm3, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv4i64:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubq %ymm0, %ymm1, %ymm2
+; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2
+; BITALG-NEXT: vpaddq %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm3
+; BITALG-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %ymm3, %ymm4, %ymm3
+; BITALG-NEXT: vpsrlw $4, %ymm0, %ymm0
+; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vpshufb %ymm0, %ymm4, %ymm0
+; BITALG-NEXT: vpaddb %ymm3, %ymm0, %ymm0
+; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: testv4i64:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1
; AVX512VPOPCNTDQ-NEXT: # kill: %YMM0<def> %YMM0<kill> %ZMM0<kill>
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: testv4i64u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsubq %ymm0, %ymm1, %ymm2
+; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2
+; BITALG_NOVLX-NEXT: vpaddq %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm3
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG_NOVLX-NEXT: vpshufb %ymm3, %ymm4, %ymm3
+; BITALG_NOVLX-NEXT: vpsrlw $4, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpshufb %ymm0, %ymm4, %ymm0
+; BITALG_NOVLX-NEXT: vpaddb %ymm3, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv4i64u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubq %ymm0, %ymm1, %ymm2
+; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2
+; BITALG-NEXT: vpaddq %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm3
+; BITALG-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %ymm3, %ymm4, %ymm3
+; BITALG-NEXT: vpsrlw $4, %ymm0, %ymm0
+; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vpshufb %ymm0, %ymm4, %ymm0
+; BITALG-NEXT: vpaddb %ymm3, %ymm0, %ymm0
+; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: testv4i64u:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1
; AVX512VPOPCNTDQ-NEXT: # kill: %YMM0<def> %YMM0<kill> %ZMM0<kill>
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: testv8i32:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsubd %ymm0, %ymm1, %ymm2
+; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2
+; BITALG_NOVLX-NEXT: vpaddd %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm3
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG_NOVLX-NEXT: vpshufb %ymm3, %ymm4, %ymm3
+; BITALG_NOVLX-NEXT: vpsrlw $4, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpshufb %ymm0, %ymm4, %ymm0
+; BITALG_NOVLX-NEXT: vpaddb %ymm3, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
+; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm2, %ymm2
+; BITALG_NOVLX-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
+; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpackuswb %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv8i32:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubd %ymm0, %ymm1, %ymm2
+; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2
+; BITALG-NEXT: vpaddd %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm3
+; BITALG-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %ymm3, %ymm4, %ymm3
+; BITALG-NEXT: vpsrlw $4, %ymm0, %ymm0
+; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vpshufb %ymm0, %ymm4, %ymm0
+; BITALG-NEXT: vpaddb %ymm3, %ymm0, %ymm0
+; BITALG-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
+; BITALG-NEXT: vpsadbw %ymm1, %ymm2, %ymm2
+; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
+; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0
+; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: testv8i32:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1
; AVX512VPOPCNTDQ-NEXT: # kill: %YMM0<def> %YMM0<kill> %ZMM0<kill>
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: testv8i32u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsubd %ymm0, %ymm1, %ymm2
+; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2
+; BITALG_NOVLX-NEXT: vpaddd %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm3
+; BITALG_NOVLX-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG_NOVLX-NEXT: vpshufb %ymm3, %ymm4, %ymm3
+; BITALG_NOVLX-NEXT: vpsrlw $4, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpshufb %ymm0, %ymm4, %ymm0
+; BITALG_NOVLX-NEXT: vpaddb %ymm3, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
+; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm2, %ymm2
+; BITALG_NOVLX-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
+; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpackuswb %ymm2, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv8i32u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubd %ymm0, %ymm1, %ymm2
+; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2
+; BITALG-NEXT: vpaddd %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm3
+; BITALG-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %ymm3, %ymm4, %ymm3
+; BITALG-NEXT: vpsrlw $4, %ymm0, %ymm0
+; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: vpshufb %ymm0, %ymm4, %ymm0
+; BITALG-NEXT: vpaddb %ymm3, %ymm0, %ymm0
+; BITALG-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
+; BITALG-NEXT: vpsadbw %ymm1, %ymm2, %ymm2
+; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
+; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0
+; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: testv8i32u:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: testv16i16u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsubw %ymm0, %ymm1, %ymm1
+; BITALG_NOVLX-NEXT: vpand %ymm1, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1
+; BITALG_NOVLX-NEXT: vpaddw %ymm1, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0
+; BITALG_NOVLX-NEXT: # kill: %YMM0<def> %YMM0<kill> %ZMM0<kill>
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv16i16u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubw %ymm0, %ymm1, %ymm1
+; BITALG-NEXT: vpand %ymm1, %ymm0, %ymm0
+; BITALG-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1
+; BITALG-NEXT: vpaddw %ymm1, %ymm0, %ymm0
+; BITALG-NEXT: vpopcntw %ymm0, %ymm0
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: testv16i16u:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1
; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm0
; AVX512VPOPCNTDQ-NEXT: retq
;
+; BITALG_NOVLX-LABEL: testv32i8u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG_NOVLX-NEXT: vpsubb %ymm0, %ymm1, %ymm1
+; BITALG_NOVLX-NEXT: vpand %ymm1, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1
+; BITALG_NOVLX-NEXT: vpaddb %ymm1, %ymm0, %ymm0
+; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0
+; BITALG_NOVLX-NEXT: # kill: %YMM0<def> %YMM0<kill> %ZMM0<kill>
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: testv32i8u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubb %ymm0, %ymm1, %ymm1
+; BITALG-NEXT: vpand %ymm1, %ymm0, %ymm0
+; BITALG-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1
+; BITALG-NEXT: vpaddb %ymm1, %ymm0, %ymm0
+; BITALG-NEXT: vpopcntb %ymm0, %ymm0
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: testv32i8u:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1
; AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,64,0]
; AVX-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv4i64:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,64,0]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv4i64:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,64,0]
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: foldv4i64:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,0,0,64,0,0,0]
; AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,64,0]
; AVX-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv4i64u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,64,0]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv4i64u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,64,0]
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: foldv4i64u:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,0,0,64,0,0,0]
; AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,32,0,16,0,3,3]
; AVX-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv8i32:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,32,0,16,0,3,3]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv8i32:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,32,0,16,0,3,3]
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: foldv8i32:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,32,0,16,0,3,3]
; AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,32,0,16,0,3,3]
; AVX-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv8i32u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,32,0,16,0,3,3]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv8i32u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,32,0,16,0,3,3]
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: foldv8i32u:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,32,0,16,0,3,3]
; AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,16,0,16,0,3,3,1,1,0,1,2,3,4,5]
; AVX-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv16i16:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,16,0,16,0,3,3,1,1,0,1,2,3,4,5]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv16i16:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,16,0,16,0,3,3,1,1,0,1,2,3,4,5]
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: foldv16i16:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,16,0,16,0,3,3,1,1,0,1,2,3,4,5]
; AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,16,0,16,0,3,3,1,1,0,1,2,3,4,5]
; AVX-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv16i16u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,16,0,16,0,3,3,1,1,0,1,2,3,4,5]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv16i16u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,16,0,16,0,3,3,1,1,0,1,2,3,4,5]
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: foldv16i16u:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,16,0,16,0,3,3,1,1,0,1,2,3,4,5]
; AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5,6,7,8,8,7,6,5,4,3,2,1,0,0,0,0,0]
; AVX-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv32i8:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5,6,7,8,8,7,6,5,4,3,2,1,0,0,0,0,0]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv32i8:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5,6,7,8,8,7,6,5,4,3,2,1,0,0,0,0,0]
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: foldv32i8:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5,6,7,8,8,7,6,5,4,3,2,1,0,0,0,0,0]
; AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5,6,7,8,8,7,6,5,4,3,2,1,0,0,0,0,0]
; AVX-NEXT: retq
;
+; BITALG_NOVLX-LABEL: foldv32i8u:
+; BITALG_NOVLX: # BB#0:
+; BITALG_NOVLX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5,6,7,8,8,7,6,5,4,3,2,1,0,0,0,0,0]
+; BITALG_NOVLX-NEXT: retq
+;
+; BITALG-LABEL: foldv32i8u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5,6,7,8,8,7,6,5,4,3,2,1,0,0,0,0,0]
+; BITALG-NEXT: retq
+;
; X32-AVX-LABEL: foldv32i8u:
; X32-AVX: # BB#0:
; X32-AVX-NEXT: vmovaps {{.*#+}} ymm0 = [8,0,8,0,8,0,3,3,1,1,0,1,2,3,4,5,6,7,8,8,7,6,5,4,3,2,1,0,0,0,0,0]
; AVX512VPOPCNTDQ-NEXT: vpaddq %zmm1, %zmm0, %zmm0
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
; AVX512VPOPCNTDQ-NEXT: retq
+;
+; BITALG-LABEL: testv8i64:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubq %zmm0, %zmm1, %zmm2
+; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2
+; BITALG-NEXT: vpaddq %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vmovdqa64 {{.*#+}} zmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm3
+; BITALG-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %zmm3, %zmm4, %zmm3
+; BITALG-NEXT: vpsrlw $4, %zmm0, %zmm0
+; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vpshufb %zmm0, %zmm4, %zmm0
+; BITALG-NEXT: vpaddb %zmm3, %zmm0, %zmm0
+; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0
+; BITALG-NEXT: retq
%out = call <8 x i64> @llvm.cttz.v8i64(<8 x i64> %in, i1 0)
ret <8 x i64> %out
}
; AVX512VPOPCNTDQ-NEXT: vpaddq %zmm1, %zmm0, %zmm0
; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0
; AVX512VPOPCNTDQ-NEXT: retq
+;
+; BITALG-LABEL: testv8i64u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubq %zmm0, %zmm1, %zmm2
+; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2
+; BITALG-NEXT: vpaddq %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vmovdqa64 {{.*#+}} zmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm3
+; BITALG-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %zmm3, %zmm4, %zmm3
+; BITALG-NEXT: vpsrlw $4, %zmm0, %zmm0
+; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vpshufb %zmm0, %zmm4, %zmm0
+; BITALG-NEXT: vpaddb %zmm3, %zmm0, %zmm0
+; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0
+; BITALG-NEXT: retq
%out = call <8 x i64> @llvm.cttz.v8i64(<8 x i64> %in, i1 -1)
ret <8 x i64> %out
}
; AVX512VPOPCNTDQ-NEXT: vpaddd %zmm1, %zmm0, %zmm0
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
; AVX512VPOPCNTDQ-NEXT: retq
+;
+; BITALG-LABEL: testv16i32:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubd %zmm0, %zmm1, %zmm2
+; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2
+; BITALG-NEXT: vpaddd %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vmovdqa64 {{.*#+}} zmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm3
+; BITALG-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %zmm3, %zmm4, %zmm3
+; BITALG-NEXT: vpsrlw $4, %zmm0, %zmm0
+; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vpshufb %zmm0, %zmm4, %zmm0
+; BITALG-NEXT: vpaddb %zmm3, %zmm0, %zmm0
+; BITALG-NEXT: vpunpckhdq {{.*#+}} zmm2 = zmm0[2],zmm1[2],zmm0[3],zmm1[3],zmm0[6],zmm1[6],zmm0[7],zmm1[7],zmm0[10],zmm1[10],zmm0[11],zmm1[11],zmm0[14],zmm1[14],zmm0[15],zmm1[15]
+; BITALG-NEXT: vpsadbw %zmm1, %zmm2, %zmm2
+; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13]
+; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0
+; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: retq
%out = call <16 x i32> @llvm.cttz.v16i32(<16 x i32> %in, i1 0)
ret <16 x i32> %out
}
; AVX512VPOPCNTDQ-NEXT: vpaddd %zmm1, %zmm0, %zmm0
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0
; AVX512VPOPCNTDQ-NEXT: retq
+;
+; BITALG-LABEL: testv16i32u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubd %zmm0, %zmm1, %zmm2
+; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2
+; BITALG-NEXT: vpaddd %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vmovdqa64 {{.*#+}} zmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
+; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm3
+; BITALG-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; BITALG-NEXT: vpshufb %zmm3, %zmm4, %zmm3
+; BITALG-NEXT: vpsrlw $4, %zmm0, %zmm0
+; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: vpshufb %zmm0, %zmm4, %zmm0
+; BITALG-NEXT: vpaddb %zmm3, %zmm0, %zmm0
+; BITALG-NEXT: vpunpckhdq {{.*#+}} zmm2 = zmm0[2],zmm1[2],zmm0[3],zmm1[3],zmm0[6],zmm1[6],zmm0[7],zmm1[7],zmm0[10],zmm1[10],zmm0[11],zmm1[11],zmm0[14],zmm1[14],zmm0[15],zmm1[15]
+; BITALG-NEXT: vpsadbw %zmm1, %zmm2, %zmm2
+; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13]
+; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0
+; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0
+; BITALG-NEXT: retq
%out = call <16 x i32> @llvm.cttz.v16i32(<16 x i32> %in, i1 -1)
ret <16 x i32> %out
}
; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm1, %zmm1
; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm1, %ymm1
; AVX512VPOPCNTDQ-NEXT: retq
+;
+; BITALG-LABEL: testv32i16u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubw %zmm0, %zmm1, %zmm1
+; BITALG-NEXT: vpandq %zmm1, %zmm0, %zmm0
+; BITALG-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1
+; BITALG-NEXT: vpaddw %zmm1, %zmm0, %zmm0
+; BITALG-NEXT: vpopcntw %zmm0, %zmm0
+; BITALG-NEXT: retq
%out = call <32 x i16> @llvm.cttz.v32i16(<32 x i16> %in, i1 -1)
ret <32 x i16> %out
}
; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm1, %ymm6, %ymm1
; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm1, %ymm1
; AVX512VPOPCNTDQ-NEXT: retq
+;
+; BITALG-LABEL: testv64i8u:
+; BITALG: # BB#0:
+; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; BITALG-NEXT: vpsubb %zmm0, %zmm1, %zmm1
+; BITALG-NEXT: vpandq %zmm1, %zmm0, %zmm0
+; BITALG-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1
+; BITALG-NEXT: vpaddb %zmm1, %zmm0, %zmm0
+; BITALG-NEXT: vpopcntb %zmm0, %zmm0
+; BITALG-NEXT: retq
%out = call <64 x i8> @llvm.cttz.v64i8(<64 x i8> %in, i1 -1)
ret <64 x i8> %out
}