%1 = sdiv <8 x i16> %x, <i16 -510, i16 -24, i16 -23, i16 3, i16 22, i16 25, i16 255, i16 511>
ret <8 x i16> %1
}
+
+define <8 x i16> @combine_vec_sdiv_nonuniform6(<8 x i16> %x) {
+; SSE-LABEL: combine_vec_sdiv_nonuniform6:
+; SSE: # %bb.0:
+; SSE-NEXT: pextrw $5, %xmm0, %eax
+; SSE-NEXT: movswl %ax, %ecx
+; SSE-NEXT: imull $-32639, %ecx, %ecx # imm = 0x8081
+; SSE-NEXT: shrl $16, %ecx
+; SSE-NEXT: addl %eax, %ecx
+; SSE-NEXT: movzwl %cx, %eax
+; SSE-NEXT: sarw $7, %cx
+; SSE-NEXT: shrl $15, %eax
+; SSE-NEXT: addl %ecx, %eax
+; SSE-NEXT: pextrw $2, %xmm0, %ecx
+; SSE-NEXT: movswl %cx, %edx
+; SSE-NEXT: imull $32703, %edx, %edx # imm = 0x7FBF
+; SSE-NEXT: shrl $16, %edx
+; SSE-NEXT: subl %ecx, %edx
+; SSE-NEXT: movzwl %dx, %ecx
+; SSE-NEXT: sarw $8, %dx
+; SSE-NEXT: shrl $15, %ecx
+; SSE-NEXT: addl %edx, %ecx
+; SSE-NEXT: pextrw $1, %xmm0, %edx
+; SSE-NEXT: movl %edx, %esi
+; SSE-NEXT: sarw $15, %si
+; SSE-NEXT: movzwl %si, %esi
+; SSE-NEXT: shrl $7, %esi
+; SSE-NEXT: addl %edx, %esi
+; SSE-NEXT: sarw $9, %si
+; SSE-NEXT: negl %esi
+; SSE-NEXT: pextrw $0, %xmm0, %edx
+; SSE-NEXT: xorl %edi, %edi
+; SSE-NEXT: cmpl $32768, %edx # imm = 0x8000
+; SSE-NEXT: sete %dil
+; SSE-NEXT: movd %edi, %xmm1
+; SSE-NEXT: pinsrw $1, %esi, %xmm1
+; SSE-NEXT: pinsrw $2, %ecx, %xmm1
+; SSE-NEXT: pextrw $3, %xmm0, %ecx
+; SSE-NEXT: negl %ecx
+; SSE-NEXT: pinsrw $3, %ecx, %xmm1
+; SSE-NEXT: pblendw {{.*#+}} xmm1 = xmm1[0,1,2,3],xmm0[4],xmm1[5,6,7]
+; SSE-NEXT: pinsrw $5, %eax, %xmm1
+; SSE-NEXT: pextrw $6, %xmm0, %eax
+; SSE-NEXT: movl %eax, %ecx
+; SSE-NEXT: sarw $15, %cx
+; SSE-NEXT: movzwl %cx, %ecx
+; SSE-NEXT: shrl $7, %ecx
+; SSE-NEXT: addl %eax, %ecx
+; SSE-NEXT: sarw $9, %cx
+; SSE-NEXT: pinsrw $6, %ecx, %xmm1
+; SSE-NEXT: pextrw $7, %xmm0, %eax
+; SSE-NEXT: cwtl
+; SSE-NEXT: movl %eax, %ecx
+; SSE-NEXT: shll $14, %ecx
+; SSE-NEXT: addl %eax, %ecx
+; SSE-NEXT: movl %ecx, %eax
+; SSE-NEXT: shrl $31, %eax
+; SSE-NEXT: sarl $29, %ecx
+; SSE-NEXT: addl %eax, %ecx
+; SSE-NEXT: pinsrw $7, %ecx, %xmm1
+; SSE-NEXT: movdqa %xmm1, %xmm0
+; SSE-NEXT: retq
+;
+; AVX-LABEL: combine_vec_sdiv_nonuniform6:
+; AVX: # %bb.0:
+; AVX-NEXT: vpextrw $5, %xmm0, %eax
+; AVX-NEXT: movswl %ax, %ecx
+; AVX-NEXT: imull $-32639, %ecx, %ecx # imm = 0x8081
+; AVX-NEXT: shrl $16, %ecx
+; AVX-NEXT: addl %eax, %ecx
+; AVX-NEXT: movzwl %cx, %eax
+; AVX-NEXT: sarw $7, %cx
+; AVX-NEXT: shrl $15, %eax
+; AVX-NEXT: addl %ecx, %eax
+; AVX-NEXT: vpextrw $2, %xmm0, %ecx
+; AVX-NEXT: movswl %cx, %edx
+; AVX-NEXT: imull $32703, %edx, %edx # imm = 0x7FBF
+; AVX-NEXT: shrl $16, %edx
+; AVX-NEXT: subl %ecx, %edx
+; AVX-NEXT: movzwl %dx, %ecx
+; AVX-NEXT: sarw $8, %dx
+; AVX-NEXT: shrl $15, %ecx
+; AVX-NEXT: addl %edx, %ecx
+; AVX-NEXT: vpextrw $1, %xmm0, %edx
+; AVX-NEXT: movl %edx, %esi
+; AVX-NEXT: sarw $15, %si
+; AVX-NEXT: movzwl %si, %esi
+; AVX-NEXT: shrl $7, %esi
+; AVX-NEXT: addl %edx, %esi
+; AVX-NEXT: sarw $9, %si
+; AVX-NEXT: negl %esi
+; AVX-NEXT: vpextrw $0, %xmm0, %edx
+; AVX-NEXT: xorl %edi, %edi
+; AVX-NEXT: cmpl $32768, %edx # imm = 0x8000
+; AVX-NEXT: sete %dil
+; AVX-NEXT: vmovd %edi, %xmm1
+; AVX-NEXT: vpinsrw $1, %esi, %xmm1, %xmm1
+; AVX-NEXT: vpinsrw $2, %ecx, %xmm1, %xmm1
+; AVX-NEXT: vpextrw $3, %xmm0, %ecx
+; AVX-NEXT: negl %ecx
+; AVX-NEXT: vpinsrw $3, %ecx, %xmm1, %xmm1
+; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3],xmm0[4],xmm1[5,6,7]
+; AVX-NEXT: vpinsrw $5, %eax, %xmm1, %xmm1
+; AVX-NEXT: vpextrw $6, %xmm0, %eax
+; AVX-NEXT: movl %eax, %ecx
+; AVX-NEXT: sarw $15, %cx
+; AVX-NEXT: movzwl %cx, %ecx
+; AVX-NEXT: shrl $7, %ecx
+; AVX-NEXT: addl %eax, %ecx
+; AVX-NEXT: sarw $9, %cx
+; AVX-NEXT: vpinsrw $6, %ecx, %xmm1, %xmm1
+; AVX-NEXT: vpextrw $7, %xmm0, %eax
+; AVX-NEXT: cwtl
+; AVX-NEXT: movl %eax, %ecx
+; AVX-NEXT: shll $14, %ecx
+; AVX-NEXT: addl %eax, %ecx
+; AVX-NEXT: movl %ecx, %eax
+; AVX-NEXT: shrl $31, %eax
+; AVX-NEXT: sarl $29, %ecx
+; AVX-NEXT: addl %eax, %ecx
+; AVX-NEXT: vpinsrw $7, %ecx, %xmm1, %xmm0
+; AVX-NEXT: retq
+ %1 = sdiv <8 x i16> %x, <i16 -32768, i16 -512, i16 -511, i16 -1, i16 1, i16 255, i16 512, i16 32767>
+ ret <8 x i16> %1
+}