define <2 x i64> @fptosi_2f64_to_2i64(<2 x double> %a) {
; SSE-LABEL: fptosi_2f64_to_2i64:
-; SSE: # BB#0:
-; SSE-NEXT: cvttsd2si %xmm0, %rax
-; SSE-NEXT: movd %rax, %xmm1
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: cvttsd2si %xmm0, %rax
-; SSE-NEXT: movd %rax, %xmm0
-; SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm0[0]
+; SSE: # BB#0:\r
+; SSE-NEXT: cvttsd2si %xmm0, %rax\r
+; SSE-NEXT: movd %rax, %xmm1\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: cvttsd2si %xmm0, %rax\r
+; SSE-NEXT: movd %rax, %xmm0\r
+; SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm0[0]\r
; SSE-NEXT: movdqa %xmm1, %xmm0
; SSE-NEXT: retq
;
define <4 x i32> @fptosi_2f64_to_2i32(<2 x double> %a) {
; SSE-LABEL: fptosi_2f64_to_2i32:
-; SSE: # BB#0:
-; SSE-NEXT: cvttsd2si %xmm0, %rax
-; SSE-NEXT: movd %rax, %xmm1
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: cvttsd2si %xmm0, %rax
-; SSE-NEXT: movd %rax, %xmm0
-; SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm0[0]
+; SSE: # BB#0:\r
+; SSE-NEXT: cvttsd2si %xmm0, %rax\r
+; SSE-NEXT: movd %rax, %xmm1\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: cvttsd2si %xmm0, %rax\r
+; SSE-NEXT: movd %rax, %xmm0\r
+; SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm0[0]\r
; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3]
; SSE-NEXT: retq
;
define <4 x i32> @fptosi_4f64_to_2i32(<2 x double> %a) {
; SSE-LABEL: fptosi_4f64_to_2i32:
-; SSE: # BB#0:
-; SSE-NEXT: cvttsd2si %xmm0, %rax
-; SSE-NEXT: movd %rax, %xmm1
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: cvttsd2si %xmm0, %rax
-; SSE-NEXT: movd %rax, %xmm0
-; SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm0[0]
+; SSE: # BB#0:\r
+; SSE-NEXT: cvttsd2si %xmm0, %rax\r
+; SSE-NEXT: movd %rax, %xmm1\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: cvttsd2si %xmm0, %rax\r
+; SSE-NEXT: movd %rax, %xmm0\r
+; SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm0[0]\r
; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3]
; SSE-NEXT: cvttsd2si %xmm0, %rax
; SSE-NEXT: movd %rax, %xmm1
define <4 x i64> @fptosi_4f64_to_4i64(<4 x double> %a) {
; SSE-LABEL: fptosi_4f64_to_4i64:
-; SSE: # BB#0:
-; SSE-NEXT: cvttsd2si %xmm0, %rax
-; SSE-NEXT: movd %rax, %xmm2
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: cvttsd2si %xmm0, %rax
-; SSE-NEXT: movd %rax, %xmm0
-; SSE-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm0[0]
-; SSE-NEXT: cvttsd2si %xmm1, %rax
-; SSE-NEXT: movd %rax, %xmm3
-; SSE-NEXT: shufpd {{.*#+}} xmm1 = xmm1[1,0]
-; SSE-NEXT: cvttsd2si %xmm1, %rax
-; SSE-NEXT: movd %rax, %xmm0
-; SSE-NEXT: punpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm0[0]
+; SSE: # BB#0:\r
+; SSE-NEXT: cvttsd2si %xmm0, %rax\r
+; SSE-NEXT: movd %rax, %xmm2\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: cvttsd2si %xmm0, %rax\r
+; SSE-NEXT: movd %rax, %xmm0\r
+; SSE-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm0[0]\r
+; SSE-NEXT: cvttsd2si %xmm1, %rax\r
+; SSE-NEXT: movd %rax, %xmm3\r
+; SSE-NEXT: movhlps {{.*#+}} xmm1 = xmm1[1,1]\r
+; SSE-NEXT: cvttsd2si %xmm1, %rax\r
+; SSE-NEXT: movd %rax, %xmm0\r
+; SSE-NEXT: punpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm0[0]\r
; SSE-NEXT: movdqa %xmm2, %xmm0
; SSE-NEXT: movdqa %xmm3, %xmm1
; SSE-NEXT: retq
define <4 x i32> @fptosi_4f64_to_4i32(<4 x double> %a) {
; SSE-LABEL: fptosi_4f64_to_4i32:
-; SSE: # BB#0:
-; SSE-NEXT: cvttsd2si %xmm1, %rax
-; SSE-NEXT: movd %rax, %xmm2
-; SSE-NEXT: shufpd {{.*#+}} xmm1 = xmm1[1,0]
-; SSE-NEXT: cvttsd2si %xmm1, %rax
-; SSE-NEXT: movd %rax, %xmm1
-; SSE-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm1[0]
-; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3]
-; SSE-NEXT: cvttsd2si %xmm0, %rax
-; SSE-NEXT: movd %rax, %xmm2
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: cvttsd2si %xmm0, %rax
-; SSE-NEXT: movd %rax, %xmm0
-; SSE-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm0[0]
+; SSE: # BB#0:\r
+; SSE-NEXT: cvttsd2si %xmm1, %rax\r
+; SSE-NEXT: movd %rax, %xmm2\r
+; SSE-NEXT: movhlps {{.*#+}} xmm1 = xmm1[1,1]\r
+; SSE-NEXT: cvttsd2si %xmm1, %rax\r
+; SSE-NEXT: movd %rax, %xmm1\r
+; SSE-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm1[0]\r
+; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3]\r
+; SSE-NEXT: cvttsd2si %xmm0, %rax\r
+; SSE-NEXT: movd %rax, %xmm2\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: cvttsd2si %xmm0, %rax\r
+; SSE-NEXT: movd %rax, %xmm0\r
+; SSE-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm0[0]\r
; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[0,2,2,3]
; SSE-NEXT: punpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
; SSE-NEXT: retq
; SSE-NEXT: movabsq $-9223372036854775808, %rcx # imm = 0x8000000000000000
; SSE-NEXT: xorq %rcx, %rax
; SSE-NEXT: cvttsd2si %xmm0, %rdx
-; SSE-NEXT: ucomisd %xmm2, %xmm0
-; SSE-NEXT: cmovaeq %rax, %rdx
-; SSE-NEXT: movd %rdx, %xmm1
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: movapd %xmm0, %xmm3
-; SSE-NEXT: subsd %xmm2, %xmm3
-; SSE-NEXT: cvttsd2si %xmm3, %rax
-; SSE-NEXT: xorq %rcx, %rax
+; SSE-NEXT: ucomisd %xmm2, %xmm0\r
+; SSE-NEXT: cmovaeq %rax, %rdx\r
+; SSE-NEXT: movd %rdx, %xmm1\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: movaps %xmm0, %xmm3\r
+; SSE-NEXT: subsd %xmm2, %xmm3\r
+; SSE-NEXT: cvttsd2si %xmm3, %rax\r
+; SSE-NEXT: xorq %rcx, %rax\r
; SSE-NEXT: cvttsd2si %xmm0, %rcx
; SSE-NEXT: ucomisd %xmm2, %xmm0
; SSE-NEXT: cmovaeq %rax, %rcx
; SSE-NEXT: movabsq $-9223372036854775808, %rcx # imm = 0x8000000000000000
; SSE-NEXT: xorq %rcx, %rax
; SSE-NEXT: cvttsd2si %xmm0, %rdx
-; SSE-NEXT: ucomisd %xmm1, %xmm0
-; SSE-NEXT: cmovaeq %rax, %rdx
-; SSE-NEXT: movd %rdx, %xmm2
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: movapd %xmm0, %xmm3
-; SSE-NEXT: subsd %xmm1, %xmm3
-; SSE-NEXT: cvttsd2si %xmm3, %rax
-; SSE-NEXT: xorq %rcx, %rax
+; SSE-NEXT: ucomisd %xmm1, %xmm0\r
+; SSE-NEXT: cmovaeq %rax, %rdx\r
+; SSE-NEXT: movd %rdx, %xmm2\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: movaps %xmm0, %xmm3\r
+; SSE-NEXT: subsd %xmm1, %xmm3\r
+; SSE-NEXT: cvttsd2si %xmm3, %rax\r
+; SSE-NEXT: xorq %rcx, %rax\r
; SSE-NEXT: cvttsd2si %xmm0, %rcx
; SSE-NEXT: ucomisd %xmm1, %xmm0
; SSE-NEXT: cmovaeq %rax, %rcx
; SSE-NEXT: movabsq $-9223372036854775808, %rcx # imm = 0x8000000000000000
; SSE-NEXT: xorq %rcx, %rax
; SSE-NEXT: cvttsd2si %xmm0, %rdx
-; SSE-NEXT: ucomisd %xmm1, %xmm0
-; SSE-NEXT: cmovaeq %rax, %rdx
-; SSE-NEXT: movd %rdx, %xmm2
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: movapd %xmm0, %xmm3
-; SSE-NEXT: subsd %xmm1, %xmm3
-; SSE-NEXT: cvttsd2si %xmm3, %rax
-; SSE-NEXT: xorq %rcx, %rax
+; SSE-NEXT: ucomisd %xmm1, %xmm0\r
+; SSE-NEXT: cmovaeq %rax, %rdx\r
+; SSE-NEXT: movd %rdx, %xmm2\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: movaps %xmm0, %xmm3\r
+; SSE-NEXT: subsd %xmm1, %xmm3\r
+; SSE-NEXT: cvttsd2si %xmm3, %rax\r
+; SSE-NEXT: xorq %rcx, %rax\r
; SSE-NEXT: cvttsd2si %xmm0, %rdx
; SSE-NEXT: ucomisd %xmm1, %xmm0
; SSE-NEXT: cmovaeq %rax, %rdx
; SSE-NEXT: movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000
; SSE-NEXT: xorq %rax, %rcx
; SSE-NEXT: cvttsd2si %xmm2, %rdx
-; SSE-NEXT: ucomisd %xmm3, %xmm2
-; SSE-NEXT: cmovaeq %rcx, %rdx
-; SSE-NEXT: movd %rdx, %xmm0
-; SSE-NEXT: shufpd {{.*#+}} xmm2 = xmm2[1,0]
-; SSE-NEXT: movapd %xmm2, %xmm4
-; SSE-NEXT: subsd %xmm3, %xmm4
-; SSE-NEXT: cvttsd2si %xmm4, %rcx
-; SSE-NEXT: xorq %rax, %rcx
+; SSE-NEXT: ucomisd %xmm3, %xmm2\r
+; SSE-NEXT: cmovaeq %rcx, %rdx\r
+; SSE-NEXT: movd %rdx, %xmm0\r
+; SSE-NEXT: movhlps {{.*#+}} xmm2 = xmm2[1,1]\r
+; SSE-NEXT: movaps %xmm2, %xmm4\r
+; SSE-NEXT: subsd %xmm3, %xmm4\r
+; SSE-NEXT: cvttsd2si %xmm4, %rcx\r
+; SSE-NEXT: xorq %rax, %rcx\r
; SSE-NEXT: cvttsd2si %xmm2, %rdx
; SSE-NEXT: ucomisd %xmm3, %xmm2
; SSE-NEXT: cmovaeq %rcx, %rdx
; SSE-NEXT: cvttsd2si %xmm2, %rcx
; SSE-NEXT: xorq %rax, %rcx
; SSE-NEXT: cvttsd2si %xmm1, %rdx
-; SSE-NEXT: ucomisd %xmm3, %xmm1
-; SSE-NEXT: cmovaeq %rcx, %rdx
-; SSE-NEXT: movd %rdx, %xmm2
-; SSE-NEXT: shufpd {{.*#+}} xmm1 = xmm1[1,0]
-; SSE-NEXT: movapd %xmm1, %xmm4
-; SSE-NEXT: subsd %xmm3, %xmm4
-; SSE-NEXT: cvttsd2si %xmm4, %rcx
-; SSE-NEXT: xorq %rax, %rcx
+; SSE-NEXT: ucomisd %xmm3, %xmm1\r
+; SSE-NEXT: cmovaeq %rcx, %rdx\r
+; SSE-NEXT: movd %rdx, %xmm2\r
+; SSE-NEXT: movhlps {{.*#+}} xmm1 = xmm1[1,1]\r
+; SSE-NEXT: movaps %xmm1, %xmm4\r
+; SSE-NEXT: subsd %xmm3, %xmm4\r
+; SSE-NEXT: cvttsd2si %xmm4, %rcx\r
+; SSE-NEXT: xorq %rax, %rcx\r
; SSE-NEXT: cvttsd2si %xmm1, %rax
; SSE-NEXT: ucomisd %xmm3, %xmm1
; SSE-NEXT: cmovaeq %rcx, %rax
; SSE-NEXT: movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000
; SSE-NEXT: xorq %rax, %rcx
; SSE-NEXT: cvttsd2si %xmm1, %rdx
-; SSE-NEXT: ucomisd %xmm2, %xmm1
-; SSE-NEXT: cmovaeq %rcx, %rdx
-; SSE-NEXT: movd %rdx, %xmm3
-; SSE-NEXT: shufpd {{.*#+}} xmm1 = xmm1[1,0]
-; SSE-NEXT: movapd %xmm1, %xmm4
-; SSE-NEXT: subsd %xmm2, %xmm4
-; SSE-NEXT: cvttsd2si %xmm4, %rcx
-; SSE-NEXT: xorq %rax, %rcx
+; SSE-NEXT: ucomisd %xmm2, %xmm1\r
+; SSE-NEXT: cmovaeq %rcx, %rdx\r
+; SSE-NEXT: movd %rdx, %xmm3\r
+; SSE-NEXT: movhlps {{.*#+}} xmm1 = xmm1[1,1]\r
+; SSE-NEXT: movaps %xmm1, %xmm4\r
+; SSE-NEXT: subsd %xmm2, %xmm4\r
+; SSE-NEXT: cvttsd2si %xmm4, %rcx\r
+; SSE-NEXT: xorq %rax, %rcx\r
; SSE-NEXT: cvttsd2si %xmm1, %rdx
; SSE-NEXT: ucomisd %xmm2, %xmm1
; SSE-NEXT: cmovaeq %rcx, %rdx
; SSE-NEXT: cvttsd2si %xmm3, %rcx
; SSE-NEXT: xorq %rax, %rcx
; SSE-NEXT: cvttsd2si %xmm0, %rdx
-; SSE-NEXT: ucomisd %xmm2, %xmm0
-; SSE-NEXT: cmovaeq %rcx, %rdx
-; SSE-NEXT: movd %rdx, %xmm3
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: movapd %xmm0, %xmm4
-; SSE-NEXT: subsd %xmm2, %xmm4
-; SSE-NEXT: cvttsd2si %xmm4, %rcx
-; SSE-NEXT: xorq %rax, %rcx
+; SSE-NEXT: ucomisd %xmm2, %xmm0\r
+; SSE-NEXT: cmovaeq %rcx, %rdx\r
+; SSE-NEXT: movd %rdx, %xmm3\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: movaps %xmm0, %xmm4\r
+; SSE-NEXT: subsd %xmm2, %xmm4\r
+; SSE-NEXT: cvttsd2si %xmm4, %rcx\r
+; SSE-NEXT: xorq %rax, %rcx\r
; SSE-NEXT: cvttsd2si %xmm0, %rax
; SSE-NEXT: ucomisd %xmm2, %xmm0
; SSE-NEXT: cmovaeq %rcx, %rax
; SSE-NEXT: movd %rax, %xmm1
; SSE-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm1[0]
; SSE-NEXT: movaps %xmm0, %xmm1
-; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,1,2,3]
-; SSE-NEXT: cvttss2si %xmm1, %rax
-; SSE-NEXT: movd %rax, %xmm3
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: cvttss2si %xmm0, %rax
-; SSE-NEXT: movd %rax, %xmm1
-; SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm3[0]
+; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,1,2,3]\r
+; SSE-NEXT: cvttss2si %xmm1, %rax\r
+; SSE-NEXT: movd %rax, %xmm3\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: cvttss2si %xmm0, %rax\r
+; SSE-NEXT: movd %rax, %xmm1\r
+; SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm3[0]\r
; SSE-NEXT: movdqa %xmm2, %xmm0
; SSE-NEXT: retq
;
; SSE-NEXT: movd %rax, %xmm1
; SSE-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm1[0]
; SSE-NEXT: movaps %xmm0, %xmm1
-; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,1,2,3]
-; SSE-NEXT: cvttss2si %xmm1, %rax
-; SSE-NEXT: movd %rax, %xmm3
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: cvttss2si %xmm0, %rax
-; SSE-NEXT: movd %rax, %xmm1
-; SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm3[0]
+; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,1,2,3]\r
+; SSE-NEXT: cvttss2si %xmm1, %rax\r
+; SSE-NEXT: movd %rax, %xmm3\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: cvttss2si %xmm0, %rax\r
+; SSE-NEXT: movd %rax, %xmm1\r
+; SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm3[0]\r
; SSE-NEXT: movdqa %xmm2, %xmm0
; SSE-NEXT: retq
;
; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[1,1,2,3]
; SSE-NEXT: cvttss2si %xmm2, %rax
; SSE-NEXT: movd %eax, %xmm2
-; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
-; SSE-NEXT: cvttss2si %xmm0, %rax
-; SSE-NEXT: movd %eax, %xmm1
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: cvttss2si %xmm0, %rax
-; SSE-NEXT: movd %eax, %xmm0
-; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
+; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]\r
+; SSE-NEXT: cvttss2si %xmm0, %rax\r
+; SSE-NEXT: movd %eax, %xmm1\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: cvttss2si %xmm0, %rax\r
+; SSE-NEXT: movd %eax, %xmm0\r
+; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]\r
; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
; SSE-NEXT: movdqa %xmm1, %xmm0
; SSE-NEXT: retq
; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,1,2,3]
; SSE-NEXT: cvttss2si %xmm3, %rax
; SSE-NEXT: movd %eax, %xmm3
-; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
-; SSE-NEXT: cvttss2si %xmm2, %rax
-; SSE-NEXT: movd %eax, %xmm0
-; SSE-NEXT: shufpd {{.*#+}} xmm2 = xmm2[1,0]
-; SSE-NEXT: cvttss2si %xmm2, %rax
-; SSE-NEXT: movd %eax, %xmm2
-; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
+; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]\r
+; SSE-NEXT: cvttss2si %xmm2, %rax\r
+; SSE-NEXT: movd %eax, %xmm0\r
+; SSE-NEXT: movhlps {{.*#+}} xmm2 = xmm2[1,1]\r
+; SSE-NEXT: cvttss2si %xmm2, %rax\r
+; SSE-NEXT: movd %eax, %xmm2\r
+; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]\r
; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1]
; SSE-NEXT: movaps %xmm1, %xmm2
; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,1,2,3]
; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,1,2,3]
; SSE-NEXT: cvttss2si %xmm3, %rax
; SSE-NEXT: movd %eax, %xmm3
-; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
-; SSE-NEXT: cvttss2si %xmm1, %rax
-; SSE-NEXT: movd %eax, %xmm2
-; SSE-NEXT: shufpd {{.*#+}} xmm1 = xmm1[1,0]
-; SSE-NEXT: cvttss2si %xmm1, %rax
-; SSE-NEXT: movd %eax, %xmm1
-; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
+; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]\r
+; SSE-NEXT: cvttss2si %xmm1, %rax\r
+; SSE-NEXT: movd %eax, %xmm2\r
+; SSE-NEXT: movhlps {{.*#+}} xmm1 = xmm1[1,1]\r
+; SSE-NEXT: cvttss2si %xmm1, %rax\r
+; SSE-NEXT: movd %eax, %xmm1\r
+; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]\r
; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
; SSE-NEXT: movdqa %xmm2, %xmm1
; SSE-NEXT: retq
; SSE-NEXT: cvttss2si %xmm4, %rcx
; SSE-NEXT: xorq %rax, %rcx
; SSE-NEXT: cvttss2si %xmm3, %rdx
-; SSE-NEXT: ucomiss %xmm1, %xmm3
-; SSE-NEXT: cmovaeq %rcx, %rdx
-; SSE-NEXT: movd %rdx, %xmm3
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: movapd %xmm0, %xmm4
-; SSE-NEXT: subss %xmm1, %xmm4
-; SSE-NEXT: cvttss2si %xmm4, %rcx
-; SSE-NEXT: xorq %rax, %rcx
+; SSE-NEXT: ucomiss %xmm1, %xmm3\r
+; SSE-NEXT: cmovaeq %rcx, %rdx\r
+; SSE-NEXT: movd %rdx, %xmm3\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: movaps %xmm0, %xmm4\r
+; SSE-NEXT: subss %xmm1, %xmm4\r
+; SSE-NEXT: cvttss2si %xmm4, %rcx\r
+; SSE-NEXT: xorq %rax, %rcx\r
; SSE-NEXT: cvttss2si %xmm0, %rax
; SSE-NEXT: ucomiss %xmm1, %xmm0
; SSE-NEXT: cmovaeq %rcx, %rax
; SSE-NEXT: cvttss2si %xmm4, %rcx
; SSE-NEXT: xorq %rax, %rcx
; SSE-NEXT: cvttss2si %xmm3, %rdx
-; SSE-NEXT: ucomiss %xmm1, %xmm3
-; SSE-NEXT: cmovaeq %rcx, %rdx
-; SSE-NEXT: movd %rdx, %xmm3
-; SSE-NEXT: shufpd {{.*#+}} xmm0 = xmm0[1,0]
-; SSE-NEXT: movapd %xmm0, %xmm4
-; SSE-NEXT: subss %xmm1, %xmm4
-; SSE-NEXT: cvttss2si %xmm4, %rcx
-; SSE-NEXT: xorq %rax, %rcx
+; SSE-NEXT: ucomiss %xmm1, %xmm3\r
+; SSE-NEXT: cmovaeq %rcx, %rdx\r
+; SSE-NEXT: movd %rdx, %xmm3\r
+; SSE-NEXT: movhlps {{.*#+}} xmm0 = xmm0[1,1]\r
+; SSE-NEXT: movaps %xmm0, %xmm4\r
+; SSE-NEXT: subss %xmm1, %xmm4\r
+; SSE-NEXT: cvttss2si %xmm4, %rcx\r
+; SSE-NEXT: xorq %rax, %rcx\r
; SSE-NEXT: cvttss2si %xmm0, %rax
; SSE-NEXT: ucomiss %xmm1, %xmm0
; SSE-NEXT: cmovaeq %rcx, %rax