define <2 x i64> @test_extrq_call_constexpr(<2 x i64> %x) {
; CHECK-LABEL: @test_extrq_call_constexpr(
-; CHECK-NEXT: [[TMP1:%.*]] = call <2 x i64> @llvm.x86.sse4a.extrq(<2 x i64> %x, <16 x i8> bitcast (<2 x i64> <i64 0, i64 undef> to <16 x i8>))
-; CHECK-NEXT: ret <2 x i64> [[TMP1]]
+; CHECK-NEXT: ret <2 x i64> %x
;
%1 = call <2 x i64> @llvm.x86.sse4a.extrq(<2 x i64> %x, <16 x i8> bitcast (<2 x i64> <i64 0, i64 undef> to <16 x i8>))
ret <2 x i64> %1
define <2 x i64> @test_extrqi_call_constexpr() {
; CHECK-LABEL: @test_extrqi_call_constexpr(
-; CHECK-NEXT: ret <2 x i64> bitcast (<16 x i8> <i8 extractelement (<16 x i8> trunc (<16 x i16> bitcast (<4 x i64> <i64 0, i64 undef, i64 2, i64 undef> to <16 x i16>) to <16 x i8>), i32 2), i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef> to <2 x i64>)
+; CHECK-NEXT: ret <2 x i64> zeroinitializer
;
%1 = tail call <2 x i64> @llvm.x86.sse4a.extrqi(<2 x i64> bitcast (<16 x i8> trunc (<16 x i16> bitcast (<4 x i64> <i64 0, i64 undef, i64 2, i64 undef> to <16 x i16>) to <16 x i8>) to <2 x i64>), i8 8, i8 16)
ret <2 x i64> %1
define <2 x i64> @test_insertq_call_constexpr(<2 x i64> %x) {
; CHECK-LABEL: @test_insertq_call_constexpr(
-; CHECK-NEXT: [[TMP1:%.*]] = tail call <2 x i64> @llvm.x86.sse4a.insertq(<2 x i64> %x, <2 x i64> bitcast (<16 x i8> trunc (<16 x i16> bitcast (<4 x i64> <i64 0, i64 undef, i64 2, i64 undef> to <16 x i16>) to <16 x i8>) to <2 x i64>))
+; CHECK-NEXT: [[TMP1:%.*]] = call <2 x i64> @llvm.x86.sse4a.insertqi(<2 x i64> %x, <2 x i64> <i64 0, i64 undef>, i8 2, i8 0)
; CHECK-NEXT: ret <2 x i64> [[TMP1]]
;
%1 = tail call <2 x i64> @llvm.x86.sse4a.insertq(<2 x i64> %x, <2 x i64> bitcast (<16 x i8> trunc (<16 x i16> bitcast (<4 x i64> <i64 0, i64 undef, i64 2, i64 undef> to <16 x i16>) to <16 x i8>) to <2 x i64>))
define <2 x i64> @test_insertqi_call_constexpr(<2 x i64> %x) {
; CHECK-LABEL: @test_insertqi_call_constexpr(
-; CHECK-NEXT: [[TMP1:%.*]] = tail call <2 x i64> @llvm.x86.sse4a.insertqi(<2 x i64> %x, <2 x i64> bitcast (<16 x i8> trunc (<16 x i16> bitcast (<4 x i64> <i64 0, i64 undef, i64 2, i64 undef> to <16 x i16>) to <16 x i8>) to <2 x i64>), i8 48, i8 3)
+; CHECK-NEXT: [[TMP1:%.*]] = tail call <2 x i64> @llvm.x86.sse4a.insertqi(<2 x i64> %x, <2 x i64> <i64 0, i64 undef>, i8 48, i8 3)
; CHECK-NEXT: ret <2 x i64> [[TMP1]]
;
%1 = tail call <2 x i64> @llvm.x86.sse4a.insertqi(<2 x i64> %x, <2 x i64> bitcast (<16 x i8> trunc (<16 x i16> bitcast (<4 x i64> <i64 0, i64 undef, i64 2, i64 undef> to <16 x i16>) to <16 x i8>) to <2 x i64>), i8 48, i8 3)
define <4 x i32> @bitcast_constexpr_4i32_2i64_u2() {
; CHECK-LABEL: @bitcast_constexpr_4i32_2i64_u2(
-; CHECK-NEXT: ret <4 x i32> bitcast (<2 x i64> <i64 undef, i64 2> to <4 x i32>)
+; CHECK-NEXT: ret <4 x i32> <i32 undef, i32 undef, i32 2, i32 0>
;
%cast = bitcast <2 x i64><i64 undef, i64 2> to <4 x i32>
ret <4 x i32> %cast
define <4 x i32> @bitcast_constexpr_4i32_2i64_1u() {
; CHECK-LABEL: @bitcast_constexpr_4i32_2i64_1u(
-; CHECK-NEXT: ret <4 x i32> bitcast (<2 x i64> <i64 1, i64 undef> to <4 x i32>)
+; CHECK-NEXT: ret <4 x i32> <i32 1, i32 0, i32 undef, i32 undef>
;
%cast = bitcast <2 x i64><i64 1, i64 undef> to <4 x i32>
ret <4 x i32> %cast
define <4 x i32> @bitcast_constexpr_4i32_2i64() {
; CHECK-LABEL: @bitcast_constexpr_4i32_2i64(
-; CHECK-NEXT: ret <4 x i32> bitcast (<2 x i64> <i64 undef, i64 2> to <4 x i32>)
+; CHECK-NEXT: ret <4 x i32> <i32 undef, i32 undef, i32 2, i32 0>
;
%cast = bitcast <2 x i64><i64 undef, i64 2> to <4 x i32>
ret <4 x i32> %cast
define <8 x i16> @bitcast_constexpr_8i16_2i64_u2() {
; CHECK-LABEL: @bitcast_constexpr_8i16_2i64_u2(
-; CHECK-NEXT: ret <8 x i16> bitcast (<2 x i64> <i64 undef, i64 2> to <8 x i16>)
+; CHECK-NEXT: ret <8 x i16> <i16 undef, i16 undef, i16 undef, i16 undef, i16 2, i16 0, i16 0, i16 0>
;
%cast = bitcast <2 x i64><i64 undef, i64 2> to <8 x i16>
ret <8 x i16> %cast
define <8 x i16> @bitcast_constexpr_8i16_2i64_1u() {
; CHECK-LABEL: @bitcast_constexpr_8i16_2i64_1u(
-; CHECK-NEXT: ret <8 x i16> bitcast (<2 x i64> <i64 1, i64 undef> to <8 x i16>)
+; CHECK-NEXT: ret <8 x i16> <i16 1, i16 0, i16 0, i16 0, i16 undef, i16 undef, i16 undef, i16 undef>
;
%cast = bitcast <2 x i64><i64 1, i64 undef> to <8 x i16>
ret <8 x i16> %cast
define <8 x i16> @bitcast_constexpr_8i16_2i64_u65536() {
; CHECK-LABEL: @bitcast_constexpr_8i16_2i64_u65536(
-; CHECK-NEXT: ret <8 x i16> bitcast (<2 x i64> <i64 undef, i64 65536> to <8 x i16>)
+; CHECK-NEXT: ret <8 x i16> <i16 undef, i16 undef, i16 undef, i16 undef, i16 0, i16 1, i16 0, i16 0>
;
%cast = bitcast <2 x i64><i64 undef, i64 65536> to <8 x i16>
ret <8 x i16> %cast
define <16 x i8> @bitcast_constexpr_16i8_2i64_u2() {
; CHECK-LABEL: @bitcast_constexpr_16i8_2i64_u2(
-; CHECK-NEXT: ret <16 x i8> bitcast (<2 x i64> <i64 undef, i64 2> to <16 x i8>)
+; CHECK-NEXT: ret <16 x i8> <i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 2, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0>
;
%cast = bitcast <2 x i64><i64 undef, i64 2> to <16 x i8>
ret <16 x i8> %cast
define <16 x i8> @bitcast_constexpr_16i8_2i64_256u() {
; CHECK-LABEL: @bitcast_constexpr_16i8_2i64_256u(
-; CHECK-NEXT: ret <16 x i8> bitcast (<2 x i64> <i64 256, i64 undef> to <16 x i8>)
+; CHECK-NEXT: ret <16 x i8> <i8 0, i8 1, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef>
;
%cast = bitcast <2 x i64><i64 256, i64 undef> to <16 x i8>
ret <16 x i8> %cast
define <16 x i8> @bitcast_constexpr_16i8_2i64_u256() {
; CHECK-LABEL: @bitcast_constexpr_16i8_2i64_u256(
-; CHECK-NEXT: ret <16 x i8> bitcast (<2 x i64> <i64 undef, i64 256> to <16 x i8>)
+; CHECK-NEXT: ret <16 x i8> <i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 0, i8 1, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0>
;
%cast = bitcast <2 x i64><i64 undef, i64 256> to <16 x i8>
ret <16 x i8> %cast
define <8 x i16> @bitcast_constexpr_8i16_4i32_uu22() {
; CHECK-LABEL: @bitcast_constexpr_8i16_4i32_uu22(
-; CHECK-NEXT: ret <8 x i16> bitcast (<4 x i32> <i32 undef, i32 undef, i32 2, i32 2> to <8 x i16>)
+; CHECK-NEXT: ret <8 x i16> <i16 undef, i16 undef, i16 undef, i16 undef, i16 2, i16 0, i16 2, i16 0>
;
%cast = bitcast <4 x i32><i32 undef, i32 undef, i32 2, i32 2> to <8 x i16>
ret <8 x i16> %cast
define <8 x i16> @bitcast_constexpr_8i16_4i32_10uu() {
; CHECK-LABEL: @bitcast_constexpr_8i16_4i32_10uu(
-; CHECK-NEXT: ret <8 x i16> bitcast (<4 x i32> <i32 1, i32 0, i32 undef, i32 undef> to <8 x i16>)
+; CHECK-NEXT: ret <8 x i16> <i16 1, i16 0, i16 0, i16 0, i16 undef, i16 undef, i16 undef, i16 undef>
;
%cast = bitcast <4 x i32><i32 1, i32 0, i32 undef, i32 undef> to <8 x i16>
ret <8 x i16> %cast
define <8 x i16> @bitcast_constexpr_8i16_4i32_u257u256() {
; CHECK-LABEL: @bitcast_constexpr_8i16_4i32_u257u256(
-; CHECK-NEXT: ret <8 x i16> bitcast (<4 x i32> <i32 undef, i32 65536, i32 undef, i32 65536> to <8 x i16>)
+; CHECK-NEXT: ret <8 x i16> <i16 undef, i16 undef, i16 0, i16 1, i16 undef, i16 undef, i16 0, i16 1>
;
%cast = bitcast <4 x i32><i32 undef, i32 65536, i32 undef, i32 65536> to <8 x i16>
ret <8 x i16> %cast
define <16 x i8> @bitcast_constexpr_16i8_4i32_u2u2() {
; CHECK-LABEL: @bitcast_constexpr_16i8_4i32_u2u2(
-; CHECK-NEXT: ret <16 x i8> bitcast (<4 x i32> <i32 undef, i32 2, i32 undef, i32 2> to <16 x i8>)
+; CHECK-NEXT: ret <16 x i8> <i8 undef, i8 undef, i8 undef, i8 undef, i8 2, i8 0, i8 0, i8 0, i8 undef, i8 undef, i8 undef, i8 undef, i8 2, i8 0, i8 0, i8 0>
;
%cast = bitcast <4 x i32><i32 undef, i32 2, i32 undef, i32 2> to <16 x i8>
ret <16 x i8> %cast
define <16 x i8> @bitcast_constexpr_16i8_4i32_1u1u() {
; CHECK-LABEL: @bitcast_constexpr_16i8_4i32_1u1u(
-; CHECK-NEXT: ret <16 x i8> bitcast (<4 x i32> <i32 1, i32 undef, i32 1, i32 undef> to <16 x i8>)
+; CHECK-NEXT: ret <16 x i8> <i8 1, i8 0, i8 0, i8 0, i8 undef, i8 undef, i8 undef, i8 undef, i8 1, i8 0, i8 0, i8 0, i8 undef, i8 undef, i8 undef, i8 undef>
;
%cast = bitcast <4 x i32><i32 1, i32 undef, i32 1, i32 undef> to <16 x i8>
ret <16 x i8> %cast
define <16 x i8> @bitcast_constexpr_16i8_4i32_u256uu() {
; CHECK-LABEL: @bitcast_constexpr_16i8_4i32_u256uu(
-; CHECK-NEXT: ret <16 x i8> bitcast (<4 x i32> <i32 undef, i32 256, i32 undef, i32 undef> to <16 x i8>)
+; CHECK-NEXT: ret <16 x i8> <i8 undef, i8 undef, i8 undef, i8 undef, i8 0, i8 1, i8 0, i8 0, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef>
;
%cast = bitcast <4 x i32><i32 undef, i32 256, i32 undef, i32 undef> to <16 x i8>
ret <16 x i8> %cast
define <16 x i8> @bitcast_constexpr_16i8_8i16_u2u2u2u2() {
; CHECK-LABEL: @bitcast_constexpr_16i8_8i16_u2u2u2u2(
-; CHECK-NEXT: ret <16 x i8> bitcast (<8 x i16> <i16 undef, i16 2, i16 undef, i16 2, i16 undef, i16 2, i16 undef, i16 2> to <16 x i8>)
+; CHECK-NEXT: ret <16 x i8> <i8 undef, i8 undef, i8 2, i8 0, i8 undef, i8 undef, i8 2, i8 0, i8 undef, i8 undef, i8 2, i8 0, i8 undef, i8 undef, i8 2, i8 0>
;
%cast = bitcast <8 x i16><i16 undef, i16 2, i16 undef, i16 2, i16 undef, i16 2, i16 undef, i16 2> to <16 x i8>
ret <16 x i8> %cast
define <16 x i8> @bitcast_constexpr_16i8_8i16_1u1u1u1u() {
; CHECK-LABEL: @bitcast_constexpr_16i8_8i16_1u1u1u1u(
-; CHECK-NEXT: ret <16 x i8> bitcast (<8 x i16> <i16 1, i16 undef, i16 1, i16 undef, i16 1, i16 undef, i16 1, i16 undef> to <16 x i8>)
+; CHECK-NEXT: ret <16 x i8> <i8 1, i8 0, i8 undef, i8 undef, i8 1, i8 0, i8 undef, i8 undef, i8 1, i8 0, i8 undef, i8 undef, i8 1, i8 0, i8 undef, i8 undef>
;
%cast = bitcast <8 x i16><i16 1, i16 undef, i16 1, i16 undef, i16 1, i16 undef, i16 1, i16 undef> to <16 x i8>
ret <16 x i8> %cast
define <16 x i8> @bitcast_constexpr_16i8_8i16_u256uuu256uu() {
; CHECK-LABEL: @bitcast_constexpr_16i8_8i16_u256uuu256uu(
-; CHECK-NEXT: ret <16 x i8> bitcast (<8 x i16> <i16 undef, i16 256, i16 undef, i16 undef, i16 undef, i16 256, i16 undef, i16 undef> to <16 x i8>)
+; CHECK-NEXT: ret <16 x i8> <i8 undef, i8 undef, i8 0, i8 1, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 undef, i8 0, i8 1, i8 undef, i8 undef, i8 undef, i8 undef>
;
%cast = bitcast <8 x i16><i16 undef, i16 256, i16 undef, i16 undef, i16 undef, i16 256, i16 undef, i16 undef> to <16 x i8>
ret <16 x i8> %cast