From: Andrea Di Biagio Date: Tue, 13 Sep 2016 13:17:42 +0000 (+0000) Subject: [InstSimplify] Add tests to show missed bitcast folding opportunities. X-Git-Tag: llvmorg-4.0.0-rc1~9947 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=3647a96a44c456e9302eee5812efb73c882565d2;p=platform%2Fupstream%2Fllvm.git [InstSimplify] Add tests to show missed bitcast folding opportunities. InstSimplify doesn't always know how to fold a bitcast of a constant vector. In particular, the logic in InstSimplify doesn't know how to handle the case where the constant vector in input contains some undef elements, and the number of elements is smaller than the number of elements of the bitcast vector type. llvm-svn: 281332 --- diff --git a/llvm/test/Transforms/InstSimplify/bitcast-vector-fold.ll b/llvm/test/Transforms/InstSimplify/bitcast-vector-fold.ll index 68783438..126aba6 100644 --- a/llvm/test/Transforms/InstSimplify/bitcast-vector-fold.ll +++ b/llvm/test/Transforms/InstSimplify/bitcast-vector-fold.ll @@ -123,3 +123,147 @@ define <2 x double> @foo6() { %cast = bitcast <4 x i32> to <2 x double> ret <2 x double> %cast } + +define <4 x i32> @bitcast_constexpr_4i32_2i64_u2() { +; CHECK-LABEL: @bitcast_constexpr_4i32_2i64_u2( +; CHECK-NEXT: ret <4 x i32> bitcast (<2 x i64> to <4 x i32>) +; + %cast = bitcast <2 x i64> to <4 x i32> + ret <4 x i32> %cast +} + +define <4 x i32> @bitcast_constexpr_4i32_2i64_1u() { +; CHECK-LABEL: @bitcast_constexpr_4i32_2i64_1u( +; CHECK-NEXT: ret <4 x i32> bitcast (<2 x i64> to <4 x i32>) +; + %cast = bitcast <2 x i64> to <4 x i32> + ret <4 x i32> %cast +} + +define <4 x i32> @bitcast_constexpr_4i32_2i64() { +; CHECK-LABEL: @bitcast_constexpr_4i32_2i64( +; CHECK-NEXT: ret <4 x i32> bitcast (<2 x i64> to <4 x i32>) +; + %cast = bitcast <2 x i64> to <4 x i32> + ret <4 x i32> %cast +} + +define <8 x i16> @bitcast_constexpr_8i16_2i64_u2() { +; CHECK-LABEL: @bitcast_constexpr_8i16_2i64_u2( +; CHECK-NEXT: ret <8 x i16> bitcast (<2 x i64> to <8 x i16>) +; + %cast = bitcast <2 x i64> to <8 x i16> + ret <8 x i16> %cast +} + +define <8 x i16> @bitcast_constexpr_8i16_2i64_1u() { +; CHECK-LABEL: @bitcast_constexpr_8i16_2i64_1u( +; CHECK-NEXT: ret <8 x i16> bitcast (<2 x i64> to <8 x i16>) +; + %cast = bitcast <2 x i64> to <8 x i16> + ret <8 x i16> %cast +} + +define <8 x i16> @bitcast_constexpr_8i16_2i64_u65536() { +; CHECK-LABEL: @bitcast_constexpr_8i16_2i64_u65536( +; CHECK-NEXT: ret <8 x i16> bitcast (<2 x i64> to <8 x i16>) +; + %cast = bitcast <2 x i64> to <8 x i16> + ret <8 x i16> %cast +} + +define <16 x i8> @bitcast_constexpr_16i8_2i64_u2() { +; CHECK-LABEL: @bitcast_constexpr_16i8_2i64_u2( +; CHECK-NEXT: ret <16 x i8> bitcast (<2 x i64> to <16 x i8>) +; + %cast = bitcast <2 x i64> to <16 x i8> + ret <16 x i8> %cast +} + +define <16 x i8> @bitcast_constexpr_16i8_2i64_256u() { +; CHECK-LABEL: @bitcast_constexpr_16i8_2i64_256u( +; CHECK-NEXT: ret <16 x i8> bitcast (<2 x i64> to <16 x i8>) +; + %cast = bitcast <2 x i64> to <16 x i8> + ret <16 x i8> %cast +} + +define <16 x i8> @bitcast_constexpr_16i8_2i64_u256() { +; CHECK-LABEL: @bitcast_constexpr_16i8_2i64_u256( +; CHECK-NEXT: ret <16 x i8> bitcast (<2 x i64> to <16 x i8>) +; + %cast = bitcast <2 x i64> to <16 x i8> + ret <16 x i8> %cast +} + +define <8 x i16> @bitcast_constexpr_8i16_4i32_uu22() { +; CHECK-LABEL: @bitcast_constexpr_8i16_4i32_uu22( +; CHECK-NEXT: ret <8 x i16> bitcast (<4 x i32> to <8 x i16>) +; + %cast = bitcast <4 x i32> to <8 x i16> + ret <8 x i16> %cast +} + +define <8 x i16> @bitcast_constexpr_8i16_4i32_10uu() { +; CHECK-LABEL: @bitcast_constexpr_8i16_4i32_10uu( +; CHECK-NEXT: ret <8 x i16> bitcast (<4 x i32> to <8 x i16>) +; + %cast = bitcast <4 x i32> to <8 x i16> + ret <8 x i16> %cast +} + +define <8 x i16> @bitcast_constexpr_8i16_4i32_u257u256() { +; CHECK-LABEL: @bitcast_constexpr_8i16_4i32_u257u256( +; CHECK-NEXT: ret <8 x i16> bitcast (<4 x i32> to <8 x i16>) +; + %cast = bitcast <4 x i32> to <8 x i16> + ret <8 x i16> %cast +} + +define <16 x i8> @bitcast_constexpr_16i8_4i32_u2u2() { +; CHECK-LABEL: @bitcast_constexpr_16i8_4i32_u2u2( +; CHECK-NEXT: ret <16 x i8> bitcast (<4 x i32> to <16 x i8>) +; + %cast = bitcast <4 x i32> to <16 x i8> + ret <16 x i8> %cast +} + +define <16 x i8> @bitcast_constexpr_16i8_4i32_1u1u() { +; CHECK-LABEL: @bitcast_constexpr_16i8_4i32_1u1u( +; CHECK-NEXT: ret <16 x i8> bitcast (<4 x i32> to <16 x i8>) +; + %cast = bitcast <4 x i32> to <16 x i8> + ret <16 x i8> %cast +} + +define <16 x i8> @bitcast_constexpr_16i8_4i32_u256uu() { +; CHECK-LABEL: @bitcast_constexpr_16i8_4i32_u256uu( +; CHECK-NEXT: ret <16 x i8> bitcast (<4 x i32> to <16 x i8>) +; + %cast = bitcast <4 x i32> to <16 x i8> + ret <16 x i8> %cast +} + +define <16 x i8> @bitcast_constexpr_16i8_8i16_u2u2u2u2() { +; CHECK-LABEL: @bitcast_constexpr_16i8_8i16_u2u2u2u2( +; CHECK-NEXT: ret <16 x i8> bitcast (<8 x i16> to <16 x i8>) +; + %cast = bitcast <8 x i16> to <16 x i8> + ret <16 x i8> %cast +} + +define <16 x i8> @bitcast_constexpr_16i8_8i16_1u1u1u1u() { +; CHECK-LABEL: @bitcast_constexpr_16i8_8i16_1u1u1u1u( +; CHECK-NEXT: ret <16 x i8> bitcast (<8 x i16> to <16 x i8>) +; + %cast = bitcast <8 x i16> to <16 x i8> + ret <16 x i8> %cast +} + +define <16 x i8> @bitcast_constexpr_16i8_8i16_u256uuu256uu() { +; CHECK-LABEL: @bitcast_constexpr_16i8_8i16_u256uuu256uu( +; CHECK-NEXT: ret <16 x i8> bitcast (<8 x i16> to <16 x i8>) +; + %cast = bitcast <8 x i16> to <16 x i8> + ret <16 x i8> %cast +}