+o2013-12-19 Tejas Belagod <tejas.belagod@arm.com>
+
+ * config/aarch64/aarch64-builtins.c (aarch64_init_simd_builtins):
+ Define builtin types for poly64_t poly128_t.
+ (TYPES_BINOPP, aarch64_types_binopp_qualifiers): New.
+ * aarch64/aarch64-simd-builtins.def: Update builtins table.
+ * config/aarch64/aarch64-simd.md (aarch64_crypto_pmulldi,
+ aarch64_crypto_pmullv2di): New.
+ * config/aarch64/aarch64.c (aarch64_simd_mangle_map): Update table for
+ poly64x2_t mangler.
+ * config/aarch64/arm_neon.h (poly64x2_t, poly64_t, poly128_t): Define.
+ (vmull_p64, vmull_high_p64): New.
+ * config/aarch64/iterators.md (UNSPEC_PMULL<2>): New.
+
2013-12-19 Tejas Belagod <tejas.belagod@arm.com>
* config/aarch64/aarch64-simd-builtins.def: Update builtins table.
= { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
static enum aarch64_type_qualifiers
+aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
+ = { qualifier_poly, qualifier_poly, qualifier_poly };
+#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
+
+static enum aarch64_type_qualifiers
aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
/* Poly scalar type nodes. */
tree aarch64_simd_polyQI_type_node = aarch64_build_poly_type (QImode);
tree aarch64_simd_polyHI_type_node = aarch64_build_poly_type (HImode);
+ tree aarch64_simd_polyDI_type_node = aarch64_build_poly_type (DImode);
+ tree aarch64_simd_polyTI_type_node = aarch64_build_poly_type (TImode);
/* Float type nodes. */
tree aarch64_simd_float_type_node = aarch64_build_signed_type (SFmode);
"__builtin_aarch64_simd_poly8");
(*lang_hooks.types.register_builtin_type) (aarch64_simd_polyHI_type_node,
"__builtin_aarch64_simd_poly16");
+ (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyDI_type_node,
+ "__builtin_aarch64_simd_poly64");
+ (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyTI_type_node,
+ "__builtin_aarch64_simd_poly128");
(*lang_hooks.types.register_builtin_type) (aarch64_simd_intTI_type_node,
"__builtin_aarch64_simd_ti");
(*lang_hooks.types.register_builtin_type) (aarch64_simd_intEI_type_node,
VAR1 (TERNOPU, crypto_sha256h2, 0, v4si)
VAR1 (BINOPU, crypto_sha256su0, 0, v4si)
VAR1 (TERNOPU, crypto_sha256su1, 0, v4si)
+
+ /* Implemented by aarch64_crypto_pmull<mode>. */
+ VAR1 (BINOPP, crypto_pmull, 0, di)
+ VAR1 (BINOPP, crypto_pmull, 0, v2di)
"sha256su1\\t%0.4s, %2.4s, %3.4s"
[(set_attr "type" "crypto_sha256_slow")]
)
+
+;; pmull
+
+(define_insn "aarch64_crypto_pmulldi"
+ [(set (match_operand:TI 0 "register_operand" "=w")
+ (unspec:TI [(match_operand:DI 1 "register_operand" "w")
+ (match_operand:DI 2 "register_operand" "w")]
+ UNSPEC_PMULL))]
+ "TARGET_SIMD && TARGET_CRYPTO"
+ "pmull\\t%0.1q, %1.1d, %2.1d"
+ [(set_attr "type" "neon_mul_d_long")]
+)
+
+(define_insn "aarch64_crypto_pmullv2di"
+ [(set (match_operand:TI 0 "register_operand" "=w")
+ (unspec:TI [(match_operand:V2DI 1 "register_operand" "w")
+ (match_operand:V2DI 2 "register_operand" "w")]
+ UNSPEC_PMULL2))]
+ "TARGET_SIMD && TARGET_CRYPTO"
+ "pmull2\\t%0.1q, %1.2d, %2.2d"
+ [(set_attr "type" "neon_mul_d_long")]
+)
{ V2DFmode, "__builtin_aarch64_simd_df", "13__Float64x2_t" },
{ V16QImode, "__builtin_aarch64_simd_poly8", "12__Poly8x16_t" },
{ V8HImode, "__builtin_aarch64_simd_poly16", "12__Poly16x8_t" },
+ { V2DImode, "__builtin_aarch64_simd_poly64", "12__Poly64x2_t" },
{ VOIDmode, NULL, NULL }
};
__attribute__ ((__vector_size__ (16)));
typedef __builtin_aarch64_simd_poly16 poly16x8_t
__attribute__ ((__vector_size__ (16)));
+typedef __builtin_aarch64_simd_poly64 poly64x2_t
+ __attribute__ ((__vector_size__ (16)));
typedef __builtin_aarch64_simd_uqi uint8x16_t
__attribute__ ((__vector_size__ (16)));
typedef __builtin_aarch64_simd_uhi uint16x8_t
typedef double float64_t;
typedef __builtin_aarch64_simd_poly8 poly8_t;
typedef __builtin_aarch64_simd_poly16 poly16_t;
+typedef __builtin_aarch64_simd_poly64 poly64_t;
+typedef __builtin_aarch64_simd_poly128 poly128_t;
typedef struct int8x8x2_t
{
return __builtin_aarch64_crypto_sha256su1v4si_uuuu (tw0_3, w8_11, w12_15);
}
+static __inline poly128_t
+vmull_p64 (poly64_t a, poly64_t b)
+{
+ return
+ __builtin_aarch64_crypto_pmulldi_ppp (a, b);
+}
+
+static __inline poly128_t
+vmull_high_p64 (poly64x2_t a, poly64x2_t b)
+{
+ return __builtin_aarch64_crypto_pmullv2di_ppp (a, b);
+}
+
#endif
/* vshl */
UNSPEC_SHA256H2 ; Used in aarch64-simd.md.
UNSPEC_SHA256SU0 ; Used in aarch64-simd.md.
UNSPEC_SHA256SU1 ; Used in aarch64-simd.md.
+ UNSPEC_PMULL ; Used in aarch64-simd.md.
+ UNSPEC_PMULL2 ; Used in aarch64-simd.md.
])
;; -------------------------------------------------------------------
2013-12-19 Tejas Belagod <tejas.belagod@arm.com>
+ * gcc.target/aarch64/pmull_1.c: New.
+
+2013-12-19 Tejas Belagod <tejas.belagod@arm.com>
+
* gcc.target/aarch64/sha256_1.c: New.
2013-12-19 Tejas Belagod <tejas.belagod@arm.com>
--- /dev/null
+
+/* { dg-do compile } */
+/* { dg-options "-march=armv8-a+crypto" } */
+
+#include "arm_neon.h"
+
+poly128_t
+test_vmull_p64 (poly64_t a, poly64_t b)
+{
+ return vmull_p64 (a, b);
+}
+
+/* { dg-final { scan-assembler-times "pmull\\tv" 1 } } */
+
+poly128_t
+test_vmull_high_p64 (poly64x2_t a, poly64x2_t b)
+{
+ return vmull_high_p64 (a, b);
+}
+
+/* { dg-final { scan-assembler-times "pmull2\\tv" 1 } } */
+
+/* { dg-final { cleanup-saved-temps } } */