__attribute__ ((__vector_size__ (8)));
typedef __builtin_aarch64_simd_si int32x2_t
__attribute__ ((__vector_size__ (8)));
-typedef int64_t int64x1_t;
+typedef __builtin_aarch64_simd_di int64x1_t
+ __attribute__ ((__vector_size__ (8)));
typedef int32_t int32x1_t;
typedef int16_t int16x1_t;
typedef int8_t int8x1_t;
__attribute__ ((__vector_size__ (8)));
typedef __builtin_aarch64_simd_usi uint32x2_t
__attribute__ ((__vector_size__ (8)));
-typedef uint64_t uint64x1_t;
+typedef __builtin_aarch64_simd_udi uint64x1_t
+ __attribute__ ((__vector_size__ (8)));
typedef uint32_t uint32x1_t;
typedef uint16_t uint16x1_t;
typedef uint8_t uint8x1_t;
__aarch64_vget_lane_any (v4hi, , ,__a, __b)
#define __aarch64_vget_lane_s32(__a, __b) \
__aarch64_vget_lane_any (v2si, , ,__a, __b)
-#define __aarch64_vget_lane_s64(__a, __b) (__a)
+#define __aarch64_vget_lane_s64(__a, __b) __extension__ \
+ ({ \
+ __builtin_aarch64_im_lane_boundsi (__b, 1); \
+ __a[0]; \
+ })
#define __aarch64_vget_lane_u8(__a, __b) \
__aarch64_vget_lane_any (v8qi, (uint8_t), (int8x8_t), __a, __b)
__aarch64_vget_lane_any (v4hi, (uint16_t), (int16x4_t), __a, __b)
#define __aarch64_vget_lane_u32(__a, __b) \
__aarch64_vget_lane_any (v2si, (uint32_t), (int32x2_t), __a, __b)
-#define __aarch64_vget_lane_u64(__a, __b) (__a)
+#define __aarch64_vget_lane_u64(__a, __b) __extension__ \
+ ({ \
+ __builtin_aarch64_im_lane_boundsi (__b, 1); \
+ __a[0]; \
+ })
#define __aarch64_vgetq_lane_f32(__a, __b) \
__aarch64_vget_lane_any (v4sf, , , __a, __b)
__aarch64_vdup_lane_any (s16, , , __a, __b)
#define __aarch64_vdup_lane_s32(__a, __b) \
__aarch64_vdup_lane_any (s32, , , __a, __b)
-#define __aarch64_vdup_lane_s64(__a, __b) (__a)
+#define __aarch64_vdup_lane_s64(__a, __b) \
+ __aarch64_vdup_lane_any (s64, , , __a, __b)
#define __aarch64_vdup_lane_u8(__a, __b) \
__aarch64_vdup_lane_any (u8, , , __a, __b)
#define __aarch64_vdup_lane_u16(__a, __b) \
__aarch64_vdup_lane_any (u16, , , __a, __b)
#define __aarch64_vdup_lane_u32(__a, __b) \
__aarch64_vdup_lane_any (u32, , , __a, __b)
-#define __aarch64_vdup_lane_u64(__a, __b) (__a)
+#define __aarch64_vdup_lane_u64(__a, __b) \
+ __aarch64_vdup_lane_any (u64, , , __a, __b)
/* __aarch64_vdup_laneq internal macros. */
#define __aarch64_vdup_laneq_f32(__a, __b) \
__aarch64_vdup_lane_any (s16, q, , __a, __b)
#define __aarch64_vdupq_lane_s32(__a, __b) \
__aarch64_vdup_lane_any (s32, q, , __a, __b)
-#define __aarch64_vdupq_lane_s64(__a, __b) (vdupq_n_s64 (__a))
+#define __aarch64_vdupq_lane_s64(__a, __b) \
+ __aarch64_vdup_lane_any (s64, q, , __a, __b)
#define __aarch64_vdupq_lane_u8(__a, __b) \
__aarch64_vdup_lane_any (u8, q, , __a, __b)
#define __aarch64_vdupq_lane_u16(__a, __b) \
__aarch64_vdup_lane_any (u16, q, , __a, __b)
#define __aarch64_vdupq_lane_u32(__a, __b) \
__aarch64_vdup_lane_any (u32, q, , __a, __b)
-#define __aarch64_vdupq_lane_u64(__a, __b) (vdupq_n_u64 (__a))
+#define __aarch64_vdupq_lane_u64(__a, __b) \
+ __aarch64_vdup_lane_any (u64, q, , __a, __b)
/* __aarch64_vdupq_laneq internal macros. */
#define __aarch64_vdupq_laneq_f32(__a, __b) \
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vqadd_s64 (int64x1_t __a, int64x1_t __b)
{
- return (int64x1_t) __builtin_aarch64_sqadddi (__a, __b);
+ return (int64x1_t) {__builtin_aarch64_sqadddi (__a[0], __b[0])};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vqadd_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return (uint64x1_t) __builtin_aarch64_uqadddi_uuu ((uint64_t) __a,
- (uint64_t) __b);
+ return (uint64x1_t) {__builtin_aarch64_uqadddi_uuu (__a[0], __b[0])};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vqsub_s64 (int64x1_t __a, int64x1_t __b)
{
- return (int64x1_t) __builtin_aarch64_sqsubdi (__a, __b);
+ return (int64x1_t) {__builtin_aarch64_sqsubdi (__a[0], __b[0])};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vqsub_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return (uint64x1_t) __builtin_aarch64_uqsubdi_uuu ((uint64_t) __a,
- (uint64_t) __b);
+ return (uint64x1_t) {__builtin_aarch64_uqsubdi_uuu (__a[0], __b[0])};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vqneg_s64 (int64x1_t __a)
{
- return __builtin_aarch64_sqnegdi (__a);
+ return (int64x1_t) {__builtin_aarch64_sqnegdi (__a[0])};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vqabs_s64 (int64x1_t __a)
{
- return __builtin_aarch64_sqabsdi (__a);
+ return (int64x1_t) {__builtin_aarch64_sqabsdi (__a[0])};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vcreate_s64 (uint64_t __a)
{
- return (int64x1_t) __a;
+ return (int64x1_t) {__a};
}
__extension__ static __inline float32x2_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcreate_u64 (uint64_t __a)
{
- return (uint64x1_t) __a;
+ return (uint64x1_t) {__a};
}
__extension__ static __inline float64x1_t __attribute__ ((__always_inline__))
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vreinterpret_s64_f64 (float64x1_t __a)
{
- return __builtin_aarch64_reinterpretdiv1df (__a);
+ return (int64x1_t) {__builtin_aarch64_reinterpretdiv1df (__a)};
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vreinterpret_u64_f64 (float64x1_t __a)
{
- return __builtin_aarch64_reinterpretdiv1df_us (__a);
+ return (uint64x1_t) {__builtin_aarch64_reinterpretdiv1df_us (__a)};
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
#define __GET_LOW(__TYPE) \
uint64x2_t tmp = vreinterpretq_u64_##__TYPE (__a); \
- uint64_t lo = vgetq_lane_u64 (tmp, 0); \
+ uint64x1_t lo = vcreate_u64 (vgetq_lane_u64 (tmp, 0)); \
return vreinterpret_##__TYPE##_u64 (lo);
__extension__ static __inline float32x2_t __attribute__ ((__always_inline__))
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vget_low_s64 (int64x2_t __a)
{
- return vgetq_lane_s64 (__a, 0);
+ __GET_LOW (s64);
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vget_low_u64 (uint64x2_t __a)
{
- return vgetq_lane_u64 (__a, 0);
+ return vcreate_u64 (vgetq_lane_u64 (__a, 0));
}
#undef __GET_LOW
__extension__ static __inline int64x2_t __attribute__ ((__always_inline__))
vcombine_s64 (int64x1_t __a, int64x1_t __b)
{
- return (int64x2_t) __builtin_aarch64_combinedi (__a, __b);
+ return __builtin_aarch64_combinedi (__a[0], __b[0]);
}
__extension__ static __inline float32x4_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x2_t __attribute__ ((__always_inline__))
vcombine_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return (uint64x2_t) __builtin_aarch64_combinedi ((int64x1_t) __a,
- (int64x1_t) __b);
+ return (uint64x2_t) __builtin_aarch64_combinedi (__a[0], __b[0]);
}
__extension__ static __inline float64x2_t __attribute__ ((__always_inline__))
return result;
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
vpaddd_s64 (int64x2_t __a)
{
return __builtin_aarch64_addpdi (__a);
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vabs_s64 (int64x1_t __a)
{
- return __builtin_llabs (__a);
+ return (int64x1_t) {__builtin_aarch64_absdi (__a[0])};
}
__extension__ static __inline float32x4_t __attribute__ ((__always_inline__))
/* vadd */
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vaddd_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vaddd_s64 (int64_t __a, int64_t __b)
{
return __a + __b;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vaddd_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vaddd_u64 (uint64_t __a, uint64_t __b)
{
return __a + __b;
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vbsl_s64 (uint64x1_t __a, int64x1_t __b, int64x1_t __c)
{
- return __builtin_aarch64_simd_bsldi_suss (__a, __b, __c);
+ return (int64x1_t)
+ {__builtin_aarch64_simd_bsldi_suss (__a[0], __b[0], __c[0])};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vbsl_u64 (uint64x1_t __a, uint64x1_t __b, uint64x1_t __c)
{
- return __builtin_aarch64_simd_bsldi_uuuu (__a, __b, __c);
+ return (uint64x1_t)
+ {__builtin_aarch64_simd_bsldi_uuuu (__a[0], __b[0], __c[0])};
}
__extension__ static __inline float32x4_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vceq_s64 (int64x1_t __a, int64x1_t __b)
{
- return __a == __b ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] == __b[0] ? -1ll : 0ll};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vceq_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return __a == __b ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] == __b[0] ? -1ll : 0ll};
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
return __a == __b ? -1 : 0;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vceqd_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vceqd_s64 (int64_t __a, int64_t __b)
{
return __a == __b ? -1ll : 0ll;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vceqd_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vceqd_u64 (uint64_t __a, uint64_t __b)
{
return __a == __b ? -1ll : 0ll;
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vceqz_s64 (int64x1_t __a)
{
- return __a == 0ll ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] == 0ll ? -1ll : 0ll};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vceqz_u64 (uint64x1_t __a)
{
- return __a == 0ll ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] == 0ll ? -1ll : 0ll};
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
return __a == 0.0f ? -1 : 0;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vceqzd_s64 (int64x1_t __a)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vceqzd_s64 (int64_t __a)
{
return __a == 0 ? -1ll : 0ll;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vceqzd_u64 (int64x1_t __a)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vceqzd_u64 (uint64_t __a)
{
return __a == 0 ? -1ll : 0ll;
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcge_s64 (int64x1_t __a, int64x1_t __b)
{
- return __a >= __b ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] >= __b[0] ? -1ll : 0ll};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcge_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return __a >= __b ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] >= __b[0] ? -1ll : 0ll};
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
return __a >= __b ? -1 : 0;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vcged_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vcged_s64 (int64_t __a, int64_t __b)
{
return __a >= __b ? -1ll : 0ll;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vcged_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vcged_u64 (uint64_t __a, uint64_t __b)
{
return __a >= __b ? -1ll : 0ll;
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcgez_s64 (int64x1_t __a)
{
- return __a >= 0ll ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] >= 0ll ? -1ll : 0ll};
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
return __a >= 0.0f ? -1 : 0;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vcgezd_s64 (int64x1_t __a)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vcgezd_s64 (int64_t __a)
{
return __a >= 0 ? -1ll : 0ll;
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcgt_s64 (int64x1_t __a, int64x1_t __b)
{
- return __a > __b ? -1ll : 0ll;
+ return (uint64x1_t) (__a[0] > __b[0] ? -1ll : 0ll);
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcgt_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return __a > __b ? -1ll : 0ll;
+ return (uint64x1_t) (__a[0] > __b[0] ? -1ll : 0ll);
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
return __a > __b ? -1 : 0;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vcgtd_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vcgtd_s64 (int64_t __a, int64_t __b)
{
return __a > __b ? -1ll : 0ll;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vcgtd_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vcgtd_u64 (uint64_t __a, uint64_t __b)
{
return __a > __b ? -1ll : 0ll;
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcgtz_s64 (int64x1_t __a)
{
- return __a > 0ll ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] > 0ll ? -1ll : 0ll};
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
return __a > 0.0f ? -1 : 0;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vcgtzd_s64 (int64x1_t __a)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vcgtzd_s64 (int64_t __a)
{
return __a > 0 ? -1ll : 0ll;
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcle_s64 (int64x1_t __a, int64x1_t __b)
{
- return __a <= __b ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] <= __b[0] ? -1ll : 0ll};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcle_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return __a <= __b ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] <= __b[0] ? -1ll : 0ll};
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
return __a <= __b ? -1 : 0;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vcled_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vcled_s64 (int64_t __a, int64_t __b)
{
return __a <= __b ? -1ll : 0ll;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vcled_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vcled_u64 (uint64_t __a, uint64_t __b)
{
return __a <= __b ? -1ll : 0ll;
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vclez_s64 (int64x1_t __a)
{
- return __a <= 0ll ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] <= 0ll ? -1ll : 0ll};
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
return __a <= 0.0f ? -1 : 0;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vclezd_s64 (int64x1_t __a)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vclezd_s64 (int64_t __a)
{
return __a <= 0 ? -1ll : 0ll;
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vclt_s64 (int64x1_t __a, int64x1_t __b)
{
- return __a < __b ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] < __b[0] ? -1ll : 0ll};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vclt_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return __a < __b ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] < __b[0] ? -1ll : 0ll};
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
return __a < __b ? -1 : 0;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vcltd_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vcltd_s64 (int64_t __a, int64_t __b)
{
return __a < __b ? -1ll : 0ll;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vcltd_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vcltd_u64 (uint64_t __a, uint64_t __b)
{
return __a < __b ? -1ll : 0ll;
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcltz_s64 (int64x1_t __a)
{
- return __a < 0ll ? -1ll : 0ll;
+ return (uint64x1_t) {__a[0] < 0ll ? -1ll : 0ll};
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
return __a < 0.0f ? -1 : 0;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vcltzd_s64 (int64x1_t __a)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vcltzd_s64 (int64_t __a)
{
return __a < 0 ? -1ll : 0ll;
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vdup_n_s64 (int64_t __a)
{
- return __a;
+ return (int64x1_t) {__a};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vdup_n_u64 (uint64_t __a)
{
- return __a;
+ return (uint64x1_t) {__a};
}
/* vdupq_n */
}
__extension__ static __inline int64_t __attribute__ ((__always_inline__))
-vdupd_lane_s64 (int64x1_t __a, const int __attribute__ ((unused)) __b)
+vdupd_lane_s64 (int64x1_t __a, const int __b)
{
- return __a;
+ __builtin_aarch64_im_lane_boundsi (__b, 1);
+ return __a[0];
}
__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
-vdupd_lane_u64 (uint64x1_t __a, const int __attribute__ ((unused)) __b)
+vdupd_lane_u64 (uint64x1_t __a, const int __b)
{
- return __a;
+ __builtin_aarch64_im_lane_boundsi (__b, 1);
+ return __a[0];
}
/* vdupb_laneq */
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vld1_s64 (const int64_t *a)
{
- return *a;
+ return (int64x1_t) {*a};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vld1_u64 (const uint64_t *a)
{
- return *a;
+ return (uint64x1_t) {*a};
}
/* vld1q */
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vmov_n_s64 (int64_t __a)
{
- return __a;
+ return (int64x1_t) {__a};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vmov_n_u64 (uint64_t __a)
{
- return __a;
+ return (uint64x1_t) {__a};
}
__extension__ static __inline float32x4_t __attribute__ ((__always_inline__))
return (int32x1_t) __builtin_aarch64_sqaddsi (__a, __b);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vqaddd_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vqaddd_s64 (int64_t __a, int64_t __b)
{
- return (int64x1_t) __builtin_aarch64_sqadddi (__a, __b);
+ return __builtin_aarch64_sqadddi (__a, __b);
}
__extension__ static __inline uint8x1_t __attribute__ ((__always_inline__))
return (uint32x1_t) __builtin_aarch64_uqaddsi_uuu (__a, __b);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vqaddd_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vqaddd_u64 (uint64_t __a, uint64_t __b)
{
- return (uint64x1_t) __builtin_aarch64_uqadddi_uuu ((uint64_t) __a,
- (uint64_t) __b);
+ return __builtin_aarch64_uqadddi_uuu (__a, __b);
}
/* vqdmlal */
return __builtin_aarch64_sqdmlal_lanehi (__a, __b, __c, __d);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vqdmlals_s32 (int64x1_t __a, int32x1_t __b, int32x1_t __c)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vqdmlals_s32 (int64_t __a, int32x1_t __b, int32x1_t __c)
{
return __builtin_aarch64_sqdmlalsi (__a, __b, __c);
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vqdmlals_lane_s32 (int64x1_t __a, int32x1_t __b, int32x2_t __c, const int __d)
{
- return __builtin_aarch64_sqdmlal_lanesi (__a, __b, __c, __d);
+ return (int64x1_t)
+ {__builtin_aarch64_sqdmlal_lanesi (__a[0], __b, __c, __d)};
}
/* vqdmlsl */
return __builtin_aarch64_sqdmlsl_lanehi (__a, __b, __c, __d);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vqdmlsls_s32 (int64x1_t __a, int32x1_t __b, int32x1_t __c)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vqdmlsls_s32 (int64_t __a, int32x1_t __b, int32x1_t __c)
{
return __builtin_aarch64_sqdmlslsi (__a, __b, __c);
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vqdmlsls_lane_s32 (int64x1_t __a, int32x1_t __b, int32x2_t __c, const int __d)
{
- return __builtin_aarch64_sqdmlsl_lanesi (__a, __b, __c, __d);
+ return (int64x1_t) {__builtin_aarch64_sqdmlsl_lanesi (__a[0], __b, __c, __d)};
}
/* vqdmulh */
return __builtin_aarch64_sqdmull_lanehi (__a, __b, __c);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
vqdmulls_s32 (int32x1_t __a, int32x1_t __b)
{
- return (int64x1_t) __builtin_aarch64_sqdmullsi (__a, __b);
+ return __builtin_aarch64_sqdmullsi (__a, __b);
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vqdmulls_lane_s32 (int32x1_t __a, int32x2_t __b, const int __c)
{
- return __builtin_aarch64_sqdmull_lanesi (__a, __b, __c);
+ return (int64x1_t) {__builtin_aarch64_sqdmull_lanesi (__a, __b, __c)};
}
/* vqmovn */
}
__extension__ static __inline int32x1_t __attribute__ ((__always_inline__))
-vqmovnd_s64 (int64x1_t __a)
+vqmovnd_s64 (int64_t __a)
{
return (int32x1_t) __builtin_aarch64_sqmovndi (__a);
}
}
__extension__ static __inline uint32x1_t __attribute__ ((__always_inline__))
-vqmovnd_u64 (uint64x1_t __a)
+vqmovnd_u64 (uint64_t __a)
{
return (uint32x1_t) __builtin_aarch64_uqmovndi (__a);
}
}
__extension__ static __inline int32x1_t __attribute__ ((__always_inline__))
-vqmovund_s64 (int64x1_t __a)
+vqmovund_s64 (int64_t __a)
{
return (int32x1_t) __builtin_aarch64_sqmovundi (__a);
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vqrshl_s64 (int64x1_t __a, int64x1_t __b)
{
- return __builtin_aarch64_sqrshldi (__a, __b);
+ return (int64x1_t) {__builtin_aarch64_sqrshldi (__a[0], __b[0])};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vqrshl_u64 (uint64x1_t __a, int64x1_t __b)
{
- return __builtin_aarch64_uqrshldi_uus ( __a, __b);
+ return (uint64x1_t) {__builtin_aarch64_uqrshldi_uus (__a[0], __b[0])};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return __builtin_aarch64_sqrshlsi (__a, __b);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vqrshld_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vqrshld_s64 (int64_t __a, int64_t __b)
{
return __builtin_aarch64_sqrshldi (__a, __b);
}
return __builtin_aarch64_uqrshlsi_uus (__a, __b);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vqrshld_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vqrshld_u64 (uint64_t __a, uint64_t __b)
{
return __builtin_aarch64_uqrshldi_uus (__a, __b);
}
}
__extension__ static __inline int32x1_t __attribute__ ((__always_inline__))
-vqrshrnd_n_s64 (int64x1_t __a, const int __b)
+vqrshrnd_n_s64 (int64_t __a, const int __b)
{
return (int32x1_t) __builtin_aarch64_sqrshrn_ndi (__a, __b);
}
}
__extension__ static __inline uint32x1_t __attribute__ ((__always_inline__))
-vqrshrnd_n_u64 (uint64x1_t __a, const int __b)
+vqrshrnd_n_u64 (uint64_t __a, const int __b)
{
return __builtin_aarch64_uqrshrn_ndi_uus (__a, __b);
}
}
__extension__ static __inline int32x1_t __attribute__ ((__always_inline__))
-vqrshrund_n_s64 (int64x1_t __a, const int __b)
+vqrshrund_n_s64 (int64_t __a, const int __b)
{
return (int32x1_t) __builtin_aarch64_sqrshrun_ndi (__a, __b);
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vqshl_s64 (int64x1_t __a, int64x1_t __b)
{
- return __builtin_aarch64_sqshldi (__a, __b);
+ return (int64x1_t) {__builtin_aarch64_sqshldi (__a[0], __b[0])};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vqshl_u64 (uint64x1_t __a, int64x1_t __b)
{
- return __builtin_aarch64_uqshldi_uus ( __a, __b);
+ return (uint64x1_t) {__builtin_aarch64_uqshldi_uus (__a[0], __b[0])};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return __builtin_aarch64_sqshlsi (__a, __b);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vqshld_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vqshld_s64 (int64_t __a, int64_t __b)
{
return __builtin_aarch64_sqshldi (__a, __b);
}
return __builtin_aarch64_uqshlsi_uus (__a, __b);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vqshld_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vqshld_u64 (uint64_t __a, uint64_t __b)
{
return __builtin_aarch64_uqshldi_uus (__a, __b);
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vqshl_n_s64 (int64x1_t __a, const int __b)
{
- return (int64x1_t) __builtin_aarch64_sqshl_ndi (__a, __b);
+ return (int64x1_t) {__builtin_aarch64_sqshl_ndi (__a[0], __b)};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vqshl_n_u64 (uint64x1_t __a, const int __b)
{
- return __builtin_aarch64_uqshl_ndi_uus (__a, __b);
+ return (uint64x1_t) {__builtin_aarch64_uqshl_ndi_uus (__a[0], __b)};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return (int32x1_t) __builtin_aarch64_sqshl_nsi (__a, __b);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vqshld_n_s64 (int64x1_t __a, const int __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vqshld_n_s64 (int64_t __a, const int __b)
{
- return (int64x1_t) __builtin_aarch64_sqshl_ndi (__a, __b);
+ return __builtin_aarch64_sqshl_ndi (__a, __b);
}
__extension__ static __inline uint8x1_t __attribute__ ((__always_inline__))
return __builtin_aarch64_uqshl_nsi_uus (__a, __b);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vqshld_n_u64 (uint64x1_t __a, const int __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vqshld_n_u64 (uint64_t __a, const int __b)
{
return __builtin_aarch64_uqshl_ndi_uus (__a, __b);
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vqshlu_n_s64 (int64x1_t __a, const int __b)
{
- return __builtin_aarch64_sqshlu_ndi_uss (__a, __b);
+ return (uint64x1_t) {__builtin_aarch64_sqshlu_ndi_uss (__a[0], __b)};
}
__extension__ static __inline uint8x16_t __attribute__ ((__always_inline__))
return (int32x1_t) __builtin_aarch64_sqshlu_nsi_uss (__a, __b);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vqshlud_n_s64 (int64x1_t __a, const int __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vqshlud_n_s64 (int64_t __a, const int __b)
{
- return (int64x1_t) __builtin_aarch64_sqshlu_ndi_uss (__a, __b);
+ return __builtin_aarch64_sqshlu_ndi_uss (__a, __b);
}
/* vqshrn */
}
__extension__ static __inline int32x1_t __attribute__ ((__always_inline__))
-vqshrnd_n_s64 (int64x1_t __a, const int __b)
+vqshrnd_n_s64 (int64_t __a, const int __b)
{
return (int32x1_t) __builtin_aarch64_sqshrn_ndi (__a, __b);
}
}
__extension__ static __inline uint32x1_t __attribute__ ((__always_inline__))
-vqshrnd_n_u64 (uint64x1_t __a, const int __b)
+vqshrnd_n_u64 (uint64_t __a, const int __b)
{
return __builtin_aarch64_uqshrn_ndi_uus (__a, __b);
}
}
__extension__ static __inline int32x1_t __attribute__ ((__always_inline__))
-vqshrund_n_s64 (int64x1_t __a, const int __b)
+vqshrund_n_s64 (int64_t __a, const int __b)
{
return (int32x1_t) __builtin_aarch64_sqshrun_ndi (__a, __b);
}
return (int32x1_t) __builtin_aarch64_sqsubsi (__a, __b);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vqsubd_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vqsubd_s64 (int64_t __a, int64_t __b)
{
- return (int64x1_t) __builtin_aarch64_sqsubdi (__a, __b);
+ return __builtin_aarch64_sqsubdi (__a, __b);
}
__extension__ static __inline uint8x1_t __attribute__ ((__always_inline__))
return (uint32x1_t) __builtin_aarch64_uqsubsi_uuu (__a, __b);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vqsubd_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vqsubd_u64 (uint64_t __a, uint64_t __b)
{
- return (uint64x1_t) __builtin_aarch64_uqsubdi_uuu ((uint64_t) __a,
- (uint64_t) __b);
+ return __builtin_aarch64_uqsubdi_uuu (__a, __b);
}
/* vrecpe */
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vrshl_s64 (int64x1_t __a, int64x1_t __b)
{
- return (int64x1_t) __builtin_aarch64_srshldi (__a, __b);
+ return (int64x1_t) {__builtin_aarch64_srshldi (__a[0], __b[0])};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vrshl_u64 (uint64x1_t __a, int64x1_t __b)
{
- return __builtin_aarch64_urshldi_uus (__a, __b);
+ return (uint64x1_t) {__builtin_aarch64_urshldi_uus (__a[0], __b[0])};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return __builtin_aarch64_urshlv2di_uus (__a, __b);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vrshld_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vrshld_s64 (int64_t __a, int64_t __b)
{
- return (int64x1_t) __builtin_aarch64_srshldi (__a, __b);
+ return __builtin_aarch64_srshldi (__a, __b);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vrshld_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vrshld_u64 (uint64_t __a, int64_t __b)
{
return __builtin_aarch64_urshldi_uus (__a, __b);
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vrshr_n_s64 (int64x1_t __a, const int __b)
{
- return (int64x1_t) __builtin_aarch64_srshr_ndi (__a, __b);
+ return (int64x1_t) {__builtin_aarch64_srshr_ndi (__a[0], __b)};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vrshr_n_u64 (uint64x1_t __a, const int __b)
{
- return __builtin_aarch64_urshr_ndi_uus (__a, __b);
+ return (uint64x1_t) {__builtin_aarch64_urshr_ndi_uus (__a[0], __b)};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return __builtin_aarch64_urshr_nv2di_uus (__a, __b);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vrshrd_n_s64 (int64x1_t __a, const int __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vrshrd_n_s64 (int64_t __a, const int __b)
{
- return (int64x1_t) __builtin_aarch64_srshr_ndi (__a, __b);
+ return __builtin_aarch64_srshr_ndi (__a, __b);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vrshrd_n_u64 (uint64x1_t __a, const int __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vrshrd_n_u64 (uint64_t __a, const int __b)
{
return __builtin_aarch64_urshr_ndi_uus (__a, __b);
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vrsra_n_s64 (int64x1_t __a, int64x1_t __b, const int __c)
{
- return (int64x1_t) __builtin_aarch64_srsra_ndi (__a, __b, __c);
+ return (int64x1_t) {__builtin_aarch64_srsra_ndi (__a[0], __b[0], __c)};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vrsra_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c)
{
- return __builtin_aarch64_ursra_ndi_uuus (__a, __b, __c);
+ return (uint64x1_t) {__builtin_aarch64_ursra_ndi_uuus (__a[0], __b[0], __c)};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return __builtin_aarch64_ursra_nv2di_uuus (__a, __b, __c);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vrsrad_n_s64 (int64x1_t __a, int64x1_t __b, const int __c)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vrsrad_n_s64 (int64_t __a, int64_t __b, const int __c)
{
- return (int64x1_t) __builtin_aarch64_srsra_ndi (__a, __b, __c);
+ return __builtin_aarch64_srsra_ndi (__a, __b, __c);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vrsrad_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vrsrad_n_u64 (uint64_t __a, uint64_t __b, const int __c)
{
return __builtin_aarch64_ursra_ndi_uuus (__a, __b, __c);
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vshl_n_s64 (int64x1_t __a, const int __b)
{
- return (int64x1_t) __builtin_aarch64_ashldi (__a, __b);
+ return (int64x1_t) {__builtin_aarch64_ashldi (__a[0], __b)};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vshl_n_u64 (uint64x1_t __a, const int __b)
{
- return (uint64x1_t) __builtin_aarch64_ashldi ((int64x1_t) __a, __b);
+ return (uint64x1_t) {__builtin_aarch64_ashldi ((int64_t) __a[0], __b)};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return (uint64x2_t) __builtin_aarch64_ashlv2di ((int64x2_t) __a, __b);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vshld_n_s64 (int64x1_t __a, const int __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vshld_n_s64 (int64_t __a, const int __b)
{
- return (int64x1_t) __builtin_aarch64_ashldi (__a, __b);
+ return __builtin_aarch64_ashldi (__a, __b);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vshld_n_u64 (uint64x1_t __a, const int __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vshld_n_u64 (uint64_t __a, const int __b)
{
- return (uint64x1_t) __builtin_aarch64_ashldi (__a, __b);
+ return (uint64_t) __builtin_aarch64_ashldi (__a, __b);
}
__extension__ static __inline int8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vshl_s64 (int64x1_t __a, int64x1_t __b)
{
- return __builtin_aarch64_sshldi (__a, __b);
+ return (int64x1_t) {__builtin_aarch64_sshldi (__a[0], __b[0])};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vshl_u64 (uint64x1_t __a, int64x1_t __b)
{
- return __builtin_aarch64_ushldi_uus (__a, __b);
+ return (uint64x1_t) {__builtin_aarch64_ushldi_uus (__a[0], __b[0])};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return __builtin_aarch64_ushlv2di_uus (__a, __b);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vshld_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vshld_s64 (int64_t __a, int64_t __b)
{
return __builtin_aarch64_sshldi (__a, __b);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vshld_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vshld_u64 (uint64_t __a, uint64_t __b)
{
return __builtin_aarch64_ushldi_uus (__a, __b);
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vshr_n_s64 (int64x1_t __a, const int __b)
{
- return (int64x1_t) __builtin_aarch64_ashr_simddi (__a, __b);
+ return (int64x1_t) {__builtin_aarch64_ashr_simddi (__a[0], __b)};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vshr_n_u64 (uint64x1_t __a, const int __b)
{
- return __builtin_aarch64_lshr_simddi_uus ( __a, __b);
+ return (uint64x1_t) {__builtin_aarch64_lshr_simddi_uus ( __a[0], __b)};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return (uint64x2_t) __builtin_aarch64_lshrv2di ((int64x2_t) __a, __b);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vshrd_n_s64 (int64x1_t __a, const int __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vshrd_n_s64 (int64_t __a, const int __b)
{
- return (int64x1_t) __builtin_aarch64_ashr_simddi (__a, __b);
+ return __builtin_aarch64_ashr_simddi (__a, __b);
}
__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vsli_n_s64 (int64x1_t __a, int64x1_t __b, const int __c)
{
- return (int64x1_t) __builtin_aarch64_ssli_ndi (__a, __b, __c);
+ return (int64x1_t) {__builtin_aarch64_ssli_ndi (__a[0], __b[0], __c)};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vsli_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c)
{
- return __builtin_aarch64_usli_ndi_uuus (__a, __b, __c);
+ return (uint64x1_t) {__builtin_aarch64_usli_ndi_uuus (__a[0], __b[0], __c)};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return __builtin_aarch64_usli_nv2di_uuus (__a, __b, __c);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vslid_n_s64 (int64x1_t __a, int64x1_t __b, const int __c)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vslid_n_s64 (int64_t __a, int64_t __b, const int __c)
{
- return (int64x1_t) __builtin_aarch64_ssli_ndi (__a, __b, __c);
+ return __builtin_aarch64_ssli_ndi (__a, __b, __c);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vslid_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vslid_n_u64 (uint64_t __a, uint64_t __b, const int __c)
{
return __builtin_aarch64_usli_ndi_uuus (__a, __b, __c);
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vsqadd_u64 (uint64x1_t __a, int64x1_t __b)
{
- return __builtin_aarch64_usqadddi_uus (__a, __b);
+ return (uint64x1_t) {__builtin_aarch64_usqadddi_uus (__a[0], __b[0])};
}
__extension__ static __inline uint8x16_t __attribute__ ((__always_inline__))
return __builtin_aarch64_usqaddsi_uus (__a, __b);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vsqaddd_u64 (uint64x1_t __a, int64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vsqaddd_u64 (uint64_t __a, int64_t __b)
{
return __builtin_aarch64_usqadddi_uus (__a, __b);
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vsra_n_s64 (int64x1_t __a, int64x1_t __b, const int __c)
{
- return (int64x1_t) __builtin_aarch64_ssra_ndi (__a, __b, __c);
+ return (int64x1_t) {__builtin_aarch64_ssra_ndi (__a[0], __b[0], __c)};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vsra_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c)
{
- return __builtin_aarch64_usra_ndi_uuus (__a, __b, __c);
+ return (uint64x1_t) {__builtin_aarch64_usra_ndi_uuus (__a[0], __b[0], __c)};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return __builtin_aarch64_usra_nv2di_uuus (__a, __b, __c);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vsrad_n_s64 (int64x1_t __a, int64x1_t __b, const int __c)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vsrad_n_s64 (int64_t __a, int64_t __b, const int __c)
{
- return (int64x1_t) __builtin_aarch64_ssra_ndi (__a, __b, __c);
+ return __builtin_aarch64_ssra_ndi (__a, __b, __c);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vsrad_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vsrad_n_u64 (uint64_t __a, uint64_t __b, const int __c)
{
return __builtin_aarch64_usra_ndi_uuus (__a, __b, __c);
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vsri_n_s64 (int64x1_t __a, int64x1_t __b, const int __c)
{
- return (int64x1_t) __builtin_aarch64_ssri_ndi (__a, __b, __c);
+ return (int64x1_t) {__builtin_aarch64_ssri_ndi (__a[0], __b[0], __c)};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vsri_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c)
{
- return __builtin_aarch64_usri_ndi_uuus (__a, __b, __c);
+ return (uint64x1_t) {__builtin_aarch64_usri_ndi_uuus (__a[0], __b[0], __c)};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return __builtin_aarch64_usri_nv2di_uuus (__a, __b, __c);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vsrid_n_s64 (int64x1_t __a, int64x1_t __b, const int __c)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vsrid_n_s64 (int64_t __a, int64_t __b, const int __c)
{
- return (int64x1_t) __builtin_aarch64_ssri_ndi (__a, __b, __c);
+ return __builtin_aarch64_ssri_ndi (__a, __b, __c);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vsrid_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vsrid_n_u64 (uint64_t __a, uint64_t __b, const int __c)
{
return __builtin_aarch64_usri_ndi_uuus (__a, __b, __c);
}
__extension__ static __inline void __attribute__ ((__always_inline__))
vst1_s64 (int64_t *a, int64x1_t b)
{
- *a = b;
+ *a = b[0];
}
__extension__ static __inline void __attribute__ ((__always_inline__))
__extension__ static __inline void __attribute__ ((__always_inline__))
vst1_u64 (uint64_t *a, uint64x1_t b)
{
- *a = b;
+ *a = b[0];
}
__extension__ static __inline void __attribute__ ((__always_inline__))
/* vsub */
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vsubd_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vsubd_s64 (int64_t __a, int64_t __b)
{
return __a - __b;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vsubd_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vsubd_u64 (uint64_t __a, uint64_t __b)
{
return __a - __b;
}
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vtst_s64 (int64x1_t __a, int64x1_t __b)
{
- return (__a & __b) ? -1ll : 0ll;
+ return (uint64x1_t) {(__a[0] & __b[0]) ? -1ll : 0ll};
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vtst_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return (__a & __b) ? -1ll : 0ll;
+ return (uint64x1_t) {(__a[0] & __b[0]) ? -1ll : 0ll};
}
__extension__ static __inline uint8x16_t __attribute__ ((__always_inline__))
(int64x2_t) __b);
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vtstd_s64 (int64x1_t __a, int64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vtstd_s64 (int64_t __a, int64_t __b)
{
return (__a & __b) ? -1ll : 0ll;
}
-__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
-vtstd_u64 (uint64x1_t __a, uint64x1_t __b)
+__extension__ static __inline uint64_t __attribute__ ((__always_inline__))
+vtstd_u64 (uint64_t __a, uint64_t __b)
{
return (__a & __b) ? -1ll : 0ll;
}
__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
vuqadd_s64 (int64x1_t __a, uint64x1_t __b)
{
- return __builtin_aarch64_suqadddi_ssu (__a, __b);
+ return (int64x1_t) {__builtin_aarch64_suqadddi_ssu (__a[0], __b[0])};
}
__extension__ static __inline int8x16_t __attribute__ ((__always_inline__))
return __builtin_aarch64_suqaddsi_ssu (__a, __b);
}
-__extension__ static __inline int64x1_t __attribute__ ((__always_inline__))
-vuqaddd_s64 (int64x1_t __a, uint64x1_t __b)
+__extension__ static __inline int64_t __attribute__ ((__always_inline__))
+vuqaddd_s64 (int64_t __a, uint64_t __b)
{
return __builtin_aarch64_suqadddi_ssu (__a, __b);
}
/* { dg-final { scan-assembler-times "\\tadd\\tx\[0-9\]+" 2 } } */
-uint64x1_t
-test_vaddd_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vaddd_u64 (uint64_t a, uint64_t b)
{
return vaddd_u64 (a, b);
}
-int64x1_t
-test_vaddd_s64 (int64x1_t a, int64x1_t b)
+int64_t
+test_vaddd_s64 (int64_t a, int64_t b)
{
return vaddd_s64 (a, b);
}
/* { dg-final { scan-assembler-times "\\tadd\\td\[0-9\]+" 1 } } */
-int64x1_t
-test_vaddd_s64_2 (int64x1_t a, int64x1_t b, int64x1_t c, int64x1_t d)
-{
- return vqaddd_s64 (vaddd_s64 (vqaddd_s64 (a, b), vqaddd_s64 (c, d)),
- vqaddd_s64 (a, d));
-}
-
-/* { dg-final { scan-assembler-times "\\tabs\\td\[0-9\]+, d\[0-9\]+" 1 } } */
-
-int64x1_t
-test_vabs_s64 (int64x1_t a)
+int64_t
+test_vaddd_s64_2 (int64_t a, int64_t b)
{
- uint64x1_t res;
+ int64_t res;
force_simd (a);
- res = vabs_s64 (a);
+ force_simd (b);
+ res = vaddd_s64 (a, b);
force_simd (res);
return res;
}
/* { dg-final { scan-assembler-times "\\tcmeq\\td\[0-9\]+, d\[0-9\]+, d\[0-9\]+" 1 } } */
-uint64x1_t
-test_vceqd_s64 (int64x1_t a, int64x1_t b)
+uint64_t
+test_vceqd_s64 (int64_t a, int64_t b)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
force_simd (b);
res = vceqd_s64 (a, b);
/* { dg-final { scan-assembler-times "\\tcmeq\\td\[0-9\]+, d\[0-9\]+, #?0" 1 } } */
-uint64x1_t
-test_vceqzd_s64 (int64x1_t a)
+uint64_t
+test_vceqzd_s64 (int64_t a)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
res = vceqzd_s64 (a);
force_simd (res);
/* { dg-final { scan-assembler-times "\\tcmge\\td\[0-9\]+, d\[0-9\]+, d\[0-9\]+" 2 } } */
-uint64x1_t
-test_vcged_s64 (int64x1_t a, int64x1_t b)
+uint64_t
+test_vcged_s64 (int64_t a, int64_t b)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
force_simd (b);
res = vcged_s64 (a, b);
return res;
}
-uint64x1_t
-test_vcled_s64 (int64x1_t a, int64x1_t b)
+uint64_t
+test_vcled_s64 (int64_t a, int64_t b)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
force_simd (b);
res = vcled_s64 (a, b);
/* Idiom recognition will cause this testcase not to generate
the expected cmge instruction, so do not check for it. */
-uint64x1_t
-test_vcgezd_s64 (int64x1_t a)
+uint64_t
+test_vcgezd_s64 (int64_t a)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
res = vcgezd_s64 (a);
force_simd (res);
/* { dg-final { scan-assembler-times "\\tcmhs\\td\[0-9\]+, d\[0-9\]+, d\[0-9\]+" 1 } } */
-uint64x1_t
-test_vcged_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vcged_u64 (uint64_t a, uint64_t b)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
force_simd (b);
res = vcged_u64 (a, b);
/* { dg-final { scan-assembler-times "\\tcmgt\\td\[0-9\]+, d\[0-9\]+, d\[0-9\]+" 2 } } */
-uint64x1_t
-test_vcgtd_s64 (int64x1_t a, int64x1_t b)
+uint64_t
+test_vcgtd_s64 (int64_t a, int64_t b)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
force_simd (b);
res = vcgtd_s64 (a, b);
return res;
}
-uint64x1_t
-test_vcltd_s64 (int64x1_t a, int64x1_t b)
+uint64_t
+test_vcltd_s64 (int64_t a, int64_t b)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
force_simd (b);
res = vcltd_s64 (a, b);
/* { dg-final { scan-assembler-times "\\tcmgt\\td\[0-9\]+, d\[0-9\]+, #?0" 1 } } */
-uint64x1_t
-test_vcgtzd_s64 (int64x1_t a)
+uint64_t
+test_vcgtzd_s64 (int64_t a)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
res = vcgtzd_s64 (a);
force_simd (res);
/* { dg-final { scan-assembler-times "\\tcmhi\\td\[0-9\]+, d\[0-9\]+, d\[0-9\]+" 1 } } */
-uint64x1_t
-test_vcgtd_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vcgtd_u64 (uint64_t a, uint64_t b)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
force_simd (b);
res = vcgtd_u64 (a, b);
/* { dg-final { scan-assembler-times "\\tcmle\\td\[0-9\]+, d\[0-9\]+, #?0" 1 } } */
-uint64x1_t
-test_vclezd_s64 (int64x1_t a)
+uint64_t
+test_vclezd_s64 (int64_t a)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
res = vclezd_s64 (a);
force_simd (res);
/* Idiom recognition will cause this testcase not to generate
the expected cmlt instruction, so do not check for it. */
-uint64x1_t
-test_vcltzd_s64 (int64x1_t a)
+uint64_t
+test_vcltzd_s64 (int64_t a)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
res = vcltzd_s64 (a);
force_simd (res);
/* { dg-final { scan-assembler-times "aarch64_get_lanev2di" 2 } } */
-int64x1_t
-test_vdupd_lane_s64 (int64x2_t a)
+int64_t
+test_vdupd_laneq_s64 (int64x2_t a)
{
- int64x1_t res;
- force_simd (a);
- res = vdupd_laneq_s64 (a, 1);
+ int64_t res = vdupd_laneq_s64 (a, 1);
force_simd (res);
return res;
}
-uint64x1_t
-test_vdupd_lane_u64 (uint64x2_t a)
+uint64_t
+test_vdupd_laneq_u64 (uint64x2_t a)
{
- uint64x1_t res;
- force_simd (a);
- res = vdupd_laneq_u64 (a, 1);
+ uint64_t res = vdupd_laneq_u64 (a, 1);
force_simd (res);
return res;
}
/* { dg-final { scan-assembler-times "\\tcmtst\\td\[0-9\]+, d\[0-9\]+, d\[0-9\]+" 2 } } */
-int64x1_t
-test_vtst_s64 (int64x1_t a, int64x1_t b)
+uint64_t
+test_vtstd_s64 (int64_t a, int64_t b)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
force_simd (b);
res = vtstd_s64 (a, b);
return res;
}
-uint64x1_t
-test_vtst_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vtstd_u64 (uint64_t a, uint64_t b)
{
- uint64x1_t res;
+ uint64_t res;
force_simd (a);
force_simd (b);
- res = vtstd_s64 (a, b);
+ res = vtstd_u64 (a, b);
force_simd (res);
return res;
}
/* { dg-final { scan-assembler-times "\\tuqadd\\td\[0-9\]+" 1 } } */
-uint64x1_t
-test_vqaddd_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vqaddd_u64 (uint64_t a, uint64_t b)
{
return vqaddd_u64 (a, b);
}
return vqaddb_u8 (a, b);
}
-/* { dg-final { scan-assembler-times "\\tsqadd\\td\[0-9\]+" 5 } } */
+/* { dg-final { scan-assembler-times "\\tsqadd\\td\[0-9\]+" 1 } } */
-int64x1_t
-test_vqaddd_s64 (int64x1_t a, int64x1_t b)
+int64_t
+test_vqaddd_s64 (int64_t a, int64_t b)
{
return vqaddd_s64 (a, b);
}
/* { dg-final { scan-assembler-times "\\tsqdmlal\\td\[0-9\]+, s\[0-9\]+, s\[0-9\]+" 1 } } */
-int64x1_t
-test_vqdmlals_s32 (int64x1_t a, int32x1_t b, int32x1_t c)
+int64_t
+test_vqdmlals_s32 (int64_t a, int32x1_t b, int32x1_t c)
{
return vqdmlals_s32 (a, b, c);
}
/* { dg-final { scan-assembler-times "\\tsqdmlsl\\td\[0-9\]+, s\[0-9\]+, s\[0-9\]+" 1 } } */
-int64x1_t
-test_vqdmlsls_s32 (int64x1_t a, int32x1_t b, int32x1_t c)
+int64_t
+test_vqdmlsls_s32 (int64_t a, int32x1_t b, int32x1_t c)
{
return vqdmlsls_s32 (a, b, c);
}
/* { dg-final { scan-assembler-times "\\tsqdmull\\td\[0-9\]+, s\[0-9\]+, s\[0-9\]+" 1 } } */
-int64x1_t
+int64_t
test_vqdmulls_s32 (int32x1_t a, int32x1_t b)
{
return vqdmulls_s32 (a, b);
/* { dg-final { scan-assembler-times "\\tsuqadd\\td\[0-9\]+" 1 } } */
-int64x1_t
-test_vuqaddd_s64 (int64x1_t a, int8x1_t b)
+int64_t
+test_vuqaddd_s64 (int64_t a, uint64_t b)
{
return vuqaddd_s64 (a, b);
}
/* { dg-final { scan-assembler-times "\\tusqadd\\td\[0-9\]+" 1 } } */
-uint64x1_t
-test_vsqaddd_u64 (uint64x1_t a, int8x1_t b)
+uint64_t
+test_vsqaddd_u64 (uint64_t a, int64_t b)
{
return vsqaddd_u64 (a, b);
}
/* { dg-final { scan-assembler-times "\\tsqxtun\\ts\[0-9\]+" 1 } } */
int32x1_t
-test_vqmovund_s64 (int64x1_t a)
+test_vqmovund_s64 (int64_t a)
{
return vqmovund_s64 (a);
}
/* { dg-final { scan-assembler-times "\\tsqxtn\\ts\[0-9\]+" 1 } } */
int32x1_t
-test_vqmovnd_s64 (int64x1_t a)
+test_vqmovnd_s64 (int64_t a)
{
return vqmovnd_s64 (a);
}
/* { dg-final { scan-assembler-times "\\tuqxtn\\ts\[0-9\]+" 1 } } */
uint32x1_t
-test_vqmovnd_u64 (uint64x1_t a)
+test_vqmovnd_u64 (uint64_t a)
{
return vqmovnd_u64 (a);
}
/* { dg-final { scan-assembler-times "\\tsub\\tx\[0-9\]+" 2 } } */
-uint64x1_t
-test_vsubd_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vsubd_u64 (uint64_t a, uint64_t b)
{
return vsubd_u64 (a, b);
}
-int64x1_t
-test_vsubd_s64 (int64x1_t a, int64x1_t b)
+int64_t
+test_vsubd_s64 (int64_t a, int64_t b)
{
return vsubd_s64 (a, b);
}
/* { dg-final { scan-assembler-times "\\tsub\\td\[0-9\]+" 1 } } */
-int64x1_t
-test_vsubd_s64_2 (int64x1_t a, int64x1_t b, int64x1_t c, int64x1_t d)
+int64_t
+test_vsubd_s64_2 (int64_t a, int64_t b)
{
- return vqsubd_s64 (vsubd_s64 (vqsubd_s64 (a, b), vqsubd_s64 (c, d)),
- vqsubd_s64 (a, d));
+ int64_t res;
+ force_simd (a);
+ force_simd (b);
+ res = vsubd_s64 (a, b);
+ force_simd (res);
+ return res;
}
/* { dg-final { scan-assembler-times "\\tuqsub\\td\[0-9\]+" 1 } } */
-uint64x1_t
-test_vqsubd_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vqsubd_u64 (uint64_t a, uint64_t b)
{
return vqsubd_u64 (a, b);
}
return vqsubb_u8 (a, b);
}
-/* { dg-final { scan-assembler-times "\\tsqsub\\td\[0-9\]+" 5 } } */
+/* { dg-final { scan-assembler-times "\\tsqsub\\td\[0-9\]+" 1 } } */
-int64x1_t
-test_vqsubd_s64 (int64x1_t a, int64x1_t b)
+int64_t
+test_vqsubd_s64 (int64_t a, int64_t b)
{
return vqsubd_s64 (a, b);
}
/* { dg-final { scan-assembler-times "\\tsshl\\td\[0-9\]+" 1 } } */
-int64x1_t
-test_vshld_s64 (int64x1_t a, int64x1_t b)
+int64_t
+test_vshld_s64 (int64_t a, int64_t b)
{
return vshld_s64 (a, b);
}
/* { dg-final { scan-assembler-times "\\tushl\\td\[0-9\]+" 1 } } */
-uint64x1_t
-test_vshld_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vshld_u64 (uint64_t a, uint64_t b)
{
return vshld_u64 (a, b);
}
/* { dg-final { scan-assembler-times "\\tsrshl\\td\[0-9\]+" 1 } } */
-int64x1_t
-test_vrshld_s64 (int64x1_t a, int64x1_t b)
+int64_t
+test_vrshld_s64 (int64_t a, int64_t b)
{
return vrshld_s64 (a, b);
}
/* { dg-final { scan-assembler-times "\\turshl\\td\[0-9\]+" 1 } } */
-uint64x1_t
-test_vrshld_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vrshld_u64 (uint64_t a, int64_t b)
{
return vrshld_u64 (a, b);
}
/* { dg-final { scan-assembler "\\tasr\\tx\[0-9\]+" } } */
-int64x1_t
-test_vshrd_n_s64 (int64x1_t a)
+int64_t
+test_vshrd_n_s64 (int64_t a)
{
return vshrd_n_s64 (a, 5);
}
/* { dg-final { scan-assembler-times "\\tlsr\\tx\[0-9\]+" 1 } } */
-uint64x1_t
-test_vshrd_n_u64 (uint64x1_t a)
+uint64_t
+test_vshrd_n_u64 (uint64_t a)
{
return vshrd_n_u64 (a, 3);
}
/* { dg-final { scan-assembler-times "\\tssra\\td\[0-9\]+" 1 } } */
-int64x1_t
-test_vsrad_n_s64 (int64x1_t a, int64x1_t b)
+int64_t
+test_vsrad_n_s64 (int64_t a, int64_t b)
{
return vsrad_n_s64 (a, b, 2);
}
/* { dg-final { scan-assembler-times "\\tusra\\td\[0-9\]+" 1 } } */
-uint64x1_t
-test_vsrad_n_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vsrad_n_u64 (uint64_t a, uint64_t b)
{
return vsrad_n_u64 (a, b, 5);
}
/* { dg-final { scan-assembler-times "\\tsrshr\\td\[0-9\]+" 1 } } */
-int64x1_t
-test_vrshrd_n_s64 (int64x1_t a)
+int64_t
+test_vrshrd_n_s64 (int64_t a)
{
return vrshrd_n_s64 (a, 5);
}
/* { dg-final { scan-assembler-times "\\turshr\\td\[0-9\]+" 1 } } */
-uint64x1_t
-test_vrshrd_n_u64 (uint64x1_t a)
+uint64_t
+test_vrshrd_n_u64 (uint64_t a)
{
return vrshrd_n_u64 (a, 3);
}
/* { dg-final { scan-assembler-times "\\tsrsra\\td\[0-9\]+" 1 } } */
-int64x1_t
-test_vrsrad_n_s64 (int64x1_t a, int64x1_t b)
+int64_t
+test_vrsrad_n_s64 (int64_t a, int64_t b)
{
return vrsrad_n_s64 (a, b, 3);
}
/* { dg-final { scan-assembler-times "\\tsrsra\\td\[0-9\]+" 1 } } */
-uint64x1_t
-test_vrsrad_n_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vrsrad_n_u64 (uint64_t a, uint64_t b)
{
return vrsrad_n_u64 (a, b, 4);
}
/* { dg-final { scan-assembler-times "\\tsqrshl\\td\[0-9\]+" 1 } } */
-int64x1_t
-test_vqrshld_s64 (int64x1_t a, int64x1_t b)
+int64_t
+test_vqrshld_s64 (int64_t a, int64_t b)
{
return vqrshld_s64 (a, b);
}
/* { dg-final { scan-assembler-times "\\tuqrshl\\td\[0-9\]+" 1 } } */
-uint64x1_t
-test_vqrshld_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vqrshld_u64 (uint64_t a, uint64_t b)
{
return vqrshld_u64 (a, b);
}
/* { dg-final { scan-assembler-times "\\tsqshlu\\td\[0-9\]+" 1 } } */
-int64x1_t
-test_vqshlud_n_s64 (int64x1_t a)
+int64_t
+test_vqshlud_n_s64 (int64_t a)
{
return vqshlud_n_s64 (a, 6);
}
/* { dg-final { scan-assembler-times "\\tsqshl\\td\[0-9\]+" 2 } } */
-int64x1_t
-test_vqshld_s64 (int64x1_t a, int64x1_t b)
+int64_t
+test_vqshld_s64 (int64_t a, int64_t b)
{
return vqshld_s64 (a, b);
}
-int64x1_t
-test_vqshld_n_s64 (int64x1_t a)
+int64_t
+test_vqshld_n_s64 (int64_t a)
{
return vqshld_n_s64 (a, 5);
}
/* { dg-final { scan-assembler-times "\\tuqshl\\td\[0-9\]+" 2 } } */
-uint64x1_t
-test_vqshld_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vqshld_u64 (uint64_t a, int64_t b)
{
return vqshld_u64 (a, b);
}
-uint64x1_t
-test_vqshld_n_u64 (uint64x1_t a)
+uint64_t
+test_vqshld_n_u64 (uint64_t a)
{
return vqshld_n_u64 (a, 5);
}
/* { dg-final { scan-assembler-times "\\tsqshrun\\ts\[0-9\]+" 1 } } */
int32x1_t
-test_vqshrund_n_s64 (int64x1_t a)
+test_vqshrund_n_s64 (int64_t a)
{
return vqshrund_n_s64 (a, 4);
}
/* { dg-final { scan-assembler-times "\\tsqrshrun\\ts\[0-9\]+" 1 } } */
int32x1_t
-test_vqrshrund_n_s64 (int64x1_t a)
+test_vqrshrund_n_s64 (int64_t a)
{
return vqrshrund_n_s64 (a, 4);
}
/* { dg-final { scan-assembler-times "\\tsqshrn\\ts\[0-9\]+" 1 } } */
int32x1_t
-test_vqshrnd_n_s64 (int64x1_t a)
+test_vqshrnd_n_s64 (int64_t a)
{
return vqshrnd_n_s64 (a, 4);
}
/* { dg-final { scan-assembler-times "\\tuqshrn\\ts\[0-9\]+" 1 } } */
uint32x1_t
-test_vqshrnd_n_u64 (uint64x1_t a)
+test_vqshrnd_n_u64 (uint64_t a)
{
return vqshrnd_n_u64 (a, 4);
}
/* { dg-final { scan-assembler-times "\\tsqrshrn\\ts\[0-9\]+" 1 } } */
int32x1_t
-test_vqrshrnd_n_s64 (int64x1_t a)
+test_vqrshrnd_n_s64 (int64_t a)
{
return vqrshrnd_n_s64 (a, 4);
}
/* { dg-final { scan-assembler-times "\\tuqrshrn\\ts\[0-9\]+" 1 } } */
uint32x1_t
-test_vqrshrnd_n_u64 (uint64x1_t a)
+test_vqrshrnd_n_u64 (uint64_t a)
{
return vqrshrnd_n_u64 (a, 4);
}
/* { dg-final { scan-assembler-times "\\tlsl\\tx\[0-9\]+" 2 } } */
-int64x1_t
-test_vshl_n_s64 (int64x1_t a)
+int64_t
+test_vshld_n_s64 (int64_t a)
{
return vshld_n_s64 (a, 9);
}
-uint64x1_t
-test_vshl_n_u64 (uint64x1_t a)
+uint64_t
+test_vshdl_n_u64 (uint64_t a)
{
return vshld_n_u64 (a, 9);
}
/* { dg-final { scan-assembler-times "\\tsli\\td\[0-9\]+" 2 } } */
-int64x1_t
-test_vsli_n_s64 (int64x1_t a, int64x1_t b)
+int64_t
+test_vslid_n_s64 (int64_t a, int64_t b)
{
return vslid_n_s64 (a, b, 9);
}
-uint64x1_t
-test_vsli_n_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vslid_n_u64 (uint64_t a, uint64_t b)
{
return vslid_n_u64 (a, b, 9);
}
/* { dg-final { scan-assembler-times "\\tsri\\td\[0-9\]+" 2 } } */
-int64x1_t
-test_vsri_n_s64 (int64x1_t a, int64x1_t b)
+int64_t
+test_vsrid_n_s64 (int64_t a, int64_t b)
{
return vsrid_n_s64 (a, b, 9);
}
-uint64x1_t
-test_vsri_n_u64 (uint64x1_t a, uint64x1_t b)
+uint64_t
+test_vsrid_n_u64 (uint64_t a, uint64_t b)
{
return vsrid_n_u64 (a, b, 9);
}