From de10bcced9188d068a5448917141171e393b9cc9 Mon Sep 17 00:00:00 2001 From: Alan Lawrence Date: Tue, 3 Jun 2014 14:57:22 +0000 Subject: [PATCH] [PATCH AArch64 1/2] Correct signedness of builtins, remove casts from arm_neon.h * gcc/config/aarch64/aarch64-builtins.c (aarch64_types_binop_uus_qualifiers, aarch64_types_shift_to_unsigned_qualifiers, aarch64_types_unsigned_shiftacc_qualifiers): Define. * gcc/config/aarch64/aarch64-simd-builtins.def (uqshl, uqrshl, uqadd, uqsub, usqadd, usra_n, ursra_n, uqshrn_n, uqrshrn_n, usri_n, usli_n, sqshlu_n, uqshl_n): Update qualifiers. * gcc/config/aarch64/arm_neon.h (vqadd_u8, vqadd_u16, vqadd_u32, vqadd_u64, vqaddq_u8, vqaddq_u16, vqaddq_u32, vqaddq_u64, vqsub_u8, vqsub_u16, vqsub_u32, vqsub_u64, vqsubq_u8, vqsubq_u16, vqsubq_u32, vqsubq_u64, vqaddb_u8, vqaddh_u16, vqadds_u32, vqaddd_u64, vqrshl_u8, vqrshl_u16, vqrshl_u32, vqrshl_u64, vqrshlq_u8, vqrshlq_u16, vqrshlq_u32, vqrshlq_u64, vqrshlb_u8, vqrshlh_u16, vqrshls_u32, vqrshld_u64, vqrshrn_n_u16, vqrshrn_n_u32, vqrshrn_n_u64, vqrshrnh_n_u16, vqrshrns_n_u32, vqrshrnd_n_u64, vqshl_u8, vqshl_u16, vqshl_u32, vqshl_u64, vqshlq_u8, vqshlq_u16, vqshlq_u32, vqshlq_u64, vqshlb_u8, vqshlh_u16, vqshls_u32, vqshld_u64, vqshl_n_u8, vqshl_n_u16, vqshl_n_u32, vqshl_n_u64, vqshlq_n_u8, vqshlq_n_u16, vqshlq_n_u32, vqshlq_n_u64, vqshlb_n_u8, vqshlh_n_u16, vqshls_n_u32, vqshld_n_u64, vqshlu_n_s8, vqshlu_n_s16, vqshlu_n_s32, vqshlu_n_s64, vqshluq_n_s8, vqshluq_n_s16, vqshluq_n_s32, vqshluq_n_s64, vqshlub_n_s8, vqshluh_n_s16, vqshlus_n_s32, vqshlud_n_s64, vqshrn_n_u16, vqshrn_n_u32, vqshrn_n_u64, vqshrnh_n_u16, vqshrns_n_u32, vqshrnd_n_u64, vqsubb_u8, vqsubh_u16, vqsubs_u32, vqsubd_u64, vrsra_n_u8, vrsra_n_u16, vrsra_n_u32, vrsra_n_u64, vrsraq_n_u8, vrsraq_n_u16, vrsraq_n_u32, vrsraq_n_u64, vrsrad_n_u64, vsli_n_u8, vsli_n_u16, vsli_n_u32,vsli_n_u64, vsliq_n_u8, vsliq_n_u16, vsliq_n_u32, vsliq_n_u64, vslid_n_u64, vsqadd_u8, vsqadd_u16, vsqadd_u32, vsqadd_u64, vsqaddq_u8, vsqaddq_u16, vsqaddq_u32, vsqaddq_u64, vsqaddb_u8, vsqaddh_u16, vsqadds_u32, vsqaddd_u64, vsra_n_u8, vsra_n_u16, vsra_n_u32, vsra_n_u64, vsraq_n_u8, vsraq_n_u16, vsraq_n_u32, vsraq_n_u64, vsrad_n_u64, vsri_n_u8, vsri_n_u16, vsri_n_u32, vsri_n_u64, vsriq_n_u8, vsriq_n_u16, vsriq_n_u32, vsriq_n_u64, vsrid_n_u64): Remove casts. From-SVN: r211185 --- gcc/ChangeLog | 37 +++ gcc/config/aarch64/aarch64-builtins.c | 16 ++ gcc/config/aarch64/aarch64-simd-builtins.def | 26 +-- gcc/config/aarch64/arm_neon.h | 323 +++++++++++---------------- 4 files changed, 202 insertions(+), 200 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 63986c2..c196684 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,40 @@ +2014-06-03 Alan Lawrence + + * gcc/config/aarch64/aarch64-builtins.c + (aarch64_types_binop_uus_qualifiers, + aarch64_types_shift_to_unsigned_qualifiers, + aarch64_types_unsigned_shiftacc_qualifiers): Define. + * gcc/config/aarch64/aarch64-simd-builtins.def (uqshl, uqrshl, uqadd, + uqsub, usqadd, usra_n, ursra_n, uqshrn_n, uqrshrn_n, usri_n, usli_n, + sqshlu_n, uqshl_n): Update qualifiers. + * gcc/config/aarch64/arm_neon.h (vqadd_u8, vqadd_u16, vqadd_u32, + vqadd_u64, vqaddq_u8, vqaddq_u16, vqaddq_u32, vqaddq_u64, vqsub_u8, + vqsub_u16, vqsub_u32, vqsub_u64, vqsubq_u8, vqsubq_u16, vqsubq_u32, + vqsubq_u64, vqaddb_u8, vqaddh_u16, vqadds_u32, vqaddd_u64, vqrshl_u8, + vqrshl_u16, vqrshl_u32, vqrshl_u64, vqrshlq_u8, vqrshlq_u16, + vqrshlq_u32, vqrshlq_u64, vqrshlb_u8, vqrshlh_u16, vqrshls_u32, + vqrshld_u64, vqrshrn_n_u16, vqrshrn_n_u32, vqrshrn_n_u64, + vqrshrnh_n_u16, vqrshrns_n_u32, vqrshrnd_n_u64, vqshl_u8, vqshl_u16, + vqshl_u32, vqshl_u64, vqshlq_u8, vqshlq_u16, vqshlq_u32, vqshlq_u64, + vqshlb_u8, vqshlh_u16, vqshls_u32, vqshld_u64, vqshl_n_u8, vqshl_n_u16, + vqshl_n_u32, vqshl_n_u64, vqshlq_n_u8, vqshlq_n_u16, vqshlq_n_u32, + vqshlq_n_u64, vqshlb_n_u8, vqshlh_n_u16, vqshls_n_u32, vqshld_n_u64, + vqshlu_n_s8, vqshlu_n_s16, vqshlu_n_s32, vqshlu_n_s64, vqshluq_n_s8, + vqshluq_n_s16, vqshluq_n_s32, vqshluq_n_s64, vqshlub_n_s8, + vqshluh_n_s16, vqshlus_n_s32, vqshlud_n_s64, vqshrn_n_u16, + vqshrn_n_u32, vqshrn_n_u64, vqshrnh_n_u16, vqshrns_n_u32, + vqshrnd_n_u64, vqsubb_u8, vqsubh_u16, vqsubs_u32, vqsubd_u64, + vrsra_n_u8, vrsra_n_u16, vrsra_n_u32, vrsra_n_u64, vrsraq_n_u8, + vrsraq_n_u16, vrsraq_n_u32, vrsraq_n_u64, vrsrad_n_u64, vsli_n_u8, + vsli_n_u16, vsli_n_u32,vsli_n_u64, vsliq_n_u8, vsliq_n_u16, + vsliq_n_u32, vsliq_n_u64, vslid_n_u64, vsqadd_u8, vsqadd_u16, + vsqadd_u32, vsqadd_u64, vsqaddq_u8, vsqaddq_u16, vsqaddq_u32, + vsqaddq_u64, vsqaddb_u8, vsqaddh_u16, vsqadds_u32, vsqaddd_u64, + vsra_n_u8, vsra_n_u16, vsra_n_u32, vsra_n_u64, vsraq_n_u8, + vsraq_n_u16, vsraq_n_u32, vsraq_n_u64, vsrad_n_u64, vsri_n_u8, + vsri_n_u16, vsri_n_u32, vsri_n_u64, vsriq_n_u8, vsriq_n_u16, + vsriq_n_u32, vsriq_n_u64, vsrid_n_u64): Remove casts. + 2014-06-03 Teresa Johnson * tree-sra.c (modify_function): Record caller nodes after rebuild. diff --git a/gcc/config/aarch64/aarch64-builtins.c b/gcc/config/aarch64/aarch64-builtins.c index ca14d51..eee3f2f 100644 --- a/gcc/config/aarch64/aarch64-builtins.c +++ b/gcc/config/aarch64/aarch64-builtins.c @@ -177,6 +177,10 @@ aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS] = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned }; #define TYPES_BINOPU (aarch64_types_binopu_qualifiers) static enum aarch64_type_qualifiers +aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS] + = { qualifier_unsigned, qualifier_unsigned, qualifier_none }; +#define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers) +static enum aarch64_type_qualifiers aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS] = { qualifier_poly, qualifier_poly, qualifier_poly }; #define TYPES_BINOPP (aarch64_types_binopp_qualifiers) @@ -203,9 +207,14 @@ aarch64_types_getlane_qualifiers[SIMD_MAX_BUILTIN_ARGS] #define TYPES_GETLANE (aarch64_types_getlane_qualifiers) #define TYPES_SHIFTIMM (aarch64_types_getlane_qualifiers) static enum aarch64_type_qualifiers +aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS] + = { qualifier_unsigned, qualifier_none, qualifier_immediate }; +#define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers) +static enum aarch64_type_qualifiers aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS] = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate }; #define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers) + static enum aarch64_type_qualifiers aarch64_types_setlane_qualifiers[SIMD_MAX_BUILTIN_ARGS] = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate }; @@ -214,6 +223,13 @@ aarch64_types_setlane_qualifiers[SIMD_MAX_BUILTIN_ARGS] #define TYPES_SHIFTACC (aarch64_types_setlane_qualifiers) static enum aarch64_type_qualifiers +aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS] + = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned, + qualifier_immediate }; +#define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers) + + +static enum aarch64_type_qualifiers aarch64_types_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS] = { qualifier_none, qualifier_none, qualifier_none }; #define TYPES_COMBINE (aarch64_types_combine_qualifiers) diff --git a/gcc/config/aarch64/aarch64-simd-builtins.def b/gcc/config/aarch64/aarch64-simd-builtins.def index b5d9965..b357be4 100644 --- a/gcc/config/aarch64/aarch64-simd-builtins.def +++ b/gcc/config/aarch64/aarch64-simd-builtins.def @@ -77,17 +77,17 @@ BUILTIN_VDQ_I (BINOP, dup_lane, 0) /* Implemented by aarch64_qshl. */ BUILTIN_VSDQ_I (BINOP, sqshl, 0) - BUILTIN_VSDQ_I (BINOP, uqshl, 0) + BUILTIN_VSDQ_I (BINOP_UUS, uqshl, 0) BUILTIN_VSDQ_I (BINOP, sqrshl, 0) - BUILTIN_VSDQ_I (BINOP, uqrshl, 0) + BUILTIN_VSDQ_I (BINOP_UUS, uqrshl, 0) /* Implemented by aarch64_. */ BUILTIN_VSDQ_I (BINOP, sqadd, 0) - BUILTIN_VSDQ_I (BINOP, uqadd, 0) + BUILTIN_VSDQ_I (BINOPU, uqadd, 0) BUILTIN_VSDQ_I (BINOP, sqsub, 0) - BUILTIN_VSDQ_I (BINOP, uqsub, 0) + BUILTIN_VSDQ_I (BINOPU, uqsub, 0) /* Implemented by aarch64_qadd. */ BUILTIN_VSDQ_I (BINOP, suqadd, 0) - BUILTIN_VSDQ_I (BINOP, usqadd, 0) + BUILTIN_VSDQ_I (BINOP_UUS, usqadd, 0) /* Implemented by aarch64_get_dreg. */ BUILTIN_VDC (GETLANE, get_dregoi, 0) @@ -214,9 +214,9 @@ BUILTIN_VSDQ_I_DI (SHIFTIMM, urshr_n, 0) /* Implemented by aarch64_sra_n. */ BUILTIN_VSDQ_I_DI (SHIFTACC, ssra_n, 0) - BUILTIN_VSDQ_I_DI (SHIFTACC, usra_n, 0) + BUILTIN_VSDQ_I_DI (USHIFTACC, usra_n, 0) BUILTIN_VSDQ_I_DI (SHIFTACC, srsra_n, 0) - BUILTIN_VSDQ_I_DI (SHIFTACC, ursra_n, 0) + BUILTIN_VSDQ_I_DI (USHIFTACC, ursra_n, 0) /* Implemented by aarch64_shll_n. */ BUILTIN_VDW (SHIFTIMM, sshll_n, 0) BUILTIN_VDW (SHIFTIMM, ushll_n, 0) @@ -227,18 +227,18 @@ BUILTIN_VSQN_HSDI (SHIFTIMM, sqshrun_n, 0) BUILTIN_VSQN_HSDI (SHIFTIMM, sqrshrun_n, 0) BUILTIN_VSQN_HSDI (SHIFTIMM, sqshrn_n, 0) - BUILTIN_VSQN_HSDI (SHIFTIMM, uqshrn_n, 0) + BUILTIN_VSQN_HSDI (USHIFTIMM, uqshrn_n, 0) BUILTIN_VSQN_HSDI (SHIFTIMM, sqrshrn_n, 0) - BUILTIN_VSQN_HSDI (SHIFTIMM, uqrshrn_n, 0) + BUILTIN_VSQN_HSDI (USHIFTIMM, uqrshrn_n, 0) /* Implemented by aarch64_si_n. */ BUILTIN_VSDQ_I_DI (SHIFTINSERT, ssri_n, 0) - BUILTIN_VSDQ_I_DI (SHIFTINSERT, usri_n, 0) + BUILTIN_VSDQ_I_DI (USHIFTACC, usri_n, 0) BUILTIN_VSDQ_I_DI (SHIFTINSERT, ssli_n, 0) - BUILTIN_VSDQ_I_DI (SHIFTINSERT, usli_n, 0) + BUILTIN_VSDQ_I_DI (USHIFTACC, usli_n, 0) /* Implemented by aarch64_qshl_n. */ - BUILTIN_VSDQ_I (SHIFTIMM, sqshlu_n, 0) + BUILTIN_VSDQ_I (SHIFTIMM_USS, sqshlu_n, 0) BUILTIN_VSDQ_I (SHIFTIMM, sqshl_n, 0) - BUILTIN_VSDQ_I (SHIFTIMM, uqshl_n, 0) + BUILTIN_VSDQ_I (USHIFTIMM, uqshl_n, 0) /* Implemented by aarch64_cm. */ BUILTIN_VALLDI (BINOP, cmeq, 0) diff --git a/gcc/config/aarch64/arm_neon.h b/gcc/config/aarch64/arm_neon.h index 60b2844..18de229 100644 --- a/gcc/config/aarch64/arm_neon.h +++ b/gcc/config/aarch64/arm_neon.h @@ -2119,29 +2119,26 @@ vqadd_s64 (int64x1_t __a, int64x1_t __b) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vqadd_u8 (uint8x8_t __a, uint8x8_t __b) { - return (uint8x8_t) __builtin_aarch64_uqaddv8qi ((int8x8_t) __a, - (int8x8_t) __b); + return __builtin_aarch64_uqaddv8qi_uuu (__a, __b); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vqadd_u16 (uint16x4_t __a, uint16x4_t __b) { - return (uint16x4_t) __builtin_aarch64_uqaddv4hi ((int16x4_t) __a, - (int16x4_t) __b); + return __builtin_aarch64_uqaddv4hi_uuu (__a, __b); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vqadd_u32 (uint32x2_t __a, uint32x2_t __b) { - return (uint32x2_t) __builtin_aarch64_uqaddv2si ((int32x2_t) __a, - (int32x2_t) __b); + return __builtin_aarch64_uqaddv2si_uuu (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vqadd_u64 (uint64x1_t __a, uint64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_uqadddi ((int64x1_t) __a, - (int64x1_t) __b); + return (uint64x1_t) __builtin_aarch64_uqadddi_uuu ((uint64_t) __a, + (uint64_t) __b); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) @@ -2171,29 +2168,25 @@ vqaddq_s64 (int64x2_t __a, int64x2_t __b) __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vqaddq_u8 (uint8x16_t __a, uint8x16_t __b) { - return (uint8x16_t) __builtin_aarch64_uqaddv16qi ((int8x16_t) __a, - (int8x16_t) __b); + return __builtin_aarch64_uqaddv16qi_uuu (__a, __b); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vqaddq_u16 (uint16x8_t __a, uint16x8_t __b) { - return (uint16x8_t) __builtin_aarch64_uqaddv8hi ((int16x8_t) __a, - (int16x8_t) __b); + return __builtin_aarch64_uqaddv8hi_uuu (__a, __b); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vqaddq_u32 (uint32x4_t __a, uint32x4_t __b) { - return (uint32x4_t) __builtin_aarch64_uqaddv4si ((int32x4_t) __a, - (int32x4_t) __b); + return __builtin_aarch64_uqaddv4si_uuu (__a, __b); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vqaddq_u64 (uint64x2_t __a, uint64x2_t __b) { - return (uint64x2_t) __builtin_aarch64_uqaddv2di ((int64x2_t) __a, - (int64x2_t) __b); + return __builtin_aarch64_uqaddv2di_uuu (__a, __b); } __extension__ static __inline int8x8_t __attribute__ ((__always_inline__)) @@ -2223,29 +2216,26 @@ vqsub_s64 (int64x1_t __a, int64x1_t __b) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vqsub_u8 (uint8x8_t __a, uint8x8_t __b) { - return (uint8x8_t) __builtin_aarch64_uqsubv8qi ((int8x8_t) __a, - (int8x8_t) __b); + return __builtin_aarch64_uqsubv8qi_uuu (__a, __b); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vqsub_u16 (uint16x4_t __a, uint16x4_t __b) { - return (uint16x4_t) __builtin_aarch64_uqsubv4hi ((int16x4_t) __a, - (int16x4_t) __b); + return __builtin_aarch64_uqsubv4hi_uuu (__a, __b); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vqsub_u32 (uint32x2_t __a, uint32x2_t __b) { - return (uint32x2_t) __builtin_aarch64_uqsubv2si ((int32x2_t) __a, - (int32x2_t) __b); + return __builtin_aarch64_uqsubv2si_uuu (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vqsub_u64 (uint64x1_t __a, uint64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_uqsubdi ((int64x1_t) __a, - (int64x1_t) __b); + return (uint64x1_t) __builtin_aarch64_uqsubdi_uuu ((uint64_t) __a, + (uint64_t) __b); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) @@ -2275,29 +2265,25 @@ vqsubq_s64 (int64x2_t __a, int64x2_t __b) __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vqsubq_u8 (uint8x16_t __a, uint8x16_t __b) { - return (uint8x16_t) __builtin_aarch64_uqsubv16qi ((int8x16_t) __a, - (int8x16_t) __b); + return __builtin_aarch64_uqsubv16qi_uuu (__a, __b); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vqsubq_u16 (uint16x8_t __a, uint16x8_t __b) { - return (uint16x8_t) __builtin_aarch64_uqsubv8hi ((int16x8_t) __a, - (int16x8_t) __b); + return __builtin_aarch64_uqsubv8hi_uuu (__a, __b); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vqsubq_u32 (uint32x4_t __a, uint32x4_t __b) { - return (uint32x4_t) __builtin_aarch64_uqsubv4si ((int32x4_t) __a, - (int32x4_t) __b); + return __builtin_aarch64_uqsubv4si_uuu (__a, __b); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vqsubq_u64 (uint64x2_t __a, uint64x2_t __b) { - return (uint64x2_t) __builtin_aarch64_uqsubv2di ((int64x2_t) __a, - (int64x2_t) __b); + return __builtin_aarch64_uqsubv2di_uuu (__a, __b); } __extension__ static __inline int8x8_t __attribute__ ((__always_inline__)) @@ -19511,25 +19497,26 @@ vqaddd_s64 (int64x1_t __a, int64x1_t __b) __extension__ static __inline uint8x1_t __attribute__ ((__always_inline__)) vqaddb_u8 (uint8x1_t __a, uint8x1_t __b) { - return (uint8x1_t) __builtin_aarch64_uqaddqi (__a, __b); + return (uint8x1_t) __builtin_aarch64_uqaddqi_uuu (__a, __b); } __extension__ static __inline uint16x1_t __attribute__ ((__always_inline__)) vqaddh_u16 (uint16x1_t __a, uint16x1_t __b) { - return (uint16x1_t) __builtin_aarch64_uqaddhi (__a, __b); + return (uint16x1_t) __builtin_aarch64_uqaddhi_uuu (__a, __b); } __extension__ static __inline uint32x1_t __attribute__ ((__always_inline__)) vqadds_u32 (uint32x1_t __a, uint32x1_t __b) { - return (uint32x1_t) __builtin_aarch64_uqaddsi (__a, __b); + return (uint32x1_t) __builtin_aarch64_uqaddsi_uuu (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vqaddd_u64 (uint64x1_t __a, uint64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_uqadddi (__a, __b); + return (uint64x1_t) __builtin_aarch64_uqadddi_uuu ((uint64_t) __a, + (uint64_t) __b); } /* vqdmlal */ @@ -20185,25 +20172,25 @@ vqrshl_s64 (int64x1_t __a, int64x1_t __b) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vqrshl_u8 (uint8x8_t __a, int8x8_t __b) { - return (uint8x8_t) __builtin_aarch64_uqrshlv8qi ((int8x8_t) __a, __b); + return __builtin_aarch64_uqrshlv8qi_uus ( __a, __b); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vqrshl_u16 (uint16x4_t __a, int16x4_t __b) { - return (uint16x4_t) __builtin_aarch64_uqrshlv4hi ((int16x4_t) __a, __b); + return __builtin_aarch64_uqrshlv4hi_uus ( __a, __b); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vqrshl_u32 (uint32x2_t __a, int32x2_t __b) { - return (uint32x2_t) __builtin_aarch64_uqrshlv2si ((int32x2_t) __a, __b); + return __builtin_aarch64_uqrshlv2si_uus ( __a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vqrshl_u64 (uint64x1_t __a, int64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_uqrshldi ((int64x1_t) __a, __b); + return __builtin_aarch64_uqrshldi_uus ( __a, __b); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) @@ -20233,25 +20220,25 @@ vqrshlq_s64 (int64x2_t __a, int64x2_t __b) __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vqrshlq_u8 (uint8x16_t __a, int8x16_t __b) { - return (uint8x16_t) __builtin_aarch64_uqrshlv16qi ((int8x16_t) __a, __b); + return __builtin_aarch64_uqrshlv16qi_uus ( __a, __b); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vqrshlq_u16 (uint16x8_t __a, int16x8_t __b) { - return (uint16x8_t) __builtin_aarch64_uqrshlv8hi ((int16x8_t) __a, __b); + return __builtin_aarch64_uqrshlv8hi_uus ( __a, __b); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vqrshlq_u32 (uint32x4_t __a, int32x4_t __b) { - return (uint32x4_t) __builtin_aarch64_uqrshlv4si ((int32x4_t) __a, __b); + return __builtin_aarch64_uqrshlv4si_uus ( __a, __b); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vqrshlq_u64 (uint64x2_t __a, int64x2_t __b) { - return (uint64x2_t) __builtin_aarch64_uqrshlv2di ((int64x2_t) __a, __b); + return __builtin_aarch64_uqrshlv2di_uus ( __a, __b); } __extension__ static __inline int8x1_t __attribute__ ((__always_inline__)) @@ -20281,25 +20268,25 @@ vqrshld_s64 (int64x1_t __a, int64x1_t __b) __extension__ static __inline uint8x1_t __attribute__ ((__always_inline__)) vqrshlb_u8 (uint8x1_t __a, uint8x1_t __b) { - return (uint8x1_t) __builtin_aarch64_uqrshlqi (__a, __b); + return __builtin_aarch64_uqrshlqi_uus (__a, __b); } __extension__ static __inline uint16x1_t __attribute__ ((__always_inline__)) vqrshlh_u16 (uint16x1_t __a, uint16x1_t __b) { - return (uint16x1_t) __builtin_aarch64_uqrshlhi (__a, __b); + return __builtin_aarch64_uqrshlhi_uus (__a, __b); } __extension__ static __inline uint32x1_t __attribute__ ((__always_inline__)) vqrshls_u32 (uint32x1_t __a, uint32x1_t __b) { - return (uint32x1_t) __builtin_aarch64_uqrshlsi (__a, __b); + return __builtin_aarch64_uqrshlsi_uus (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vqrshld_u64 (uint64x1_t __a, uint64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_uqrshldi (__a, __b); + return __builtin_aarch64_uqrshldi_uus (__a, __b); } /* vqrshrn */ @@ -20325,19 +20312,19 @@ vqrshrn_n_s64 (int64x2_t __a, const int __b) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vqrshrn_n_u16 (uint16x8_t __a, const int __b) { - return (uint8x8_t) __builtin_aarch64_uqrshrn_nv8hi ((int16x8_t) __a, __b); + return __builtin_aarch64_uqrshrn_nv8hi_uus ( __a, __b); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vqrshrn_n_u32 (uint32x4_t __a, const int __b) { - return (uint16x4_t) __builtin_aarch64_uqrshrn_nv4si ((int32x4_t) __a, __b); + return __builtin_aarch64_uqrshrn_nv4si_uus ( __a, __b); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vqrshrn_n_u64 (uint64x2_t __a, const int __b) { - return (uint32x2_t) __builtin_aarch64_uqrshrn_nv2di ((int64x2_t) __a, __b); + return __builtin_aarch64_uqrshrn_nv2di_uus ( __a, __b); } __extension__ static __inline int8x1_t __attribute__ ((__always_inline__)) @@ -20361,19 +20348,19 @@ vqrshrnd_n_s64 (int64x1_t __a, const int __b) __extension__ static __inline uint8x1_t __attribute__ ((__always_inline__)) vqrshrnh_n_u16 (uint16x1_t __a, const int __b) { - return (uint8x1_t) __builtin_aarch64_uqrshrn_nhi (__a, __b); + return __builtin_aarch64_uqrshrn_nhi_uus (__a, __b); } __extension__ static __inline uint16x1_t __attribute__ ((__always_inline__)) vqrshrns_n_u32 (uint32x1_t __a, const int __b) { - return (uint16x1_t) __builtin_aarch64_uqrshrn_nsi (__a, __b); + return __builtin_aarch64_uqrshrn_nsi_uus (__a, __b); } __extension__ static __inline uint32x1_t __attribute__ ((__always_inline__)) vqrshrnd_n_u64 (uint64x1_t __a, const int __b) { - return (uint32x1_t) __builtin_aarch64_uqrshrn_ndi (__a, __b); + return __builtin_aarch64_uqrshrn_ndi_uus (__a, __b); } /* vqrshrun */ @@ -20443,25 +20430,25 @@ vqshl_s64 (int64x1_t __a, int64x1_t __b) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vqshl_u8 (uint8x8_t __a, int8x8_t __b) { - return (uint8x8_t) __builtin_aarch64_uqshlv8qi ((int8x8_t) __a, __b); + return __builtin_aarch64_uqshlv8qi_uus ( __a, __b); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vqshl_u16 (uint16x4_t __a, int16x4_t __b) { - return (uint16x4_t) __builtin_aarch64_uqshlv4hi ((int16x4_t) __a, __b); + return __builtin_aarch64_uqshlv4hi_uus ( __a, __b); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vqshl_u32 (uint32x2_t __a, int32x2_t __b) { - return (uint32x2_t) __builtin_aarch64_uqshlv2si ((int32x2_t) __a, __b); + return __builtin_aarch64_uqshlv2si_uus ( __a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vqshl_u64 (uint64x1_t __a, int64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_uqshldi ((int64x1_t) __a, __b); + return __builtin_aarch64_uqshldi_uus ( __a, __b); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) @@ -20491,25 +20478,25 @@ vqshlq_s64 (int64x2_t __a, int64x2_t __b) __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vqshlq_u8 (uint8x16_t __a, int8x16_t __b) { - return (uint8x16_t) __builtin_aarch64_uqshlv16qi ((int8x16_t) __a, __b); + return __builtin_aarch64_uqshlv16qi_uus ( __a, __b); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vqshlq_u16 (uint16x8_t __a, int16x8_t __b) { - return (uint16x8_t) __builtin_aarch64_uqshlv8hi ((int16x8_t) __a, __b); + return __builtin_aarch64_uqshlv8hi_uus ( __a, __b); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vqshlq_u32 (uint32x4_t __a, int32x4_t __b) { - return (uint32x4_t) __builtin_aarch64_uqshlv4si ((int32x4_t) __a, __b); + return __builtin_aarch64_uqshlv4si_uus ( __a, __b); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vqshlq_u64 (uint64x2_t __a, int64x2_t __b) { - return (uint64x2_t) __builtin_aarch64_uqshlv2di ((int64x2_t) __a, __b); + return __builtin_aarch64_uqshlv2di_uus ( __a, __b); } __extension__ static __inline int8x1_t __attribute__ ((__always_inline__)) @@ -20539,25 +20526,25 @@ vqshld_s64 (int64x1_t __a, int64x1_t __b) __extension__ static __inline uint8x1_t __attribute__ ((__always_inline__)) vqshlb_u8 (uint8x1_t __a, uint8x1_t __b) { - return (uint8x1_t) __builtin_aarch64_uqshlqi (__a, __b); + return __builtin_aarch64_uqshlqi_uus (__a, __b); } __extension__ static __inline uint16x1_t __attribute__ ((__always_inline__)) vqshlh_u16 (uint16x1_t __a, uint16x1_t __b) { - return (uint16x1_t) __builtin_aarch64_uqshlhi (__a, __b); + return __builtin_aarch64_uqshlhi_uus (__a, __b); } __extension__ static __inline uint32x1_t __attribute__ ((__always_inline__)) vqshls_u32 (uint32x1_t __a, uint32x1_t __b) { - return (uint32x1_t) __builtin_aarch64_uqshlsi (__a, __b); + return __builtin_aarch64_uqshlsi_uus (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vqshld_u64 (uint64x1_t __a, uint64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_uqshldi (__a, __b); + return __builtin_aarch64_uqshldi_uus (__a, __b); } __extension__ static __inline int8x8_t __attribute__ ((__always_inline__)) @@ -20587,25 +20574,25 @@ vqshl_n_s64 (int64x1_t __a, const int __b) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vqshl_n_u8 (uint8x8_t __a, const int __b) { - return (uint8x8_t) __builtin_aarch64_uqshl_nv8qi ((int8x8_t) __a, __b); + return __builtin_aarch64_uqshl_nv8qi_uus (__a, __b); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vqshl_n_u16 (uint16x4_t __a, const int __b) { - return (uint16x4_t) __builtin_aarch64_uqshl_nv4hi ((int16x4_t) __a, __b); + return __builtin_aarch64_uqshl_nv4hi_uus (__a, __b); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vqshl_n_u32 (uint32x2_t __a, const int __b) { - return (uint32x2_t) __builtin_aarch64_uqshl_nv2si ((int32x2_t) __a, __b); + return __builtin_aarch64_uqshl_nv2si_uus (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vqshl_n_u64 (uint64x1_t __a, const int __b) { - return (uint64x1_t) __builtin_aarch64_uqshl_ndi ((int64x1_t) __a, __b); + return __builtin_aarch64_uqshl_ndi_uus (__a, __b); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) @@ -20635,25 +20622,25 @@ vqshlq_n_s64 (int64x2_t __a, const int __b) __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vqshlq_n_u8 (uint8x16_t __a, const int __b) { - return (uint8x16_t) __builtin_aarch64_uqshl_nv16qi ((int8x16_t) __a, __b); + return __builtin_aarch64_uqshl_nv16qi_uus (__a, __b); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vqshlq_n_u16 (uint16x8_t __a, const int __b) { - return (uint16x8_t) __builtin_aarch64_uqshl_nv8hi ((int16x8_t) __a, __b); + return __builtin_aarch64_uqshl_nv8hi_uus (__a, __b); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vqshlq_n_u32 (uint32x4_t __a, const int __b) { - return (uint32x4_t) __builtin_aarch64_uqshl_nv4si ((int32x4_t) __a, __b); + return __builtin_aarch64_uqshl_nv4si_uus (__a, __b); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vqshlq_n_u64 (uint64x2_t __a, const int __b) { - return (uint64x2_t) __builtin_aarch64_uqshl_nv2di ((int64x2_t) __a, __b); + return __builtin_aarch64_uqshl_nv2di_uus (__a, __b); } __extension__ static __inline int8x1_t __attribute__ ((__always_inline__)) @@ -20683,25 +20670,25 @@ vqshld_n_s64 (int64x1_t __a, const int __b) __extension__ static __inline uint8x1_t __attribute__ ((__always_inline__)) vqshlb_n_u8 (uint8x1_t __a, const int __b) { - return (uint8x1_t) __builtin_aarch64_uqshl_nqi (__a, __b); + return __builtin_aarch64_uqshl_nqi_uus (__a, __b); } __extension__ static __inline uint16x1_t __attribute__ ((__always_inline__)) vqshlh_n_u16 (uint16x1_t __a, const int __b) { - return (uint16x1_t) __builtin_aarch64_uqshl_nhi (__a, __b); + return __builtin_aarch64_uqshl_nhi_uus (__a, __b); } __extension__ static __inline uint32x1_t __attribute__ ((__always_inline__)) vqshls_n_u32 (uint32x1_t __a, const int __b) { - return (uint32x1_t) __builtin_aarch64_uqshl_nsi (__a, __b); + return __builtin_aarch64_uqshl_nsi_uus (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vqshld_n_u64 (uint64x1_t __a, const int __b) { - return (uint64x1_t) __builtin_aarch64_uqshl_ndi (__a, __b); + return __builtin_aarch64_uqshl_ndi_uus (__a, __b); } /* vqshlu */ @@ -20709,73 +20696,73 @@ vqshld_n_u64 (uint64x1_t __a, const int __b) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vqshlu_n_s8 (int8x8_t __a, const int __b) { - return (uint8x8_t) __builtin_aarch64_sqshlu_nv8qi (__a, __b); + return __builtin_aarch64_sqshlu_nv8qi_uss (__a, __b); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vqshlu_n_s16 (int16x4_t __a, const int __b) { - return (uint16x4_t) __builtin_aarch64_sqshlu_nv4hi (__a, __b); + return __builtin_aarch64_sqshlu_nv4hi_uss (__a, __b); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vqshlu_n_s32 (int32x2_t __a, const int __b) { - return (uint32x2_t) __builtin_aarch64_sqshlu_nv2si (__a, __b); + return __builtin_aarch64_sqshlu_nv2si_uss (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vqshlu_n_s64 (int64x1_t __a, const int __b) { - return (uint64x1_t) __builtin_aarch64_sqshlu_ndi (__a, __b); + return __builtin_aarch64_sqshlu_ndi_uss (__a, __b); } __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vqshluq_n_s8 (int8x16_t __a, const int __b) { - return (uint8x16_t) __builtin_aarch64_sqshlu_nv16qi (__a, __b); + return __builtin_aarch64_sqshlu_nv16qi_uss (__a, __b); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vqshluq_n_s16 (int16x8_t __a, const int __b) { - return (uint16x8_t) __builtin_aarch64_sqshlu_nv8hi (__a, __b); + return __builtin_aarch64_sqshlu_nv8hi_uss (__a, __b); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vqshluq_n_s32 (int32x4_t __a, const int __b) { - return (uint32x4_t) __builtin_aarch64_sqshlu_nv4si (__a, __b); + return __builtin_aarch64_sqshlu_nv4si_uss (__a, __b); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vqshluq_n_s64 (int64x2_t __a, const int __b) { - return (uint64x2_t) __builtin_aarch64_sqshlu_nv2di (__a, __b); + return __builtin_aarch64_sqshlu_nv2di_uss (__a, __b); } __extension__ static __inline int8x1_t __attribute__ ((__always_inline__)) vqshlub_n_s8 (int8x1_t __a, const int __b) { - return (int8x1_t) __builtin_aarch64_sqshlu_nqi (__a, __b); + return (int8x1_t) __builtin_aarch64_sqshlu_nqi_uss (__a, __b); } __extension__ static __inline int16x1_t __attribute__ ((__always_inline__)) vqshluh_n_s16 (int16x1_t __a, const int __b) { - return (int16x1_t) __builtin_aarch64_sqshlu_nhi (__a, __b); + return (int16x1_t) __builtin_aarch64_sqshlu_nhi_uss (__a, __b); } __extension__ static __inline int32x1_t __attribute__ ((__always_inline__)) vqshlus_n_s32 (int32x1_t __a, const int __b) { - return (int32x1_t) __builtin_aarch64_sqshlu_nsi (__a, __b); + return (int32x1_t) __builtin_aarch64_sqshlu_nsi_uss (__a, __b); } __extension__ static __inline int64x1_t __attribute__ ((__always_inline__)) vqshlud_n_s64 (int64x1_t __a, const int __b) { - return (int64x1_t) __builtin_aarch64_sqshlu_ndi (__a, __b); + return (int64x1_t) __builtin_aarch64_sqshlu_ndi_uss (__a, __b); } /* vqshrn */ @@ -20801,19 +20788,19 @@ vqshrn_n_s64 (int64x2_t __a, const int __b) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vqshrn_n_u16 (uint16x8_t __a, const int __b) { - return (uint8x8_t) __builtin_aarch64_uqshrn_nv8hi ((int16x8_t) __a, __b); + return __builtin_aarch64_uqshrn_nv8hi_uus ( __a, __b); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vqshrn_n_u32 (uint32x4_t __a, const int __b) { - return (uint16x4_t) __builtin_aarch64_uqshrn_nv4si ((int32x4_t) __a, __b); + return __builtin_aarch64_uqshrn_nv4si_uus ( __a, __b); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vqshrn_n_u64 (uint64x2_t __a, const int __b) { - return (uint32x2_t) __builtin_aarch64_uqshrn_nv2di ((int64x2_t) __a, __b); + return __builtin_aarch64_uqshrn_nv2di_uus ( __a, __b); } __extension__ static __inline int8x1_t __attribute__ ((__always_inline__)) @@ -20837,19 +20824,19 @@ vqshrnd_n_s64 (int64x1_t __a, const int __b) __extension__ static __inline uint8x1_t __attribute__ ((__always_inline__)) vqshrnh_n_u16 (uint16x1_t __a, const int __b) { - return (uint8x1_t) __builtin_aarch64_uqshrn_nhi (__a, __b); + return __builtin_aarch64_uqshrn_nhi_uus (__a, __b); } __extension__ static __inline uint16x1_t __attribute__ ((__always_inline__)) vqshrns_n_u32 (uint32x1_t __a, const int __b) { - return (uint16x1_t) __builtin_aarch64_uqshrn_nsi (__a, __b); + return __builtin_aarch64_uqshrn_nsi_uus (__a, __b); } __extension__ static __inline uint32x1_t __attribute__ ((__always_inline__)) vqshrnd_n_u64 (uint64x1_t __a, const int __b) { - return (uint32x1_t) __builtin_aarch64_uqshrn_ndi (__a, __b); + return __builtin_aarch64_uqshrn_ndi_uus (__a, __b); } /* vqshrun */ @@ -20919,25 +20906,26 @@ vqsubd_s64 (int64x1_t __a, int64x1_t __b) __extension__ static __inline uint8x1_t __attribute__ ((__always_inline__)) vqsubb_u8 (uint8x1_t __a, uint8x1_t __b) { - return (uint8x1_t) __builtin_aarch64_uqsubqi (__a, __b); + return (uint8x1_t) __builtin_aarch64_uqsubqi_uuu (__a, __b); } __extension__ static __inline uint16x1_t __attribute__ ((__always_inline__)) vqsubh_u16 (uint16x1_t __a, uint16x1_t __b) { - return (uint16x1_t) __builtin_aarch64_uqsubhi (__a, __b); + return (uint16x1_t) __builtin_aarch64_uqsubhi_uuu (__a, __b); } __extension__ static __inline uint32x1_t __attribute__ ((__always_inline__)) vqsubs_u32 (uint32x1_t __a, uint32x1_t __b) { - return (uint32x1_t) __builtin_aarch64_uqsubsi (__a, __b); + return (uint32x1_t) __builtin_aarch64_uqsubsi_uuu (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vqsubd_u64 (uint64x1_t __a, uint64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_uqsubdi (__a, __b); + return (uint64x1_t) __builtin_aarch64_uqsubdi_uuu ((uint64_t) __a, + (uint64_t) __b); } /* vrecpe */ @@ -21677,29 +21665,25 @@ vrsra_n_s64 (int64x1_t __a, int64x1_t __b, const int __c) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vrsra_n_u8 (uint8x8_t __a, uint8x8_t __b, const int __c) { - return (uint8x8_t) __builtin_aarch64_ursra_nv8qi ((int8x8_t) __a, - (int8x8_t) __b, __c); + return __builtin_aarch64_ursra_nv8qi_uuus (__a, __b, __c); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vrsra_n_u16 (uint16x4_t __a, uint16x4_t __b, const int __c) { - return (uint16x4_t) __builtin_aarch64_ursra_nv4hi ((int16x4_t) __a, - (int16x4_t) __b, __c); + return __builtin_aarch64_ursra_nv4hi_uuus (__a, __b, __c); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vrsra_n_u32 (uint32x2_t __a, uint32x2_t __b, const int __c) { - return (uint32x2_t) __builtin_aarch64_ursra_nv2si ((int32x2_t) __a, - (int32x2_t) __b, __c); + return __builtin_aarch64_ursra_nv2si_uuus (__a, __b, __c); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vrsra_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c) { - return (uint64x1_t) __builtin_aarch64_ursra_ndi ((int64x1_t) __a, - (int64x1_t) __b, __c); + return __builtin_aarch64_ursra_ndi_uuus (__a, __b, __c); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) @@ -21729,29 +21713,25 @@ vrsraq_n_s64 (int64x2_t __a, int64x2_t __b, const int __c) __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vrsraq_n_u8 (uint8x16_t __a, uint8x16_t __b, const int __c) { - return (uint8x16_t) __builtin_aarch64_ursra_nv16qi ((int8x16_t) __a, - (int8x16_t) __b, __c); + return __builtin_aarch64_ursra_nv16qi_uuus (__a, __b, __c); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vrsraq_n_u16 (uint16x8_t __a, uint16x8_t __b, const int __c) { - return (uint16x8_t) __builtin_aarch64_ursra_nv8hi ((int16x8_t) __a, - (int16x8_t) __b, __c); + return __builtin_aarch64_ursra_nv8hi_uuus (__a, __b, __c); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vrsraq_n_u32 (uint32x4_t __a, uint32x4_t __b, const int __c) { - return (uint32x4_t) __builtin_aarch64_ursra_nv4si ((int32x4_t) __a, - (int32x4_t) __b, __c); + return __builtin_aarch64_ursra_nv4si_uuus (__a, __b, __c); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vrsraq_n_u64 (uint64x2_t __a, uint64x2_t __b, const int __c) { - return (uint64x2_t) __builtin_aarch64_ursra_nv2di ((int64x2_t) __a, - (int64x2_t) __b, __c); + return __builtin_aarch64_ursra_nv2di_uuus (__a, __b, __c); } __extension__ static __inline int64x1_t __attribute__ ((__always_inline__)) @@ -21763,7 +21743,7 @@ vrsrad_n_s64 (int64x1_t __a, int64x1_t __b, const int __c) __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vrsrad_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c) { - return (uint64x1_t) __builtin_aarch64_ursra_ndi (__a, __b, __c); + return __builtin_aarch64_ursra_ndi_uuus (__a, __b, __c); } #ifdef __ARM_FEATURE_CRYPTO @@ -22272,29 +22252,25 @@ vsli_n_s64 (int64x1_t __a, int64x1_t __b, const int __c) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vsli_n_u8 (uint8x8_t __a, uint8x8_t __b, const int __c) { - return (uint8x8_t) __builtin_aarch64_usli_nv8qi ((int8x8_t) __a, - (int8x8_t) __b, __c); + return __builtin_aarch64_usli_nv8qi_uuus (__a, __b, __c); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vsli_n_u16 (uint16x4_t __a, uint16x4_t __b, const int __c) { - return (uint16x4_t) __builtin_aarch64_usli_nv4hi ((int16x4_t) __a, - (int16x4_t) __b, __c); + return __builtin_aarch64_usli_nv4hi_uuus (__a, __b, __c); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vsli_n_u32 (uint32x2_t __a, uint32x2_t __b, const int __c) { - return (uint32x2_t) __builtin_aarch64_usli_nv2si ((int32x2_t) __a, - (int32x2_t) __b, __c); + return __builtin_aarch64_usli_nv2si_uuus (__a, __b, __c); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vsli_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c) { - return (uint64x1_t) __builtin_aarch64_usli_ndi ((int64x1_t) __a, - (int64x1_t) __b, __c); + return __builtin_aarch64_usli_ndi_uuus (__a, __b, __c); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) @@ -22324,29 +22300,25 @@ vsliq_n_s64 (int64x2_t __a, int64x2_t __b, const int __c) __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vsliq_n_u8 (uint8x16_t __a, uint8x16_t __b, const int __c) { - return (uint8x16_t) __builtin_aarch64_usli_nv16qi ((int8x16_t) __a, - (int8x16_t) __b, __c); + return __builtin_aarch64_usli_nv16qi_uuus (__a, __b, __c); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vsliq_n_u16 (uint16x8_t __a, uint16x8_t __b, const int __c) { - return (uint16x8_t) __builtin_aarch64_usli_nv8hi ((int16x8_t) __a, - (int16x8_t) __b, __c); + return __builtin_aarch64_usli_nv8hi_uuus (__a, __b, __c); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vsliq_n_u32 (uint32x4_t __a, uint32x4_t __b, const int __c) { - return (uint32x4_t) __builtin_aarch64_usli_nv4si ((int32x4_t) __a, - (int32x4_t) __b, __c); + return __builtin_aarch64_usli_nv4si_uuus (__a, __b, __c); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vsliq_n_u64 (uint64x2_t __a, uint64x2_t __b, const int __c) { - return (uint64x2_t) __builtin_aarch64_usli_nv2di ((int64x2_t) __a, - (int64x2_t) __b, __c); + return __builtin_aarch64_usli_nv2di_uuus (__a, __b, __c); } __extension__ static __inline int64x1_t __attribute__ ((__always_inline__)) @@ -22358,7 +22330,7 @@ vslid_n_s64 (int64x1_t __a, int64x1_t __b, const int __c) __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vslid_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c) { - return (uint64x1_t) __builtin_aarch64_usli_ndi (__a, __b, __c); + return __builtin_aarch64_usli_ndi_uuus (__a, __b, __c); } /* vsqadd */ @@ -22366,80 +22338,73 @@ vslid_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vsqadd_u8 (uint8x8_t __a, int8x8_t __b) { - return (uint8x8_t) __builtin_aarch64_usqaddv8qi ((int8x8_t) __a, - (int8x8_t) __b); + return __builtin_aarch64_usqaddv8qi_uus (__a, __b); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vsqadd_u16 (uint16x4_t __a, int16x4_t __b) { - return (uint16x4_t) __builtin_aarch64_usqaddv4hi ((int16x4_t) __a, - (int16x4_t) __b); + return __builtin_aarch64_usqaddv4hi_uus (__a, __b); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vsqadd_u32 (uint32x2_t __a, int32x2_t __b) { - return (uint32x2_t) __builtin_aarch64_usqaddv2si ((int32x2_t) __a, - (int32x2_t) __b); + return __builtin_aarch64_usqaddv2si_uus (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vsqadd_u64 (uint64x1_t __a, int64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_usqadddi ((int64x1_t) __a, __b); + return __builtin_aarch64_usqadddi_uus (__a, __b); } __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vsqaddq_u8 (uint8x16_t __a, int8x16_t __b) { - return (uint8x16_t) __builtin_aarch64_usqaddv16qi ((int8x16_t) __a, - (int8x16_t) __b); + return __builtin_aarch64_usqaddv16qi_uus (__a, __b); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vsqaddq_u16 (uint16x8_t __a, int16x8_t __b) { - return (uint16x8_t) __builtin_aarch64_usqaddv8hi ((int16x8_t) __a, - (int16x8_t) __b); + return __builtin_aarch64_usqaddv8hi_uus (__a, __b); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vsqaddq_u32 (uint32x4_t __a, int32x4_t __b) { - return (uint32x4_t) __builtin_aarch64_usqaddv4si ((int32x4_t) __a, - (int32x4_t) __b); + return __builtin_aarch64_usqaddv4si_uus (__a, __b); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vsqaddq_u64 (uint64x2_t __a, int64x2_t __b) { - return (uint64x2_t) __builtin_aarch64_usqaddv2di ((int64x2_t) __a, - (int64x2_t) __b); + return __builtin_aarch64_usqaddv2di_uus (__a, __b); } __extension__ static __inline uint8x1_t __attribute__ ((__always_inline__)) vsqaddb_u8 (uint8x1_t __a, int8x1_t __b) { - return (uint8x1_t) __builtin_aarch64_usqaddqi ((int8x1_t) __a, __b); + return __builtin_aarch64_usqaddqi_uus (__a, __b); } __extension__ static __inline uint16x1_t __attribute__ ((__always_inline__)) vsqaddh_u16 (uint16x1_t __a, int16x1_t __b) { - return (uint16x1_t) __builtin_aarch64_usqaddhi ((int16x1_t) __a, __b); + return __builtin_aarch64_usqaddhi_uus (__a, __b); } __extension__ static __inline uint32x1_t __attribute__ ((__always_inline__)) vsqadds_u32 (uint32x1_t __a, int32x1_t __b) { - return (uint32x1_t) __builtin_aarch64_usqaddsi ((int32x1_t) __a, __b); + return __builtin_aarch64_usqaddsi_uus (__a, __b); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vsqaddd_u64 (uint64x1_t __a, int64x1_t __b) { - return (uint64x1_t) __builtin_aarch64_usqadddi ((int64x1_t) __a, __b); + return __builtin_aarch64_usqadddi_uus (__a, __b); } /* vsqrt */ @@ -22490,29 +22455,25 @@ vsra_n_s64 (int64x1_t __a, int64x1_t __b, const int __c) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vsra_n_u8 (uint8x8_t __a, uint8x8_t __b, const int __c) { - return (uint8x8_t) __builtin_aarch64_usra_nv8qi ((int8x8_t) __a, - (int8x8_t) __b, __c); + return __builtin_aarch64_usra_nv8qi_uuus (__a, __b, __c); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vsra_n_u16 (uint16x4_t __a, uint16x4_t __b, const int __c) { - return (uint16x4_t) __builtin_aarch64_usra_nv4hi ((int16x4_t) __a, - (int16x4_t) __b, __c); + return __builtin_aarch64_usra_nv4hi_uuus (__a, __b, __c); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vsra_n_u32 (uint32x2_t __a, uint32x2_t __b, const int __c) { - return (uint32x2_t) __builtin_aarch64_usra_nv2si ((int32x2_t) __a, - (int32x2_t) __b, __c); + return __builtin_aarch64_usra_nv2si_uuus (__a, __b, __c); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vsra_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c) { - return (uint64x1_t) __builtin_aarch64_usra_ndi ((int64x1_t) __a, - (int64x1_t) __b, __c); + return __builtin_aarch64_usra_ndi_uuus (__a, __b, __c); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) @@ -22542,29 +22503,25 @@ vsraq_n_s64 (int64x2_t __a, int64x2_t __b, const int __c) __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vsraq_n_u8 (uint8x16_t __a, uint8x16_t __b, const int __c) { - return (uint8x16_t) __builtin_aarch64_usra_nv16qi ((int8x16_t) __a, - (int8x16_t) __b, __c); + return __builtin_aarch64_usra_nv16qi_uuus (__a, __b, __c); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vsraq_n_u16 (uint16x8_t __a, uint16x8_t __b, const int __c) { - return (uint16x8_t) __builtin_aarch64_usra_nv8hi ((int16x8_t) __a, - (int16x8_t) __b, __c); + return __builtin_aarch64_usra_nv8hi_uuus (__a, __b, __c); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vsraq_n_u32 (uint32x4_t __a, uint32x4_t __b, const int __c) { - return (uint32x4_t) __builtin_aarch64_usra_nv4si ((int32x4_t) __a, - (int32x4_t) __b, __c); + return __builtin_aarch64_usra_nv4si_uuus (__a, __b, __c); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vsraq_n_u64 (uint64x2_t __a, uint64x2_t __b, const int __c) { - return (uint64x2_t) __builtin_aarch64_usra_nv2di ((int64x2_t) __a, - (int64x2_t) __b, __c); + return __builtin_aarch64_usra_nv2di_uuus (__a, __b, __c); } __extension__ static __inline int64x1_t __attribute__ ((__always_inline__)) @@ -22576,7 +22533,7 @@ vsrad_n_s64 (int64x1_t __a, int64x1_t __b, const int __c) __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vsrad_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c) { - return (uint64x1_t) __builtin_aarch64_usra_ndi (__a, __b, __c); + return __builtin_aarch64_usra_ndi_uuus (__a, __b, __c); } /* vsri */ @@ -22608,29 +22565,25 @@ vsri_n_s64 (int64x1_t __a, int64x1_t __b, const int __c) __extension__ static __inline uint8x8_t __attribute__ ((__always_inline__)) vsri_n_u8 (uint8x8_t __a, uint8x8_t __b, const int __c) { - return (uint8x8_t) __builtin_aarch64_usri_nv8qi ((int8x8_t) __a, - (int8x8_t) __b, __c); + return __builtin_aarch64_usri_nv8qi_uuus (__a, __b, __c); } __extension__ static __inline uint16x4_t __attribute__ ((__always_inline__)) vsri_n_u16 (uint16x4_t __a, uint16x4_t __b, const int __c) { - return (uint16x4_t) __builtin_aarch64_usri_nv4hi ((int16x4_t) __a, - (int16x4_t) __b, __c); + return __builtin_aarch64_usri_nv4hi_uuus (__a, __b, __c); } __extension__ static __inline uint32x2_t __attribute__ ((__always_inline__)) vsri_n_u32 (uint32x2_t __a, uint32x2_t __b, const int __c) { - return (uint32x2_t) __builtin_aarch64_usri_nv2si ((int32x2_t) __a, - (int32x2_t) __b, __c); + return __builtin_aarch64_usri_nv2si_uuus (__a, __b, __c); } __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vsri_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c) { - return (uint64x1_t) __builtin_aarch64_usri_ndi ((int64x1_t) __a, - (int64x1_t) __b, __c); + return __builtin_aarch64_usri_ndi_uuus (__a, __b, __c); } __extension__ static __inline int8x16_t __attribute__ ((__always_inline__)) @@ -22660,29 +22613,25 @@ vsriq_n_s64 (int64x2_t __a, int64x2_t __b, const int __c) __extension__ static __inline uint8x16_t __attribute__ ((__always_inline__)) vsriq_n_u8 (uint8x16_t __a, uint8x16_t __b, const int __c) { - return (uint8x16_t) __builtin_aarch64_usri_nv16qi ((int8x16_t) __a, - (int8x16_t) __b, __c); + return __builtin_aarch64_usri_nv16qi_uuus (__a, __b, __c); } __extension__ static __inline uint16x8_t __attribute__ ((__always_inline__)) vsriq_n_u16 (uint16x8_t __a, uint16x8_t __b, const int __c) { - return (uint16x8_t) __builtin_aarch64_usri_nv8hi ((int16x8_t) __a, - (int16x8_t) __b, __c); + return __builtin_aarch64_usri_nv8hi_uuus (__a, __b, __c); } __extension__ static __inline uint32x4_t __attribute__ ((__always_inline__)) vsriq_n_u32 (uint32x4_t __a, uint32x4_t __b, const int __c) { - return (uint32x4_t) __builtin_aarch64_usri_nv4si ((int32x4_t) __a, - (int32x4_t) __b, __c); + return __builtin_aarch64_usri_nv4si_uuus (__a, __b, __c); } __extension__ static __inline uint64x2_t __attribute__ ((__always_inline__)) vsriq_n_u64 (uint64x2_t __a, uint64x2_t __b, const int __c) { - return (uint64x2_t) __builtin_aarch64_usri_nv2di ((int64x2_t) __a, - (int64x2_t) __b, __c); + return __builtin_aarch64_usri_nv2di_uuus (__a, __b, __c); } __extension__ static __inline int64x1_t __attribute__ ((__always_inline__)) @@ -22694,7 +22643,7 @@ vsrid_n_s64 (int64x1_t __a, int64x1_t __b, const int __c) __extension__ static __inline uint64x1_t __attribute__ ((__always_inline__)) vsrid_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c) { - return (uint64x1_t) __builtin_aarch64_usri_ndi (__a, __b, __c); + return __builtin_aarch64_usri_ndi_uuus (__a, __b, __c); } /* vst1 */ -- 2.7.4