arch: Introduce arch_{,try_}_cmpxchg128{,_local}()
authorPeter Zijlstra <peterz@infradead.org>
Wed, 31 May 2023 13:08:36 +0000 (15:08 +0200)
committerPeter Zijlstra <peterz@infradead.org>
Mon, 5 Jun 2023 07:36:35 +0000 (09:36 +0200)
For all architectures that currently support cmpxchg_double()
implement the cmpxchg128() family of functions that is basically the
same but with a saner interface.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Arnd Bergmann <arnd@arndb.de>
Reviewed-by: Mark Rutland <mark.rutland@arm.com>
Acked-by: Heiko Carstens <hca@linux.ibm.com>
Acked-by: Mark Rutland <mark.rutland@arm.com>
Tested-by: Mark Rutland <mark.rutland@arm.com>
Link: https://lore.kernel.org/r/20230531132323.452120708@infradead.org
arch/arm64/include/asm/atomic_ll_sc.h
arch/arm64/include/asm/atomic_lse.h
arch/arm64/include/asm/cmpxchg.h
arch/s390/include/asm/cmpxchg.h
arch/x86/include/asm/cmpxchg_32.h
arch/x86/include/asm/cmpxchg_64.h

index cbb3d96..b100a49 100644 (file)
@@ -326,6 +326,47 @@ __CMPXCHG_DBL(   ,        ,  ,         )
 __CMPXCHG_DBL(_mb, dmb ish, l, "memory")
 
 #undef __CMPXCHG_DBL
+
+union __u128_halves {
+       u128 full;
+       struct {
+               u64 low, high;
+       };
+};
+
+#define __CMPXCHG128(name, mb, rel, cl...)                             \
+static __always_inline u128                                            \
+__ll_sc__cmpxchg128##name(volatile u128 *ptr, u128 old, u128 new)      \
+{                                                                      \
+       union __u128_halves r, o = { .full = (old) },                   \
+                              n = { .full = (new) };                   \
+       unsigned int tmp;                                               \
+                                                                       \
+       asm volatile("// __cmpxchg128" #name "\n"                       \
+       "       prfm    pstl1strm, %[v]\n"                              \
+       "1:     ldxp    %[rl], %[rh], %[v]\n"                           \
+       "       cmp     %[rl], %[ol]\n"                                 \
+       "       ccmp    %[rh], %[oh], 0, eq\n"                          \
+       "       b.ne    2f\n"                                           \
+       "       st" #rel "xp    %w[tmp], %[nl], %[nh], %[v]\n"          \
+       "       cbnz    %w[tmp], 1b\n"                                  \
+       "       " #mb "\n"                                              \
+       "2:"                                                            \
+       : [v] "+Q" (*(u128 *)ptr),                                      \
+         [rl] "=&r" (r.low), [rh] "=&r" (r.high),                      \
+         [tmp] "=&r" (tmp)                                             \
+       : [ol] "r" (o.low), [oh] "r" (o.high),                          \
+         [nl] "r" (n.low), [nh] "r" (n.high)                           \
+       : "cc", ##cl);                                                  \
+                                                                       \
+       return r.full;                                                  \
+}
+
+__CMPXCHG128(   ,        ,  )
+__CMPXCHG128(_mb, dmb ish, l, "memory")
+
+#undef __CMPXCHG128
+
 #undef K
 
 #endif /* __ASM_ATOMIC_LL_SC_H */
index 319958b..c781281 100644 (file)
@@ -317,4 +317,35 @@ __CMPXCHG_DBL(_mb, al, "memory")
 
 #undef __CMPXCHG_DBL
 
+#define __CMPXCHG128(name, mb, cl...)                                  \
+static __always_inline u128                                            \
+__lse__cmpxchg128##name(volatile u128 *ptr, u128 old, u128 new)                \
+{                                                                      \
+       union __u128_halves r, o = { .full = (old) },                   \
+                              n = { .full = (new) };                   \
+       register unsigned long x0 asm ("x0") = o.low;                   \
+       register unsigned long x1 asm ("x1") = o.high;                  \
+       register unsigned long x2 asm ("x2") = n.low;                   \
+       register unsigned long x3 asm ("x3") = n.high;                  \
+       register unsigned long x4 asm ("x4") = (unsigned long)ptr;      \
+                                                                       \
+       asm volatile(                                                   \
+       __LSE_PREAMBLE                                                  \
+       "       casp" #mb "\t%[old1], %[old2], %[new1], %[new2], %[v]\n"\
+       : [old1] "+&r" (x0), [old2] "+&r" (x1),                         \
+         [v] "+Q" (*(u128 *)ptr)                                       \
+       : [new1] "r" (x2), [new2] "r" (x3), [ptr] "r" (x4),             \
+         [oldval1] "r" (o.low), [oldval2] "r" (o.high)                 \
+       : cl);                                                          \
+                                                                       \
+       r.low = x0; r.high = x1;                                        \
+                                                                       \
+       return r.full;                                                  \
+}
+
+__CMPXCHG128(   ,   )
+__CMPXCHG128(_mb, al, "memory")
+
+#undef __CMPXCHG128
+
 #endif /* __ASM_ATOMIC_LSE_H */
index c6bc5d8..097b832 100644 (file)
@@ -146,6 +146,19 @@ __CMPXCHG_DBL(_mb)
 
 #undef __CMPXCHG_DBL
 
+#define __CMPXCHG128(name)                                             \
+static inline u128 __cmpxchg128##name(volatile u128 *ptr,              \
+                                     u128 old, u128 new)               \
+{                                                                      \
+       return __lse_ll_sc_body(_cmpxchg128##name,                      \
+                               ptr, old, new);                         \
+}
+
+__CMPXCHG128(   )
+__CMPXCHG128(_mb)
+
+#undef __CMPXCHG128
+
 #define __CMPXCHG_GEN(sfx)                                             \
 static __always_inline unsigned long __cmpxchg##sfx(volatile void *ptr,        \
                                           unsigned long old,           \
@@ -228,6 +241,19 @@ __CMPXCHG_GEN(_mb)
        __ret;                                                                  \
 })
 
+/* cmpxchg128 */
+#define system_has_cmpxchg128()                1
+
+#define arch_cmpxchg128(ptr, o, n)                                             \
+({                                                                             \
+       __cmpxchg128_mb((ptr), (o), (n));                                       \
+})
+
+#define arch_cmpxchg128_local(ptr, o, n)                                       \
+({                                                                             \
+       __cmpxchg128((ptr), (o), (n));                                          \
+})
+
 #define __CMPWAIT_CASE(w, sfx, sz)                                     \
 static inline void __cmpwait_case_##sz(volatile void *ptr,             \
                                       unsigned long val)               \
index 06e0e42..7517376 100644 (file)
@@ -224,4 +224,18 @@ static __always_inline int __cmpxchg_double(unsigned long p1, unsigned long p2,
                         (unsigned long)(n1), (unsigned long)(n2));     \
 })
 
+#define system_has_cmpxchg128()                1
+
+static __always_inline u128 arch_cmpxchg128(volatile u128 *ptr, u128 old, u128 new)
+{
+       asm volatile(
+               "       cdsg    %[old],%[new],%[ptr]\n"
+               : [old] "+d" (old), [ptr] "+QS" (*ptr)
+               : [new] "d" (new)
+               : "memory", "cc");
+       return old;
+}
+
+#define arch_cmpxchg128                arch_cmpxchg128
+
 #endif /* __ASM_CMPXCHG_H */
index 6ba80ce..d12f9ed 100644 (file)
@@ -103,6 +103,7 @@ static inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *pold, u64 new)
 
 #endif
 
-#define system_has_cmpxchg_double() boot_cpu_has(X86_FEATURE_CX8)
+#define system_has_cmpxchg_double()    boot_cpu_has(X86_FEATURE_CX8)
+#define system_has_cmpxchg64()         boot_cpu_has(X86_FEATURE_CX8)
 
 #endif /* _ASM_X86_CMPXCHG_32_H */
index 0d3beb2..fe29107 100644 (file)
        arch_try_cmpxchg((ptr), (po), (n));                             \
 })
 
-#define system_has_cmpxchg_double() boot_cpu_has(X86_FEATURE_CX16)
+union __u128_halves {
+       u128 full;
+       struct {
+               u64 low, high;
+       };
+};
+
+#define __arch_cmpxchg128(_ptr, _old, _new, _lock)                     \
+({                                                                     \
+       union __u128_halves o = { .full = (_old), },                    \
+                           n = { .full = (_new), };                    \
+                                                                       \
+       asm volatile(_lock "cmpxchg16b %[ptr]"                          \
+                    : [ptr] "+m" (*(_ptr)),                            \
+                      "+a" (o.low), "+d" (o.high)                      \
+                    : "b" (n.low), "c" (n.high)                        \
+                    : "memory");                                       \
+                                                                       \
+       o.full;                                                         \
+})
+
+static __always_inline u128 arch_cmpxchg128(volatile u128 *ptr, u128 old, u128 new)
+{
+       return __arch_cmpxchg128(ptr, old, new, LOCK_PREFIX);
+}
+
+static __always_inline u128 arch_cmpxchg128_local(volatile u128 *ptr, u128 old, u128 new)
+{
+       return __arch_cmpxchg128(ptr, old, new,);
+}
+
+#define __arch_try_cmpxchg128(_ptr, _oldp, _new, _lock)                        \
+({                                                                     \
+       union __u128_halves o = { .full = *(_oldp), },                  \
+                           n = { .full = (_new), };                    \
+       bool ret;                                                       \
+                                                                       \
+       asm volatile(_lock "cmpxchg16b %[ptr]"                          \
+                    CC_SET(e)                                          \
+                    : CC_OUT(e) (ret),                                 \
+                      [ptr] "+m" (*ptr),                               \
+                      "+a" (o.low), "+d" (o.high)                      \
+                    : "b" (n.low), "c" (n.high)                        \
+                    : "memory");                                       \
+                                                                       \
+       if (unlikely(!ret))                                             \
+               *(_oldp) = o.full;                                      \
+                                                                       \
+       likely(ret);                                                    \
+})
+
+static __always_inline bool arch_try_cmpxchg128(volatile u128 *ptr, u128 *oldp, u128 new)
+{
+       return __arch_try_cmpxchg128(ptr, oldp, new, LOCK_PREFIX);
+}
+
+static __always_inline bool arch_try_cmpxchg128_local(volatile u128 *ptr, u128 *oldp, u128 new)
+{
+       return __arch_try_cmpxchg128(ptr, oldp, new,);
+}
+
+#define system_has_cmpxchg_double()    boot_cpu_has(X86_FEATURE_CX16)
+#define system_has_cmpxchg128()                boot_cpu_has(X86_FEATURE_CX16)
 
 #endif /* _ASM_X86_CMPXCHG_64_H */