powerpc: add compile-time support for lbarx, lharx
authorNicholas Piggin <npiggin@gmail.com>
Fri, 9 Sep 2022 05:23:12 +0000 (15:23 +1000)
committerMichael Ellerman <mpe@ellerman.id.au>
Thu, 24 Nov 2022 12:31:47 +0000 (23:31 +1100)
ISA v2.06 (POWER7 and up) as well as e6500 support lbarx and lharx.
Add a compile option that allows code to use it, and add support in
cmpxchg and xchg 8 and 16 bit values without shifting and masking.

Signed-off-by: Nicholas Piggin <npiggin@gmail.com>
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/20220909052312.63916-1-npiggin@gmail.com
arch/powerpc/Kconfig
arch/powerpc/include/asm/cmpxchg.h
arch/powerpc/lib/sstep.c
arch/powerpc/platforms/Kconfig.cputype

index 699df27..4fd4924 100644 (file)
@@ -293,6 +293,9 @@ config PPC_BARRIER_NOSPEC
        default y
        depends on PPC_BOOK3S_64 || PPC_E500
 
+config PPC_HAS_LBARX_LHARX
+       bool
+
 config EARLY_PRINTK
        bool
        default y
index 05f246c..d0ea057 100644 (file)
@@ -77,10 +77,76 @@ u32 __cmpxchg_##type##sfx(volatile void *p, u32 old, u32 new)       \
  * the previous value stored there.
  */
 
+#ifndef CONFIG_PPC_HAS_LBARX_LHARX
 XCHG_GEN(u8, _local, "memory");
 XCHG_GEN(u8, _relaxed, "cc");
 XCHG_GEN(u16, _local, "memory");
 XCHG_GEN(u16, _relaxed, "cc");
+#else
+static __always_inline unsigned long
+__xchg_u8_local(volatile void *p, unsigned long val)
+{
+       unsigned long prev;
+
+       __asm__ __volatile__(
+"1:    lbarx   %0,0,%2         # __xchg_u8_local\n"
+"      stbcx.  %3,0,%2 \n"
+"      bne-    1b"
+       : "=&r" (prev), "+m" (*(volatile unsigned char *)p)
+       : "r" (p), "r" (val)
+       : "cc", "memory");
+
+       return prev;
+}
+
+static __always_inline unsigned long
+__xchg_u8_relaxed(u8 *p, unsigned long val)
+{
+       unsigned long prev;
+
+       __asm__ __volatile__(
+"1:    lbarx   %0,0,%2         # __xchg_u8_relaxed\n"
+"      stbcx.  %3,0,%2\n"
+"      bne-    1b"
+       : "=&r" (prev), "+m" (*p)
+       : "r" (p), "r" (val)
+       : "cc");
+
+       return prev;
+}
+
+static __always_inline unsigned long
+__xchg_u16_local(volatile void *p, unsigned long val)
+{
+       unsigned long prev;
+
+       __asm__ __volatile__(
+"1:    lharx   %0,0,%2         # __xchg_u16_local\n"
+"      sthcx.  %3,0,%2\n"
+"      bne-    1b"
+       : "=&r" (prev), "+m" (*(volatile unsigned short *)p)
+       : "r" (p), "r" (val)
+       : "cc", "memory");
+
+       return prev;
+}
+
+static __always_inline unsigned long
+__xchg_u16_relaxed(u16 *p, unsigned long val)
+{
+       unsigned long prev;
+
+       __asm__ __volatile__(
+"1:    lharx   %0,0,%2         # __xchg_u16_relaxed\n"
+"      sthcx.  %3,0,%2\n"
+"      bne-    1b"
+       : "=&r" (prev), "+m" (*p)
+       : "r" (p), "r" (val)
+       : "cc");
+
+       return prev;
+}
+#endif
 
 static __always_inline unsigned long
 __xchg_u32_local(volatile void *p, unsigned long val)
@@ -198,11 +264,12 @@ __xchg_relaxed(void *ptr, unsigned long x, unsigned int size)
        (__typeof__(*(ptr))) __xchg_relaxed((ptr),                      \
                        (unsigned long)_x_, sizeof(*(ptr)));            \
 })
+
 /*
  * Compare and exchange - if *p == old, set it to new,
  * and return the old value of *p.
  */
-
+#ifndef CONFIG_PPC_HAS_LBARX_LHARX
 CMPXCHG_GEN(u8, , PPC_ATOMIC_ENTRY_BARRIER, PPC_ATOMIC_EXIT_BARRIER, "memory");
 CMPXCHG_GEN(u8, _local, , , "memory");
 CMPXCHG_GEN(u8, _acquire, , PPC_ACQUIRE_BARRIER, "memory");
@@ -211,6 +278,168 @@ CMPXCHG_GEN(u16, , PPC_ATOMIC_ENTRY_BARRIER, PPC_ATOMIC_EXIT_BARRIER, "memory");
 CMPXCHG_GEN(u16, _local, , , "memory");
 CMPXCHG_GEN(u16, _acquire, , PPC_ACQUIRE_BARRIER, "memory");
 CMPXCHG_GEN(u16, _relaxed, , , "cc");
+#else
+static __always_inline unsigned long
+__cmpxchg_u8(volatile unsigned char *p, unsigned long old, unsigned long new)
+{
+       unsigned int prev;
+
+       __asm__ __volatile__ (
+       PPC_ATOMIC_ENTRY_BARRIER
+"1:    lbarx   %0,0,%2         # __cmpxchg_u8\n"
+"      cmpw    0,%0,%3\n"
+"      bne-    2f\n"
+"      stbcx.  %4,0,%2\n"
+"      bne-    1b"
+       PPC_ATOMIC_EXIT_BARRIER
+       "\n\
+2:"
+       : "=&r" (prev), "+m" (*p)
+       : "r" (p), "r" (old), "r" (new)
+       : "cc", "memory");
+
+       return prev;
+}
+
+static __always_inline unsigned long
+__cmpxchg_u8_local(volatile unsigned char *p, unsigned long old,
+                       unsigned long new)
+{
+       unsigned int prev;
+
+       __asm__ __volatile__ (
+"1:    lbarx   %0,0,%2         # __cmpxchg_u8_local\n"
+"      cmpw    0,%0,%3\n"
+"      bne-    2f\n"
+"      stbcx.  %4,0,%2\n"
+"      bne-    1b\n"
+"2:"
+       : "=&r" (prev), "+m" (*p)
+       : "r" (p), "r" (old), "r" (new)
+       : "cc", "memory");
+
+       return prev;
+}
+
+static __always_inline unsigned long
+__cmpxchg_u8_relaxed(u8 *p, unsigned long old, unsigned long new)
+{
+       unsigned long prev;
+
+       __asm__ __volatile__ (
+"1:    lbarx   %0,0,%2         # __cmpxchg_u8_relaxed\n"
+"      cmpw    0,%0,%3\n"
+"      bne-    2f\n"
+"      stbcx.  %4,0,%2\n"
+"      bne-    1b\n"
+"2:"
+       : "=&r" (prev), "+m" (*p)
+       : "r" (p), "r" (old), "r" (new)
+       : "cc");
+
+       return prev;
+}
+
+static __always_inline unsigned long
+__cmpxchg_u8_acquire(u8 *p, unsigned long old, unsigned long new)
+{
+       unsigned long prev;
+
+       __asm__ __volatile__ (
+"1:    lbarx   %0,0,%2         # __cmpxchg_u8_acquire\n"
+"      cmpw    0,%0,%3\n"
+"      bne-    2f\n"
+"      stbcx.  %4,0,%2\n"
+"      bne-    1b\n"
+       PPC_ACQUIRE_BARRIER
+"2:"
+       : "=&r" (prev), "+m" (*p)
+       : "r" (p), "r" (old), "r" (new)
+       : "cc", "memory");
+
+       return prev;
+}
+
+static __always_inline unsigned long
+__cmpxchg_u16(volatile unsigned short *p, unsigned long old, unsigned long new)
+{
+       unsigned int prev;
+
+       __asm__ __volatile__ (
+       PPC_ATOMIC_ENTRY_BARRIER
+"1:    lharx   %0,0,%2         # __cmpxchg_u16\n"
+"      cmpw    0,%0,%3\n"
+"      bne-    2f\n"
+"      sthcx.  %4,0,%2\n"
+"      bne-    1b\n"
+       PPC_ATOMIC_EXIT_BARRIER
+"2:"
+       : "=&r" (prev), "+m" (*p)
+       : "r" (p), "r" (old), "r" (new)
+       : "cc", "memory");
+
+       return prev;
+}
+
+static __always_inline unsigned long
+__cmpxchg_u16_local(volatile unsigned short *p, unsigned long old,
+                       unsigned long new)
+{
+       unsigned int prev;
+
+       __asm__ __volatile__ (
+"1:    lharx   %0,0,%2         # __cmpxchg_u16_local\n"
+"      cmpw    0,%0,%3\n"
+"      bne-    2f\n"
+"      sthcx.  %4,0,%2\n"
+"      bne-    1b"
+"2:"
+       : "=&r" (prev), "+m" (*p)
+       : "r" (p), "r" (old), "r" (new)
+       : "cc", "memory");
+
+       return prev;
+}
+
+static __always_inline unsigned long
+__cmpxchg_u16_relaxed(u16 *p, unsigned long old, unsigned long new)
+{
+       unsigned long prev;
+
+       __asm__ __volatile__ (
+"1:    lharx   %0,0,%2         # __cmpxchg_u16_relaxed\n"
+"      cmpw    0,%0,%3\n"
+"      bne-    2f\n"
+"      sthcx.  %4,0,%2\n"
+"      bne-    1b\n"
+"2:"
+       : "=&r" (prev), "+m" (*p)
+       : "r" (p), "r" (old), "r" (new)
+       : "cc");
+
+       return prev;
+}
+
+static __always_inline unsigned long
+__cmpxchg_u16_acquire(u16 *p, unsigned long old, unsigned long new)
+{
+       unsigned long prev;
+
+       __asm__ __volatile__ (
+"1:    lharx   %0,0,%2         # __cmpxchg_u16_acquire\n"
+"      cmpw    0,%0,%3\n"
+"      bne-    2f\n"
+"      sthcx.  %4,0,%2\n"
+"      bne-    1b\n"
+       PPC_ACQUIRE_BARRIER
+"2:"
+       : "=&r" (prev), "+m" (*p)
+       : "r" (p), "r" (old), "r" (new)
+       : "cc", "memory");
+
+       return prev;
+}
+#endif
 
 static __always_inline unsigned long
 __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
index 398b569..38158b7 100644 (file)
@@ -2284,15 +2284,7 @@ int analyse_instr(struct instruction_op *op, const struct pt_regs *regs,
                        op->type = MKOP(STCX, 0, 4);
                        break;
 
-#ifdef __powerpc64__
-               case 84:        /* ldarx */
-                       op->type = MKOP(LARX, 0, 8);
-                       break;
-
-               case 214:       /* stdcx. */
-                       op->type = MKOP(STCX, 0, 8);
-                       break;
-
+#ifdef CONFIG_PPC_HAS_LBARX_LHARX
                case 52:        /* lbarx */
                        op->type = MKOP(LARX, 0, 1);
                        break;
@@ -2308,6 +2300,15 @@ int analyse_instr(struct instruction_op *op, const struct pt_regs *regs,
                case 726:       /* sthcx. */
                        op->type = MKOP(STCX, 0, 2);
                        break;
+#endif
+#ifdef __powerpc64__
+               case 84:        /* ldarx */
+                       op->type = MKOP(LARX, 0, 8);
+                       break;
+
+               case 214:       /* stdcx. */
+                       op->type = MKOP(STCX, 0, 8);
+                       break;
 
                case 276:       /* lqarx */
                        if (!((rd & 1) || rd == ra || rd == rb))
@@ -3334,7 +3335,7 @@ int emulate_loadstore(struct pt_regs *regs, struct instruction_op *op)
                err = 0;
                val = 0;
                switch (size) {
-#ifdef __powerpc64__
+#ifdef CONFIG_PPC_HAS_LBARX_LHARX
                case 1:
                        __get_user_asmx(val, ea, err, "lbarx");
                        break;
index 0c4eed9..7bac213 100644 (file)
@@ -135,6 +135,7 @@ config GENERIC_CPU
        depends on PPC_BOOK3S_64 && CPU_LITTLE_ENDIAN
        select ARCH_HAS_FAST_MULTIPLIER
        select PPC_64S_HASH_MMU
+       select PPC_HAS_LBARX_LHARX
 
 config POWERPC_CPU
        bool "Generic 32 bits powerpc"
@@ -160,17 +161,20 @@ config POWER7_CPU
        depends on PPC_BOOK3S_64
        select ARCH_HAS_FAST_MULTIPLIER
        select PPC_64S_HASH_MMU
+       select PPC_HAS_LBARX_LHARX
 
 config POWER8_CPU
        bool "POWER8"
        depends on PPC_BOOK3S_64
        select ARCH_HAS_FAST_MULTIPLIER
        select PPC_64S_HASH_MMU
+       select PPC_HAS_LBARX_LHARX
 
 config POWER9_CPU
        bool "POWER9"
        depends on PPC_BOOK3S_64
        select ARCH_HAS_FAST_MULTIPLIER
+       select PPC_HAS_LBARX_LHARX
 
 config POWER10_CPU
        bool "POWER10"
@@ -184,6 +188,7 @@ config E5500_CPU
 config E6500_CPU
        bool "Freescale e6500"
        depends on PPC64 && PPC_E500
+       select PPC_HAS_LBARX_LHARX
 
 config 405_CPU
        bool "40x family"