From: Paul Burton Date: Tue, 1 Oct 2019 21:53:38 +0000 (+0000) Subject: MIPS: futex: Emit Loongson3 sync workarounds within asm X-Git-Tag: v5.10.7~3549^2~78 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=3c1d3f0979721a39dd2980c97466127ce65aa130;p=platform%2Fkernel%2Flinux-rpi.git MIPS: futex: Emit Loongson3 sync workarounds within asm Generate the sync instructions required to workaround Loongson3 LL/SC errata within inline asm blocks, which feels a little safer than doing it from C where strictly speaking the compiler would be well within its rights to insert a memory access between the separate asm statements we previously had, containing sync & ll instructions respectively. Signed-off-by: Paul Burton Cc: linux-mips@vger.kernel.org Cc: Huacai Chen Cc: Jiaxun Yang Cc: linux-kernel@vger.kernel.org --- diff --git a/arch/mips/include/asm/barrier.h b/arch/mips/include/asm/barrier.h index c7e05e8..133afd5 100644 --- a/arch/mips/include/asm/barrier.h +++ b/arch/mips/include/asm/barrier.h @@ -95,13 +95,14 @@ static inline void wmb(void) * ordering will be done by smp_llsc_mb() and friends. */ #if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP) -#define __WEAK_LLSC_MB " sync \n" -#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") -#define __LLSC_CLOBBER +# define __WEAK_LLSC_MB sync +# define smp_llsc_mb() \ + __asm__ __volatile__(__stringify(__WEAK_LLSC_MB) : : :"memory") +# define __LLSC_CLOBBER #else -#define __WEAK_LLSC_MB " \n" -#define smp_llsc_mb() do { } while (0) -#define __LLSC_CLOBBER "memory" +# define __WEAK_LLSC_MB +# define smp_llsc_mb() do { } while (0) +# define __LLSC_CLOBBER "memory" #endif #ifdef CONFIG_CPU_CAVIUM_OCTEON diff --git a/arch/mips/include/asm/futex.h b/arch/mips/include/asm/futex.h index b83b039..54cf205 100644 --- a/arch/mips/include/asm/futex.h +++ b/arch/mips/include/asm/futex.h @@ -16,6 +16,7 @@ #include #include #include +#include #include #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ @@ -32,7 +33,7 @@ " .set arch=r4000 \n" \ "2: sc $1, %2 \n" \ " beqzl $1, 1b \n" \ - __WEAK_LLSC_MB \ + __stringify(__WEAK_LLSC_MB) \ "3: \n" \ " .insn \n" \ " .set pop \n" \ @@ -50,19 +51,19 @@ "i" (-EFAULT) \ : "memory"); \ } else if (cpu_has_llsc) { \ - loongson_llsc_mb(); \ __asm__ __volatile__( \ " .set push \n" \ " .set noat \n" \ " .set push \n" \ " .set "MIPS_ISA_ARCH_LEVEL" \n" \ + " " __SYNC(full, loongson3_war) " \n" \ "1: "user_ll("%1", "%4")" # __futex_atomic_op\n" \ " .set pop \n" \ " " insn " \n" \ " .set "MIPS_ISA_ARCH_LEVEL" \n" \ "2: "user_sc("$1", "%2")" \n" \ " beqz $1, 1b \n" \ - __WEAK_LLSC_MB \ + __stringify(__WEAK_LLSC_MB) \ "3: \n" \ " .insn \n" \ " .set pop \n" \ @@ -147,7 +148,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, " .set arch=r4000 \n" "2: sc $1, %2 \n" " beqzl $1, 1b \n" - __WEAK_LLSC_MB + __stringify(__WEAK_LLSC_MB) "3: \n" " .insn \n" " .set pop \n" @@ -164,13 +165,13 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, "i" (-EFAULT) : "memory"); } else if (cpu_has_llsc) { - loongson_llsc_mb(); __asm__ __volatile__( "# futex_atomic_cmpxchg_inatomic \n" " .set push \n" " .set noat \n" " .set push \n" " .set "MIPS_ISA_ARCH_LEVEL" \n" + " " __SYNC(full, loongson3_war) " \n" "1: "user_ll("%1", "%3")" \n" " bne %1, %z4, 3f \n" " .set pop \n" @@ -178,8 +179,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, " .set "MIPS_ISA_ARCH_LEVEL" \n" "2: "user_sc("$1", "%2")" \n" " beqz $1, 1b \n" - __WEAK_LLSC_MB - "3: \n" + "3: " __SYNC_ELSE(full, loongson3_war, __WEAK_LLSC_MB) "\n" " .insn \n" " .set pop \n" " .section .fixup,\"ax\" \n" @@ -194,7 +194,6 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT) : "memory"); - loongson_llsc_mb(); } else return -ENOSYS;