x86/percpu: Optimize raw_cpu_xchg()
authorPeter Zijlstra <peterz@infradead.org>
Wed, 27 Feb 2019 10:09:56 +0000 (11:09 +0100)
committerIngo Molnar <mingo@kernel.org>
Mon, 17 Jun 2019 10:43:44 +0000 (12:43 +0200)
Since raw_cpu_xchg() doesn't need to be IRQ-safe, like
this_cpu_xchg(), we can use a simple load-store instead of the cmpxchg
loop.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Ingo Molnar <mingo@kernel.org>
arch/x86/include/asm/percpu.h

index f75cccc..2278797 100644 (file)
@@ -407,9 +407,21 @@ do {                                                                       \
 #define raw_cpu_or_1(pcp, val)         percpu_to_op(, "or", (pcp), val)
 #define raw_cpu_or_2(pcp, val)         percpu_to_op(, "or", (pcp), val)
 #define raw_cpu_or_4(pcp, val)         percpu_to_op(, "or", (pcp), val)
-#define raw_cpu_xchg_1(pcp, val)       percpu_xchg_op(, pcp, val)
-#define raw_cpu_xchg_2(pcp, val)       percpu_xchg_op(, pcp, val)
-#define raw_cpu_xchg_4(pcp, val)       percpu_xchg_op(, pcp, val)
+
+/*
+ * raw_cpu_xchg() can use a load-store since it is not required to be
+ * IRQ-safe.
+ */
+#define raw_percpu_xchg_op(var, nval)                                  \
+({                                                                     \
+       typeof(var) pxo_ret__ = raw_cpu_read(var);                      \
+       raw_cpu_write(var, (nval));                                     \
+       pxo_ret__;                                                      \
+})
+
+#define raw_cpu_xchg_1(pcp, val)       raw_percpu_xchg_op(pcp, val)
+#define raw_cpu_xchg_2(pcp, val)       raw_percpu_xchg_op(pcp, val)
+#define raw_cpu_xchg_4(pcp, val)       raw_percpu_xchg_op(pcp, val)
 
 #define this_cpu_read_1(pcp)           percpu_from_op(volatile, "mov", pcp)
 #define this_cpu_read_2(pcp)           percpu_from_op(volatile, "mov", pcp)
@@ -472,7 +484,7 @@ do {                                                                        \
 #define raw_cpu_and_8(pcp, val)                        percpu_to_op(, "and", (pcp), val)
 #define raw_cpu_or_8(pcp, val)                 percpu_to_op(, "or", (pcp), val)
 #define raw_cpu_add_return_8(pcp, val)         percpu_add_return_op(, pcp, val)
-#define raw_cpu_xchg_8(pcp, nval)              percpu_xchg_op(, pcp, nval)
+#define raw_cpu_xchg_8(pcp, nval)              raw_percpu_xchg_op(pcp, nval)
 #define raw_cpu_cmpxchg_8(pcp, oval, nval)     percpu_cmpxchg_op(, pcp, oval, nval)
 
 #define this_cpu_read_8(pcp)                   percpu_from_op(volatile, "mov", pcp)