Revert "x86/refcount: Work around GCC inlining bug"
authorIngo Molnar <mingo@kernel.org>
Wed, 19 Dec 2018 10:23:14 +0000 (11:23 +0100)
committerIngo Molnar <mingo@kernel.org>
Wed, 19 Dec 2018 11:00:09 +0000 (12:00 +0100)
This reverts commit 9e1725b410594911cc5981b6c7b4cea4ec054ca8.

See this commit for details about the revert:

  e769742d3584 ("Revert "x86/jump-labels: Macrofy inline assembly code to work around GCC inlining bugs"")

The conflict resolution for interaction with:

  288e4521f0f6: ("x86/asm: 'Simplify' GEN_*_RMWcc() macros")

was provided by Masahiro Yamada.

 Conflicts:
arch/x86/include/asm/refcount.h

Reported-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Borislav Petkov <bp@alien8.de>
Reviewed-by: Thomas Gleixner <tglx@linutronix.de>
Cc: Juergen Gross <jgross@suse.com>
Cc: Richard Biener <rguenther@suse.de>
Cc: Kees Cook <keescook@chromium.org>
Cc: Segher Boessenkool <segher@kernel.crashing.org>
Cc: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Josh Poimboeuf <jpoimboe@redhat.com>
Cc: Nadav Amit <namit@vmware.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: linux-kernel@vger.kernel.org
Signed-off-by: Ingo Molnar <mingo@kernel.org>
arch/x86/include/asm/refcount.h
arch/x86/kernel/macros.S

index a8b5e1e..dbaed55 100644 (file)
@@ -4,41 +4,6 @@
  * x86-specific implementation of refcount_t. Based on PAX_REFCOUNT from
  * PaX/grsecurity.
  */
-
-#ifdef __ASSEMBLY__
-
-#include <asm/asm.h>
-#include <asm/bug.h>
-
-.macro REFCOUNT_EXCEPTION counter:req
-       .pushsection .text..refcount
-111:   lea \counter, %_ASM_CX
-112:   ud2
-       ASM_UNREACHABLE
-       .popsection
-113:   _ASM_EXTABLE_REFCOUNT(112b, 113b)
-.endm
-
-/* Trigger refcount exception if refcount result is negative. */
-.macro REFCOUNT_CHECK_LT_ZERO counter:req
-       js 111f
-       REFCOUNT_EXCEPTION counter="\counter"
-.endm
-
-/* Trigger refcount exception if refcount result is zero or negative. */
-.macro REFCOUNT_CHECK_LE_ZERO counter:req
-       jz 111f
-       REFCOUNT_CHECK_LT_ZERO counter="\counter"
-.endm
-
-/* Trigger refcount exception unconditionally. */
-.macro REFCOUNT_ERROR counter:req
-       jmp 111f
-       REFCOUNT_EXCEPTION counter="\counter"
-.endm
-
-#else /* __ASSEMBLY__ */
-
 #include <linux/refcount.h>
 #include <asm/bug.h>
 
  * central refcount exception. The fixup address for the exception points
  * back to the regular execution flow in .text.
  */
+#define _REFCOUNT_EXCEPTION                            \
+       ".pushsection .text..refcount\n"                \
+       "111:\tlea %[var], %%" _ASM_CX "\n"             \
+       "112:\t" ASM_UD2 "\n"                           \
+       ASM_UNREACHABLE                                 \
+       ".popsection\n"                                 \
+       "113:\n"                                        \
+       _ASM_EXTABLE_REFCOUNT(112b, 113b)
+
+/* Trigger refcount exception if refcount result is negative. */
+#define REFCOUNT_CHECK_LT_ZERO                         \
+       "js 111f\n\t"                                   \
+       _REFCOUNT_EXCEPTION
+
+/* Trigger refcount exception if refcount result is zero or negative. */
+#define REFCOUNT_CHECK_LE_ZERO                         \
+       "jz 111f\n\t"                                   \
+       REFCOUNT_CHECK_LT_ZERO
+
+/* Trigger refcount exception unconditionally. */
+#define REFCOUNT_ERROR                                 \
+       "jmp 111f\n\t"                                  \
+       _REFCOUNT_EXCEPTION
 
 static __always_inline void refcount_add(unsigned int i, refcount_t *r)
 {
        asm volatile(LOCK_PREFIX "addl %1,%0\n\t"
-               "REFCOUNT_CHECK_LT_ZERO counter=\"%[counter]\""
-               : [counter] "+m" (r->refs.counter)
+               REFCOUNT_CHECK_LT_ZERO
+               : [var] "+m" (r->refs.counter)
                : "ir" (i)
                : "cc", "cx");
 }
@@ -63,32 +51,31 @@ static __always_inline void refcount_add(unsigned int i, refcount_t *r)
 static __always_inline void refcount_inc(refcount_t *r)
 {
        asm volatile(LOCK_PREFIX "incl %0\n\t"
-               "REFCOUNT_CHECK_LT_ZERO counter=\"%[counter]\""
-               : [counter] "+m" (r->refs.counter)
+               REFCOUNT_CHECK_LT_ZERO
+               : [var] "+m" (r->refs.counter)
                : : "cc", "cx");
 }
 
 static __always_inline void refcount_dec(refcount_t *r)
 {
        asm volatile(LOCK_PREFIX "decl %0\n\t"
-               "REFCOUNT_CHECK_LE_ZERO counter=\"%[counter]\""
-               : [counter] "+m" (r->refs.counter)
+               REFCOUNT_CHECK_LE_ZERO
+               : [var] "+m" (r->refs.counter)
                : : "cc", "cx");
 }
 
 static __always_inline __must_check
 bool refcount_sub_and_test(unsigned int i, refcount_t *r)
 {
-
        return GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX "subl",
-                                        "REFCOUNT_CHECK_LT_ZERO counter=\"%[var]\"",
+                                        REFCOUNT_CHECK_LT_ZERO,
                                         r->refs.counter, e, "er", i, "cx");
 }
 
 static __always_inline __must_check bool refcount_dec_and_test(refcount_t *r)
 {
        return GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX "decl",
-                                       "REFCOUNT_CHECK_LT_ZERO counter=\"%[var]\"",
+                                       REFCOUNT_CHECK_LT_ZERO,
                                        r->refs.counter, e, "cx");
 }
 
@@ -106,8 +93,8 @@ bool refcount_add_not_zero(unsigned int i, refcount_t *r)
 
                /* Did we try to increment from/to an undesirable state? */
                if (unlikely(c < 0 || c == INT_MAX || result < c)) {
-                       asm volatile("REFCOUNT_ERROR counter=\"%[counter]\""
-                                    : : [counter] "m" (r->refs.counter)
+                       asm volatile(REFCOUNT_ERROR
+                                    : : [var] "m" (r->refs.counter)
                                     : "cc", "cx");
                        break;
                }
@@ -122,6 +109,4 @@ static __always_inline __must_check bool refcount_inc_not_zero(refcount_t *r)
        return refcount_add_not_zero(1, r);
 }
 
-#endif /* __ASSEMBLY__ */
-
 #endif
index f1fe1d5..cee28c3 100644 (file)
@@ -7,4 +7,3 @@
  */
 
 #include <linux/compiler.h>
-#include <asm/refcount.h>