Revert "x86/objtool: Use asm macros to work around GCC inlining bugs"
authorIngo Molnar <mingo@kernel.org>
Wed, 19 Dec 2018 10:23:27 +0000 (11:23 +0100)
committerIngo Molnar <mingo@kernel.org>
Wed, 19 Dec 2018 11:00:23 +0000 (12:00 +0100)
This reverts commit c06c4d8090513f2974dfdbed2ac98634357ac475.

See this commit for details about the revert:

  e769742d3584 ("Revert "x86/jump-labels: Macrofy inline assembly code to work around GCC inlining bugs"")

Reported-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Borislav Petkov <bp@alien8.de>
Reviewed-by: Thomas Gleixner <tglx@linutronix.de>
Cc: Juergen Gross <jgross@suse.com>
Cc: Richard Biener <rguenther@suse.de>
Cc: Kees Cook <keescook@chromium.org>
Cc: Segher Boessenkool <segher@kernel.crashing.org>
Cc: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Josh Poimboeuf <jpoimboe@redhat.com>
Cc: Nadav Amit <namit@vmware.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: linux-kernel@vger.kernel.org
Signed-off-by: Ingo Molnar <mingo@kernel.org>
arch/x86/kernel/macros.S
include/linux/compiler.h

index cee28c3..cfc1c7d 100644 (file)
@@ -5,5 +5,3 @@
  * commonly used. The macros are precompiled into assmebly file which is later
  * assembled together with each compiled file.
  */
-
-#include <linux/compiler.h>
index 06396c1..fc5004a 100644 (file)
@@ -99,13 +99,22 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val,
  * unique, to convince GCC not to merge duplicate inline asm statements.
  */
 #define annotate_reachable() ({                                                \
-       asm volatile("ANNOTATE_REACHABLE counter=%c0"                   \
-                    : : "i" (__COUNTER__));                            \
+       asm volatile("%c0:\n\t"                                         \
+                    ".pushsection .discard.reachable\n\t"              \
+                    ".long %c0b - .\n\t"                               \
+                    ".popsection\n\t" : : "i" (__COUNTER__));          \
 })
 #define annotate_unreachable() ({                                      \
-       asm volatile("ANNOTATE_UNREACHABLE counter=%c0"                 \
-                    : : "i" (__COUNTER__));                            \
+       asm volatile("%c0:\n\t"                                         \
+                    ".pushsection .discard.unreachable\n\t"            \
+                    ".long %c0b - .\n\t"                               \
+                    ".popsection\n\t" : : "i" (__COUNTER__));          \
 })
+#define ASM_UNREACHABLE                                                        \
+       "999:\n\t"                                                      \
+       ".pushsection .discard.unreachable\n\t"                         \
+       ".long 999b - .\n\t"                                            \
+       ".popsection\n\t"
 #else
 #define annotate_reachable()
 #define annotate_unreachable()
@@ -293,45 +302,6 @@ static inline void *offset_to_ptr(const int *off)
        return (void *)((unsigned long)off + *off);
 }
 
-#else /* __ASSEMBLY__ */
-
-#ifdef __KERNEL__
-#ifndef LINKER_SCRIPT
-
-#ifdef CONFIG_STACK_VALIDATION
-.macro ANNOTATE_UNREACHABLE counter:req
-\counter:
-       .pushsection .discard.unreachable
-       .long \counter\()b -.
-       .popsection
-.endm
-
-.macro ANNOTATE_REACHABLE counter:req
-\counter:
-       .pushsection .discard.reachable
-       .long \counter\()b -.
-       .popsection
-.endm
-
-.macro ASM_UNREACHABLE
-999:
-       .pushsection .discard.unreachable
-       .long 999b - .
-       .popsection
-.endm
-#else /* CONFIG_STACK_VALIDATION */
-.macro ANNOTATE_UNREACHABLE counter:req
-.endm
-
-.macro ANNOTATE_REACHABLE counter:req
-.endm
-
-.macro ASM_UNREACHABLE
-.endm
-#endif /* CONFIG_STACK_VALIDATION */
-
-#endif /* LINKER_SCRIPT */
-#endif /* __KERNEL__ */
 #endif /* __ASSEMBLY__ */
 
 /* Compile time object size, -1 for unknown */