ARM: 9176/1: avoid literal references in inline assembly
authorArd Biesheuvel <ardb@kernel.org>
Fri, 24 Dec 2021 09:31:00 +0000 (10:31 +0100)
committerRussell King (Oracle) <rmk+kernel@armlinux.org.uk>
Thu, 6 Jan 2022 12:58:58 +0000 (12:58 +0000)
Nathan reports that the new get_current() and per-CPU offset accessors
may cause problems at build time due to the use of a literal to hold the
address of the respective variables. This is due to the fact that LLD
before v14 does not support the PC-relative group relocations that are
normally used for this, and the fallback relies on literals but does not
emit the literal pools explictly using the .ltorg directive.

./arch/arm/include/asm/current.h:53:6: error: out of range pc-relative fixup value
        asm(LOAD_SYM_ARMV6(%0, __current) : "=r"(cur));
            ^
./arch/arm/include/asm/insn.h:25:2: note: expanded from macro 'LOAD_SYM_ARMV6'
        "       ldr     " #reg ", =" #sym "                     nt"
        ^
<inline asm>:1:3: note: instantiated into assembly here
                ldr     r0, =__current
                ^

Since emitting a literal pool in this particular case is not possible,
let's avoid the LOAD_SYM_ARMV6() entirely, and use the ordinary C
assigment instead.

As it turns out, there are other such cases, and here, using .ltorg to
emit the literal pool within range of the LDR instruction would be
possible due to the presence of an unconditional branch right after it.
Unfortunately, putting .ltorg directives in subsections appears to
confuse the Clang inline assembler, resulting in similar errors even
though the .ltorg is most definitely within range.

So let's fix this by emitting the literal explicitly, and not rely on
the assembler to figure this out. This means we have move the fallback
out of the LOAD_SYM_ARMV6() macro and into the callers.

Link: https://github.com/ClangBuiltLinux/linux/issues/1551
Fixes: 9c46929e7989 ("ARM: implement THREAD_INFO_IN_TASK for uniprocessor systems")
Reported-by: Nathan Chancellor <natechancellor@gmail.com>
Tested-by: Nathan Chancellor <nathan@kernel.org>
Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
Signed-off-by: Russell King (Oracle) <rmk+kernel@armlinux.org.uk>
arch/arm/include/asm/current.h
arch/arm/include/asm/insn.h
arch/arm/include/asm/percpu.h

index 69ecf4c..2f9d792 100644 (file)
@@ -37,8 +37,16 @@ static inline __attribute_const__ struct task_struct *get_current(void)
 #ifdef CONFIG_CPU_V6
            "1:                                                 \n\t"
            "   .subsection 1                                   \n\t"
+#if !(defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) && \
+    !(defined(CONFIG_LD_IS_LLD) && CONFIG_LLD_VERSION < 140000)
            "2: " LOAD_SYM_ARMV6(%0, __current) "               \n\t"
            "   b       1b                                      \n\t"
+#else
+           "2: ldr     %0, 3f                                  \n\t"
+           "   ldr     %0, [%0]                                \n\t"
+           "   b       1b                                      \n\t"
+           "3: .long   __current                               \n\t"
+#endif
            "   .previous                                       \n\t"
            "   .pushsection \".alt.smp.init\", \"a\"           \n\t"
            "   .long   0b - .                                  \n\t"
@@ -46,8 +54,9 @@ static inline __attribute_const__ struct task_struct *get_current(void)
            "   .popsection                                     \n\t"
 #endif
            : "=r"(cur));
-#elif __LINUX_ARM_ARCH__>=7 || \
-      (defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS))
+#elif __LINUX_ARM_ARCH__>= 7 || \
+      (defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) || \
+      (defined(CONFIG_LD_IS_LLD) && CONFIG_LLD_VERSION < 140000)
        cur = __current;
 #else
        asm(LOAD_SYM_ARMV6(%0, __current) : "=r"(cur));
index a160ed3..faf3d1c 100644 (file)
@@ -10,8 +10,6 @@
  * which should be sufficient for the core kernel as well as modules loaded
  * into the module region. (Not supported by LLD before release 14)
  */
-#if !(defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) && \
-    !(defined(CONFIG_LD_IS_LLD) && CONFIG_LLD_VERSION < 140000)
 #define LOAD_SYM_ARMV6(reg, sym)                                       \
        "       .globl  " #sym "                                \n\t"   \
        "       .reloc  10f, R_ARM_ALU_PC_G0_NC, " #sym "       \n\t"   \
        "10:    sub     " #reg ", pc, #8                        \n\t"   \
        "11:    sub     " #reg ", " #reg ", #4                  \n\t"   \
        "12:    ldr     " #reg ", [" #reg ", #0]                \n\t"
-#else
-#define LOAD_SYM_ARMV6(reg, sym)                                       \
-       "       ldr     " #reg ", =" #sym "                     \n\t"   \
-       "       ldr     " #reg ", [" #reg "]                    \n\t"
-#endif
 
 static inline unsigned long
 arm_gen_nop(void)
index a4a0d38..28961d6 100644 (file)
@@ -38,8 +38,16 @@ static inline unsigned long __my_cpu_offset(void)
 #ifdef CONFIG_CPU_V6
            "1:                                                 \n\t"
            "   .subsection 1                                   \n\t"
+#if !(defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) && \
+    !(defined(CONFIG_LD_IS_LLD) && CONFIG_LLD_VERSION < 140000)
            "2: " LOAD_SYM_ARMV6(%0, __per_cpu_offset) "        \n\t"
            "   b       1b                                      \n\t"
+#else
+           "2: ldr     %0, 3f                                  \n\t"
+           "   ldr     %0, [%0]                                \n\t"
+           "   b       1b                                      \n\t"
+           "3: .long   __per_cpu_offset                        \n\t"
+#endif
            "   .previous                                       \n\t"
            "   .pushsection \".alt.smp.init\", \"a\"           \n\t"
            "   .long   0b - .                                  \n\t"