arm64: entry: Don't assume tramp_vectors is the start of the vectors
authorJames Morse <james.morse@arm.com>
Wed, 24 Nov 2021 13:40:09 +0000 (13:40 +0000)
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>
Fri, 11 Mar 2022 11:22:34 +0000 (12:22 +0100)
commit ed50da7764535f1e24432ded289974f2bf2b0c5a upstream.

The tramp_ventry macro uses tramp_vectors as the address of the vectors
when calculating which ventry in the 'full fat' vectors to branch to.

While there is one set of tramp_vectors, this will be true.
Adding multiple sets of vectors will break this assumption.

Move the generation of the vectors to a macro, and pass the start
of the vectors as an argument to tramp_ventry.

Reviewed-by: Russell King (Oracle) <rmk+kernel@armlinux.org.uk>
Reviewed-by: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: James Morse <james.morse@arm.com>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
arch/arm64/kernel/entry.S

index 1057f4c25f077139c340d65d1f15bcfe38e0cb69..b2fb051fcf64ba260e737d9b3e75d5178a7fdec5 100644 (file)
@@ -652,7 +652,7 @@ alternative_else_nop_endif
        sub     \dst, \dst, PAGE_SIZE
        .endm
 
-       .macro tramp_ventry, regsize = 64
+       .macro tramp_ventry, vector_start, regsize
        .align  7
 1:
        .if     \regsize == 64
@@ -675,10 +675,10 @@ alternative_insn isb, nop, ARM64_WORKAROUND_QCOM_FALKOR_E1003
        ldr     x30, =vectors
 #endif
 alternative_if_not ARM64_WORKAROUND_CAVIUM_TX2_219_PRFM
-       prfm    plil1strm, [x30, #(1b - tramp_vectors)]
+       prfm    plil1strm, [x30, #(1b - \vector_start)]
 alternative_else_nop_endif
        msr     vbar_el1, x30
-       add     x30, x30, #(1b - tramp_vectors + 4)
+       add     x30, x30, #(1b - \vector_start + 4)
        isb
        ret
 .org 1b + 128  // Did we overflow the ventry slot?
@@ -697,19 +697,21 @@ alternative_else_nop_endif
        sb
        .endm
 
-       .align  11
-SYM_CODE_START_NOALIGN(tramp_vectors)
+       .macro  generate_tramp_vector
+.Lvector_start\@:
        .space  0x400
 
-       tramp_ventry
-       tramp_ventry
-       tramp_ventry
-       tramp_ventry
+       .rept   4
+       tramp_ventry    .Lvector_start\@, 64
+       .endr
+       .rept   4
+       tramp_ventry    .Lvector_start\@, 32
+       .endr
+       .endm
 
-       tramp_ventry    32
-       tramp_ventry    32
-       tramp_ventry    32
-       tramp_ventry    32
+       .align  11
+SYM_CODE_START_NOALIGN(tramp_vectors)
+       generate_tramp_vector
 SYM_CODE_END(tramp_vectors)
 
 SYM_CODE_START(tramp_exit_native)