arm64: kernel: Convert to modern annotations for assembly functions
authorMark Brown <broonie@kernel.org>
Fri, 1 May 2020 11:54:29 +0000 (12:54 +0100)
committerWill Deacon <will@kernel.org>
Mon, 4 May 2020 11:46:03 +0000 (12:46 +0100)
In an effort to clarify and simplify the annotation of assembly functions
in the kernel new macros have been introduced. These replace ENTRY and
ENDPROC and also add a new annotation for static functions which previously
had no ENTRY equivalent. Update the annotations in the core kernel code to
the new macros.

Signed-off-by: Mark Brown <broonie@kernel.org>
Acked-by: Mark Rutland <mark.rutland@arm.com>
Link: https://lore.kernel.org/r/20200501115430.37315-3-broonie@kernel.org
Signed-off-by: Will Deacon <will@kernel.org>
arch/arm64/kernel/cpu-reset.S
arch/arm64/kernel/efi-rt-wrapper.S
arch/arm64/kernel/entry-fpsimd.S
arch/arm64/kernel/hibernate-asm.S
arch/arm64/kernel/hyp-stub.S
arch/arm64/kernel/probes/kprobes_trampoline.S
arch/arm64/kernel/reloc_test_syms.S
arch/arm64/kernel/relocate_kernel.S
arch/arm64/kernel/sleep.S
arch/arm64/kernel/smccc-call.S

index 38087b4..4a18055 100644 (file)
@@ -29,7 +29,7 @@
  * branch to what would be the reset vector. It must be executed with the
  * flat identity mapping.
  */
-ENTRY(__cpu_soft_restart)
+SYM_CODE_START(__cpu_soft_restart)
        /* Clear sctlr_el1 flags. */
        mrs     x12, sctlr_el1
        mov_q   x13, SCTLR_ELx_FLAGS
@@ -47,6 +47,6 @@ ENTRY(__cpu_soft_restart)
        mov     x1, x3                          // arg1
        mov     x2, x4                          // arg2
        br      x8
-ENDPROC(__cpu_soft_restart)
+SYM_CODE_END(__cpu_soft_restart)
 
 .popsection
index 3fc7110..1192c4b 100644 (file)
@@ -5,7 +5,7 @@
 
 #include <linux/linkage.h>
 
-ENTRY(__efi_rt_asm_wrapper)
+SYM_FUNC_START(__efi_rt_asm_wrapper)
        stp     x29, x30, [sp, #-32]!
        mov     x29, sp
 
@@ -35,4 +35,4 @@ ENTRY(__efi_rt_asm_wrapper)
        b.ne    0f
        ret
 0:     b       efi_handle_corrupted_x18        // tail call
-ENDPROC(__efi_rt_asm_wrapper)
+SYM_FUNC_END(__efi_rt_asm_wrapper)
index 0f24eae..f880dd6 100644 (file)
  *
  * x0 - pointer to struct fpsimd_state
  */
-ENTRY(fpsimd_save_state)
+SYM_FUNC_START(fpsimd_save_state)
        fpsimd_save x0, 8
        ret
-ENDPROC(fpsimd_save_state)
+SYM_FUNC_END(fpsimd_save_state)
 
 /*
  * Load the FP registers.
  *
  * x0 - pointer to struct fpsimd_state
  */
-ENTRY(fpsimd_load_state)
+SYM_FUNC_START(fpsimd_load_state)
        fpsimd_restore x0, 8
        ret
-ENDPROC(fpsimd_load_state)
+SYM_FUNC_END(fpsimd_load_state)
 
 #ifdef CONFIG_ARM64_SVE
-ENTRY(sve_save_state)
+SYM_FUNC_START(sve_save_state)
        sve_save 0, x1, 2
        ret
-ENDPROC(sve_save_state)
+SYM_FUNC_END(sve_save_state)
 
-ENTRY(sve_load_state)
+SYM_FUNC_START(sve_load_state)
        sve_load 0, x1, x2, 3, x4
        ret
-ENDPROC(sve_load_state)
+SYM_FUNC_END(sve_load_state)
 
-ENTRY(sve_get_vl)
+SYM_FUNC_START(sve_get_vl)
        _sve_rdvl       0, 1
        ret
-ENDPROC(sve_get_vl)
+SYM_FUNC_END(sve_get_vl)
 #endif /* CONFIG_ARM64_SVE */
index 6532105..8ccca66 100644 (file)
@@ -65,7 +65,7 @@
  * x5: physical address of a  zero page that remains zero after resume
  */
 .pushsection    ".hibernate_exit.text", "ax"
-ENTRY(swsusp_arch_suspend_exit)
+SYM_CODE_START(swsusp_arch_suspend_exit)
        /*
         * We execute from ttbr0, change ttbr1 to our copied linear map tables
         * with a break-before-make via the zero page
@@ -110,7 +110,7 @@ ENTRY(swsusp_arch_suspend_exit)
        cbz     x24, 3f         /* Do we need to re-initialise EL2? */
        hvc     #0
 3:     ret
-ENDPROC(swsusp_arch_suspend_exit)
+SYM_CODE_END(swsusp_arch_suspend_exit)
 
 /*
  * Restore the hyp stub.
@@ -119,15 +119,15 @@ ENDPROC(swsusp_arch_suspend_exit)
  *
  * x24: The physical address of __hyp_stub_vectors
  */
-el1_sync:
+SYM_CODE_START_LOCAL(el1_sync)
        msr     vbar_el2, x24
        eret
-ENDPROC(el1_sync)
+SYM_CODE_END(el1_sync)
 
 .macro invalid_vector  label
-\label:
+SYM_CODE_START_LOCAL(\label)
        b \label
-ENDPROC(\label)
+SYM_CODE_END(\label)
 .endm
 
        invalid_vector  el2_sync_invalid
@@ -141,7 +141,7 @@ ENDPROC(\label)
 
 /* el2 vectors - switch el2 here while we restore the memory image. */
        .align 11
-ENTRY(hibernate_el2_vectors)
+SYM_CODE_START(hibernate_el2_vectors)
        ventry  el2_sync_invalid                // Synchronous EL2t
        ventry  el2_irq_invalid                 // IRQ EL2t
        ventry  el2_fiq_invalid                 // FIQ EL2t
@@ -161,6 +161,6 @@ ENTRY(hibernate_el2_vectors)
        ventry  el1_irq_invalid                 // IRQ 32-bit EL1
        ventry  el1_fiq_invalid                 // FIQ 32-bit EL1
        ventry  el1_error_invalid               // Error 32-bit EL1
-END(hibernate_el2_vectors)
+SYM_CODE_END(hibernate_el2_vectors)
 
 .popsection
index e473ead..160f588 100644 (file)
@@ -21,7 +21,7 @@
 
        .align 11
 
-ENTRY(__hyp_stub_vectors)
+SYM_CODE_START(__hyp_stub_vectors)
        ventry  el2_sync_invalid                // Synchronous EL2t
        ventry  el2_irq_invalid                 // IRQ EL2t
        ventry  el2_fiq_invalid                 // FIQ EL2t
@@ -41,11 +41,11 @@ ENTRY(__hyp_stub_vectors)
        ventry  el1_irq_invalid                 // IRQ 32-bit EL1
        ventry  el1_fiq_invalid                 // FIQ 32-bit EL1
        ventry  el1_error_invalid               // Error 32-bit EL1
-ENDPROC(__hyp_stub_vectors)
+SYM_CODE_END(__hyp_stub_vectors)
 
        .align 11
 
-el1_sync:
+SYM_CODE_START_LOCAL(el1_sync)
        cmp     x0, #HVC_SET_VECTORS
        b.ne    2f
        msr     vbar_el2, x1
@@ -68,12 +68,12 @@ el1_sync:
 
 9:     mov     x0, xzr
        eret
-ENDPROC(el1_sync)
+SYM_CODE_END(el1_sync)
 
 .macro invalid_vector  label
-\label:
+SYM_CODE_START_LOCAL(\label)
        b \label
-ENDPROC(\label)
+SYM_CODE_END(\label)
 .endm
 
        invalid_vector  el2_sync_invalid
@@ -106,15 +106,15 @@ ENDPROC(\label)
  * initialisation entry point.
  */
 
-ENTRY(__hyp_set_vectors)
+SYM_FUNC_START(__hyp_set_vectors)
        mov     x1, x0
        mov     x0, #HVC_SET_VECTORS
        hvc     #0
        ret
-ENDPROC(__hyp_set_vectors)
+SYM_FUNC_END(__hyp_set_vectors)
 
-ENTRY(__hyp_reset_vectors)
+SYM_FUNC_START(__hyp_reset_vectors)
        mov     x0, #HVC_RESET_VECTORS
        hvc     #0
        ret
-ENDPROC(__hyp_reset_vectors)
+SYM_FUNC_END(__hyp_reset_vectors)
index 45dce03..890ca72 100644 (file)
@@ -61,7 +61,7 @@
        ldp x28, x29, [sp, #S_X28]
        .endm
 
-ENTRY(kretprobe_trampoline)
+SYM_CODE_START(kretprobe_trampoline)
        sub sp, sp, #S_FRAME_SIZE
 
        save_all_base_regs
@@ -79,4 +79,4 @@ ENTRY(kretprobe_trampoline)
        add sp, sp, #S_FRAME_SIZE
        ret
 
-ENDPROC(kretprobe_trampoline)
+SYM_CODE_END(kretprobe_trampoline)
index 16a34f1..c50f45f 100644 (file)
@@ -5,81 +5,81 @@
 
 #include <linux/linkage.h>
 
-ENTRY(absolute_data64)
+SYM_FUNC_START(absolute_data64)
        ldr     x0, 0f
        ret
 0:     .quad   sym64_abs
-ENDPROC(absolute_data64)
+SYM_FUNC_END(absolute_data64)
 
-ENTRY(absolute_data32)
+SYM_FUNC_START(absolute_data32)
        ldr     w0, 0f
        ret
 0:     .long   sym32_abs
-ENDPROC(absolute_data32)
+SYM_FUNC_END(absolute_data32)
 
-ENTRY(absolute_data16)
+SYM_FUNC_START(absolute_data16)
        adr     x0, 0f
        ldrh    w0, [x0]
        ret
 0:     .short  sym16_abs, 0
-ENDPROC(absolute_data16)
+SYM_FUNC_END(absolute_data16)
 
-ENTRY(signed_movw)
+SYM_FUNC_START(signed_movw)
        movz    x0, #:abs_g2_s:sym64_abs
        movk    x0, #:abs_g1_nc:sym64_abs
        movk    x0, #:abs_g0_nc:sym64_abs
        ret
-ENDPROC(signed_movw)
+SYM_FUNC_END(signed_movw)
 
-ENTRY(unsigned_movw)
+SYM_FUNC_START(unsigned_movw)
        movz    x0, #:abs_g3:sym64_abs
        movk    x0, #:abs_g2_nc:sym64_abs
        movk    x0, #:abs_g1_nc:sym64_abs
        movk    x0, #:abs_g0_nc:sym64_abs
        ret
-ENDPROC(unsigned_movw)
+SYM_FUNC_END(unsigned_movw)
 
        .align  12
        .space  0xff8
-ENTRY(relative_adrp)
+SYM_FUNC_START(relative_adrp)
        adrp    x0, sym64_rel
        add     x0, x0, #:lo12:sym64_rel
        ret
-ENDPROC(relative_adrp)
+SYM_FUNC_END(relative_adrp)
 
        .align  12
        .space  0xffc
-ENTRY(relative_adrp_far)
+SYM_FUNC_START(relative_adrp_far)
        adrp    x0, memstart_addr
        add     x0, x0, #:lo12:memstart_addr
        ret
-ENDPROC(relative_adrp_far)
+SYM_FUNC_END(relative_adrp_far)
 
-ENTRY(relative_adr)
+SYM_FUNC_START(relative_adr)
        adr     x0, sym64_rel
        ret
-ENDPROC(relative_adr)
+SYM_FUNC_END(relative_adr)
 
-ENTRY(relative_data64)
+SYM_FUNC_START(relative_data64)
        adr     x1, 0f
        ldr     x0, [x1]
        add     x0, x0, x1
        ret
 0:     .quad   sym64_rel - .
-ENDPROC(relative_data64)
+SYM_FUNC_END(relative_data64)
 
-ENTRY(relative_data32)
+SYM_FUNC_START(relative_data32)
        adr     x1, 0f
        ldr     w0, [x1]
        add     x0, x0, x1
        ret
 0:     .long   sym64_rel - .
-ENDPROC(relative_data32)
+SYM_FUNC_END(relative_data32)
 
-ENTRY(relative_data16)
+SYM_FUNC_START(relative_data16)
        adr     x1, 0f
        ldrsh   w0, [x1]
        add     x0, x0, x1
        ret
 0:     .short  sym64_rel - ., 0
-ENDPROC(relative_data16)
+SYM_FUNC_END(relative_data16)
index c40ce49..542d6ed 100644 (file)
@@ -26,7 +26,7 @@
  * control_code_page, a special page which has been set up to be preserved
  * during the copy operation.
  */
-ENTRY(arm64_relocate_new_kernel)
+SYM_CODE_START(arm64_relocate_new_kernel)
 
        /* Setup the list loop variables. */
        mov     x18, x2                         /* x18 = dtb address */
@@ -111,7 +111,7 @@ ENTRY(arm64_relocate_new_kernel)
        mov     x3, xzr
        br      x17
 
-ENDPROC(arm64_relocate_new_kernel)
+SYM_CODE_END(arm64_relocate_new_kernel)
 
 .align 3       /* To keep the 64-bit values below naturally aligned. */
 
index 7b2f2e6..c1bf43c 100644 (file)
@@ -62,7 +62,7 @@
  *
  *  x0 = struct sleep_stack_data area
  */
-ENTRY(__cpu_suspend_enter)
+SYM_FUNC_START(__cpu_suspend_enter)
        stp     x29, lr, [x0, #SLEEP_STACK_DATA_CALLEE_REGS]
        stp     x19, x20, [x0,#SLEEP_STACK_DATA_CALLEE_REGS+16]
        stp     x21, x22, [x0,#SLEEP_STACK_DATA_CALLEE_REGS+32]
@@ -95,10 +95,10 @@ ENTRY(__cpu_suspend_enter)
        ldp     x29, lr, [sp], #16
        mov     x0, #1
        ret
-ENDPROC(__cpu_suspend_enter)
+SYM_FUNC_END(__cpu_suspend_enter)
 
        .pushsection ".idmap.text", "awx"
-ENTRY(cpu_resume)
+SYM_CODE_START(cpu_resume)
        bl      el2_setup               // if in EL2 drop to EL1 cleanly
        mov     x0, #ARM64_CPU_RUNTIME
        bl      __cpu_setup
@@ -107,11 +107,11 @@ ENTRY(cpu_resume)
        bl      __enable_mmu
        ldr     x8, =_cpu_resume
        br      x8
-ENDPROC(cpu_resume)
+SYM_CODE_END(cpu_resume)
        .ltorg
        .popsection
 
-ENTRY(_cpu_resume)
+SYM_FUNC_START(_cpu_resume)
        mrs     x1, mpidr_el1
        adr_l   x8, mpidr_hash          // x8 = struct mpidr_hash virt address
 
@@ -147,4 +147,4 @@ ENTRY(_cpu_resume)
        ldp     x29, lr, [x29]
        mov     x0, #0
        ret
-ENDPROC(_cpu_resume)
+SYM_FUNC_END(_cpu_resume)
index 5465527..1f93809 100644 (file)
@@ -30,9 +30,9 @@
  *               unsigned long a6, unsigned long a7, struct arm_smccc_res *res,
  *               struct arm_smccc_quirk *quirk)
  */
-ENTRY(__arm_smccc_smc)
+SYM_FUNC_START(__arm_smccc_smc)
        SMCCC   smc
-ENDPROC(__arm_smccc_smc)
+SYM_FUNC_END(__arm_smccc_smc)
 EXPORT_SYMBOL(__arm_smccc_smc)
 
 /*
@@ -41,7 +41,7 @@ EXPORT_SYMBOL(__arm_smccc_smc)
  *               unsigned long a6, unsigned long a7, struct arm_smccc_res *res,
  *               struct arm_smccc_quirk *quirk)
  */
-ENTRY(__arm_smccc_hvc)
+SYM_FUNC_START(__arm_smccc_hvc)
        SMCCC   hvc
-ENDPROC(__arm_smccc_hvc)
+SYM_FUNC_END(__arm_smccc_hvc)
 EXPORT_SYMBOL(__arm_smccc_hvc)