x86/realmode: Add SEV-ES specific trampoline entry point
authorJoerg Roedel <jroedel@suse.de>
Mon, 7 Sep 2020 13:16:06 +0000 (15:16 +0200)
committerBorislav Petkov <bp@suse.de>
Wed, 9 Sep 2020 09:33:20 +0000 (11:33 +0200)
The code at the trampoline entry point is executed in real-mode. In
real-mode, #VC exceptions can't be handled so anything that might cause
such an exception must be avoided.

In the standard trampoline entry code this is the WBINVD instruction and
the call to verify_cpu(), which are both not needed anyway when running
as an SEV-ES guest.

Signed-off-by: Joerg Roedel <jroedel@suse.de>
Signed-off-by: Borislav Petkov <bp@suse.de>
Link: https://lkml.kernel.org/r/20200907131613.12703-66-joro@8bytes.org
arch/x86/include/asm/realmode.h
arch/x86/realmode/rm/header.S
arch/x86/realmode/rm/trampoline_64.S

index 96118fb..4d4d853 100644 (file)
@@ -21,6 +21,9 @@ struct real_mode_header {
        /* SMP trampoline */
        u32     trampoline_start;
        u32     trampoline_header;
+#ifdef CONFIG_AMD_MEM_ENCRYPT
+       u32     sev_es_trampoline_start;
+#endif
 #ifdef CONFIG_X86_64
        u32     trampoline_pgd;
 #endif
index af04512..8c1db5b 100644 (file)
@@ -20,6 +20,9 @@ SYM_DATA_START(real_mode_header)
        /* SMP trampoline */
        .long   pa_trampoline_start
        .long   pa_trampoline_header
+#ifdef CONFIG_AMD_MEM_ENCRYPT
+       .long   pa_sev_es_trampoline_start
+#endif
 #ifdef CONFIG_X86_64
        .long   pa_trampoline_pgd;
 #endif
index 251758e..84c5d1b 100644 (file)
@@ -56,6 +56,7 @@ SYM_CODE_START(trampoline_start)
        testl   %eax, %eax              # Check for return code
        jnz     no_longmode
 
+.Lswitch_to_protected:
        /*
         * GDT tables in non default location kernel can be beyond 16MB and
         * lgdt will not be able to load the address as in real mode default
@@ -80,6 +81,25 @@ no_longmode:
        jmp no_longmode
 SYM_CODE_END(trampoline_start)
 
+#ifdef CONFIG_AMD_MEM_ENCRYPT
+/* SEV-ES supports non-zero IP for entry points - no alignment needed */
+SYM_CODE_START(sev_es_trampoline_start)
+       cli                     # We should be safe anyway
+
+       LJMPW_RM(1f)
+1:
+       mov     %cs, %ax        # Code and data in the same place
+       mov     %ax, %ds
+       mov     %ax, %es
+       mov     %ax, %ss
+
+       # Setup stack
+       movl    $rm_stack_end, %esp
+
+       jmp     .Lswitch_to_protected
+SYM_CODE_END(sev_es_trampoline_start)
+#endif /* CONFIG_AMD_MEM_ENCRYPT */
+
 #include "../kernel/verify_cpu.S"
 
        .section ".text32","ax"