x86/ibt,ftrace: Make function-graph play nice
authorPeter Zijlstra <peterz@infradead.org>
Tue, 16 Aug 2022 08:26:57 +0000 (05:26 -0300)
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>
Sun, 21 Aug 2022 13:17:48 +0000 (15:17 +0200)
commit e52fc2cf3f662828cc0d51c4b73bed73ad275fce upstream.

Return trampoline must not use indirect branch to return; while this
preserves the RSB, it is fundamentally incompatible with IBT. Instead
use a retpoline like ROP gadget that defeats IBT while not unbalancing
the RSB.

And since ftrace_stub is no longer a plain RET, don't use it to copy
from. Since RET is a trivial instruction, poke it directly.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Acked-by: Josh Poimboeuf <jpoimboe@redhat.com>
Link: https://lore.kernel.org/r/20220308154318.347296408@infradead.org
[cascardo: remove ENDBR]
Signed-off-by: Thadeu Lima de Souza Cascardo <cascardo@canonical.com>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
arch/x86/kernel/ftrace.c
arch/x86/kernel/ftrace_64.S

index 5080f578236a5800548b757d016a5937a4d64f7c..8160d1dc6ed39bfaa0e95987df85bc02e92841bb 100644 (file)
@@ -322,12 +322,12 @@ create_trampoline(struct ftrace_ops *ops, unsigned int *tramp_size)
        unsigned long offset;
        unsigned long npages;
        unsigned long size;
-       unsigned long retq;
        unsigned long *ptr;
        void *trampoline;
        void *ip;
        /* 48 8b 15 <offset> is movq <offset>(%rip), %rdx */
        unsigned const char op_ref[] = { 0x48, 0x8b, 0x15 };
+       unsigned const char retq[] = { RET_INSN_OPCODE, INT3_INSN_OPCODE };
        union ftrace_op_code_union op_ptr;
        int ret;
 
@@ -365,12 +365,7 @@ create_trampoline(struct ftrace_ops *ops, unsigned int *tramp_size)
                goto fail;
 
        ip = trampoline + size;
-
-       /* The trampoline ends with ret(q) */
-       retq = (unsigned long)ftrace_stub;
-       ret = copy_from_kernel_nofault(ip, (void *)retq, RET_SIZE);
-       if (WARN_ON(ret < 0))
-               goto fail;
+       memcpy(ip, retq, RET_SIZE);
 
        /* No need to test direct calls on created trampolines */
        if (ops->flags & FTRACE_OPS_FL_SAVE_REGS) {
index d6af81d1b788367bb2b72d17d00609ce9075b88d..6cc14a835991d8481819ee395410af0c99cc36f3 100644 (file)
@@ -181,7 +181,6 @@ SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL)
 
 /*
  * This is weak to keep gas from relaxing the jumps.
- * It is also used to copy the RET for trampolines.
  */
 SYM_INNER_LABEL_ALIGN(ftrace_stub, SYM_L_WEAK)
        UNWIND_HINT_FUNC
@@ -335,7 +334,7 @@ SYM_FUNC_START(ftrace_graph_caller)
 SYM_FUNC_END(ftrace_graph_caller)
 
 SYM_FUNC_START(return_to_handler)
-       subq  $24, %rsp
+       subq  $16, %rsp
 
        /* Save the return values */
        movq %rax, (%rsp)
@@ -347,7 +346,19 @@ SYM_FUNC_START(return_to_handler)
        movq %rax, %rdi
        movq 8(%rsp), %rdx
        movq (%rsp), %rax
-       addq $24, %rsp
-       JMP_NOSPEC rdi
+
+       addq $16, %rsp
+       /*
+        * Jump back to the old return address. This cannot be JMP_NOSPEC rdi
+        * since IBT would demand that contain ENDBR, which simply isn't so for
+        * return addresses. Use a retpoline here to keep the RSB balanced.
+        */
+       ANNOTATE_INTRA_FUNCTION_CALL
+       call .Ldo_rop
+       int3
+.Ldo_rop:
+       mov %rdi, (%rsp)
+       UNWIND_HINT_FUNC
+       RET
 SYM_FUNC_END(return_to_handler)
 #endif