x86,kprobes: Fix optprobe trampoline to generate complete pt_regs
authorMasami Hiramatsu <mhiramat@kernel.org>
Sat, 26 Mar 2022 02:27:40 +0000 (11:27 +0900)
committerAlexei Starovoitov <ast@kernel.org>
Tue, 29 Mar 2022 02:38:51 +0000 (19:38 -0700)
Currently the optprobe trampoline template code ganerate an
almost complete pt_regs on-stack, everything except regs->ss.
The 'regs->ss' points to the top of stack, which is not a
valid segment decriptor.

As same as the rethook does, complete the job by also pushing ss.

Suggested-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Masami Hiramatsu <mhiramat@kernel.org>
Signed-off-by: Alexei Starovoitov <ast@kernel.org>
Link: https://lore.kernel.org/bpf/164826166027.2455864.14759128090648961900.stgit@devnote2
arch/x86/kernel/kprobes/opt.c

index b4a54a5..e6b8c53 100644 (file)
@@ -106,7 +106,8 @@ asm (
                        ".global optprobe_template_entry\n"
                        "optprobe_template_entry:\n"
 #ifdef CONFIG_X86_64
-                       /* We don't bother saving the ss register */
+                       "       pushq $" __stringify(__KERNEL_DS) "\n"
+                       /* Save the 'sp - 8', this will be fixed later. */
                        "       pushq %rsp\n"
                        "       pushfq\n"
                        ".global optprobe_template_clac\n"
@@ -121,14 +122,17 @@ asm (
                        ".global optprobe_template_call\n"
                        "optprobe_template_call:\n"
                        ASM_NOP5
-                       /* Move flags to rsp */
+                       /* Copy 'regs->flags' into 'regs->ss'. */
                        "       movq 18*8(%rsp), %rdx\n"
-                       "       movq %rdx, 19*8(%rsp)\n"
+                       "       movq %rdx, 20*8(%rsp)\n"
                        RESTORE_REGS_STRING
-                       /* Skip flags entry */
-                       "       addq $8, %rsp\n"
+                       /* Skip 'regs->flags' and 'regs->sp'. */
+                       "       addq $16, %rsp\n"
+                       /* And pop flags register from 'regs->ss'. */
                        "       popfq\n"
 #else /* CONFIG_X86_32 */
+                       "       pushl %ss\n"
+                       /* Save the 'sp - 4', this will be fixed later. */
                        "       pushl %esp\n"
                        "       pushfl\n"
                        ".global optprobe_template_clac\n"
@@ -142,12 +146,13 @@ asm (
                        ".global optprobe_template_call\n"
                        "optprobe_template_call:\n"
                        ASM_NOP5
-                       /* Move flags into esp */
+                       /* Copy 'regs->flags' into 'regs->ss'. */
                        "       movl 14*4(%esp), %edx\n"
-                       "       movl %edx, 15*4(%esp)\n"
+                       "       movl %edx, 16*4(%esp)\n"
                        RESTORE_REGS_STRING
-                       /* Skip flags entry */
-                       "       addl $4, %esp\n"
+                       /* Skip 'regs->flags' and 'regs->sp'. */
+                       "       addl $8, %esp\n"
+                       /* And pop flags register from 'regs->ss'. */
                        "       popfl\n"
 #endif
                        ".global optprobe_template_end\n"
@@ -179,6 +184,8 @@ optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs)
                kprobes_inc_nmissed_count(&op->kp);
        } else {
                struct kprobe_ctlblk *kcb = get_kprobe_ctlblk();
+               /* Adjust stack pointer */
+               regs->sp += sizeof(long);
                /* Save skipped registers */
                regs->cs = __KERNEL_CS;
 #ifdef CONFIG_X86_32