x86/paravirt: Use common macro for creating simple asm paravirt functions
authorJuergen Gross <jgross@suse.com>
Wed, 9 Nov 2022 13:44:18 +0000 (14:44 +0100)
committerPeter Zijlstra <peterz@infradead.org>
Thu, 24 Nov 2022 12:56:44 +0000 (13:56 +0100)
There are some paravirt assembler functions which are sharing a common
pattern. Introduce a macro DEFINE_PARAVIRT_ASM() for creating them.

Note that this macro is including explicit alignment of the generated
functions, leading to __raw_callee_save___kvm_vcpu_is_preempted(),
_paravirt_nop() and paravirt_ret0() to be aligned at 4 byte boundaries
now.

The explicit _paravirt_nop() prototype in paravirt.c isn't needed, as
it is included in paravirt_types.h already.

Signed-off-by: Juergen Gross <jgross@suse.com>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Srivatsa S. Bhat (VMware) <srivatsa@csail.mit.edu>
Link: https://lkml.kernel.org/r/20221109134418.6516-1-jgross@suse.com
arch/x86/include/asm/paravirt.h
arch/x86/include/asm/qspinlock_paravirt.h
arch/x86/kernel/kvm.c
arch/x86/kernel/paravirt.c

index 2851bc2..73e9522 100644 (file)
@@ -731,6 +731,18 @@ static __always_inline unsigned long arch_local_irq_save(void)
 #undef PVOP_VCALL4
 #undef PVOP_CALL4
 
+#define DEFINE_PARAVIRT_ASM(func, instr, sec)          \
+       asm (".pushsection " #sec ", \"ax\"\n"          \
+            ".global " #func "\n\t"                    \
+            ".type " #func ", @function\n\t"           \
+            ASM_FUNC_ALIGN "\n"                        \
+            #func ":\n\t"                              \
+            ASM_ENDBR                                  \
+            instr "\n\t"                               \
+            ASM_RET                                    \
+            ".size " #func ", . - " #func "\n\t"       \
+            ".popsection")
+
 extern void default_banner(void);
 
 #else  /* __ASSEMBLY__ */
index d861127..42b17cf 100644 (file)
@@ -14,8 +14,6 @@
 
 __PV_CALLEE_SAVE_REGS_THUNK(__pv_queued_spin_unlock_slowpath, ".spinlock.text");
 #define __pv_queued_spin_unlock        __pv_queued_spin_unlock
-#define PV_UNLOCK              "__raw_callee_save___pv_queued_spin_unlock"
-#define PV_UNLOCK_SLOWPATH     "__raw_callee_save___pv_queued_spin_unlock_slowpath"
 
 /*
  * Optimized assembly version of __raw_callee_save___pv_queued_spin_unlock
@@ -37,32 +35,27 @@ __PV_CALLEE_SAVE_REGS_THUNK(__pv_queued_spin_unlock_slowpath, ".spinlock.text");
  *   rsi = lockval           (second argument)
  *   rdx = internal variable (set to 0)
  */
-asm    (".pushsection .spinlock.text, \"ax\";"
-       ".globl " PV_UNLOCK ";"
-       ".type " PV_UNLOCK ", @function;"
-       ASM_FUNC_ALIGN
-       PV_UNLOCK ": "
-       ASM_ENDBR
-       FRAME_BEGIN
-       "push  %rdx;"
-       "mov   $0x1,%eax;"
-       "xor   %edx,%edx;"
-       LOCK_PREFIX "cmpxchg %dl,(%rdi);"
-       "cmp   $0x1,%al;"
-       "jne   .slowpath;"
-       "pop   %rdx;"
+#define PV_UNLOCK_ASM                                                  \
+       FRAME_BEGIN                                                     \
+       "push  %rdx\n\t"                                                \
+       "mov   $0x1,%eax\n\t"                                           \
+       "xor   %edx,%edx\n\t"                                           \
+       LOCK_PREFIX "cmpxchg %dl,(%rdi)\n\t"                            \
+       "cmp   $0x1,%al\n\t"                                            \
+       "jne   .slowpath\n\t"                                           \
+       "pop   %rdx\n\t"                                                \
+       FRAME_END                                                       \
+       ASM_RET                                                         \
+       ".slowpath:\n\t"                                                \
+       "push   %rsi\n\t"                                               \
+       "movzbl %al,%esi\n\t"                                           \
+       "call __raw_callee_save___pv_queued_spin_unlock_slowpath\n\t"   \
+       "pop    %rsi\n\t"                                               \
+       "pop    %rdx\n\t"                                               \
        FRAME_END
-       ASM_RET
-       ".slowpath: "
-       "push   %rsi;"
-       "movzbl %al,%esi;"
-       "call " PV_UNLOCK_SLOWPATH ";"
-       "pop    %rsi;"
-       "pop    %rdx;"
-       FRAME_END
-       ASM_RET
-       ".size " PV_UNLOCK ", .-" PV_UNLOCK ";"
-       ".popsection");
+
+DEFINE_PARAVIRT_ASM(__raw_callee_save___pv_queued_spin_unlock,
+                   PV_UNLOCK_ASM, .spinlock.text);
 
 #else /* CONFIG_64BIT */
 
index 95fb85b..4d053cb 100644 (file)
@@ -798,20 +798,13 @@ extern bool __raw_callee_save___kvm_vcpu_is_preempted(long);
  * Hand-optimize version for x86-64 to avoid 8 64-bit register saving and
  * restoring to/from the stack.
  */
-asm(
-".pushsection .text;"
-".global __raw_callee_save___kvm_vcpu_is_preempted;"
-".type __raw_callee_save___kvm_vcpu_is_preempted, @function;"
-ASM_FUNC_ALIGN
-"__raw_callee_save___kvm_vcpu_is_preempted:"
-ASM_ENDBR
-"movq  __per_cpu_offset(,%rdi,8), %rax;"
-"cmpb  $0, " __stringify(KVM_STEAL_TIME_preempted) "+steal_time(%rax);"
-"setne %al;"
-ASM_RET
-".size __raw_callee_save___kvm_vcpu_is_preempted, .-__raw_callee_save___kvm_vcpu_is_preempted;"
-".popsection");
+#define PV_VCPU_PREEMPTED_ASM                                               \
+ "movq   __per_cpu_offset(,%rdi,8), %rax\n\t"                               \
+ "cmpb   $0, " __stringify(KVM_STEAL_TIME_preempted) "+steal_time(%rax)\n\t" \
+ "setne  %al\n\t"
 
+DEFINE_PARAVIRT_ASM(__raw_callee_save___kvm_vcpu_is_preempted,
+                   PV_VCPU_PREEMPTED_ASM, .text);
 #endif
 
 static void __init kvm_guest_init(void)
index e244c49..327757a 100644 (file)
  * nop stub, which must not clobber anything *including the stack* to
  * avoid confusing the entry prologues.
  */
-extern void _paravirt_nop(void);
-asm (".pushsection .entry.text, \"ax\"\n"
-     ".global _paravirt_nop\n"
-     ASM_FUNC_ALIGN
-     "_paravirt_nop:\n\t"
-     ASM_ENDBR
-     ASM_RET
-     ".size _paravirt_nop, . - _paravirt_nop\n\t"
-     ".type _paravirt_nop, @function\n\t"
-     ".popsection");
+DEFINE_PARAVIRT_ASM(_paravirt_nop, "", .entry.text);
 
 /* stub always returning 0. */
-asm (".pushsection .entry.text, \"ax\"\n"
-     ".global paravirt_ret0\n"
-     ASM_FUNC_ALIGN
-     "paravirt_ret0:\n\t"
-     ASM_ENDBR
-     "xor %" _ASM_AX ", %" _ASM_AX ";\n\t"
-     ASM_RET
-     ".size paravirt_ret0, . - paravirt_ret0\n\t"
-     ".type paravirt_ret0, @function\n\t"
-     ".popsection");
-
+DEFINE_PARAVIRT_ASM(paravirt_ret0, "xor %eax,%eax", .entry.text);
 
 void __init default_banner(void)
 {