From 57380fd1249b20ef772549af2c58ef57b21faba7 Mon Sep 17 00:00:00 2001 From: Rong Tao Date: Wed, 24 May 2023 20:31:24 +0800 Subject: [PATCH] tools/virtio: Fix arm64 ringtest compilation error MIME-Version: 1.0 Content-Type: text/plain; charset=utf8 Content-Transfer-Encoding: 8bit Add cpu_relax() for arm64 instead of directly assert(), and add assert.h header file. Also, add smp_wmb and smp_mb for arm64. Compilation error as follows, avoid __always_inline undefined. $ make cc -Wall -pthread -O2 -ggdb -flto -fwhole-program -c -o ring.o ring.c In file included from ring.c:10: main.h: In function ‘busy_wait’: main.h:99:21: warning: implicit declaration of function ‘assert’ [-Wimplicit-function-declaration] 99 | #define cpu_relax() assert(0) | ^~~~~~ main.h:107:17: note: in expansion of macro ‘cpu_relax’ 107 | cpu_relax(); | ^~~~~~~~~ main.h:12:1: note: ‘assert’ is defined in header ‘’; did you forget to ‘#include ’? 11 | #include +++ |+#include 12 | main.h: At top level: main.h:143:23: error: expected ‘;’ before ‘void’ 143 | static __always_inline | ^ | ; 144 | void __read_once_size(const volatile void *p, void *res, int size) | ~~~~ main.h:158:23: error: expected ‘;’ before ‘void’ 158 | static __always_inline void __write_once_size(volatile void *p, void *res, int size) | ^~~~~ | ; make: *** [: ring.o] Error 1 Signed-off-by: Rong Tao Message-Id: Signed-off-by: Michael S. Tsirkin --- tools/virtio/ringtest/main.h | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tools/virtio/ringtest/main.h b/tools/virtio/ringtest/main.h index b68920d..d18dd31 100644 --- a/tools/virtio/ringtest/main.h +++ b/tools/virtio/ringtest/main.h @@ -8,6 +8,7 @@ #ifndef MAIN_H #define MAIN_H +#include #include extern int param; @@ -95,6 +96,8 @@ extern unsigned ring_size; #define cpu_relax() asm ("rep; nop" ::: "memory") #elif defined(__s390x__) #define cpu_relax() barrier() +#elif defined(__aarch64__) +#define cpu_relax() asm ("yield" ::: "memory") #else #define cpu_relax() assert(0) #endif @@ -112,6 +115,8 @@ static inline void busy_wait(void) #if defined(__x86_64__) || defined(__i386__) #define smp_mb() asm volatile("lock; addl $0,-132(%%rsp)" ::: "memory", "cc") +#elif defined(__aarch64__) +#define smp_mb() asm volatile("dmb ish" ::: "memory") #else /* * Not using __ATOMIC_SEQ_CST since gcc docs say they are only synchronized @@ -136,10 +141,16 @@ static inline void busy_wait(void) #if defined(__i386__) || defined(__x86_64__) || defined(__s390x__) #define smp_wmb() barrier() +#elif defined(__aarch64__) +#define smp_wmb() asm volatile("dmb ishst" ::: "memory") #else #define smp_wmb() smp_release() #endif +#ifndef __always_inline +#define __always_inline inline __attribute__((always_inline)) +#endif + static __always_inline void __read_once_size(const volatile void *p, void *res, int size) { -- 2.7.4