From: WANG Xuerui Date: Thu, 29 Jun 2023 12:58:43 +0000 (+0800) Subject: LoongArch: Simplify the invtlb wrappers X-Git-Tag: v6.6.17~4515^2~20 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=83d8b38967d253942d9172b0c4d69b7d844d5f06;p=platform%2Fkernel%2Flinux-rpi.git LoongArch: Simplify the invtlb wrappers The invtlb instruction has been supported by upstream LoongArch toolchains from day one, so ditch the raw opcode trickery and just use plain inline asm for it. While at it, also make the invtlb asm statements barriers, for proper modeling of the side effects. The functions are also marked as __always_inline instead of just "inline", because they cannot work at all if not inlined: the op argument will not be compile-time const in that case, thus failing to satisfy the "i" constraint. The signature of the other more specific invtlb wrappers contain unused arguments right now, but these are not removed right away in order for the patch to be focused. In the meantime, assertions are added to ensure no accidental misuse happens before the refactor. (The more specific wrappers cannot re-use the generic invtlb wrapper, because the ISA manual says $zero shall be used in case a particular op does not take the respective argument: re-using the generic wrapper would mean losing control over the register usage.) Signed-off-by: WANG Xuerui Signed-off-by: Huacai Chen --- diff --git a/arch/loongarch/include/asm/tlb.h b/arch/loongarch/include/asm/tlb.h index f5e4deb..0ad53f1 100644 --- a/arch/loongarch/include/asm/tlb.h +++ b/arch/loongarch/include/asm/tlb.h @@ -88,52 +88,47 @@ enum invtlb_ops { INVTLB_GID_ADDR = 0x16, }; -/* - * invtlb op info addr - * (0x1 << 26) | (0x24 << 20) | (0x13 << 15) | - * (addr << 10) | (info << 5) | op - */ -static inline void invtlb(u32 op, u32 info, u64 addr) +static __always_inline void invtlb(u32 op, u32 info, u64 addr) { __asm__ __volatile__( - "parse_r addr,%0\n\t" - "parse_r info,%1\n\t" - ".word ((0x6498000) | (addr << 10) | (info << 5) | %2)\n\t" - : - : "r"(addr), "r"(info), "i"(op) + "invtlb %0, %1, %2\n\t" : + : "i"(op), "r"(info), "r"(addr) + : "memory" ); } -static inline void invtlb_addr(u32 op, u32 info, u64 addr) +static __always_inline void invtlb_addr(u32 op, u32 info, u64 addr) { + BUILD_BUG_ON(!__builtin_constant_p(info) || info != 0); __asm__ __volatile__( - "parse_r addr,%0\n\t" - ".word ((0x6498000) | (addr << 10) | (0 << 5) | %1)\n\t" - : - : "r"(addr), "i"(op) + "invtlb %0, $zero, %1\n\t" : + : "i"(op), "r"(addr) + : "memory" ); } -static inline void invtlb_info(u32 op, u32 info, u64 addr) +static __always_inline void invtlb_info(u32 op, u32 info, u64 addr) { + BUILD_BUG_ON(!__builtin_constant_p(addr) || addr != 0); __asm__ __volatile__( - "parse_r info,%0\n\t" - ".word ((0x6498000) | (0 << 10) | (info << 5) | %1)\n\t" - : - : "r"(info), "i"(op) + "invtlb %0, %1, $zero\n\t" : + : "i"(op), "r"(info) + : "memory" ); } -static inline void invtlb_all(u32 op, u32 info, u64 addr) +static __always_inline void invtlb_all(u32 op, u32 info, u64 addr) { + BUILD_BUG_ON(!__builtin_constant_p(info) || info != 0); + BUILD_BUG_ON(!__builtin_constant_p(addr) || addr != 0); __asm__ __volatile__( - ".word ((0x6498000) | (0 << 10) | (0 << 5) | %0)\n\t" + "invtlb %0, $zero, $zero\n\t" : : "i"(op) - : + : "memory" ); }