From 39c8275de81cf7fb524e2ff067aa175447412284 Mon Sep 17 00:00:00 2001 From: Mark Rutland Date: Tue, 14 Mar 2023 15:36:58 +0000 Subject: [PATCH] arm64: uaccess: permit __smp_store_release() to use zero register Currently the asm constraints for __smp_store_release() require that the value is placed in a "real" GPR (i.e. one other than [XW]ZR or SP). This means that for cases such as: __smp_store_release(ptr, 0) ... the compiler has to move '0' into "real" GPR, e.g. mov xN, #0 stlr xN, [] This is unfortunate, as using the zero register would require fewer instructions and save a "real" GPR for other usage, allowing the compiler to generate: stlr xzr, [] Modify the asm constaints for __smp_store_release() to permit the use of the zero register for the value. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland Cc: Catalin Marinas Cc: Robin Murphy Cc: Will Deacon Link: https://lore.kernel.org/r/20230314153700.787701-3-mark.rutland@arm.com Signed-off-by: Will Deacon --- arch/arm64/include/asm/barrier.h | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/arch/arm64/include/asm/barrier.h b/arch/arm64/include/asm/barrier.h index 3dd8982..cf29874 100644 --- a/arch/arm64/include/asm/barrier.h +++ b/arch/arm64/include/asm/barrier.h @@ -131,25 +131,25 @@ do { \ case 1: \ asm volatile ("stlrb %w1, %0" \ : "=Q" (*__p) \ - : "r" (*(__u8 *)__u.__c) \ + : "rZ" (*(__u8 *)__u.__c) \ : "memory"); \ break; \ case 2: \ asm volatile ("stlrh %w1, %0" \ : "=Q" (*__p) \ - : "r" (*(__u16 *)__u.__c) \ + : "rZ" (*(__u16 *)__u.__c) \ : "memory"); \ break; \ case 4: \ asm volatile ("stlr %w1, %0" \ : "=Q" (*__p) \ - : "r" (*(__u32 *)__u.__c) \ + : "rZ" (*(__u32 *)__u.__c) \ : "memory"); \ break; \ case 8: \ - asm volatile ("stlr %1, %0" \ + asm volatile ("stlr %x1, %0" \ : "=Q" (*__p) \ - : "r" (*(__u64 *)__u.__c) \ + : "rZ" (*(__u64 *)__u.__c) \ : "memory"); \ break; \ } \ -- 2.7.4