1 /* SPDX-License-Identifier: GPL-2.0-only */
3 * Based on arch/arm/include/asm/barrier.h
5 * Copyright (C) 2012 ARM Ltd.
7 #ifndef __ASM_BARRIER_H
8 #define __ASM_BARRIER_H
12 #include <linux/kasan-checks.h>
14 #define __nops(n) ".rept " #n "\nnop\n.endr\n"
15 #define nops(n) asm volatile(__nops(n))
17 #define sev() asm volatile("sev" : : : "memory")
18 #define wfe() asm volatile("wfe" : : : "memory")
19 #define wfet(val) asm volatile("msr s0_3_c1_c0_0, %0" \
20 : : "r" (val) : "memory")
21 #define wfi() asm volatile("wfi" : : : "memory")
22 #define wfit(val) asm volatile("msr s0_3_c1_c0_1, %0" \
23 : : "r" (val) : "memory")
25 #define isb() asm volatile("isb" : : : "memory")
26 #define dmb(opt) asm volatile("dmb " #opt : : : "memory")
27 #define dsb(opt) asm volatile("dsb " #opt : : : "memory")
29 #define psb_csync() asm volatile("hint #17" : : : "memory")
30 #define __tsb_csync() asm volatile("hint #18" : : : "memory")
31 #define csdb() asm volatile("hint #20" : : : "memory")
34 * Data Gathering Hint:
35 * This instruction prevents merging memory accesses with Normal-NC or
36 * Device-GRE attributes before the hint instruction with any memory accesses
37 * appearing after the hint instruction.
39 #define dgh() asm volatile("hint #6" : : : "memory")
41 #ifdef CONFIG_ARM64_PSEUDO_NMI
44 extern struct static_key_false gic_pmr_sync; \
46 if (static_branch_unlikely(&gic_pmr_sync)) \
50 #define pmr_sync() do {} while (0)
57 #define dma_mb() dmb(osh)
58 #define dma_rmb() dmb(oshld)
59 #define dma_wmb() dmb(oshst)
61 #define io_stop_wc() dgh()
66 * CPUs affected by Arm Erratum 2054223 or 2067961 needs \
67 * another TSB to ensure the trace is flushed. The barriers \
68 * don't have to be strictly back to back, as long as the \
69 * CPU is in trace prohibited state. \
71 if (cpus_have_final_cap(ARM64_WORKAROUND_TSB_FLUSH_FAILURE)) \
77 * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
80 #define array_index_mask_nospec array_index_mask_nospec
81 static inline unsigned long array_index_mask_nospec(unsigned long idx,
90 : "r" (idx), "Ir" (sz)
98 * Ensure that reads of the counter are treated the same as memory reads
99 * for the purposes of ordering by subsequent memory barriers.
101 * This insanity brought to you by speculative system register reads,
102 * out-of-order memory accesses, sequence locks and Thomas Gleixner.
104 * https://lore.kernel.org/r/alpine.DEB.2.21.1902081950260.1662@nanos.tec.linutronix.de/
106 #define arch_counter_enforce_ordering(val) do { \
107 u64 tmp, _val = (val); \
110 " eor %0, %1, %1\n" \
111 " add %0, sp, %0\n" \
113 : "=r" (tmp) : "r" (_val)); \
116 #define __smp_mb() dmb(ish)
117 #define __smp_rmb() dmb(ishld)
118 #define __smp_wmb() dmb(ishst)
120 #define __smp_store_release(p, v) \
122 typeof(p) __p = (p); \
123 union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u = \
124 { .__val = (__force __unqual_scalar_typeof(*p)) (v) }; \
125 compiletime_assert_atomic_type(*p); \
126 kasan_check_write(__p, sizeof(*p)); \
127 switch (sizeof(*p)) { \
129 asm volatile ("stlrb %w1, %0" \
131 : "r" (*(__u8 *)__u.__c) \
135 asm volatile ("stlrh %w1, %0" \
137 : "r" (*(__u16 *)__u.__c) \
141 asm volatile ("stlr %w1, %0" \
143 : "r" (*(__u32 *)__u.__c) \
147 asm volatile ("stlr %1, %0" \
149 : "r" (*(__u64 *)__u.__c) \
155 #define __smp_load_acquire(p) \
157 union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u; \
158 typeof(p) __p = (p); \
159 compiletime_assert_atomic_type(*p); \
160 kasan_check_read(__p, sizeof(*p)); \
161 switch (sizeof(*p)) { \
163 asm volatile ("ldarb %w0, %1" \
164 : "=r" (*(__u8 *)__u.__c) \
165 : "Q" (*__p) : "memory"); \
168 asm volatile ("ldarh %w0, %1" \
169 : "=r" (*(__u16 *)__u.__c) \
170 : "Q" (*__p) : "memory"); \
173 asm volatile ("ldar %w0, %1" \
174 : "=r" (*(__u32 *)__u.__c) \
175 : "Q" (*__p) : "memory"); \
178 asm volatile ("ldar %0, %1" \
179 : "=r" (*(__u64 *)__u.__c) \
180 : "Q" (*__p) : "memory"); \
183 (typeof(*p))__u.__val; \
186 #define smp_cond_load_relaxed(ptr, cond_expr) \
188 typeof(ptr) __PTR = (ptr); \
189 __unqual_scalar_typeof(*ptr) VAL; \
191 VAL = READ_ONCE(*__PTR); \
194 __cmpwait_relaxed(__PTR, VAL); \
199 #define smp_cond_load_acquire(ptr, cond_expr) \
201 typeof(ptr) __PTR = (ptr); \
202 __unqual_scalar_typeof(*ptr) VAL; \
204 VAL = smp_load_acquire(__PTR); \
207 __cmpwait_relaxed(__PTR, VAL); \
212 #include <asm-generic/barrier.h>
214 #endif /* __ASSEMBLY__ */
216 #endif /* __ASM_BARRIER_H */