1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_GENERIC_PERCPU_H_
3 #define _ASM_GENERIC_PERCPU_H_
5 #include <linux/compiler.h>
6 #include <linux/threads.h>
7 #include <linux/percpu-defs.h>
12 * per_cpu_offset() is the offset that has to be added to a
13 * percpu variable to get to the instance for a certain processor.
15 * Most arches use the __per_cpu_offset array for those offsets but
16 * some arches have their own ways of determining the offset (x86_64, s390).
18 #ifndef __per_cpu_offset
19 extern unsigned long __per_cpu_offset[NR_CPUS];
21 #define per_cpu_offset(x) (__per_cpu_offset[x])
25 * Determine the offset for the currently active processor.
26 * An arch may define __my_cpu_offset to provide a more effective
27 * means of obtaining the offset to the per cpu variables of the
30 #ifndef __my_cpu_offset
31 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
33 #ifdef CONFIG_DEBUG_PREEMPT
34 #define my_cpu_offset per_cpu_offset(smp_processor_id())
36 #define my_cpu_offset __my_cpu_offset
40 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
41 * translations for raw_cpu_ptr().
43 #ifndef arch_raw_cpu_ptr
44 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
47 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
48 extern void setup_per_cpu_areas(void);
53 #ifndef PER_CPU_BASE_SECTION
55 #define PER_CPU_BASE_SECTION ".data..percpu"
57 #define PER_CPU_BASE_SECTION ".data"
61 #ifndef PER_CPU_ATTRIBUTES
62 #define PER_CPU_ATTRIBUTES
65 #define raw_cpu_generic_read(pcp) \
67 *raw_cpu_ptr(&(pcp)); \
70 #define raw_cpu_generic_to_op(pcp, val, op) \
72 *raw_cpu_ptr(&(pcp)) op val; \
75 #define raw_cpu_generic_add_return(pcp, val) \
77 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \
83 #define raw_cpu_generic_xchg(pcp, nval) \
85 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \
92 #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
94 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \
97 if (__ret == (oval)) \
102 #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
104 typeof(pcp1) *__p1 = raw_cpu_ptr(&(pcp1)); \
105 typeof(pcp2) *__p2 = raw_cpu_ptr(&(pcp2)); \
107 if (*__p1 == (oval1) && *__p2 == (oval2)) { \
115 #define __this_cpu_generic_read_nopreempt(pcp) \
117 typeof(pcp) ___ret; \
118 preempt_disable_notrace(); \
119 ___ret = READ_ONCE(*raw_cpu_ptr(&(pcp))); \
120 preempt_enable_notrace(); \
124 #define __this_cpu_generic_read_noirq(pcp) \
126 typeof(pcp) ___ret; \
127 unsigned long ___flags; \
128 raw_local_irq_save(___flags); \
129 ___ret = raw_cpu_generic_read(pcp); \
130 raw_local_irq_restore(___flags); \
134 #define this_cpu_generic_read(pcp) \
137 if (__native_word(pcp)) \
138 __ret = __this_cpu_generic_read_nopreempt(pcp); \
140 __ret = __this_cpu_generic_read_noirq(pcp); \
144 #define this_cpu_generic_to_op(pcp, val, op) \
146 unsigned long __flags; \
147 raw_local_irq_save(__flags); \
148 raw_cpu_generic_to_op(pcp, val, op); \
149 raw_local_irq_restore(__flags); \
153 #define this_cpu_generic_add_return(pcp, val) \
156 unsigned long __flags; \
157 raw_local_irq_save(__flags); \
158 __ret = raw_cpu_generic_add_return(pcp, val); \
159 raw_local_irq_restore(__flags); \
163 #define this_cpu_generic_xchg(pcp, nval) \
166 unsigned long __flags; \
167 raw_local_irq_save(__flags); \
168 __ret = raw_cpu_generic_xchg(pcp, nval); \
169 raw_local_irq_restore(__flags); \
173 #define this_cpu_generic_cmpxchg(pcp, oval, nval) \
176 unsigned long __flags; \
177 raw_local_irq_save(__flags); \
178 __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval); \
179 raw_local_irq_restore(__flags); \
183 #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
186 unsigned long __flags; \
187 raw_local_irq_save(__flags); \
188 __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
189 oval1, oval2, nval1, nval2); \
190 raw_local_irq_restore(__flags); \
194 #ifndef raw_cpu_read_1
195 #define raw_cpu_read_1(pcp) raw_cpu_generic_read(pcp)
197 #ifndef raw_cpu_read_2
198 #define raw_cpu_read_2(pcp) raw_cpu_generic_read(pcp)
200 #ifndef raw_cpu_read_4
201 #define raw_cpu_read_4(pcp) raw_cpu_generic_read(pcp)
203 #ifndef raw_cpu_read_8
204 #define raw_cpu_read_8(pcp) raw_cpu_generic_read(pcp)
207 #ifndef raw_cpu_write_1
208 #define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
210 #ifndef raw_cpu_write_2
211 #define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
213 #ifndef raw_cpu_write_4
214 #define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
216 #ifndef raw_cpu_write_8
217 #define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
220 #ifndef raw_cpu_add_1
221 #define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
223 #ifndef raw_cpu_add_2
224 #define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
226 #ifndef raw_cpu_add_4
227 #define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
229 #ifndef raw_cpu_add_8
230 #define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
233 #ifndef raw_cpu_and_1
234 #define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
236 #ifndef raw_cpu_and_2
237 #define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
239 #ifndef raw_cpu_and_4
240 #define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
242 #ifndef raw_cpu_and_8
243 #define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
247 #define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
250 #define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
253 #define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
256 #define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
259 #ifndef raw_cpu_add_return_1
260 #define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
262 #ifndef raw_cpu_add_return_2
263 #define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
265 #ifndef raw_cpu_add_return_4
266 #define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
268 #ifndef raw_cpu_add_return_8
269 #define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
272 #ifndef raw_cpu_xchg_1
273 #define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
275 #ifndef raw_cpu_xchg_2
276 #define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
278 #ifndef raw_cpu_xchg_4
279 #define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
281 #ifndef raw_cpu_xchg_8
282 #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
285 #ifndef raw_cpu_cmpxchg_1
286 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
287 raw_cpu_generic_cmpxchg(pcp, oval, nval)
289 #ifndef raw_cpu_cmpxchg_2
290 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
291 raw_cpu_generic_cmpxchg(pcp, oval, nval)
293 #ifndef raw_cpu_cmpxchg_4
294 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
295 raw_cpu_generic_cmpxchg(pcp, oval, nval)
297 #ifndef raw_cpu_cmpxchg_8
298 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
299 raw_cpu_generic_cmpxchg(pcp, oval, nval)
302 #ifndef raw_cpu_cmpxchg_double_1
303 #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
304 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
306 #ifndef raw_cpu_cmpxchg_double_2
307 #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
308 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
310 #ifndef raw_cpu_cmpxchg_double_4
311 #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
312 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
314 #ifndef raw_cpu_cmpxchg_double_8
315 #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
316 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
319 #ifndef this_cpu_read_1
320 #define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
322 #ifndef this_cpu_read_2
323 #define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
325 #ifndef this_cpu_read_4
326 #define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
328 #ifndef this_cpu_read_8
329 #define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
332 #ifndef this_cpu_write_1
333 #define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
335 #ifndef this_cpu_write_2
336 #define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
338 #ifndef this_cpu_write_4
339 #define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
341 #ifndef this_cpu_write_8
342 #define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
345 #ifndef this_cpu_add_1
346 #define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
348 #ifndef this_cpu_add_2
349 #define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
351 #ifndef this_cpu_add_4
352 #define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
354 #ifndef this_cpu_add_8
355 #define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
358 #ifndef this_cpu_and_1
359 #define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
361 #ifndef this_cpu_and_2
362 #define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
364 #ifndef this_cpu_and_4
365 #define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
367 #ifndef this_cpu_and_8
368 #define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
371 #ifndef this_cpu_or_1
372 #define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
374 #ifndef this_cpu_or_2
375 #define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
377 #ifndef this_cpu_or_4
378 #define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
380 #ifndef this_cpu_or_8
381 #define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
384 #ifndef this_cpu_add_return_1
385 #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
387 #ifndef this_cpu_add_return_2
388 #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
390 #ifndef this_cpu_add_return_4
391 #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
393 #ifndef this_cpu_add_return_8
394 #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
397 #ifndef this_cpu_xchg_1
398 #define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
400 #ifndef this_cpu_xchg_2
401 #define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
403 #ifndef this_cpu_xchg_4
404 #define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
406 #ifndef this_cpu_xchg_8
407 #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
410 #ifndef this_cpu_cmpxchg_1
411 #define this_cpu_cmpxchg_1(pcp, oval, nval) \
412 this_cpu_generic_cmpxchg(pcp, oval, nval)
414 #ifndef this_cpu_cmpxchg_2
415 #define this_cpu_cmpxchg_2(pcp, oval, nval) \
416 this_cpu_generic_cmpxchg(pcp, oval, nval)
418 #ifndef this_cpu_cmpxchg_4
419 #define this_cpu_cmpxchg_4(pcp, oval, nval) \
420 this_cpu_generic_cmpxchg(pcp, oval, nval)
422 #ifndef this_cpu_cmpxchg_8
423 #define this_cpu_cmpxchg_8(pcp, oval, nval) \
424 this_cpu_generic_cmpxchg(pcp, oval, nval)
427 #ifndef this_cpu_cmpxchg_double_1
428 #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
429 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
431 #ifndef this_cpu_cmpxchg_double_2
432 #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
433 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
435 #ifndef this_cpu_cmpxchg_double_4
436 #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
437 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
439 #ifndef this_cpu_cmpxchg_double_8
440 #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
441 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
444 #endif /* _ASM_GENERIC_PERCPU_H_ */