1 /* Copyright (C) 2010-2013 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
3 Contributed by Maxim Kuvyrkov <maxim@codesourcery.com>, 2010.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library. If not, see
17 <http://www.gnu.org/licenses/>. */
19 /* Borrowed from ARM's version. */
21 #ifndef _LOWLEVELLOCK_H
22 #define _LOWLEVELLOCK_H 1
25 #include <sys/param.h>
26 #include <bits/pthreadtypes.h>
28 #include <kernel-features.h>
32 #define FUTEX_REQUEUE 3
33 #define FUTEX_CMP_REQUEUE 4
34 #define FUTEX_WAKE_OP 5
35 #define FUTEX_OP_CLEAR_WAKE_IF_GT_ONE ((4 << 24) | 1)
36 #define FUTEX_LOCK_PI 6
37 #define FUTEX_UNLOCK_PI 7
38 #define FUTEX_TRYLOCK_PI 8
39 #define FUTEX_WAIT_BITSET 9
40 #define FUTEX_WAKE_BITSET 10
41 #define FUTEX_PRIVATE_FLAG 128
42 #define FUTEX_CLOCK_REALTIME 256
44 #define FUTEX_BITSET_MATCH_ANY 0xffffffff
46 /* Values for 'private' parameter of locking macros. Yes, the
47 definition seems to be backwards. But it is not. The bit will be
48 reversed before passing to the system call. */
50 #define LLL_SHARED FUTEX_PRIVATE_FLAG
53 #if !defined NOT_IN_libc || defined IS_IN_rtld
54 /* In libc.so or ld.so all futexes are private. */
55 # ifdef __ASSUME_PRIVATE_FUTEX
56 # define __lll_private_flag(fl, private) \
57 ((fl) | FUTEX_PRIVATE_FLAG)
59 # define __lll_private_flag(fl, private) \
60 ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex))
63 # ifdef __ASSUME_PRIVATE_FUTEX
64 # define __lll_private_flag(fl, private) \
65 (((fl) | FUTEX_PRIVATE_FLAG) ^ (private))
67 # define __lll_private_flag(fl, private) \
68 (__builtin_constant_p (private) \
70 ? ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex)) \
72 : ((fl) | (((private) ^ FUTEX_PRIVATE_FLAG) \
73 & THREAD_GETMEM (THREAD_SELF, header.private_futex))))
78 #define lll_futex_wait(futexp, val, private) \
79 lll_futex_timed_wait(futexp, val, NULL, private)
81 #define lll_futex_timed_wait(futexp, val, timespec, private) \
83 INTERNAL_SYSCALL_DECL (__err); \
85 __ret = INTERNAL_SYSCALL (futex, __err, 4, (futexp), \
86 __lll_private_flag (FUTEX_WAIT, private), \
91 #define lll_futex_timed_wait_bitset(futexp, val, timespec, clockbit, private) \
93 INTERNAL_SYSCALL_DECL (__err); \
95 int __op = FUTEX_WAIT_BITSET | clockbit; \
96 __ret = INTERNAL_SYSCALL (futex, __err, 6, (long) (futexp), \
97 __lll_private_flag (__op, private), \
98 (val), (timespec), NULL /* Unused. */, \
99 FUTEX_BITSET_MATCH_ANY); \
103 #define lll_futex_wake(futexp, nr, private) \
105 INTERNAL_SYSCALL_DECL (__err); \
107 __ret = INTERNAL_SYSCALL (futex, __err, 4, (futexp), \
108 __lll_private_flag (FUTEX_WAKE, private), \
113 #define lll_robust_dead(futexv, private) \
116 int *__futexp = &(futexv); \
117 atomic_or (__futexp, FUTEX_OWNER_DIED); \
118 lll_futex_wake (__futexp, 1, private); \
122 /* Returns non-zero if error happened, zero if success. */
123 #define lll_futex_requeue(futexp, nr_wake, nr_move, mutex, val, private) \
125 INTERNAL_SYSCALL_DECL (__err); \
127 __ret = INTERNAL_SYSCALL (futex, __err, 6, (futexp), \
128 __lll_private_flag (FUTEX_CMP_REQUEUE, private),\
129 (nr_wake), (nr_move), (mutex), (val)); \
130 INTERNAL_SYSCALL_ERROR_P (__ret, __err); \
133 /* Returns non-zero if error happened, zero if success. */
134 #define lll_futex_wake_unlock(futexp, nr_wake, nr_wake2, futexp2, private) \
136 INTERNAL_SYSCALL_DECL (__err); \
138 __ret = INTERNAL_SYSCALL (futex, __err, 6, (futexp), \
139 __lll_private_flag (FUTEX_WAKE_OP, private), \
140 (nr_wake), (nr_wake2), (futexp2), \
141 FUTEX_OP_CLEAR_WAKE_IF_GT_ONE); \
142 INTERNAL_SYSCALL_ERROR_P (__ret, __err); \
145 #define lll_trylock(lock) \
146 atomic_compare_and_exchange_val_acq (&(lock), 1, 0)
148 #define lll_cond_trylock(lock) \
149 atomic_compare_and_exchange_val_acq (&(lock), 2, 0)
151 #define lll_robust_trylock(lock, id) \
152 atomic_compare_and_exchange_val_acq (&(lock), id, 0)
154 extern void __lll_lock_wait_private (int *futex) attribute_hidden;
155 extern void __lll_lock_wait (int *futex, int private) attribute_hidden;
156 extern int __lll_robust_lock_wait (int *futex, int private) attribute_hidden;
158 #define __lll_lock(futex, private) \
160 int *__futex = (futex); \
161 if (__builtin_expect (atomic_compare_and_exchange_val_acq (__futex, \
164 if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
165 __lll_lock_wait_private (__futex); \
167 __lll_lock_wait (__futex, private); \
170 #define lll_lock(futex, private) __lll_lock (&(futex), private)
173 #define __lll_robust_lock(futex, id, private) \
175 int *__futex = (futex); \
178 if (__builtin_expect (atomic_compare_and_exchange_bool_acq (__futex, id, \
180 __val = __lll_robust_lock_wait (__futex, private); \
183 #define lll_robust_lock(futex, id, private) \
184 __lll_robust_lock (&(futex), id, private)
187 #define __lll_cond_lock(futex, private) \
189 int *__futex = (futex); \
190 if (__builtin_expect (atomic_exchange_acq (__futex, 2), 0)) \
191 __lll_lock_wait (__futex, private); \
193 #define lll_cond_lock(futex, private) __lll_cond_lock (&(futex), private)
196 #define lll_robust_cond_lock(futex, id, private) \
197 __lll_robust_lock (&(futex), (id) | FUTEX_WAITERS, private)
200 extern int __lll_timedlock_wait (int *futex, const struct timespec *,
201 int private) attribute_hidden;
202 extern int __lll_robust_timedlock_wait (int *futex, const struct timespec *,
203 int private) attribute_hidden;
205 #define __lll_timedlock(futex, abstime, private) \
207 int *__futex = (futex); \
210 if (__builtin_expect (atomic_exchange_acq (__futex, 1), 0)) \
211 __val = __lll_timedlock_wait (__futex, abstime, private); \
214 #define lll_timedlock(futex, abstime, private) \
215 __lll_timedlock (&(futex), abstime, private)
218 #define __lll_robust_timedlock(futex, abstime, id, private) \
220 int *__futex = (futex); \
223 if (__builtin_expect (atomic_compare_and_exchange_bool_acq (__futex, id, \
225 __val = __lll_robust_timedlock_wait (__futex, abstime, private); \
228 #define lll_robust_timedlock(futex, abstime, id, private) \
229 __lll_robust_timedlock (&(futex), abstime, id, private)
232 #define __lll_unlock(futex, private) \
234 ({ int *__futex = (futex); \
235 int __oldval = atomic_exchange_rel (__futex, 0); \
236 if (__builtin_expect (__oldval > 1, 0)) \
237 lll_futex_wake (__futex, 1, private); \
239 #define lll_unlock(futex, private) __lll_unlock(&(futex), private)
242 #define __lll_robust_unlock(futex, private) \
244 ({ int *__futex = (futex); \
245 int __oldval = atomic_exchange_rel (__futex, 0); \
246 if (__builtin_expect (__oldval & FUTEX_WAITERS, 0)) \
247 lll_futex_wake (__futex, 1, private); \
249 #define lll_robust_unlock(futex, private) \
250 __lll_robust_unlock(&(futex), private)
253 #define lll_islocked(futex) \
257 /* Our internal lock implementation is identical to the binary-compatible
258 mutex implementation. */
260 /* Initializers for lock. */
261 #define LLL_LOCK_INITIALIZER (0)
262 #define LLL_LOCK_INITIALIZER_LOCKED (1)
264 /* The states of a lock are:
266 1 - taken by one user
267 >1 - taken by more users */
269 /* The kernel notifies a process which uses CLONE_CHILD_CLEARTID via futex
270 wakeup when the clone terminates. The memory location contains the
271 thread ID while the clone is running and is reset to zero
273 #define lll_wait_tid(tid) \
275 __typeof (tid) __tid; \
276 while ((__tid = (tid)) != 0) \
277 lll_futex_wait (&(tid), __tid, LLL_SHARED); \
280 extern int __lll_timedwait_tid (int *, const struct timespec *)
283 #define lll_timedwait_tid(tid, abstime) \
287 __res = __lll_timedwait_tid (&(tid), (abstime)); \
291 #endif /* lowlevellock.h */