Fix "set but not used" warnings from MIPS bits/atomic.h.
[platform/upstream/glibc.git] / ports / sysdeps / mips / bits / atomic.h
1 /* Low-level functions for atomic operations. Mips version.
2    Copyright (C) 2005-2012 Free Software Foundation, Inc.
3    This file is part of the GNU C Library.
4
5    The GNU C Library is free software; you can redistribute it and/or
6    modify it under the terms of the GNU Lesser General Public
7    License as published by the Free Software Foundation; either
8    version 2.1 of the License, or (at your option) any later version.
9
10    The GNU C Library is distributed in the hope that it will be useful,
11    but WITHOUT ANY WARRANTY; without even the implied warranty of
12    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13    Lesser General Public License for more details.
14
15    You should have received a copy of the GNU Lesser General Public
16    License along with the GNU C Library.  If not, see
17    <http://www.gnu.org/licenses/>.  */
18
19 #ifndef _MIPS_BITS_ATOMIC_H
20 #define _MIPS_BITS_ATOMIC_H 1
21
22 #include <inttypes.h>
23 #include <sgidefs.h>
24
25 typedef int32_t atomic32_t;
26 typedef uint32_t uatomic32_t;
27 typedef int_fast32_t atomic_fast32_t;
28 typedef uint_fast32_t uatomic_fast32_t;
29
30 typedef int64_t atomic64_t;
31 typedef uint64_t uatomic64_t;
32 typedef int_fast64_t atomic_fast64_t;
33 typedef uint_fast64_t uatomic_fast64_t;
34
35 typedef intptr_t atomicptr_t;
36 typedef uintptr_t uatomicptr_t;
37 typedef intmax_t atomic_max_t;
38 typedef uintmax_t uatomic_max_t;
39
40 #if _MIPS_SIM == _ABIO32
41 #define MIPS_PUSH_MIPS2 ".set   mips2\n\t"
42 #else
43 #define MIPS_PUSH_MIPS2
44 #endif
45
46 /* See the comments in <sys/asm.h> about the use of the sync instruction.  */
47 #ifndef MIPS_SYNC
48 # define MIPS_SYNC      sync
49 #endif
50
51 /* Certain revisions of the R10000 Processor need an LL/SC Workaround
52    enabled.  Revisions before 3.0 misbehave on atomic operations, and
53    Revs 2.6 and lower deadlock after several seconds due to other errata.
54
55    To quote the R10K Errata:
56       Workaround: The basic idea is to inhibit the four instructions
57       from simultaneously becoming active in R10000. Padding all
58       ll/sc sequences with nops or changing the looping branch in the
59       routines to a branch likely (which is always predicted taken
60       by R10000) will work. The nops should go after the loop, and the
61       number of them should be 28. This number could be decremented for
62       each additional instruction in the ll/sc loop such as the lock
63       modifier(s) between the ll and sc, the looping branch and its
64       delay slot. For typical short routines with one ll/sc loop, any
65       instructions after the loop could also count as a decrement. The
66       nop workaround pollutes the cache more but would be a few cycles
67       faster if all the code is in the cache and the looping branch
68       is predicted not taken.  */
69
70
71 #ifdef _MIPS_ARCH_R10000
72 #define R10K_BEQZ_INSN "beqzl"
73 #else
74 #define R10K_BEQZ_INSN "beqz"
75 #endif
76
77 #define MIPS_SYNC_STR_2(X) #X
78 #define MIPS_SYNC_STR_1(X) MIPS_SYNC_STR_2(X)
79 #define MIPS_SYNC_STR MIPS_SYNC_STR_1(MIPS_SYNC)
80
81 #if __GNUC_PREREQ (4, 8)
82 /* The __atomic_* builtins are available in GCC 4.7 and later, but MIPS
83    support for their efficient implementation was added only in GCC 4.8.  */
84
85 /* Compare and exchange.
86    For all "bool" routines, we return FALSE if exchange succesful.  */
87
88 # define __arch_compare_and_exchange_bool_8_int(mem, newval, oldval, model) \
89   (abort (), 0)
90
91 # define __arch_compare_and_exchange_bool_16_int(mem, newval, oldval, model) \
92   (abort (), 0)
93
94 # define __arch_compare_and_exchange_bool_32_int(mem, newval, oldval, model) \
95   ({                                                                    \
96     typeof (*mem) __oldval = (oldval);                                  \
97     !__atomic_compare_exchange_n (mem, (void *) &__oldval, newval, 0,   \
98                                   model, __ATOMIC_RELAXED);             \
99   })
100
101 # define __arch_compare_and_exchange_val_8_int(mem, newval, oldval, model) \
102   (abort (), (typeof(*mem)) 0)
103
104 # define __arch_compare_and_exchange_val_16_int(mem, newval, oldval, model) \
105   (abort (), (typeof(*mem)) 0)
106
107 # define __arch_compare_and_exchange_val_32_int(mem, newval, oldval, model) \
108   ({                                                                    \
109     typeof (*mem) __oldval = (oldval);                                  \
110     __atomic_compare_exchange_n (mem, (void *) &__oldval, newval, 0,    \
111                                  model, __ATOMIC_RELAXED);              \
112     __oldval;                                                           \
113   })
114
115 # if _MIPS_SIM == _ABIO32
116   /* We can't do an atomic 64-bit operation in O32.  */
117 #  define __arch_compare_and_exchange_bool_64_int(mem, newval, oldval, model) \
118   (abort (), 0)
119 #  define __arch_compare_and_exchange_val_64_int(mem, newval, oldval, model) \
120   (abort (), (typeof(*mem)) 0)
121 # else
122 #  define __arch_compare_and_exchange_bool_64_int(mem, newval, oldval, model) \
123   __arch_compare_and_exchange_bool_32_int (mem, newval, oldval, model)
124 #  define __arch_compare_and_exchange_val_64_int(mem, newval, oldval, model) \
125   __arch_compare_and_exchange_val_32_int (mem, newval, oldval, model)
126 # endif
127
128 /* Compare and exchange with "acquire" semantics, ie barrier after.  */
129
130 # define atomic_compare_and_exchange_bool_acq(mem, new, old)    \
131   __atomic_bool_bysize (__arch_compare_and_exchange_bool, int,  \
132                         mem, new, old, __ATOMIC_ACQUIRE)
133
134 # define atomic_compare_and_exchange_val_acq(mem, new, old)     \
135   __atomic_val_bysize (__arch_compare_and_exchange_val, int,    \
136                        mem, new, old, __ATOMIC_ACQUIRE)
137
138 /* Compare and exchange with "release" semantics, ie barrier before.  */
139
140 # define atomic_compare_and_exchange_bool_rel(mem, new, old)    \
141   __atomic_bool_bysize (__arch_compare_and_exchange_bool, int,  \
142                         mem, new, old, __ATOMIC_RELEASE)
143
144 # define atomic_compare_and_exchange_val_rel(mem, new, old)      \
145   __atomic_val_bysize (__arch_compare_and_exchange_val, int,    \
146                        mem, new, old, __ATOMIC_RELEASE)
147
148
149 /* Atomic exchange (without compare).  */
150
151 # define __arch_exchange_8_int(mem, newval, model)      \
152   (abort (), (typeof(*mem)) 0)
153
154 # define __arch_exchange_16_int(mem, newval, model)     \
155   (abort (), (typeof(*mem)) 0)
156
157 # define __arch_exchange_32_int(mem, newval, model)     \
158   __atomic_exchange_n (mem, newval, model)
159
160 # if _MIPS_SIM == _ABIO32
161 /* We can't do an atomic 64-bit operation in O32.  */
162 #  define __arch_exchange_64_int(mem, newval, model)    \
163   (abort (), (typeof(*mem)) 0)
164 # else
165 #  define __arch_exchange_64_int(mem, newval, model)    \
166   __atomic_exchange_n (mem, newval, model)
167 # endif
168
169 # define atomic_exchange_acq(mem, value)                                \
170   __atomic_val_bysize (__arch_exchange, int, mem, value, __ATOMIC_ACQUIRE)
171
172 # define atomic_exchange_rel(mem, value)                                \
173   __atomic_val_bysize (__arch_exchange, int, mem, value, __ATOMIC_RELEASE)
174
175
176 /* Atomically add value and return the previous (unincremented) value.  */
177
178 # define __arch_exchange_and_add_8_int(mem, value, model)       \
179   (abort (), (typeof(*mem)) 0)
180
181 # define __arch_exchange_and_add_16_int(mem, value, model)      \
182   (abort (), (typeof(*mem)) 0)
183
184 # define __arch_exchange_and_add_32_int(mem, value, model)      \
185   __atomic_fetch_add (mem, value, model)
186
187 # if _MIPS_SIM == _ABIO32
188 /* We can't do an atomic 64-bit operation in O32.  */
189 #  define __arch_exchange_and_add_64_int(mem, value, model)     \
190   (abort (), (typeof(*mem)) 0)
191 # else
192 #  define __arch_exchange_and_add_64_int(mem, value, model)     \
193   __atomic_fetch_add (mem, value, model)
194 # endif
195
196 # define atomic_exchange_and_add_acq(mem, value)                        \
197   __atomic_val_bysize (__arch_exchange_and_add, int, mem, value,        \
198                        __ATOMIC_ACQUIRE)
199
200 # define atomic_exchange_and_add_rel(mem, value)                        \
201   __atomic_val_bysize (__arch_exchange_and_add, int, mem, value,        \
202                        __ATOMIC_RELEASE)
203 #else /* !__GNUC_PREREQ (4, 8) */
204 /* This implementation using inline assembly will be removed once glibc
205    requires GCC 4.8 or later to build.  */
206
207 /* Compare and exchange.  For all of the "xxx" routines, we expect a
208    "__prev" and a "__cmp" variable to be provided by the enclosing scope,
209    in which values are returned.  */
210
211 # define __arch_compare_and_exchange_xxx_8_int(mem, newval, oldval, rel, acq) \
212   (abort (), __prev = __cmp = 0)
213
214 # define __arch_compare_and_exchange_xxx_16_int(mem, newval, oldval, rel, acq) \
215   (abort (), __prev = __cmp = 0)
216
217 # define __arch_compare_and_exchange_xxx_32_int(mem, newval, oldval, rel, acq) \
218      __asm__ __volatile__ (                                                   \
219      ".set      push\n\t"                                                     \
220      MIPS_PUSH_MIPS2                                                          \
221      rel        "\n"                                                          \
222      "1:\t"                                                                   \
223      "ll        %0,%5\n\t"                                                    \
224      "move      %1,$0\n\t"                                                    \
225      "bne       %0,%3,2f\n\t"                                                 \
226      "move      %1,%4\n\t"                                                    \
227      "sc        %1,%2\n\t"                                                    \
228      R10K_BEQZ_INSN"    %1,1b\n"                                              \
229      acq        "\n\t"                                                        \
230      ".set      pop\n"                                                        \
231      "2:\n\t"                                                                 \
232               : "=&r" (__prev), "=&r" (__cmp), "=m" (*mem)                    \
233               : "r" (oldval), "r" (newval), "m" (*mem)                        \
234               : "memory")
235
236 # if _MIPS_SIM == _ABIO32
237 /* We can't do an atomic 64-bit operation in O32.  */
238 # define __arch_compare_and_exchange_xxx_64_int(mem, newval, oldval, rel, acq) \
239   (abort (), __prev = __cmp = 0)
240 # else
241 # define __arch_compare_and_exchange_xxx_64_int(mem, newval, oldval, rel, acq) \
242      __asm__ __volatile__ ("\n"                                               \
243      ".set      push\n\t"                                                     \
244      MIPS_PUSH_MIPS2                                                          \
245      rel        "\n"                                                          \
246      "1:\t"                                                                   \
247      "lld       %0,%5\n\t"                                                    \
248      "move      %1,$0\n\t"                                                    \
249      "bne       %0,%3,2f\n\t"                                                 \
250      "move      %1,%4\n\t"                                                    \
251      "scd       %1,%2\n\t"                                                    \
252      R10K_BEQZ_INSN"    %1,1b\n"                                              \
253      acq        "\n\t"                                                        \
254      ".set      pop\n"                                                        \
255      "2:\n\t"                                                                 \
256               : "=&r" (__prev), "=&r" (__cmp), "=m" (*mem)                    \
257               : "r" (oldval), "r" (newval), "m" (*mem)                        \
258               : "memory")
259 # endif
260
261 /* For all "bool" routines, we return FALSE if exchange succesful.  */
262
263 # define __arch_compare_and_exchange_bool_8_int(mem, new, old, rel, acq) \
264 ({ typeof (*mem) __prev __attribute__ ((unused)); int __cmp;            \
265    __arch_compare_and_exchange_xxx_8_int(mem, new, old, rel, acq);      \
266    !__cmp; })
267
268 # define __arch_compare_and_exchange_bool_16_int(mem, new, old, rel, acq) \
269 ({ typeof (*mem) __prev __attribute__ ((unused)); int __cmp;            \
270    __arch_compare_and_exchange_xxx_16_int(mem, new, old, rel, acq);     \
271    !__cmp; })
272
273 # define __arch_compare_and_exchange_bool_32_int(mem, new, old, rel, acq) \
274 ({ typeof (*mem) __prev __attribute__ ((unused)); int __cmp;            \
275    __arch_compare_and_exchange_xxx_32_int(mem, new, old, rel, acq);     \
276    !__cmp; })
277
278 # define __arch_compare_and_exchange_bool_64_int(mem, new, old, rel, acq) \
279 ({ typeof (*mem) __prev __attribute__ ((unused)); int __cmp;            \
280    __arch_compare_and_exchange_xxx_64_int(mem, new, old, rel, acq);     \
281    !__cmp; })
282
283 /* For all "val" routines, return the old value whether exchange
284    successful or not.  */
285
286 # define __arch_compare_and_exchange_val_8_int(mem, new, old, rel, acq) \
287 ({ typeof (*mem) __prev; int __cmp;                                     \
288    __arch_compare_and_exchange_xxx_8_int(mem, new, old, rel, acq);      \
289    (typeof (*mem))__prev; })
290
291 # define __arch_compare_and_exchange_val_16_int(mem, new, old, rel, acq) \
292 ({ typeof (*mem) __prev; int __cmp;                                     \
293    __arch_compare_and_exchange_xxx_16_int(mem, new, old, rel, acq);     \
294    (typeof (*mem))__prev; })
295
296 # define __arch_compare_and_exchange_val_32_int(mem, new, old, rel, acq) \
297 ({ typeof (*mem) __prev; int __cmp;                                     \
298    __arch_compare_and_exchange_xxx_32_int(mem, new, old, rel, acq);     \
299    (typeof (*mem))__prev; })
300
301 # define __arch_compare_and_exchange_val_64_int(mem, new, old, rel, acq) \
302 ({ typeof (*mem) __prev; int __cmp;                                     \
303    __arch_compare_and_exchange_xxx_64_int(mem, new, old, rel, acq);     \
304    (typeof (*mem))__prev; })
305
306 /* Compare and exchange with "acquire" semantics, ie barrier after.  */
307
308 # define atomic_compare_and_exchange_bool_acq(mem, new, old)    \
309   __atomic_bool_bysize (__arch_compare_and_exchange_bool, int,  \
310                         mem, new, old, "", MIPS_SYNC_STR)
311
312 # define atomic_compare_and_exchange_val_acq(mem, new, old)     \
313   __atomic_val_bysize (__arch_compare_and_exchange_val, int,    \
314                        mem, new, old, "", MIPS_SYNC_STR)
315
316 /* Compare and exchange with "release" semantics, ie barrier before.  */
317
318 # define atomic_compare_and_exchange_bool_rel(mem, new, old)    \
319   __atomic_bool_bysize (__arch_compare_and_exchange_bool, int,  \
320                         mem, new, old, MIPS_SYNC_STR, "")
321
322 # define atomic_compare_and_exchange_val_rel(mem, new, old)     \
323   __atomic_val_bysize (__arch_compare_and_exchange_val, int,    \
324                        mem, new, old, MIPS_SYNC_STR, "")
325
326
327
328 /* Atomic exchange (without compare).  */
329
330 # define __arch_exchange_xxx_8_int(mem, newval, rel, acq) \
331   (abort (), (typeof(*mem)) 0)
332
333 # define __arch_exchange_xxx_16_int(mem, newval, rel, acq) \
334   (abort (), (typeof(*mem)) 0)
335
336 # define __arch_exchange_xxx_32_int(mem, newval, rel, acq) \
337 ({ typeof (*mem) __prev; int __cmp;                                           \
338      __asm__ __volatile__ ("\n"                                               \
339      ".set      push\n\t"                                                     \
340      MIPS_PUSH_MIPS2                                                          \
341      rel        "\n"                                                          \
342      "1:\t"                                                                   \
343      "ll        %0,%4\n\t"                                                    \
344      "move      %1,%3\n\t"                                                    \
345      "sc        %1,%2\n\t"                                                    \
346      R10K_BEQZ_INSN"    %1,1b\n"                                              \
347      acq        "\n\t"                                                        \
348      ".set      pop\n"                                                        \
349      "2:\n\t"                                                                 \
350               : "=&r" (__prev), "=&r" (__cmp), "=m" (*mem)                    \
351               : "r" (newval), "m" (*mem)                                      \
352               : "memory");                                                    \
353   __prev; })
354
355 # if _MIPS_SIM == _ABIO32
356 /* We can't do an atomic 64-bit operation in O32.  */
357 #  define __arch_exchange_xxx_64_int(mem, newval, rel, acq) \
358   (abort (), (typeof(*mem)) 0)
359 # else
360 #  define __arch_exchange_xxx_64_int(mem, newval, rel, acq) \
361 ({ typeof (*mem) __prev; int __cmp;                                           \
362      __asm__ __volatile__ ("\n"                                               \
363      ".set      push\n\t"                                                     \
364      MIPS_PUSH_MIPS2                                                          \
365      rel        "\n"                                                          \
366      "1:\n"                                                                   \
367      "lld       %0,%4\n\t"                                                    \
368      "move      %1,%3\n\t"                                                    \
369      "scd       %1,%2\n\t"                                                    \
370      R10K_BEQZ_INSN"    %1,1b\n"                                              \
371      acq        "\n\t"                                                        \
372      ".set      pop\n"                                                        \
373      "2:\n\t"                                                                 \
374               : "=&r" (__prev), "=&r" (__cmp), "=m" (*mem)                    \
375               : "r" (newval), "m" (*mem)                                      \
376               : "memory");                                                    \
377   __prev; })
378 # endif
379
380 # define atomic_exchange_acq(mem, value) \
381   __atomic_val_bysize (__arch_exchange_xxx, int, mem, value, "", MIPS_SYNC_STR)
382
383 # define atomic_exchange_rel(mem, value) \
384   __atomic_val_bysize (__arch_exchange_xxx, int, mem, value, MIPS_SYNC_STR, "")
385
386
387 /* Atomically add value and return the previous (unincremented) value.  */
388
389 # define __arch_exchange_and_add_8_int(mem, newval, rel, acq) \
390   (abort (), (typeof(*mem)) 0)
391
392 # define __arch_exchange_and_add_16_int(mem, newval, rel, acq) \
393   (abort (), (typeof(*mem)) 0)
394
395 # define __arch_exchange_and_add_32_int(mem, value, rel, acq) \
396 ({ typeof (*mem) __prev; int __cmp;                                           \
397      __asm__ __volatile__ ("\n"                                               \
398      ".set      push\n\t"                                                     \
399      MIPS_PUSH_MIPS2                                                          \
400      rel        "\n"                                                          \
401      "1:\t"                                                                   \
402      "ll        %0,%4\n\t"                                                    \
403      "addu      %1,%0,%3\n\t"                                                 \
404      "sc        %1,%2\n\t"                                                    \
405      R10K_BEQZ_INSN"    %1,1b\n"                                              \
406      acq        "\n\t"                                                        \
407      ".set      pop\n"                                                        \
408      "2:\n\t"                                                                 \
409               : "=&r" (__prev), "=&r" (__cmp), "=m" (*mem)                    \
410               : "r" (value), "m" (*mem)                                       \
411               : "memory");                                                    \
412   __prev; })
413
414 # if _MIPS_SIM == _ABIO32
415 /* We can't do an atomic 64-bit operation in O32.  */
416 #  define __arch_exchange_and_add_64_int(mem, value, rel, acq) \
417   (abort (), (typeof(*mem)) 0)
418 # else
419 #  define __arch_exchange_and_add_64_int(mem, value, rel, acq) \
420 ({ typeof (*mem) __prev; int __cmp;                                           \
421      __asm__ __volatile__ (                                                   \
422      ".set      push\n\t"                                                     \
423      MIPS_PUSH_MIPS2                                                          \
424      rel        "\n"                                                          \
425      "1:\t"                                                                   \
426      "lld       %0,%4\n\t"                                                    \
427      "daddu     %1,%0,%3\n\t"                                                 \
428      "scd       %1,%2\n\t"                                                    \
429      R10K_BEQZ_INSN"    %1,1b\n"                                              \
430      acq        "\n\t"                                                        \
431      ".set      pop\n"                                                        \
432      "2:\n\t"                                                                 \
433               : "=&r" (__prev), "=&r" (__cmp), "=m" (*mem)                    \
434               : "r" (value), "m" (*mem)                                       \
435               : "memory");                                                    \
436   __prev; })
437 # endif
438
439 # define atomic_exchange_and_add_acq(mem, value)                        \
440   __atomic_val_bysize (__arch_exchange_and_add, int, mem, value,        \
441                        "", MIPS_SYNC_STR)
442
443 # define atomic_exchange_and_add_rel(mem, value)                        \
444   __atomic_val_bysize (__arch_exchange_and_add, int, mem, value,        \
445                        MIPS_SYNC_STR, "")
446 #endif /* __GNUC_PREREQ (4, 8) */
447
448 /* TODO: More atomic operations could be implemented efficiently; only the
449    basic requirements are done.  */
450
451 #define atomic_full_barrier() \
452   __asm__ __volatile__ (".set push\n\t"                                       \
453                         MIPS_PUSH_MIPS2                                       \
454                         MIPS_SYNC_STR "\n\t"                                  \
455                         ".set pop" : : : "memory")
456
457 #endif /* bits/atomic.h */