2 * Copyright (C) 1999, 2003 Erik Walthinsen <omega@cse.ogi.edu>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
20 * Much of the code in this file is taken from the Linux kernel.
21 * The code is relicensed under the LGPL with the kind permission of
22 * Linus Torvalds,Ralf Baechle and Alan Cox
25 #ifndef __GST_ATOMIC_IMPL_H__
26 #define __GST_ATOMIC_IMPL_H__
33 #include "gstatomic.h"
34 #include "gstmacros.h"
38 #if defined (GST_CAN_INLINE) || defined (__GST_ATOMIC_C__)
40 /***** Intel x86 *****/
41 #if defined (HAVE_CPU_I386) && defined(__GNUC__)
43 #ifdef GST_CONFIG_NO_SMP
46 #define SMP_LOCK "lock ; "
49 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
50 GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
51 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
52 GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
55 gst_atomic_int_add (GstAtomicInt *aint, gint val)
60 :"ir" (val), "m" (aint->counter));
64 gst_atomic_int_inc (GstAtomicInt *aint)
69 :"m" (aint->counter));
72 GST_INLINE_FUNC gboolean
73 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
78 SMP_LOCK "decl %0; sete %1"
79 :"=m" (aint->counter), "=qm" (res)
80 :"m" (aint->counter) : "memory");
86 #elif defined (HAVE_CPU_PPC) && defined(__GNUC__)
88 #ifdef GST_CONFIG_NO_SMP
92 #define SMP_SYNC "\tsync\n"
93 #define SMP_ISYNC "\tisync\n"
96 /* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx.
97 * The old ATOMIC_SYNC_FIX covered some but not all of this.
99 #ifdef GST_CONFIG_IBM405_ERR77
100 #define PPC405_ERR77(ra,rb) "\tdcbt " #ra "," #rb "\n"
102 #define PPC405_ERR77(ra,rb)
105 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
106 GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
107 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
108 GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
111 gst_atomic_int_add (GstAtomicInt *aint, gint val)
115 __asm__ __volatile__(
121 : "=&r" (t), "=m" (aint->counter)
122 : "r" (val), "r" (&aint->counter), "m" (aint->counter)
127 gst_atomic_int_inc (GstAtomicInt *aint)
131 __asm__ __volatile__(
137 : "=&r" (t), "=m" (aint->counter)
138 : "r" (&aint->counter), "m" (aint->counter)
142 GST_INLINE_FUNC gboolean
143 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
147 __asm__ __volatile__(
155 : "r" (&aint->counter)
161 /***** DEC[/Compaq/HP?/Intel?] Alpha *****/
162 #elif defined(HAVE_CPU_ALPHA) && defined(__GNUC__)
164 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
165 GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
166 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
167 GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
170 gst_atomic_int_add (GstAtomicInt *aint, gint val)
174 __asm__ __volatile__(
182 :"=&r" (temp), "=m" (aint->counter)
183 :"Ir" (val), "m" (aint->counter));
187 gst_atomic_int_inc (GstAtomicInt *aint)
189 gst_atomic_int_add (aint, 1);
192 GST_INLINE_FUNC gboolean
193 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
197 __asm__ __volatile__(
207 :"=&r" (temp), "=m" (aint->counter), "=&r" (result)
208 :"Ir" (val), "m" (aint->counter) : "memory");
213 /***** Sun SPARC *****/
214 #elif defined(HAVE_CPU_SPARC) && defined(__GNUC__)
216 GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
218 #ifdef GST_CONFIG_NO_SMP
219 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
220 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
221 GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
223 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = (val<<8); }
224 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = (val<<8); }
227 * For SMP the trick is you embed the spin lock byte within
228 * the word, use the low byte so signedness is easily retained
229 * via a quick arithmetic shift. It looks like this:
231 * ----------------------------------------
232 * | signed 24-bit counter value | lock | atomic_t
233 * ----------------------------------------
237 gst_atomic_int_read (GstAtomicInt *aint)
239 int ret = aint->counter;
246 #endif /* GST_CONFIG_NO_SMP */
249 gst_atomic_int_add (GstAtomicInt *aint, gint val)
251 volatile int increment, *ptr;
255 ptr = &(aint->counter);
257 #if __GNUC__ > 3 || (__GNUC__ >=3 && __GNUC_MINOR__ >= 2)
258 __asm__ __volatile__("1: ldstub [%[ptr] + 3], %[lock]\n"
259 "\torcc %[lock], 0, %[ignore]\n"
260 "\tbne 1b\n" /* go back until we have the lock */
261 "\tld [%[ptr]], %[inc]\n"
262 "\tsra %[inc], 8, %[inc]\n"
263 "\tadd %[inc], %[val], %[inc]\n"
264 "\tsll %[inc], 8, %[lock]\n"
265 "\tst %[lock],[%[ptr]]\n" /* Release the lock */
266 : [inc] "=&r" (increment), [lock] "=r" (lock),
267 [ignore] "=&r" (ignore)
268 : "0" (increment), [ptr] "r" (ptr), [val] "r" (val)
271 __asm__ __volatile__("1: ldstub [%4 + 3], %1\n"
273 "\tbne 1b\n" /* go back until we have the lock */
278 "\tst %1,[%4]\n" /* Release the lock */
279 : "=&r" (increment), "=r" (lock), "=&r" (ignore)
280 : "0" (increment), "r" (ptr), "r" (val)
286 gst_atomic_int_inc (GstAtomicInt *aint)
288 gst_atomic_int_add (aint, 1);
291 GST_INLINE_FUNC gboolean
292 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
294 volatile int increment, *ptr;
298 ptr = &aint->counter;
300 #if __GNUC__ > 3 || (__GNUC__ >=3 && __GNUC_MINOR__ >= 2)
301 __asm__ __volatile__("1: ldstub [%[ptr] + 3], %[lock]\n"
302 "\torcc %[lock], 0, %[ignore]\n"
303 "\tbne 1b\n" /* go back until we have the lock */
304 "\tld [%[ptr]], %[inc]\n"
305 "\tsra %[inc], 8, %[inc]\n"
306 "\tsub %[inc], 1, %[inc]\n"
307 "\tsll %[inc], 8, %[lock]\n"
308 "\tst %[lock],[%[ptr]]\n" /* Release the lock */
309 : [inc] "=&r" (increment), [lock] "=r" (lock),
310 [ignore] "=&r" (ignore)
311 : "0" (increment), [ptr] "r" (ptr)
314 __asm__ __volatile__("1: ldstub [%4 + 3], %1\n"
316 "\tbne 1b\n" /* go back until we have the lock */
321 "\tst %1,[%4]\n" /* Release the lock */
322 : "=&r" (increment), "=r" (lock), "=&r" (ignore)
323 : "0" (increment), "r" (ptr)
327 return increment == 0;
331 /* This is disabled because the asm code is broken on most MIPS
332 * processors and doesn't generally compile. */
333 #elif defined(HAVE_CPU_MIPS) && defined(__GNUC__) && 0
335 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
336 GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
337 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
338 GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
340 /* this only works on MIPS II and better */
342 gst_atomic_int_add (GstAtomicInt *aint, gint val)
346 __asm__ __volatile__(
347 "1: ll %0, %1 # atomic_add\n"
351 : "=&r" (temp), "=m" (aint->counter)
352 : "Ir" (val), "m" (aint->counter));
356 gst_atomic_int_inc (GstAtomicInt *aint)
358 gst_atomic_int_add (aint, 1);
361 GST_INLINE_FUNC gboolean
362 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
364 unsigned long temp, result;
367 __asm__ __volatile__(
369 ".set noreorder # atomic_sub_return\n"
371 " subu %0, %1, %3 \n"
374 " subu %0, %1, %3 \n"
376 : "=&r" (result), "=&r" (temp), "=m" (aint->counter)
377 : "Ir" (val), "m" (aint->counter)
384 #elif defined(HAVE_CPU_S390) && defined(__GNUC__)
386 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
387 GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
388 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
389 GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
391 #define __CS_LOOP(old_val, new_val, ptr, op_val, op_string) \
392 __asm__ __volatile__(" l %0,0(%3)\n" \
394 op_string " %1,%4\n" \
395 " cs %0,%1,0(%3)\n" \
397 : "=&d" (old_val), "=&d" (new_val), \
398 "+m" (((atomic_t *)(ptr))->counter) \
399 : "a" (ptr), "d" (op_val) : "cc" );
402 gst_atomic_int_add (GstAtomicInt *aint, gint val)
404 int old_val, new_val;
405 __CS_LOOP(old_val, new_val, aint, val, "ar");
409 gst_atomic_int_inc (GstAtomicInt *aint)
411 int old_val, new_val;
412 __CS_LOOP(old_val, new_val, aint, 1, "ar");
415 GST_INLINE_FUNC gboolean
416 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
418 int old_val, new_val;
419 __CS_LOOP(old_val, new_val, aint, 1, "sr");
424 #warning consider putting your architecture specific atomic implementations here
427 * generic implementation
430 gst_atomic_int_init (GstAtomicInt *aint, gint val)
433 aint->lock = g_mutex_new ();
437 gst_atomic_int_destroy (GstAtomicInt *aint)
439 g_mutex_free (aint->lock);
443 gst_atomic_int_set (GstAtomicInt *aint, gint val)
445 g_mutex_lock (aint->lock);
447 g_mutex_unlock (aint->lock);
451 gst_atomic_int_read (GstAtomicInt *aint)
455 g_mutex_lock (aint->lock);
457 g_mutex_unlock (aint->lock);
463 gst_atomic_int_add (GstAtomicInt *aint, gint val)
465 g_mutex_lock (aint->lock);
466 aint->counter += val;
467 g_mutex_unlock (aint->lock);
471 gst_atomic_int_inc (GstAtomicInt *aint)
473 g_mutex_lock (aint->lock);
475 g_mutex_unlock (aint->lock);
478 GST_INLINE_FUNC gboolean
479 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
483 g_mutex_lock (aint->lock);
485 res = (aint->counter == 0);
486 g_mutex_unlock (aint->lock);
495 GST_INLINE_FUNC GstAtomicInt*
496 gst_atomic_int_new (gint val)
500 aint = g_new0 (GstAtomicInt, 1);
501 gst_atomic_int_init (aint, val);
507 gst_atomic_int_free (GstAtomicInt *aint)
509 gst_atomic_int_destroy (aint);
513 #endif /* defined (GST_CAN_INLINE) || defined (__GST_TRASH_STACK_C__)*/
517 #endif /* __GST_ATOMIC_IMPL_H__ */