2 * Copyright (C) 1999, 2003 Erik Walthinsen <omega@cse.ogi.edu>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
20 * Much of the code in this file is taken from the Linux kernel.
21 * The code is relicensed under the LGPL with the kind permission of
22 * Linus Torvalds,Ralf Baechle and Alan Cox
25 #ifndef __GST_ATOMIC_IMPL_H__
26 #define __GST_ATOMIC_IMPL_H__
33 #include "gstatomic.h"
34 #include "gstmacros.h"
37 #if defined (GST_CAN_INLINE) || defined (__GST_ATOMIC_C__)
38 /***** Intel x86 *****/
39 #if defined (HAVE_CPU_I386) && defined(__GNUC__)
40 #ifdef GST_CONFIG_NO_SMP
43 #define SMP_LOCK "lock ; "
46 gst_atomic_int_init (GstAtomicInt * aint, gint val)
51 gst_atomic_int_destroy (GstAtomicInt * aint)
55 gst_atomic_int_set (GstAtomicInt * aint, gint val)
60 gst_atomic_int_read (GstAtomicInt * aint)
66 gst_atomic_int_add (GstAtomicInt * aint, gint val)
68 __asm__ __volatile__ (SMP_LOCK "addl %1,%0":"=m" (aint->counter)
69 :"ir" (val), "m" (aint->counter));
73 gst_atomic_int_inc (GstAtomicInt * aint)
75 __asm__ __volatile__ (SMP_LOCK "incl %0":"=m" (aint->counter)
76 :"m" (aint->counter));
79 GST_INLINE_FUNC gboolean
80 gst_atomic_int_dec_and_test (GstAtomicInt * aint)
84 __asm__ __volatile__ (SMP_LOCK "decl %0; sete %1":"=m" (aint->counter),
86 :"m" (aint->counter):"memory");
92 #elif defined (HAVE_CPU_PPC) && defined(__GNUC__)
93 #ifdef GST_CONFIG_NO_SMP
97 #define SMP_SYNC "\tsync\n"
98 #define SMP_ISYNC "\tisync\n"
100 /* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx.
101 * The old ATOMIC_SYNC_FIX covered some but not all of this.
103 #ifdef GST_CONFIG_IBM405_ERR77
104 #define PPC405_ERR77(ra,rb) "\tdcbt " #ra "," #rb "\n"
106 #define PPC405_ERR77(ra,rb)
109 gst_atomic_int_init (GstAtomicInt * aint, gint val)
114 gst_atomic_int_destroy (GstAtomicInt * aint)
118 gst_atomic_int_set (GstAtomicInt * aint, gint val)
123 gst_atomic_int_read (GstAtomicInt * aint)
125 return aint->counter;
129 gst_atomic_int_add (GstAtomicInt * aint, gint val)
133 __asm__ __volatile__ ("1: lwarx %0,0,%3\n"
134 " add %0,%2,%0\n" PPC405_ERR77 (0, %3)
136 " bne- 1b\n":"=&r" (t), "=m" (aint->counter)
137 :"r" (val), "r" (&aint->counter), "m" (aint->counter)
142 gst_atomic_int_inc (GstAtomicInt * aint)
146 __asm__ __volatile__ ("1: lwarx %0,0,%2\n"
147 " addic %0,%0,1\n" PPC405_ERR77 (0, %2)
149 " bne- 1b\n":"=&r" (t), "=m" (aint->counter)
150 :"r" (&aint->counter), "m" (aint->counter)
154 GST_INLINE_FUNC gboolean
155 gst_atomic_int_dec_and_test (GstAtomicInt * aint)
159 __asm__ __volatile__ ("1: lwarx %0,0,%1\n"
160 " addic %0,%0,-1\n" PPC405_ERR77 (0, %1)
161 " stwcx. %0,0,%1\n" " bne- 1b\n" SMP_ISYNC:"=&r" (t)
162 :"r" (&aint->counter)
168 /***** DEC[/Compaq/HP?/Intel?] Alpha *****/
169 #elif defined(HAVE_CPU_ALPHA) && defined(__GNUC__)
171 gst_atomic_int_init (GstAtomicInt * aint, gint val)
176 gst_atomic_int_destroy (GstAtomicInt * aint)
180 gst_atomic_int_set (GstAtomicInt * aint, gint val)
185 gst_atomic_int_read (GstAtomicInt * aint)
187 return aint->counter;
191 gst_atomic_int_add (GstAtomicInt * aint, gint val)
195 __asm__ __volatile__ ("1: ldl_l %0,%1\n"
200 "2: br 1b\n" ".previous":"=&r" (temp), "=m" (aint->counter)
201 :"Ir" (val), "m" (aint->counter));
205 gst_atomic_int_inc (GstAtomicInt * aint)
207 gst_atomic_int_add (aint, 1);
210 GST_INLINE_FUNC gboolean
211 gst_atomic_int_dec_and_test (GstAtomicInt * aint)
215 __asm__ __volatile__ ("1: ldl_l %0,%1\n"
223 ".previous":"=&r" (temp), "=m" (aint->counter), "=&r" (result)
224 :"Ir" (val), "m" (aint->counter):"memory");
229 /***** Sun SPARC *****/
230 #elif 0 && defined(HAVE_CPU_SPARC) && defined(__GNUC__)
231 /* allegedly broken again */
233 gst_atomic_int_destroy (GstAtomicInt * aint)
237 #ifdef GST_CONFIG_NO_SMP
239 gst_atomic_int_init (GstAtomicInt * aint, gint val)
244 gst_atomic_int_set (GstAtomicInt * aint, gint val)
249 gst_atomic_int_read (GstAtomicInt * aint)
251 return aint->counter;
255 gst_atomic_int_init (GstAtomicInt * aint, gint val)
257 aint->counter = (val << 8);
260 gst_atomic_int_set (GstAtomicInt * aint, gint val)
262 aint->counter = (val << 8);
266 * For SMP the trick is you embed the spin lock byte within
267 * the word, use the low byte so signedness is easily retained
268 * via a quick arithmetic shift. It looks like this:
270 * ----------------------------------------
271 * | signed 24-bit counter value | lock | atomic_t
272 * ----------------------------------------
276 gst_atomic_int_read (GstAtomicInt * aint)
278 int ret = aint->counter;
285 #endif /* GST_CONFIG_NO_SMP */
288 gst_atomic_int_add (GstAtomicInt * aint, gint val)
290 volatile int increment, *ptr;
294 ptr = &(aint->counter);
296 #if __GNUC__ > 3 || (__GNUC__ >=3 && __GNUC_MINOR__ >= 2)
297 __asm__ __volatile__ ("1: ldstub [%[ptr] + 3], %[lock]\n" "\torcc %[lock], 0, %[ignore]\n" "\tbne 1b\n" /* go back until we have the lock */
298 "\tld [%[ptr]], %[inc]\n" "\tsra %[inc], 8, %[inc]\n" "\tadd %[inc], %[val], %[inc]\n" "\tsll %[inc], 8, %[lock]\n" "\tst %[lock],[%[ptr]]\n" /* Release the lock */
299 :[inc] "=&r" (increment),[lock] "=r" (lock),[ignore] "=&r" (ignore)
300 :"0" (increment),[ptr] "r" (ptr),[val] "r" (val)
303 __asm__ __volatile__ ("1: ldstub [%4 + 3], %1\n" "\torcc %1, 0, %2\n" "\tbne 1b\n" /* go back until we have the lock */
304 "\tld [%4], %0\n" "\tsra %0, 8, %0\n" "\tadd %0, %5, %0\n" "\tsll %0, 8, %1\n" "\tst %1,[%4]\n" /* Release the lock */
305 :"=&r" (increment), "=r" (lock), "=&r" (ignore)
306 :"0" (increment), "r" (ptr), "r" (val)
312 gst_atomic_int_inc (GstAtomicInt * aint)
314 gst_atomic_int_add (aint, 1);
317 GST_INLINE_FUNC gboolean
318 gst_atomic_int_dec_and_test (GstAtomicInt * aint)
320 volatile int increment, *ptr;
324 ptr = &aint->counter;
326 #if __GNUC__ > 3 || (__GNUC__ >=3 && __GNUC_MINOR__ >= 2)
327 __asm__ __volatile__ ("1: ldstub [%[ptr] + 3], %[lock]\n" "\torcc %[lock], 0, %[ignore]\n" "\tbne 1b\n" /* go back until we have the lock */
328 "\tld [%[ptr]], %[inc]\n" "\tsra %[inc], 8, %[inc]\n" "\tsub %[inc], 1, %[inc]\n" "\tsll %[inc], 8, %[lock]\n" "\tst %[lock],[%[ptr]]\n" /* Release the lock */
329 :[inc] "=&r" (increment),[lock] "=r" (lock),[ignore] "=&r" (ignore)
330 :"0" (increment),[ptr] "r" (ptr)
333 __asm__ __volatile__ ("1: ldstub [%4 + 3], %1\n" "\torcc %1, 0, %2\n" "\tbne 1b\n" /* go back until we have the lock */
334 "\tld [%4], %0\n" "\tsra %0, 8, %0\n" "\tsub %0, 1, %0\n" "\tsll %0, 8, %1\n" "\tst %1,[%4]\n" /* Release the lock */
335 :"=&r" (increment), "=r" (lock), "=&r" (ignore)
336 :"0" (increment), "r" (ptr)
340 return increment == 0;
344 /* This is disabled because the asm code is broken on most MIPS
345 * processors and doesn't generally compile. */
346 #elif defined(HAVE_CPU_MIPS) && defined(__GNUC__) && 0
348 gst_atomic_int_init (GstAtomicInt * aint, gint val)
353 gst_atomic_int_destroy (GstAtomicInt * aint)
357 gst_atomic_int_set (GstAtomicInt * aint, gint val)
362 gst_atomic_int_read (GstAtomicInt * aint)
364 return aint->counter;
367 /* this only works on MIPS II and better */
369 gst_atomic_int_add (GstAtomicInt * aint, gint val)
373 __asm__ __volatile__ ("1: ll %0, %1 # atomic_add\n"
376 " beqz %0, 1b \n":"=&r" (temp),
378 :"Ir" (val), "m" (aint->counter));
382 gst_atomic_int_inc (GstAtomicInt * aint)
384 gst_atomic_int_add (aint, 1);
387 GST_INLINE_FUNC gboolean
388 gst_atomic_int_dec_and_test (GstAtomicInt * aint)
390 unsigned long temp, result;
393 __asm__ __volatile__ (".set push \n"
394 ".set noreorder # atomic_sub_return\n"
396 " subu %0, %1, %3 \n"
399 " subu %0, %1, %3 \n"
400 ".set pop \n":"=&r" (result),
401 "=&r" (temp), "=m" (aint->counter)
402 :"Ir" (val), "m" (aint->counter)
409 #elif defined(HAVE_CPU_S390) && defined(__GNUC__)
412 volatile int counter;
413 } atomic_t __attribute__ ((aligned (4)));
416 gst_atomic_int_init (GstAtomicInt * aint, gint val)
421 gst_atomic_int_destroy (GstAtomicInt * aint)
425 gst_atomic_int_set (GstAtomicInt * aint, gint val)
430 gst_atomic_int_read (GstAtomicInt * aint)
432 return aint->counter;
435 #define __CS_LOOP(old_val, new_val, ptr, op_val, op_string) \
436 __asm__ __volatile__(" l %0,0(%3)\n" \
438 op_string " %1,%4\n" \
439 " cs %0,%1,0(%3)\n" \
441 : "=&d" (old_val), "=&d" (new_val), \
442 "+m" (((atomic_t *)(ptr))->counter) \
443 : "a" (ptr), "d" (op_val) : "cc" );
446 gst_atomic_int_add (GstAtomicInt * aint, gint val)
448 int old_val, new_val;
450 __CS_LOOP (old_val, new_val, aint, val, "ar");
454 gst_atomic_int_inc (GstAtomicInt * aint)
456 int old_val, new_val;
458 __CS_LOOP (old_val, new_val, aint, 1, "ar");
461 GST_INLINE_FUNC gboolean
462 gst_atomic_int_dec_and_test (GstAtomicInt * aint)
464 int old_val, new_val;
466 __CS_LOOP (old_val, new_val, aint, 1, "sr");
471 #warning consider putting your architecture specific atomic implementations here
473 * generic implementation
476 gst_atomic_int_init (GstAtomicInt * aint, gint val)
479 aint->lock = g_mutex_new ();
483 gst_atomic_int_destroy (GstAtomicInt * aint)
485 g_mutex_free (aint->lock);
489 gst_atomic_int_set (GstAtomicInt * aint, gint val)
491 g_mutex_lock (aint->lock);
493 g_mutex_unlock (aint->lock);
497 gst_atomic_int_read (GstAtomicInt * aint)
501 g_mutex_lock (aint->lock);
503 g_mutex_unlock (aint->lock);
509 gst_atomic_int_add (GstAtomicInt * aint, gint val)
511 g_mutex_lock (aint->lock);
512 aint->counter += val;
513 g_mutex_unlock (aint->lock);
517 gst_atomic_int_inc (GstAtomicInt * aint)
519 g_mutex_lock (aint->lock);
521 g_mutex_unlock (aint->lock);
524 GST_INLINE_FUNC gboolean
525 gst_atomic_int_dec_and_test (GstAtomicInt * aint)
529 g_mutex_lock (aint->lock);
531 res = (aint->counter == 0);
532 g_mutex_unlock (aint->lock);
541 GST_INLINE_FUNC GstAtomicInt *
542 gst_atomic_int_new (gint val)
546 aint = g_new0 (GstAtomicInt, 1);
547 gst_atomic_int_init (aint, val);
553 gst_atomic_int_free (GstAtomicInt * aint)
555 gst_atomic_int_destroy (aint);
559 #endif /* defined (GST_CAN_INLINE) || defined (__GST_TRASH_STACK_C__) */
562 #endif /* __GST_ATOMIC_IMPL_H__ */