2 * Copyright (C) 1999, 2003 Erik Walthinsen <omega@cse.ogi.edu>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
20 * Much of the code in this file is taken from the Linux kernel.
21 * The code is relicensed under the LGPL with the kind permission of
22 * Linus Torvalds,Ralf Baechle and Alan Cox
25 #ifndef __GST_ATOMIC_IMPL_H__
26 #define __GST_ATOMIC_IMPL_H__
33 #include "gstatomic.h"
34 #include "gstmacros.h"
38 #if defined (GST_CAN_INLINE) || defined (__GST_ATOMIC_C__)
40 /***** Intel x86 *****/
41 #if defined (HAVE_CPU_I386) && defined(__GNUC__)
43 #ifdef GST_CONFIG_NO_SMP
46 #define SMP_LOCK "lock ; "
49 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
50 GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
51 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
52 GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
55 gst_atomic_int_add (GstAtomicInt *aint, gint val)
60 :"ir" (val), "m" (aint->counter));
64 gst_atomic_int_inc (GstAtomicInt *aint)
69 :"m" (aint->counter));
72 GST_INLINE_FUNC gboolean
73 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
78 SMP_LOCK "decl %0; sete %1"
79 :"=m" (aint->counter), "=qm" (res)
80 :"m" (aint->counter) : "memory");
86 #elif defined (HAVE_CPU_PPC) && defined(__GNUC__)
88 #ifdef GST_CONFIG_NO_SMP
92 #define SMP_SYNC "sync"
93 #define SMP_ISYNC "\n\tisync"
96 /* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx.
97 * The old ATOMIC_SYNC_FIX covered some but not all of this.
99 #ifdef GST_CONFIG_IBM405_ERR77
100 #define PPC405_ERR77(ra,rb) "dcbt " #ra "," #rb ";"
102 #define PPC405_ERR77(ra,rb)
105 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
106 GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
107 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
108 GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
111 gst_atomic_int_add (GstAtomicInt *aint, gint val)
115 __asm__ __volatile__(
116 "1: lwarx %0,0,%3 # atomic_add\n\
121 : "=&r" (t), "=m" (aint->counter)
122 : "r" (val), "r" (&aint->counter), "m" (aint->counter)
127 gst_atomic_int_inc (GstAtomicInt *aint)
131 __asm__ __volatile__(
132 "1: lwarx %0,0,%2 # atomic_inc\n\
137 : "=&r" (t), "=m" (aint->counter)
138 : "r" (&aint->counter), "m" (aint->counter)
142 GST_INLINE_FUNC gboolean
143 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
147 __asm__ __volatile__(
148 "1: lwarx %0,0,%1 # atomic_dec_return\n\
155 : "r" (&aint->counter)
161 /***** DEC[/Compaq/HP?/Intel?] Alpha *****/
162 #elif defined(HAVE_CPU_ALPHA) && defined(__GNUC__)
164 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
165 GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
166 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
167 GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
170 gst_atomic_int_add (GstAtomicInt *aint, gint val)
174 __asm__ __volatile__(
182 :"=&r" (temp), "=m" (aint->counter)
183 :"Ir" (val), "m" (aint->counter));
187 gst_atomic_int_inc (GstAtomicInt *aint)
189 gst_atomic_int_add (aint, 1);
192 GST_INLINE_FUNC gboolean
193 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
197 __asm__ __volatile__(
207 :"=&r" (temp), "=m" (aint->counter), "=&r" (result)
208 :"Ir" (val), "m" (aint->counter) : "memory");
213 /***** Sun SPARC *****/
214 #elif defined(HAVE_CPU_SPARC) && defined(__GNUC__)
216 GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
218 #ifdef GST_CONFIG_NO_SMP
219 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
220 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
221 GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
223 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = (val<<8); }
224 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = (val<<8); }
227 * For SMP the trick is you embed the spin lock byte within
228 * the word, use the low byte so signedness is easily retained
229 * via a quick arithmetic shift. It looks like this:
231 * ----------------------------------------
232 * | signed 24-bit counter value | lock | atomic_t
233 * ----------------------------------------
237 gst_atomic_int_read (GstAtomicInt *aint)
239 int ret = aint->counter;
246 #endif /* GST_CONFIG_NO_SMP */
249 gst_atomic_int_add (GstAtomicInt *aint, gint val)
251 volatile int increment, *ptr;
254 ptr = &(aint->counter);
256 __asm__ __volatile__("1: ldstub [%[ptr] + 3], %[lock]\n"
257 "\torcc %[lock], 0, %%g0\n"
258 "\tbne 1b\n" /* go back until we have the lock */
259 "\tld [%[ptr]], %[inc]\n"
260 "\tsra %[inc], 8, %[inc]\n"
261 "\tadd %[inc], %[val], %[inc]\n"
262 "\tsll %[inc], 8, %[lock]\n"
263 "\tst %[lock],[%[ptr]]\n" /* Release the lock */
264 : [inc] "=&r" (increment), [lock] "=r" (lock)
265 : "0" (increment), [ptr] "r" (ptr), [val] "r" (val)
270 gst_atomic_int_inc (GstAtomicInt *aint)
272 gst_atomic_int_add (aint, 1);
275 GST_INLINE_FUNC gboolean
276 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
278 volatile int increment, *ptr;
281 ptr = &aint->counter;
283 __asm__ __volatile__("1: ldstub [%[ptr] + 3], %[lock]\n"
284 "\torcc %[lock], 0, %%g0\n"
285 "\tbne 1b\n" /* go back until we have the lock */
286 "\tld [%[ptr]], %[inc]\n"
287 "\tsra %[inc], 8, %[inc]\n"
288 "\tsub %[inc], 1, %[inc]\n"
289 "\tsll %[inc], 8, %[lock]\n"
290 "\tst %[lock],[%[ptr]]\n" /* Release the lock */
291 : [inc] "=&r" (increment), [lock] "=r" (lock)
292 : "0" (increment), [ptr] "r" (ptr)
295 return increment == 0;
299 /* This is disabled because the asm code is broken on most MIPS
300 * processors and doesn't generally compile. */
301 #elif defined(HAVE_CPU_MIPS) && defined(__GNUC__) && 0
303 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
304 GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
305 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
306 GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
308 /* this only works on MIPS II and better */
310 gst_atomic_int_add (GstAtomicInt *aint, gint val)
314 __asm__ __volatile__(
315 "1: ll %0, %1 # atomic_add\n"
319 : "=&r" (temp), "=m" (aint->counter)
320 : "Ir" (val), "m" (aint->counter));
324 gst_atomic_int_inc (GstAtomicInt *aint)
326 gst_atomic_int_add (aint, 1);
329 GST_INLINE_FUNC gboolean
330 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
332 unsigned long temp, result;
335 __asm__ __volatile__(
337 ".set noreorder # atomic_sub_return\n"
339 " subu %0, %1, %3 \n"
342 " subu %0, %1, %3 \n"
344 : "=&r" (result), "=&r" (temp), "=m" (aint->counter)
345 : "Ir" (val), "m" (aint->counter)
352 #elif defined(HAVE_CPU_S390) && defined(__GNUC__)
354 GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
355 GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
356 GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
357 GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
359 #define __CS_LOOP(old_val, new_val, ptr, op_val, op_string) \
360 __asm__ __volatile__(" l %0,0(%3)\n" \
362 op_string " %1,%4\n" \
363 " cs %0,%1,0(%3)\n" \
365 : "=&d" (old_val), "=&d" (new_val), \
366 "+m" (((atomic_t *)(ptr))->counter) \
367 : "a" (ptr), "d" (op_val) : "cc" );
370 gst_atomic_int_add (GstAtomicInt *aint, gint val)
372 int old_val, new_val;
373 __CS_LOOP(old_val, new_val, aint, val, "ar");
377 gst_atomic_int_inc (GstAtomicInt *aint)
379 int old_val, new_val;
380 __CS_LOOP(old_val, new_val, aint, 1, "ar");
383 GST_INLINE_FUNC gboolean
384 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
386 int old_val, new_val;
387 __CS_LOOP(old_val, new_val, aint, 1, "sr");
392 #warning consider putting your architecture specific atomic implementations here
395 * generic implementation
398 gst_atomic_int_init (GstAtomicInt *aint, gint val)
401 aint->lock = g_mutex_new ();
405 gst_atomic_int_destroy (GstAtomicInt *aint)
407 g_mutex_free (aint->lock);
411 gst_atomic_int_set (GstAtomicInt *aint, gint val)
413 g_mutex_lock (aint->lock);
415 g_mutex_unlock (aint->lock);
419 gst_atomic_int_read (GstAtomicInt *aint)
423 g_mutex_lock (aint->lock);
425 g_mutex_unlock (aint->lock);
431 gst_atomic_int_add (GstAtomicInt *aint, gint val)
433 g_mutex_lock (aint->lock);
434 aint->counter += val;
435 g_mutex_unlock (aint->lock);
439 gst_atomic_int_inc (GstAtomicInt *aint)
441 g_mutex_lock (aint->lock);
443 g_mutex_unlock (aint->lock);
446 GST_INLINE_FUNC gboolean
447 gst_atomic_int_dec_and_test (GstAtomicInt *aint)
451 g_mutex_lock (aint->lock);
453 res = (aint->counter == 0);
454 g_mutex_unlock (aint->lock);
463 GST_INLINE_FUNC GstAtomicInt*
464 gst_atomic_int_new (gint val)
468 aint = g_new0 (GstAtomicInt, 1);
469 gst_atomic_int_init (aint, val);
475 gst_atomic_int_free (GstAtomicInt *aint)
477 gst_atomic_int_destroy (aint);
481 #endif /* defined (GST_CAN_INLINE) || defined (__GST_TRASH_STACK_C__)*/
485 #endif /* __GST_ATOMIC_IMPL_H__ */