2 * Copyright 2006 The Android Open Source Project
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
8 #ifndef SkThread_DEFINED
9 #define SkThread_DEFINED
13 // SK_ATOMICS_PLATFORM_H must provide inline implementations for the following declarations.
15 /** Atomically adds one to the int referenced by addr and returns the previous value.
16 * No additional memory barrier is required; this must act as a compiler barrier.
18 static int32_t sk_atomic_inc(int32_t* addr);
20 /** Atomically adds inc to the int referenced by addr and returns the previous value.
21 * No additional memory barrier is required; this must act as a compiler barrier.
23 static int32_t sk_atomic_add(int32_t* addr, int32_t inc);
25 /** Atomically subtracts one from the int referenced by addr and returns the previous value.
26 * This must act as a release (SL/S) memory barrier and as a compiler barrier.
28 static int32_t sk_atomic_dec(int32_t* addr);
30 /** Atomically adds one to the int referenced by addr iff the referenced int was not 0
31 * and returns the previous value.
32 * No additional memory barrier is required; this must act as a compiler barrier.
34 static int32_t sk_atomic_conditional_inc(int32_t* addr);
36 /** Atomic compare and set.
37 * If *addr == before, set *addr to after and return true, otherwise return false.
38 * This must act as a release (SL/S) memory barrier and as a compiler barrier.
40 static bool sk_atomic_cas(int32_t* addr, int32_t before, int32_t after);
42 /** If sk_atomic_dec does not act as an acquire (L/SL) barrier,
43 * this must act as an acquire (L/SL) memory barrier and as a compiler barrier.
45 static void sk_membar_acquire__after_atomic_dec();
47 /** If sk_atomic_conditional_inc does not act as an acquire (L/SL) barrier,
48 * this must act as an acquire (L/SL) memory barrier and as a compiler barrier.
50 static void sk_membar_acquire__after_atomic_conditional_inc();
52 #include SK_ATOMICS_PLATFORM_H
54 // This is POD and must be zero-initialized.
57 SkASSERT(shouldBeZero == 0);
58 // No memory barrier needed, but sk_atomic_cas gives us at least release anyway.
59 while (!sk_atomic_cas(&thisIsPrivate, 0, 1)) {
65 SkASSERT(shouldBeZero == 0);
66 // This requires a release memory barrier before storing, which sk_atomic_cas guarantees.
67 SkAssertResult(sk_atomic_cas(&thisIsPrivate, 1, 0));
70 int32_t thisIsPrivate;
71 SkDEBUGCODE(int32_t shouldBeZero;)
74 class SkAutoSpinlock : SkNoncopyable {
76 explicit SkAutoSpinlock(SkSpinlock* lock) : fLock(lock) { fLock->acquire(); }
77 ~SkAutoSpinlock() { fLock->release(); }
81 #define SkAutoSpinlock(...) SK_REQUIRE_LOCAL_VAR(SkAutoSpinlock)
83 /** SK_MUTEX_PLATFORM_H must provide the following (or equivalent) declarations.
91 class SkMutex : SkBaseMutex {
97 #define SK_DECLARE_STATIC_MUTEX(name) static SkBaseMutex name = ...
98 #define SK_DECLARE_GLOBAL_MUTEX(name) SkBaseMutex name = ...
101 #include SK_MUTEX_PLATFORM_H
104 class SkAutoMutexAcquire : SkNoncopyable {
106 explicit SkAutoMutexAcquire(SkBaseMutex& mutex) : fMutex(&mutex) {
107 SkASSERT(fMutex != NULL);
111 explicit SkAutoMutexAcquire(SkBaseMutex* mutex) : fMutex(mutex) {
117 /** If the mutex has not been released, release it now. */
118 ~SkAutoMutexAcquire() {
124 /** If the mutex has not been released, release it now. */
135 #define SkAutoMutexAcquire(...) SK_REQUIRE_LOCAL_VAR(SkAutoMutexAcquire)