__sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
#define KMP_TEST_THEN_INC_ACQ32(p) \
__sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
-#if defined(KMP_ARCH_MIPS)
+#if KMP_ARCH_MIPS
#define KMP_TEST_THEN_INC64(p) \
__atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
#define KMP_TEST_THEN_INC_ACQ64(p) \
__sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
#define KMP_TEST_THEN_ADD4_ACQ32(p) \
__sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
-#if defined(KMP_ARCH_MIPS)
+#if KMP_ARCH_MIPS
#define KMP_TEST_THEN_ADD4_64(p) \
__atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
#define KMP_TEST_THEN_ADD4_ACQ64(p) \
__sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
#define KMP_TEST_THEN_ADD32(p, v) \
__sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
-#if defined(KMP_ARCH_MIPS)
+#if KMP_ARCH_MIPS
#define KMP_TEST_THEN_ADD64(p, v) \
__atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
__ATOMIC_SEQ_CST)
__sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
#define KMP_TEST_THEN_AND32(p, v) \
__sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
-#if defined(KMP_ARCH_MIPS)
+#if KMP_ARCH_MIPS
#define KMP_TEST_THEN_OR64(p, v) \
__atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
__ATOMIC_SEQ_CST)
#define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
__sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
(kmp_uint32)(sv))
-#if defined(KMP_ARCH_MIPS)
+#if KMP_ARCH_MIPS
static inline bool mips_sync_bool_compare_and_swap(
volatile kmp_uint64 *p, kmp_uint64 cv, kmp_uint64 sv) {
return __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,