2011-06-06 Ivan Maidanski <ivmai@mail.ru>
+ * src/atomic_ops/sysdeps/gcc/arm.h (AO_THUMB_GO_ARM,
+ AO_THUMB_RESTORE_MODE, AO_THUMB_SWITCH_CLOBBERS): Define new
+ macro (to switch temporarily CPU mode to ARM in inline assembler
+ if compiling in the Thumb mode and to restore it back on leave).
+ * src/atomic_ops/sysdeps/gcc/arm.h (AO_nop_full, AO_store,
+ AO_test_and_set, AO_fetch_and_add, AO_fetch_and_add1,
+ AO_fetch_and_sub1, AO_compare_and_swap, AO_test_and_set_full):
+ Enable compilation in the Thumb mode (use AO_THUMB_GO_ARM,
+ AO_THUMB_RESTORE_MODE and AO_THUMB_SWITCH_CLOBBERS macros).
+ * src/atomic_ops/sysdeps/gcc/arm.h (AO_store, AO_test_and_set,
+ AO_fetch_and_add, AO_fetch_and_add1, AO_fetch_and_sub1,
+ AO_compare_and_swap, AO_compare_double_and_swap_double): Add
+ "memory" to the list of clobbered registers.
+
+2011-06-06 Ivan Maidanski <ivmai@mail.ru>
+
* src/atomic_ops/sysdeps/gcc/arm.h: Reformat code.
* src/atomic_ops/sysdeps/gcc/arm.h (AO_nop_full,
AO_test_and_set_full): Add assembler comment containing the
/* If only a single processor is used, we can define AO_UNIPROCESSOR */
/* and do not need to access CP15 for ensuring a DMB. */
+#ifdef __thumb__
+# define AO_THUMB_GO_ARM \
+ " adr r3, 101f\n" \
+ " bx r3\n" \
+ " .align\n" \
+ " .arm\n" \
+ "101:\n"
+# define AO_THUMB_RESTORE_MODE \
+ " adr r3, 102f + 1\n" \
+ " bx r3\n" \
+ " .thumb\n" \
+ "102:\n"
+# define AO_THUMB_SWITCH_CLOBBERS "r3",
+#else
+# define AO_THUMB_GO_ARM /* empty */
+# define AO_THUMB_RESTORE_MODE /* empty */
+# define AO_THUMB_SWITCH_CLOBBERS /* empty */
+#endif /* !__thumb__ */
+
/* NEC LE-IT: gcc has no way to easily check the arm architecture */
/* but it defines only one of __ARM_ARCH_x__ to be true. */
#if !defined(__ARM_ARCH_2__) && !defined(__ARM_ARCH_3__) \
/* Issue a data memory barrier (keeps ordering of memory */
/* transactions before and after this operation). */
__asm__ __volatile__("@AO_nop_full\n"
+ AO_THUMB_GO_ARM
" mcr p15,0,%0,c7,c10,5\n"
+ AO_THUMB_RESTORE_MODE
: "=&r"(dest)
: /* empty */
- : "memory");
+ : AO_THUMB_SWITCH_CLOBBERS "memory");
# endif
}
#define AO_HAVE_nop_full
AO_t flag;
__asm__ __volatile__("@AO_store\n"
+ AO_THUMB_GO_ARM
"1: ldrex %0, [%2]\n"
" strex %0, %3, [%2]\n"
" teq %0, #0\n"
- " bne 1b"
+ " bne 1b\n"
+ AO_THUMB_RESTORE_MODE
: "=&r"(flag), "+m"(*addr)
: "r" (addr), "r"(value)
- : "cc");
+ : AO_THUMB_SWITCH_CLOBBERS "cc", "memory");
}
#define AO_HAVE_store
unsigned long flag;
__asm__ __volatile__("@AO_test_and_set\n"
+ AO_THUMB_GO_ARM
"1: ldrex %0, [%3]\n"
" strex %1, %4, [%3]\n"
" teq %1, #0\n"
" bne 1b\n"
+ AO_THUMB_RESTORE_MODE
: "=&r"(oldval), "=&r"(flag), "+m"(*addr)
: "r"(addr), "r"(1)
- : "cc");
+ : AO_THUMB_SWITCH_CLOBBERS "cc", "memory");
return oldval;
}
#define AO_HAVE_test_and_set
AO_t result;
__asm__ __volatile__("@AO_fetch_and_add\n"
+ AO_THUMB_GO_ARM
"1: ldrex %0, [%5]\n" /* get original */
" add %2, %0, %4\n" /* sum up in incr */
" strex %1, %2, [%5]\n" /* store them */
" teq %1, #0\n"
" bne 1b\n"
+ AO_THUMB_RESTORE_MODE
: "=&r"(result), "=&r"(flag), "=&r"(tmp), "+m"(*p) /* 0..3 */
: "r"(incr), "r"(p) /* 4..5 */
- : "cc");
+ : AO_THUMB_SWITCH_CLOBBERS "cc", "memory");
return result;
}
#define AO_HAVE_fetch_and_add
AO_t result;
__asm__ __volatile__("@AO_fetch_and_add1\n"
+ AO_THUMB_GO_ARM
"1: ldrex %0, [%4]\n" /* get original */
" add %1, %0, #1\n" /* increment */
" strex %2, %1, [%4]\n" /* store them */
" teq %2, #0\n"
" bne 1b\n"
+ AO_THUMB_RESTORE_MODE
: "=&r"(result), "=&r"(tmp), "=&r"(flag), "+m"(*p)
: "r"(p)
- : "cc");
+ : AO_THUMB_SWITCH_CLOBBERS "cc", "memory");
return result;
}
#define AO_HAVE_fetch_and_add1
AO_t result;
__asm__ __volatile__("@AO_fetch_and_sub1\n"
+ AO_THUMB_GO_ARM
"1: ldrex %0, [%4]\n" /* get original */
" sub %1, %0, #1\n" /* decrement */
" strex %2, %1, [%4]\n" /* store them */
" teq %2, #0\n"
" bne 1b\n"
+ AO_THUMB_RESTORE_MODE
: "=&r"(result), "=&r"(tmp), "=&r"(flag), "+m"(*p)
: "r"(p)
- : "cc");
+ : AO_THUMB_SWITCH_CLOBBERS "cc", "memory");
return result;
}
#define AO_HAVE_fetch_and_sub1
AO_t result, tmp;
__asm__ __volatile__("@AO_compare_and_swap\n"
+ AO_THUMB_GO_ARM
"1: mov %0, #2\n" /* store a flag */
" ldrex %1, [%3]\n" /* get original */
" teq %1, %4\n" /* see if match */
" strexeq %0, %5, [%3]\n" /* store new one if matched */
" teq %0, #1\n"
" beq 1b\n" /* if update failed, repeat */
+ AO_THUMB_RESTORE_MODE
: "=&r"(result), "=&r"(tmp), "+m"(*addr)
: "r"(addr), "r"(old_val), "r"(new_val)
- : "cc");
+ : AO_THUMB_SWITCH_CLOBBERS "cc", "memory");
return !(result&2); /* if succeded, return 1, else 0 */
}
#define AO_HAVE_compare_and_swap
" ldrexd %0, [%1]\n" /* get original to r1 & r2 */
: "=&r"(tmp)
: "r"(addr)
- : "cc");
+ : "cc", "memory");
if (tmp != old_val)
return 0;
__asm__ __volatile__(
" strexd %0, %2, [%3]\n" /* store new one if matched */
: "=&r"(result), "+m"(*addr)
: "r"(new_val), "r"(addr)
- : "cc");
+ : "cc", "memory");
} while (result);
return 1;
}
/* them to the same register if they are both unused. */
__asm__ __volatile__("@AO_test_and_set_full\n"
+ AO_THUMB_GO_ARM
" swp %0, %2, [%3]\n"
+ AO_THUMB_RESTORE_MODE
: "=&r"(oldval), "=&r"(addr)
: "r"(1), "1"(addr)
- : "memory");
+ : AO_THUMB_SWITCH_CLOBBERS "memory");
return oldval;
}
# define AO_HAVE_test_and_set_full