#elif defined(__arm__)
#define WAPI_ATOMIC_ASM
+/*
+ * Atomic operations on ARM doesn't contain memory barriers, and the runtime code
+ * depends on this, so we add them explicitly.
+ */
+
static inline gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch, gint32 comp)
{
#if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
gint32 ret, tmp;
__asm__ __volatile__ ( "1:\n"
+ "dmb\n"
"mov %0, #0\n"
"ldrex %1, [%2]\n"
"teq %1, %3\n"
"strexeq %0, %4, [%2]\n"
"teq %0, #0\n"
"bne 1b\n"
+ "dmb\n"
: "=&r" (tmp), "=&r" (ret)
: "r" (dest), "r" (comp), "r" (exch)
: "memory", "cc");
{
#if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
gpointer ret, tmp;
- __asm__ __volatile__ ( "1:\n"
+ __asm__ __volatile__ (
+ "dmb\n"
+ "1:\n"
"mov %0, #0\n"
"ldrex %1, [%2]\n"
"teq %1, %3\n"
"strexeq %0, %4, [%2]\n"
"teq %0, #0\n"
"bne 1b\n"
+ "dmb\n"
: "=&r" (tmp), "=&r" (ret)
: "r" (dest), "r" (comp), "r" (exch)
: "memory", "cc");
{
#if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
gint32 ret, flag;
- __asm__ __volatile__ ( "1:\n"
+ __asm__ __volatile__ (
+ "dmb\n"
+ "1:\n"
"ldrex %0, [%2]\n"
"add %0, %0, %3\n"
"strex %1, %0, [%2]\n"
"teq %1, #0\n"
"bne 1b\n"
+ "dmb\n"
: "=&r" (ret), "=&r" (flag)
: "r" (dest), "r" (1)
: "memory", "cc");
{
#if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
gint32 ret, flag;
- __asm__ __volatile__ ( "1:\n"
+ __asm__ __volatile__ (
+ "dmb\n"
+ "1:\n"
"ldrex %0, [%2]\n"
"sub %0, %0, %3\n"
"strex %1, %0, [%2]\n"
"teq %1, #0\n"
"bne 1b\n"
+ "dmb\n"
: "=&r" (ret), "=&r" (flag)
: "r" (dest), "r" (1)
: "memory", "cc");
#if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
gint32 ret, flag;
__asm__ __volatile__ (
+ "dmb\n"
"1:\n"
"ldrex %0, [%3]\n"
"strex %1, %2, [%3]\n"
"teq %1, #0\n"
"bne 1b\n"
+ "dmb\n"
: "=&r" (ret), "=&r" (flag)
: "r" (exch), "r" (dest)
: "memory", "cc");
#if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
gpointer ret, flag;
__asm__ __volatile__ (
+ "dmb\n"
"1:\n"
"ldrex %0, [%3]\n"
"strex %1, %2, [%3]\n"
"teq %1, #0\n"
"bne 1b\n"
+ "dmb\n"
: "=&r" (ret), "=&r" (flag)
: "r" (exch), "r" (dest)
: "memory", "cc");
{
#if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
gint32 ret, tmp, flag;
- __asm__ __volatile__ ( "1:\n"
+ __asm__ __volatile__ (
+ "dmb\n"
+ "1:\n"
"ldrex %0, [%3]\n"
"add %1, %0, %4\n"
"strex %2, %1, [%3]\n"
"teq %2, #0\n"
"bne 1b\n"
+ "dmb\n"
: "=&r" (ret), "=&r" (tmp), "=&r" (flag)
: "r" (dest), "r" (add)
: "memory", "cc");