2 * atomic.h: Atomic operations
5 * Dick Porter (dick@ximian.com)
7 * (C) 2002 Ximian, Inc.
8 * Copyright 2012 Xamarin Inc
11 #ifndef _WAPI_ATOMIC_H_
12 #define _WAPI_ATOMIC_H_
14 #if defined(__NetBSD__)
15 #include <sys/param.h>
17 #if __NetBSD_Version__ > 499004000
18 #include <sys/atomic.h>
19 #define HAVE_ATOMIC_OPS
26 #include "mono/io-layer/wapi.h"
28 #if defined(__NetBSD__) && defined(HAVE_ATOMIC_OPS)
30 #define WAPI_ATOMIC_ASM
31 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
32 gint32 exch, gint32 comp)
34 return atomic_cas_32((uint32_t*)dest, comp, exch);
37 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
39 return atomic_cas_ptr(dest, comp, exch);
42 static inline gint32 InterlockedIncrement(volatile gint32 *val)
44 return atomic_inc_32_nv((uint32_t*)val);
47 static inline gint32 InterlockedDecrement(volatile gint32 *val)
49 return atomic_dec_32_nv((uint32_t*)val);
52 static inline gint32 InterlockedExchange(volatile gint32 *val, gint32 new_val)
54 return atomic_swap_32((uint32_t*)val, new_val);
57 static inline gpointer InterlockedExchangePointer(volatile gpointer *val,
60 return atomic_swap_ptr(val, new_val);
63 static inline gint32 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
65 return atomic_add_32_nv((uint32_t*)val, add) - add;
68 #elif defined(__i386__) || defined(__x86_64__)
69 #define WAPI_ATOMIC_ASM
72 * NB: The *Pointer() functions here assume that
73 * sizeof(pointer)==sizeof(gint32)
75 * NB2: These asm functions assume 486+ (some of the opcodes dont
76 * exist on 386). If this becomes an issue, we can get configure to
77 * fall back to the non-atomic C versions of these calls.
80 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
81 gint32 exch, gint32 comp)
85 __asm__ __volatile__ ("lock; cmpxchgl %2, %0"
86 : "=m" (*dest), "=a" (old)
87 : "r" (exch), "m" (*dest), "a" (comp));
91 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
95 __asm__ __volatile__ ("lock; "
96 #if defined(__x86_64__) && !defined(__native_client__)
102 : "=m" (*dest), "=a" (old)
103 : "r" (exch), "m" (*dest), "a" (comp));
108 static inline gint32 InterlockedIncrement(volatile gint32 *val)
112 __asm__ __volatile__ ("lock; xaddl %0, %1"
113 : "=r" (tmp), "=m" (*val)
114 : "0" (1), "m" (*val));
119 static inline gint32 InterlockedDecrement(volatile gint32 *val)
123 __asm__ __volatile__ ("lock; xaddl %0, %1"
124 : "=r" (tmp), "=m" (*val)
125 : "0" (-1), "m" (*val));
132 * http://msdn.microsoft.com/library/en-us/dnmag00/html/win320700.asp?frame=true
133 * for the reasons for using cmpxchg and a loop here.
135 * That url is no longer valid, but it's still in the google cache at the
136 * moment: http://www.google.com/search?q=cache:http://msdn.microsoft.com/library/en-us/dnmag00/html/win320700.asp?frame=true
138 * For the time being, http://msdn.microsoft.com/msdnmag/issues/0700/Win32/
139 * might work. Bet it will change soon enough though.
141 static inline gint32 InterlockedExchange(volatile gint32 *val, gint32 new_val)
145 __asm__ __volatile__ ("1:; lock; cmpxchgl %2, %0; jne 1b"
146 : "=m" (*val), "=a" (ret)
147 : "r" (new_val), "m" (*val), "a" (*val));
152 static inline gpointer InterlockedExchangePointer(volatile gpointer *val,
157 __asm__ __volatile__ ("1:; lock; "
158 #if defined(__x86_64__) && !defined(__native_client__)
164 : "=m" (*val), "=a" (ret)
165 : "r" (new_val), "m" (*val), "a" (*val));
170 static inline gint32 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
174 __asm__ __volatile__ ("lock; xaddl %0, %1"
175 : "=r" (ret), "=m" (*val)
176 : "0" (add), "m" (*val));
181 #elif (defined(sparc) || defined (__sparc__)) && defined(__GNUC__)
182 #define WAPI_ATOMIC_ASM
185 static inline gint32 InterlockedCompareExchange(volatile gint32 *_dest, gint32 _exch, gint32 _comp)
187 register volatile gint32 *dest asm("g1") = _dest;
188 register gint32 comp asm("o4") = _comp;
189 register gint32 exch asm("o5") = _exch;
191 __asm__ __volatile__(
192 /* cas [%%g1], %%o4, %%o5 */
195 : "0" (exch), "r" (dest), "r" (comp)
202 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *_dest, gpointer _exch, gpointer _comp)
204 register volatile gpointer *dest asm("g1") = _dest;
205 register gpointer comp asm("o4") = _comp;
206 register gpointer exch asm("o5") = _exch;
208 __asm__ __volatile__(
210 /* casx [%%g1], %%o4, %%o5 */
213 /* cas [%%g1], %%o4, %%o5 */
217 : "0" (exch), "r" (dest), "r" (comp)
224 static inline gint32 InterlockedIncrement(volatile gint32 *_dest)
226 register volatile gint32 *dest asm("g1") = _dest;
227 register gint32 tmp asm("o4");
228 register gint32 ret asm("o5");
230 __asm__ __volatile__(
231 "1: ld [%%g1], %%o4\n\t"
232 " add %%o4, 1, %%o5\n\t"
233 /* cas [%%g1], %%o4, %%o5 */
234 " .word 0xdbe0500c\n\t"
235 " cmp %%o4, %%o5\n\t"
238 : "=&r" (tmp), "=&r" (ret)
246 static inline gint32 InterlockedDecrement(volatile gint32 *_dest)
248 register volatile gint32 *dest asm("g1") = _dest;
249 register gint32 tmp asm("o4");
250 register gint32 ret asm("o5");
252 __asm__ __volatile__(
253 "1: ld [%%g1], %%o4\n\t"
254 " sub %%o4, 1, %%o5\n\t"
255 /* cas [%%g1], %%o4, %%o5 */
256 " .word 0xdbe0500c\n\t"
257 " cmp %%o4, %%o5\n\t"
260 : "=&r" (tmp), "=&r" (ret)
268 static inline gint32 InterlockedExchange(volatile gint32 *_dest, gint32 exch)
270 register volatile gint32 *dest asm("g1") = _dest;
271 register gint32 tmp asm("o4");
272 register gint32 ret asm("o5");
274 __asm__ __volatile__(
275 "1: ld [%%g1], %%o4\n\t"
277 /* cas [%%g1], %%o4, %%o5 */
278 " .word 0xdbe0500c\n\t"
279 " cmp %%o4, %%o5\n\t"
282 : "=&r" (tmp), "=&r" (ret)
283 : "r" (dest), "r" (exch)
290 static inline gpointer InterlockedExchangePointer(volatile gpointer *_dest, gpointer exch)
292 register volatile gpointer *dest asm("g1") = _dest;
293 register gpointer tmp asm("o4");
294 register gpointer ret asm("o5");
296 __asm__ __volatile__(
298 "1: ldx [%%g1], %%o4\n\t"
300 "1: ld [%%g1], %%o4\n\t"
304 /* casx [%%g1], %%o4, %%o5 */
305 " .word 0xdbf0500c\n\t"
307 /* cas [%%g1], %%o4, %%o5 */
308 " .word 0xdbe0500c\n\t"
310 " cmp %%o4, %%o5\n\t"
313 : "=&r" (tmp), "=&r" (ret)
314 : "r" (dest), "r" (exch)
321 static inline gint32 InterlockedExchangeAdd(volatile gint32 *_dest, gint32 add)
323 register volatile gint32 *dest asm("g1") = _dest;
324 register gint32 tmp asm("o4");
325 register gint32 ret asm("o5");
327 __asm__ __volatile__(
328 "1: ld [%%g1], %%o4\n\t"
329 " add %%o4, %3, %%o5\n\t"
330 /* cas [%%g1], %%o4, %%o5 */
331 " .word 0xdbe0500c\n\t"
332 " cmp %%o4, %%o5\n\t"
334 " add %%o5, %3, %%o5"
335 : "=&r" (tmp), "=&r" (ret)
336 : "r" (dest), "r" (add)
344 #define WAPI_ATOMIC_ASM
347 InterlockedCompareExchange(volatile gint32 *dest,
348 gint32 exch, gint32 comp)
352 __asm__ __volatile__ ("\tLA\t1,%0\n"
355 : "+m" (*dest), "=&r" (old)
356 : "r" (exch), "r" (comp)
362 static inline gpointer
363 InterlockedCompareExchangePointer(volatile gpointer *dest,
364 gpointer exch, gpointer comp)
368 __asm__ __volatile__ ("\tLA\t1,%0\n"
371 : "+m" (*dest), "=&r" (old)
372 : "r" (exch), "r" (comp)
377 static inline gpointer
378 InterlockedCompareExchangePointer(volatile gpointer *dest,
384 __asm__ __volatile__ ("\tLA\t1,%0\n"
386 "\tCSG\t%1,%2,0(1)\n"
387 : "+m" (*dest), "=&r" (old)
388 : "r" (exch), "r" (comp)
397 InterlockedIncrement(volatile gint32 *val)
401 __asm__ __volatile__ ("\tLA\t2,%1\n"
408 : "=r" (tmp), "+m" (*val)
415 InterlockedIncrement(volatile gint32 *val)
419 __asm__ __volatile__ ("\tLA\t2,%1\n"
426 : "=r" (tmp), "+m" (*val)
435 InterlockedDecrement(volatile gint32 *val)
439 __asm__ __volatile__ ("\tLA\t2,%1\n"
446 : "=r" (tmp), "+m" (*val)
453 InterlockedDecrement(volatile gint32 *val)
457 __asm__ __volatile__ ("\tLA\t2,%1\n"
464 : "=r" (tmp), "+m" (*val)
472 InterlockedExchange(volatile gint32 *val, gint32 new_val)
476 __asm__ __volatile__ ("\tLA\t1,%0\n"
480 : "+m" (*val), "=&r" (ret)
488 static inline gpointer
489 InterlockedExchangePointer(volatile gpointer *val, gpointer new_val)
493 __asm__ __volatile__ ("\tLA\t1,%0\n"
497 : "+m" (*val), "=&r" (ret)
504 static inline gpointer
505 InterlockedExchangePointer(volatile gpointer *val, gpointer new_val)
509 __asm__ __volatile__ ("\tLA\t1,%0\n"
511 "\tCSG\t%1,%2,0(1)\n"
513 : "+m" (*val), "=&r" (ret)
523 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
527 __asm__ __volatile__ ("\tLA\t2,%1\n"
533 : "=&r" (ret), "+m" (*val)
541 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
545 __asm__ __volatile__ ("\tLA\t2,%1\n"
551 : "=&r" (ret), "+m" (*val)
559 #elif defined(__mono_ppc__)
560 #define WAPI_ATOMIC_ASM
562 #ifdef G_COMPILER_CODEWARRIOR
563 static inline gint32 InterlockedIncrement(volatile register gint32 *val)
565 gint32 result = 0, tmp;
566 register gint32 result = 0;
574 stwcx. result, 0, val
581 static inline gint32 InterlockedDecrement(register volatile gint32 *val)
583 register gint32 result = 0;
591 stwcx. result, 0, val
597 #define InterlockedCompareExchangePointer(dest,exch,comp) (void*)InterlockedCompareExchange((volatile gint32 *)(dest), (gint32)(exch), (gint32)(comp))
599 static inline gint32 InterlockedCompareExchange(volatile register gint32 *dest, register gint32 exch, register gint32 comp)
601 register gint32 tmp = 0;
616 static inline gint32 InterlockedExchange(register volatile gint32 *dest, register gint32 exch)
618 register gint32 tmp = 0;
630 #define InterlockedExchangePointer(dest,exch) (void*)InterlockedExchange((volatile gint32 *)(dest), (gint32)(exch))
633 #if defined(__mono_ppc64__) && !defined(__mono_ilp32__)
634 #define LDREGX "ldarx"
635 #define STREGCXD "stdcx."
636 #define CMPREG "cmpd"
638 #define LDREGX "lwarx"
639 #define STREGCXD "stwcx."
640 #define CMPREG "cmpw"
643 static inline gint32 InterlockedIncrement(volatile gint32 *val)
645 gint32 result = 0, tmp;
647 __asm__ __volatile__ ("\n1:\n\t"
648 "lwarx %0, 0, %2\n\t"
650 "stwcx. %1, 0, %2\n\t"
652 : "=&b" (result), "=&b" (tmp): "r" (val): "cc", "memory");
656 static inline gint32 InterlockedDecrement(volatile gint32 *val)
658 gint32 result = 0, tmp;
660 __asm__ __volatile__ ("\n1:\n\t"
661 "lwarx %0, 0, %2\n\t"
662 "addi %1, %0, -1\n\t"
663 "stwcx. %1, 0, %2\n\t"
665 : "=&b" (result), "=&b" (tmp): "r" (val): "cc", "memory");
669 static inline gpointer InterlockedCompareExchangePointer (volatile gpointer *dest,
670 gpointer exch, gpointer comp)
674 __asm__ __volatile__ ("\n1:\n\t"
675 LDREGX " %0, 0, %1\n\t"
678 STREGCXD " %3, 0, %1\n\t"
682 : "b" (dest), "r" (comp), "r" (exch): "cc", "memory");
686 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
687 gint32 exch, gint32 comp) {
690 __asm__ __volatile__ ("\n1:\n\t"
691 "lwarx %0, 0, %1\n\t"
694 "stwcx. %3, 0, %1\n\t"
698 : "b" (dest), "r" (comp), "r" (exch): "cc", "memory");
702 static inline gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch)
706 __asm__ __volatile__ ("\n1:\n\t"
707 "lwarx %0, 0, %2\n\t"
708 "stwcx. %3, 0, %2\n\t"
710 : "=r" (tmp) : "0" (tmp), "b" (dest), "r" (exch): "cc", "memory");
714 static inline gpointer InterlockedExchangePointer (volatile gpointer *dest, gpointer exch)
718 __asm__ __volatile__ ("\n1:\n\t"
719 LDREGX " %0, 0, %2\n\t"
720 STREGCXD " %3, 0, %2\n\t"
722 : "=r" (tmp) : "0" (tmp), "b" (dest), "r" (exch): "cc", "memory");
726 static inline gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add)
729 __asm__ __volatile__ ("\n1:\n\t"
730 "lwarx %0, 0, %2\n\t"
732 "stwcx. %1, 0, %2\n\t"
734 : "=&r" (result), "=&r" (tmp)
735 : "r" (dest), "r" (add) : "cc", "memory");
743 #endif /* !G_COMPILER_CODEWARRIOR */
745 #elif defined(__arm__)
746 #define WAPI_ATOMIC_ASM
749 * Atomic operations on ARM doesn't contain memory barriers, and the runtime code
750 * depends on this, so we add them explicitly.
753 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch, gint32 comp)
755 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
757 __asm__ __volatile__ ( "1:\n"
763 "strexeq %0, %4, [%2]\n"
767 : "=&r" (tmp), "=&r" (ret)
768 : "r" (dest), "r" (comp), "r" (exch)
775 __asm__ __volatile__ ( "0:\n\t"
780 "swp %0, %3, [%2]\n\t"
782 "swpne %3, %0, [%2]\n\t"
785 : "=&r" (a), "=&r" (b)
786 : "r" (dest), "r" (exch), "r" (comp)
793 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
795 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
797 __asm__ __volatile__ (
804 "strexeq %0, %4, [%2]\n"
808 : "=&r" (tmp), "=&r" (ret)
809 : "r" (dest), "r" (comp), "r" (exch)
816 __asm__ __volatile__ ( "0:\n\t"
821 "swpeq %0, %3, [%2]\n\t"
823 "swpne %3, %0, [%2]\n\t"
826 : "=&r" (a), "=&r" (b)
827 : "r" (dest), "r" (exch), "r" (comp)
834 static inline gint32 InterlockedIncrement(volatile gint32 *dest)
836 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
838 __asm__ __volatile__ (
843 "strex %1, %0, [%2]\n"
847 : "=&r" (ret), "=&r" (flag)
848 : "r" (dest), "r" (1)
855 __asm__ __volatile__ ( "0:\n\t"
858 "swp %2, %1, [%3]\n\t"
860 "swpne %1, %2, [%3]\n\t"
862 : "=&r" (a), "=&r" (b), "=&r" (c)
863 : "r" (dest), "r" (1)
870 static inline gint32 InterlockedDecrement(volatile gint32 *dest)
872 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
874 __asm__ __volatile__ (
879 "strex %1, %0, [%2]\n"
883 : "=&r" (ret), "=&r" (flag)
884 : "r" (dest), "r" (1)
891 __asm__ __volatile__ ( "0:\n\t"
894 "swp %2, %1, [%3]\n\t"
896 "swpne %1, %2, [%3]\n\t"
898 : "=&r" (a), "=&r" (b), "=&r" (c)
899 : "r" (dest), "r" (-1)
906 static inline gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch)
908 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
910 __asm__ __volatile__ (
914 "strex %1, %2, [%3]\n"
918 : "=&r" (ret), "=&r" (flag)
919 : "r" (exch), "r" (dest)
925 __asm__ __volatile__ ( "swp %0, %2, [%1]"
927 : "r" (dest), "r" (exch));
933 static inline gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch)
935 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
937 __asm__ __volatile__ (
941 "strex %1, %2, [%3]\n"
945 : "=&r" (ret), "=&r" (flag)
946 : "r" (exch), "r" (dest)
952 __asm__ __volatile__ ( "swp %0, %2, [%1]"
954 : "r" (dest), "r" (exch));
960 static inline gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add)
962 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
963 gint32 ret, tmp, flag;
964 __asm__ __volatile__ (
969 "strex %2, %1, [%3]\n"
973 : "=&r" (ret), "=&r" (tmp), "=&r" (flag)
974 : "r" (dest), "r" (add)
981 __asm__ __volatile__ ( "0:\n\t"
984 "swp %2, %1, [%3]\n\t"
986 "swpne %1, %2, [%3]\n\t"
988 : "=&r" (a), "=&r" (b), "=&r" (c)
989 : "r" (dest), "r" (add)
996 #elif defined(__ia64__)
997 #define WAPI_ATOMIC_ASM
999 #ifdef __INTEL_COMPILER
1000 #include <ia64intrin.h>
1003 static inline gint32 InterlockedCompareExchange(gint32 volatile *dest,
1004 gint32 exch, gint32 comp)
1009 #ifdef __INTEL_COMPILER
1010 old = _InterlockedCompareExchange (dest, exch, comp);
1012 /* cmpxchg4 zero extends the value read from memory */
1013 real_comp = (guint64)(guint32)comp;
1014 asm volatile ("mov ar.ccv = %2 ;;\n\t"
1015 "cmpxchg4.acq %0 = [%1], %3, ar.ccv\n\t"
1016 : "=r" (old) : "r" (dest), "r" (real_comp), "r" (exch));
1022 static inline gpointer InterlockedCompareExchangePointer(gpointer volatile *dest,
1023 gpointer exch, gpointer comp)
1027 #ifdef __INTEL_COMPILER
1028 old = _InterlockedCompareExchangePointer (dest, exch, comp);
1030 asm volatile ("mov ar.ccv = %2 ;;\n\t"
1031 "cmpxchg8.acq %0 = [%1], %3, ar.ccv\n\t"
1032 : "=r" (old) : "r" (dest), "r" (comp), "r" (exch));
1038 static inline gint32 InterlockedIncrement(gint32 volatile *val)
1040 #ifdef __INTEL_COMPILER
1041 return _InterlockedIncrement (val);
1047 } while (InterlockedCompareExchange (val, old + 1, old) != old);
1053 static inline gint32 InterlockedDecrement(gint32 volatile *val)
1055 #ifdef __INTEL_COMPILER
1056 return _InterlockedDecrement (val);
1062 } while (InterlockedCompareExchange (val, old - 1, old) != old);
1068 static inline gint32 InterlockedExchange(gint32 volatile *dest, gint32 new_val)
1070 #ifdef __INTEL_COMPILER
1071 return _InterlockedExchange (dest, new_val);
1077 } while (InterlockedCompareExchange (dest, new_val, res) != res);
1083 static inline gpointer InterlockedExchangePointer(gpointer volatile *dest, gpointer new_val)
1085 #ifdef __INTEL_COMPILER
1086 return (gpointer)_InterlockedExchange64 ((gint64*)dest, (gint64)new_val);
1092 } while (InterlockedCompareExchangePointer (dest, new_val, res) != res);
1098 static inline gint32 InterlockedExchangeAdd(gint32 volatile *val, gint32 add)
1102 #ifdef __INTEL_COMPILER
1103 old = _InterlockedExchangeAdd (val, add);
1107 } while (InterlockedCompareExchange (val, old + add, old) != old);
1113 #elif defined(__alpha__)
1114 #define WAPI_ATOMIC_ASM
1116 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
1117 gint32 exch, gint32 comp)
1119 gint32 old, temp, temp2;
1120 long compq = comp, exchq = exch;
1122 __asm__ __volatile__ (
1125 " cmpeq %2, %5, %3\n"
1126 " cmovne %3, %4, %2\n"
1129 : "=m" (*dest), "=&r" (old), "=&r" (temp), "=&r" (temp2)
1130 : "r" (exchq), "r" (compq), "m" (*dest));
1134 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
1136 gpointer old, temp, temp2;
1138 __asm__ __volatile__ (
1141 " cmpeq %2, %5, %3\n"
1142 " cmovne %3, %4, %2\n"
1145 : "=m" (*dest), "=&r" (old), "=&r" (temp), "=&r" (temp2)
1146 : "r" (exch), "r" (comp), "m" (*dest));
1150 static inline gint32 InterlockedIncrement(volatile gint32 *val)
1154 __asm__ __volatile__ (
1156 " addl %0, %3, %0\n"
1160 : "=&r" (temp), "=m" (*val), "=r" (cur)
1161 : "Ir" (1), "m" (*val));
1165 static inline gint32 InterlockedDecrement(volatile gint32 *val)
1169 __asm__ __volatile__ (
1171 " subl %0, %3, %0\n"
1175 : "=&r" (temp), "=m" (*val), "=r" (cur)
1176 : "Ir" (1), "m" (*val));
1180 static inline gint32 InterlockedExchange(volatile gint32 *val, gint32 new_val)
1184 __asm__ __volatile__ (
1189 : "=m" (*val), "=&r" (ret), "=&r" (temp)
1190 : "r" (new_val), "m" (*val));
1194 static inline gpointer InterlockedExchangePointer(volatile gpointer *val, gpointer new_val)
1198 __asm__ __volatile__ (
1203 : "=m" (*val), "=&r" (ret), "=&r" (temp)
1204 : "r" (new_val), "m" (*val));
1208 static inline gint32 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
1212 __asm__ __volatile__ (
1215 " addl %2, %3, %2\n"
1218 : "=m" (*val), "=&r" (ret), "=&r" (temp)
1219 : "r" (add), "m" (*val));
1224 #elif defined(__mips__)
1225 #define WAPI_ATOMIC_ASM
1227 #if SIZEOF_REGISTER == 8
1228 #error "Not implemented."
1231 static inline gint32 InterlockedIncrement(volatile gint32 *val)
1233 gint32 tmp, result = 0;
1235 __asm__ __volatile__ (" .set mips32\n"
1241 : "=&r" (result), "=&r" (tmp), "=m" (*val)
1246 static inline gint32 InterlockedDecrement(volatile gint32 *val)
1248 gint32 tmp, result = 0;
1250 __asm__ __volatile__ (" .set mips32\n"
1256 : "=&r" (result), "=&r" (tmp), "=m" (*val)
1261 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
1262 gint32 exch, gint32 comp) {
1265 __asm__ __volatile__ (" .set mips32\n"
1272 : "=&r" (old), "=&r" (tmp), "=m" (*dest)
1273 : "m" (*dest), "r" (exch), "r" (comp));
1277 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
1279 return (gpointer)(InterlockedCompareExchange((volatile gint32 *)(dest), (gint32)(exch), (gint32)(comp)));
1282 static inline gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch)
1286 __asm__ __volatile__ (" .set mips32\n"
1292 : "=&r" (result), "=&r" (tmp), "=m" (*dest)
1293 : "m" (*dest), "r" (exch));
1297 static inline gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch)
1299 return (gpointer)InterlockedExchange((volatile gint32 *)(dest), (gint32)(exch));
1302 static inline gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add)
1306 __asm__ __volatile__ (" .set mips32\n"
1308 " addu %1, %0, %4\n"
1312 : "=&r" (result), "=&r" (tmp), "=m" (*dest)
1313 : "m" (*dest), "r" (add));
1319 extern gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch, gint32 comp);
1320 extern gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp);
1321 extern gint32 InterlockedIncrement(volatile gint32 *dest);
1322 extern gint32 InterlockedDecrement(volatile gint32 *dest);
1323 extern gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch);
1324 extern gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch);
1325 extern gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add);
1327 #if defined(__hppa__)
1328 #define WAPI_ATOMIC_ASM
1333 #endif /* _WAPI_ATOMIC_H_ */