2 * atomic.h: Atomic operations
5 * Dick Porter (dick@ximian.com)
7 * (C) 2002 Ximian, Inc.
10 #ifndef _WAPI_ATOMIC_H_
11 #define _WAPI_ATOMIC_H_
13 #if defined(__NetBSD__)
14 #include <sys/param.h>
16 #if __NetBSD_Version__ > 499004000
17 #include <sys/atomic.h>
18 #define HAVE_ATOMIC_OPS
25 #include "mono/io-layer/wapi.h"
27 #if defined(__NetBSD__) && defined(HAVE_ATOMIC_OPS)
29 #define WAPI_ATOMIC_ASM
30 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
31 gint32 exch, gint32 comp)
33 return atomic_cas_32((uint32_t*)dest, comp, exch);
36 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
38 return atomic_cas_ptr(dest, comp, exch);
41 static inline gint32 InterlockedIncrement(volatile gint32 *val)
43 return atomic_inc_32_nv((uint32_t*)val);
46 static inline gint32 InterlockedDecrement(volatile gint32 *val)
48 return atomic_dec_32_nv((uint32_t*)val);
51 static inline gint32 InterlockedExchange(volatile gint32 *val, gint32 new_val)
53 return atomic_swap_32((uint32_t*)val, new_val);
56 static inline gpointer InterlockedExchangePointer(volatile gpointer *val,
59 return atomic_swap_ptr(val, new_val);
62 static inline gint32 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
64 return atomic_add_32_nv((uint32_t*)val, add) - add;
67 #elif defined(__i386__) || defined(__x86_64__)
68 #define WAPI_ATOMIC_ASM
71 * NB: The *Pointer() functions here assume that
72 * sizeof(pointer)==sizeof(gint32)
74 * NB2: These asm functions assume 486+ (some of the opcodes dont
75 * exist on 386). If this becomes an issue, we can get configure to
76 * fall back to the non-atomic C versions of these calls.
79 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
80 gint32 exch, gint32 comp)
84 __asm__ __volatile__ ("lock; cmpxchgl %2, %0"
85 : "=m" (*dest), "=a" (old)
86 : "r" (exch), "m" (*dest), "a" (comp));
90 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
94 __asm__ __volatile__ ("lock; "
95 #if defined(__x86_64__) && !defined(__native_client__)
101 : "=m" (*dest), "=a" (old)
102 : "r" (exch), "m" (*dest), "a" (comp));
107 static inline gint32 InterlockedIncrement(volatile gint32 *val)
111 __asm__ __volatile__ ("lock; xaddl %0, %1"
112 : "=r" (tmp), "=m" (*val)
113 : "0" (1), "m" (*val));
118 static inline gint32 InterlockedDecrement(volatile gint32 *val)
122 __asm__ __volatile__ ("lock; xaddl %0, %1"
123 : "=r" (tmp), "=m" (*val)
124 : "0" (-1), "m" (*val));
131 * http://msdn.microsoft.com/library/en-us/dnmag00/html/win320700.asp?frame=true
132 * for the reasons for using cmpxchg and a loop here.
134 * That url is no longer valid, but it's still in the google cache at the
135 * moment: http://www.google.com/search?q=cache:http://msdn.microsoft.com/library/en-us/dnmag00/html/win320700.asp?frame=true
137 * For the time being, http://msdn.microsoft.com/msdnmag/issues/0700/Win32/
138 * might work. Bet it will change soon enough though.
140 static inline gint32 InterlockedExchange(volatile gint32 *val, gint32 new_val)
144 __asm__ __volatile__ ("1:; lock; cmpxchgl %2, %0; jne 1b"
145 : "=m" (*val), "=a" (ret)
146 : "r" (new_val), "m" (*val), "a" (*val));
151 static inline gpointer InterlockedExchangePointer(volatile gpointer *val,
156 __asm__ __volatile__ ("1:; lock; "
157 #if defined(__x86_64__) && !defined(__native_client__)
163 : "=m" (*val), "=a" (ret)
164 : "r" (new_val), "m" (*val), "a" (*val));
169 static inline gint32 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
173 __asm__ __volatile__ ("lock; xaddl %0, %1"
174 : "=r" (ret), "=m" (*val)
175 : "0" (add), "m" (*val));
180 #elif (defined(sparc) || defined (__sparc__)) && defined(__GNUC__)
181 #define WAPI_ATOMIC_ASM
184 static inline gint32 InterlockedCompareExchange(volatile gint32 *_dest, gint32 _exch, gint32 _comp)
186 register volatile gint32 *dest asm("g1") = _dest;
187 register gint32 comp asm("o4") = _comp;
188 register gint32 exch asm("o5") = _exch;
190 __asm__ __volatile__(
191 /* cas [%%g1], %%o4, %%o5 */
194 : "0" (exch), "r" (dest), "r" (comp)
201 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *_dest, gpointer _exch, gpointer _comp)
203 register volatile gpointer *dest asm("g1") = _dest;
204 register gpointer comp asm("o4") = _comp;
205 register gpointer exch asm("o5") = _exch;
207 __asm__ __volatile__(
209 /* casx [%%g1], %%o4, %%o5 */
212 /* cas [%%g1], %%o4, %%o5 */
216 : "0" (exch), "r" (dest), "r" (comp)
223 static inline gint32 InterlockedIncrement(volatile gint32 *_dest)
225 register volatile gint32 *dest asm("g1") = _dest;
226 register gint32 tmp asm("o4");
227 register gint32 ret asm("o5");
229 __asm__ __volatile__(
230 "1: ld [%%g1], %%o4\n\t"
231 " add %%o4, 1, %%o5\n\t"
232 /* cas [%%g1], %%o4, %%o5 */
233 " .word 0xdbe0500c\n\t"
234 " cmp %%o4, %%o5\n\t"
237 : "=&r" (tmp), "=&r" (ret)
245 static inline gint32 InterlockedDecrement(volatile gint32 *_dest)
247 register volatile gint32 *dest asm("g1") = _dest;
248 register gint32 tmp asm("o4");
249 register gint32 ret asm("o5");
251 __asm__ __volatile__(
252 "1: ld [%%g1], %%o4\n\t"
253 " sub %%o4, 1, %%o5\n\t"
254 /* cas [%%g1], %%o4, %%o5 */
255 " .word 0xdbe0500c\n\t"
256 " cmp %%o4, %%o5\n\t"
259 : "=&r" (tmp), "=&r" (ret)
267 static inline gint32 InterlockedExchange(volatile gint32 *_dest, gint32 exch)
269 register volatile gint32 *dest asm("g1") = _dest;
270 register gint32 tmp asm("o4");
271 register gint32 ret asm("o5");
273 __asm__ __volatile__(
274 "1: ld [%%g1], %%o4\n\t"
276 /* cas [%%g1], %%o4, %%o5 */
277 " .word 0xdbe0500c\n\t"
278 " cmp %%o4, %%o5\n\t"
281 : "=&r" (tmp), "=&r" (ret)
282 : "r" (dest), "r" (exch)
289 static inline gpointer InterlockedExchangePointer(volatile gpointer *_dest, gpointer exch)
291 register volatile gpointer *dest asm("g1") = _dest;
292 register gpointer tmp asm("o4");
293 register gpointer ret asm("o5");
295 __asm__ __volatile__(
297 "1: ldx [%%g1], %%o4\n\t"
299 "1: ld [%%g1], %%o4\n\t"
303 /* casx [%%g1], %%o4, %%o5 */
304 " .word 0xdbf0500c\n\t"
306 /* cas [%%g1], %%o4, %%o5 */
307 " .word 0xdbe0500c\n\t"
309 " cmp %%o4, %%o5\n\t"
312 : "=&r" (tmp), "=&r" (ret)
313 : "r" (dest), "r" (exch)
320 static inline gint32 InterlockedExchangeAdd(volatile gint32 *_dest, gint32 add)
322 register volatile gint32 *dest asm("g1") = _dest;
323 register gint32 tmp asm("o4");
324 register gint32 ret asm("o5");
326 __asm__ __volatile__(
327 "1: ld [%%g1], %%o4\n\t"
328 " add %%o4, %3, %%o5\n\t"
329 /* cas [%%g1], %%o4, %%o5 */
330 " .word 0xdbe0500c\n\t"
331 " cmp %%o4, %%o5\n\t"
333 " add %%o5, %3, %%o5"
334 : "=&r" (tmp), "=&r" (ret)
335 : "r" (dest), "r" (add)
343 #define WAPI_ATOMIC_ASM
346 InterlockedCompareExchange(volatile gint32 *dest,
347 gint32 exch, gint32 comp)
351 __asm__ __volatile__ ("\tLA\t1,%0\n"
354 : "+m" (*dest), "=&r" (old)
355 : "r" (exch), "r" (comp)
361 static inline gpointer
362 InterlockedCompareExchangePointer(volatile gpointer *dest,
363 gpointer exch, gpointer comp)
367 __asm__ __volatile__ ("\tLA\t1,%0\n"
370 : "+m" (*dest), "=&r" (old)
371 : "r" (exch), "r" (comp)
376 static inline gpointer
377 InterlockedCompareExchangePointer(volatile gpointer *dest,
383 __asm__ __volatile__ ("\tLA\t1,%0\n"
385 "\tCSG\t%1,%2,0(1)\n"
386 : "+m" (*dest), "=&r" (old)
387 : "r" (exch), "r" (comp)
396 InterlockedIncrement(volatile gint32 *val)
400 __asm__ __volatile__ ("\tLA\t2,%1\n"
407 : "=r" (tmp), "+m" (*val)
414 InterlockedIncrement(volatile gint32 *val)
418 __asm__ __volatile__ ("\tLA\t2,%1\n"
425 : "=r" (tmp), "+m" (*val)
434 InterlockedDecrement(volatile gint32 *val)
438 __asm__ __volatile__ ("\tLA\t2,%1\n"
445 : "=r" (tmp), "+m" (*val)
452 InterlockedDecrement(volatile gint32 *val)
456 __asm__ __volatile__ ("\tLA\t2,%1\n"
463 : "=r" (tmp), "+m" (*val)
471 InterlockedExchange(volatile gint32 *val, gint32 new_val)
475 __asm__ __volatile__ ("\tLA\t1,%0\n"
479 : "+m" (*val), "=&r" (ret)
487 static inline gpointer
488 InterlockedExchangePointer(volatile gpointer *val, gpointer new_val)
492 __asm__ __volatile__ ("\tLA\t1,%0\n"
496 : "+m" (*val), "=&r" (ret)
503 static inline gpointer
504 InterlockedExchangePointer(volatile gpointer *val, gpointer new_val)
508 __asm__ __volatile__ ("\tLA\t1,%0\n"
510 "\tCSG\t%1,%2,0(1)\n"
512 : "+m" (*val), "=&r" (ret)
522 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
526 __asm__ __volatile__ ("\tLA\t2,%1\n"
532 : "=&r" (ret), "+m" (*val)
540 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
544 __asm__ __volatile__ ("\tLA\t2,%1\n"
550 : "=&r" (ret), "+m" (*val)
558 #elif defined(__mono_ppc__)
559 #define WAPI_ATOMIC_ASM
561 #ifdef G_COMPILER_CODEWARRIOR
562 static inline gint32 InterlockedIncrement(volatile register gint32 *val)
564 gint32 result = 0, tmp;
565 register gint32 result = 0;
573 stwcx. result, 0, val
580 static inline gint32 InterlockedDecrement(register volatile gint32 *val)
582 register gint32 result = 0;
590 stwcx. result, 0, val
596 #define InterlockedCompareExchangePointer(dest,exch,comp) (void*)InterlockedCompareExchange((volatile gint32 *)(dest), (gint32)(exch), (gint32)(comp))
598 static inline gint32 InterlockedCompareExchange(volatile register gint32 *dest, register gint32 exch, register gint32 comp)
600 register gint32 tmp = 0;
615 static inline gint32 InterlockedExchange(register volatile gint32 *dest, register gint32 exch)
617 register gint32 tmp = 0;
629 #define InterlockedExchangePointer(dest,exch) (void*)InterlockedExchange((volatile gint32 *)(dest), (gint32)(exch))
632 #if defined(__mono_ppc64__) && !defined(__mono_ilp32__)
633 #define LDREGX "ldarx"
634 #define STREGCXD "stdcx."
635 #define CMPREG "cmpd"
637 #define LDREGX "lwarx"
638 #define STREGCXD "stwcx."
639 #define CMPREG "cmpw"
642 static inline gint32 InterlockedIncrement(volatile gint32 *val)
644 gint32 result = 0, tmp;
646 __asm__ __volatile__ ("\n1:\n\t"
647 "lwarx %0, 0, %2\n\t"
649 "stwcx. %1, 0, %2\n\t"
651 : "=&b" (result), "=&b" (tmp): "r" (val): "cc", "memory");
655 static inline gint32 InterlockedDecrement(volatile gint32 *val)
657 gint32 result = 0, tmp;
659 __asm__ __volatile__ ("\n1:\n\t"
660 "lwarx %0, 0, %2\n\t"
661 "addi %1, %0, -1\n\t"
662 "stwcx. %1, 0, %2\n\t"
664 : "=&b" (result), "=&b" (tmp): "r" (val): "cc", "memory");
668 static inline gpointer InterlockedCompareExchangePointer (volatile gpointer *dest,
669 gpointer exch, gpointer comp)
673 __asm__ __volatile__ ("\n1:\n\t"
674 LDREGX " %0, 0, %1\n\t"
677 STREGCXD " %3, 0, %1\n\t"
681 : "b" (dest), "r" (comp), "r" (exch): "cc", "memory");
685 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
686 gint32 exch, gint32 comp) {
689 __asm__ __volatile__ ("\n1:\n\t"
690 "lwarx %0, 0, %1\n\t"
693 "stwcx. %3, 0, %1\n\t"
697 : "b" (dest), "r" (comp), "r" (exch): "cc", "memory");
701 static inline gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch)
705 __asm__ __volatile__ ("\n1:\n\t"
706 "lwarx %0, 0, %2\n\t"
707 "stwcx. %3, 0, %2\n\t"
709 : "=r" (tmp) : "0" (tmp), "b" (dest), "r" (exch): "cc", "memory");
713 static inline gpointer InterlockedExchangePointer (volatile gpointer *dest, gpointer exch)
717 __asm__ __volatile__ ("\n1:\n\t"
718 LDREGX " %0, 0, %2\n\t"
719 STREGCXD " %3, 0, %2\n\t"
721 : "=r" (tmp) : "0" (tmp), "b" (dest), "r" (exch): "cc", "memory");
725 static inline gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add)
728 __asm__ __volatile__ ("\n1:\n\t"
729 "lwarx %0, 0, %2\n\t"
731 "stwcx. %1, 0, %2\n\t"
733 : "=&r" (result), "=&r" (tmp)
734 : "r" (dest), "r" (add) : "cc", "memory");
742 #endif /* !G_COMPILER_CODEWARRIOR */
744 #elif defined(__arm__)
745 #define WAPI_ATOMIC_ASM
747 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch, gint32 comp)
749 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
751 __asm__ __volatile__ ( "1:\n"
756 "strexeq %0, %4, [%2]\n"
759 : "=&r" (tmp), "=&r" (ret)
760 : "r" (dest), "r" (comp), "r" (exch)
767 __asm__ __volatile__ ( "0:\n\t"
772 "swp %0, %3, [%2]\n\t"
774 "swpne %3, %0, [%2]\n\t"
777 : "=&r" (a), "=&r" (b)
778 : "r" (dest), "r" (exch), "r" (comp)
785 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
787 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
789 __asm__ __volatile__ ( "1:\n"
794 "strexeq %0, %4, [%2]\n"
797 : "=&r" (tmp), "=&r" (ret)
798 : "r" (dest), "r" (comp), "r" (exch)
805 __asm__ __volatile__ ( "0:\n\t"
810 "swpeq %0, %3, [%2]\n\t"
812 "swpne %3, %0, [%2]\n\t"
815 : "=&r" (a), "=&r" (b)
816 : "r" (dest), "r" (exch), "r" (comp)
823 static inline gint32 InterlockedIncrement(volatile gint32 *dest)
825 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
827 __asm__ __volatile__ ( "1:\n"
830 "strex %1, %0, [%2]\n"
833 : "=&r" (ret), "=&r" (flag)
834 : "r" (dest), "r" (1)
841 __asm__ __volatile__ ( "0:\n\t"
844 "swp %2, %1, [%3]\n\t"
846 "swpne %1, %2, [%3]\n\t"
848 : "=&r" (a), "=&r" (b), "=&r" (c)
849 : "r" (dest), "r" (1)
856 static inline gint32 InterlockedDecrement(volatile gint32 *dest)
858 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
860 __asm__ __volatile__ ( "1:\n"
863 "strex %1, %0, [%2]\n"
866 : "=&r" (ret), "=&r" (flag)
867 : "r" (dest), "r" (1)
874 __asm__ __volatile__ ( "0:\n\t"
877 "swp %2, %1, [%3]\n\t"
879 "swpne %1, %2, [%3]\n\t"
881 : "=&r" (a), "=&r" (b), "=&r" (c)
882 : "r" (dest), "r" (-1)
889 static inline gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch)
891 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
893 __asm__ __volatile__ (
896 "strex %1, %2, [%3]\n"
899 : "=&r" (ret), "=&r" (flag)
900 : "r" (exch), "r" (dest)
906 __asm__ __volatile__ ( "swp %0, %2, [%1]"
908 : "r" (dest), "r" (exch));
914 static inline gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch)
916 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
918 __asm__ __volatile__ (
921 "strex %1, %2, [%3]\n"
924 : "=&r" (ret), "=&r" (flag)
925 : "r" (exch), "r" (dest)
931 __asm__ __volatile__ ( "swp %0, %2, [%1]"
933 : "r" (dest), "r" (exch));
939 static inline gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add)
941 #if defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7__)
942 gint32 ret, tmp, flag;
943 __asm__ __volatile__ ( "1:\n"
946 "strex %2, %1, [%3]\n"
949 : "=&r" (ret), "=&r" (tmp), "=&r" (flag)
950 : "r" (dest), "r" (add)
957 __asm__ __volatile__ ( "0:\n\t"
960 "swp %2, %1, [%3]\n\t"
962 "swpne %1, %2, [%3]\n\t"
964 : "=&r" (a), "=&r" (b), "=&r" (c)
965 : "r" (dest), "r" (add)
972 #elif defined(__ia64__)
973 #define WAPI_ATOMIC_ASM
975 #ifdef __INTEL_COMPILER
976 #include <ia64intrin.h>
979 static inline gint32 InterlockedCompareExchange(gint32 volatile *dest,
980 gint32 exch, gint32 comp)
985 #ifdef __INTEL_COMPILER
986 old = _InterlockedCompareExchange (dest, exch, comp);
988 /* cmpxchg4 zero extends the value read from memory */
989 real_comp = (guint64)(guint32)comp;
990 asm volatile ("mov ar.ccv = %2 ;;\n\t"
991 "cmpxchg4.acq %0 = [%1], %3, ar.ccv\n\t"
992 : "=r" (old) : "r" (dest), "r" (real_comp), "r" (exch));
998 static inline gpointer InterlockedCompareExchangePointer(gpointer volatile *dest,
999 gpointer exch, gpointer comp)
1003 #ifdef __INTEL_COMPILER
1004 old = _InterlockedCompareExchangePointer (dest, exch, comp);
1006 asm volatile ("mov ar.ccv = %2 ;;\n\t"
1007 "cmpxchg8.acq %0 = [%1], %3, ar.ccv\n\t"
1008 : "=r" (old) : "r" (dest), "r" (comp), "r" (exch));
1014 static inline gint32 InterlockedIncrement(gint32 volatile *val)
1016 #ifdef __INTEL_COMPILER
1017 return _InterlockedIncrement (val);
1023 } while (InterlockedCompareExchange (val, old + 1, old) != old);
1029 static inline gint32 InterlockedDecrement(gint32 volatile *val)
1031 #ifdef __INTEL_COMPILER
1032 return _InterlockedDecrement (val);
1038 } while (InterlockedCompareExchange (val, old - 1, old) != old);
1044 static inline gint32 InterlockedExchange(gint32 volatile *dest, gint32 new_val)
1046 #ifdef __INTEL_COMPILER
1047 return _InterlockedExchange (dest, new_val);
1053 } while (InterlockedCompareExchange (dest, new_val, res) != res);
1059 static inline gpointer InterlockedExchangePointer(gpointer volatile *dest, gpointer new_val)
1061 #ifdef __INTEL_COMPILER
1062 return (gpointer)_InterlockedExchange64 ((gint64*)dest, (gint64)new_val);
1068 } while (InterlockedCompareExchangePointer (dest, new_val, res) != res);
1074 static inline gint32 InterlockedExchangeAdd(gint32 volatile *val, gint32 add)
1078 #ifdef __INTEL_COMPILER
1079 old = _InterlockedExchangeAdd (val, add);
1083 } while (InterlockedCompareExchange (val, old + add, old) != old);
1089 #elif defined(__alpha__)
1090 #define WAPI_ATOMIC_ASM
1092 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
1093 gint32 exch, gint32 comp)
1095 gint32 old, temp, temp2;
1096 long compq = comp, exchq = exch;
1098 __asm__ __volatile__ (
1101 " cmpeq %2, %5, %3\n"
1102 " cmovne %3, %4, %2\n"
1105 : "=m" (*dest), "=&r" (old), "=&r" (temp), "=&r" (temp2)
1106 : "r" (exchq), "r" (compq), "m" (*dest));
1110 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
1112 gpointer old, temp, temp2;
1114 __asm__ __volatile__ (
1117 " cmpeq %2, %5, %3\n"
1118 " cmovne %3, %4, %2\n"
1121 : "=m" (*dest), "=&r" (old), "=&r" (temp), "=&r" (temp2)
1122 : "r" (exch), "r" (comp), "m" (*dest));
1126 static inline gint32 InterlockedIncrement(volatile gint32 *val)
1130 __asm__ __volatile__ (
1132 " addl %0, %3, %0\n"
1136 : "=&r" (temp), "=m" (*val), "=r" (cur)
1137 : "Ir" (1), "m" (*val));
1141 static inline gint32 InterlockedDecrement(volatile gint32 *val)
1145 __asm__ __volatile__ (
1147 " subl %0, %3, %0\n"
1151 : "=&r" (temp), "=m" (*val), "=r" (cur)
1152 : "Ir" (1), "m" (*val));
1156 static inline gint32 InterlockedExchange(volatile gint32 *val, gint32 new_val)
1160 __asm__ __volatile__ (
1165 : "=m" (*val), "=&r" (ret), "=&r" (temp)
1166 : "r" (new_val), "m" (*val));
1170 static inline gpointer InterlockedExchangePointer(volatile gpointer *val, gpointer new_val)
1174 __asm__ __volatile__ (
1179 : "=m" (*val), "=&r" (ret), "=&r" (temp)
1180 : "r" (new_val), "m" (*val));
1184 static inline gint32 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
1188 __asm__ __volatile__ (
1191 " addl %2, %3, %2\n"
1194 : "=m" (*val), "=&r" (ret), "=&r" (temp)
1195 : "r" (add), "m" (*val));
1200 #elif defined(__mips__)
1201 #define WAPI_ATOMIC_ASM
1203 static inline gint32 InterlockedIncrement(volatile gint32 *val)
1205 gint32 tmp, result = 0;
1207 __asm__ __volatile__ (" .set mips32\n"
1213 : "=&r" (result), "=&r" (tmp), "=m" (*val)
1218 static inline gint32 InterlockedDecrement(volatile gint32 *val)
1220 gint32 tmp, result = 0;
1222 __asm__ __volatile__ (" .set mips32\n"
1228 : "=&r" (result), "=&r" (tmp), "=m" (*val)
1233 #define InterlockedCompareExchangePointer(dest,exch,comp) InterlockedCompareExchange((volatile gint32 *)(dest), (gint32)(exch), (gint32)(comp))
1235 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
1236 gint32 exch, gint32 comp) {
1239 __asm__ __volatile__ (" .set mips32\n"
1246 : "=&r" (old), "=&r" (tmp), "=m" (*dest)
1247 : "m" (*dest), "r" (exch), "r" (comp));
1251 static inline gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch)
1255 __asm__ __volatile__ (" .set mips32\n"
1261 : "=&r" (result), "=&r" (tmp), "=m" (*dest)
1262 : "m" (*dest), "r" (exch));
1265 #define InterlockedExchangePointer(dest,exch) InterlockedExchange((volatile gint32 *)(dest), (gint32)(exch))
1267 static inline gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add)
1271 __asm__ __volatile__ (" .set mips32\n"
1273 " addu %1, %0, %4\n"
1277 : "=&r" (result), "=&r" (tmp), "=m" (*dest)
1278 : "m" (*dest), "r" (add));
1284 extern gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch, gint32 comp);
1285 extern gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp);
1286 extern gint32 InterlockedIncrement(volatile gint32 *dest);
1287 extern gint32 InterlockedDecrement(volatile gint32 *dest);
1288 extern gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch);
1289 extern gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch);
1290 extern gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add);
1292 #if defined(__hppa__)
1293 #define WAPI_ATOMIC_ASM
1298 #endif /* _WAPI_ATOMIC_H_ */