2 * atomic.h: Atomic operations
5 * Dick Porter (dick@ximian.com)
7 * (C) 2002 Ximian, Inc.
8 * Copyright 2012 Xamarin Inc
11 #ifndef _WAPI_ATOMIC_H_
12 #define _WAPI_ATOMIC_H_
14 #if defined(__NetBSD__)
15 #include <sys/param.h>
17 #if __NetBSD_Version__ > 499004000
18 #include <sys/atomic.h>
19 #define HAVE_ATOMIC_OPS
27 #ifdef ENABLE_EXTENSION_MODULE
28 #include "../../../mono-extensions/mono/utils/atomic.h"
31 /* On Windows, we always use the functions provided by the Windows API. */
32 #if defined(__WIN32__) || defined(_WIN32)
35 #define HAS_64BITS_ATOMICS 1
37 /* mingw is missing InterlockedCompareExchange64 () from winbase.h */
39 static inline gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
41 return __sync_val_compare_and_swap (dest, comp, exch);
45 /* Prefer GCC atomic ops if the target supports it (see configure.in). */
46 #elif defined(USE_GCC_ATOMIC_OPS)
48 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
49 gint32 exch, gint32 comp)
51 return __sync_val_compare_and_swap (dest, comp, exch);
54 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
56 return __sync_val_compare_and_swap (dest, comp, exch);
59 static inline gint32 InterlockedIncrement(volatile gint32 *val)
61 return __sync_add_and_fetch (val, 1);
64 static inline gint32 InterlockedDecrement(volatile gint32 *val)
66 return __sync_add_and_fetch (val, -1);
69 static inline gint32 InterlockedExchange(volatile gint32 *val, gint32 new_val)
74 } while (__sync_val_compare_and_swap (val, old_val, new_val) != old_val);
78 static inline gpointer InterlockedExchangePointer(volatile gpointer *val,
84 } while (__sync_val_compare_and_swap (val, old_val, new_val) != old_val);
88 static inline gint32 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
90 return __sync_fetch_and_add (val, add);
93 #if defined (TARGET_OSX)
94 #define BROKEN_64BIT_ATOMICS_INTRINSIC 1
98 #if !defined (BROKEN_64BIT_ATOMICS_INTRINSIC)
99 #define HAS_64BITS_ATOMICS 1
101 static inline gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
103 return __sync_val_compare_and_swap (dest, comp, exch);
109 #elif defined(__NetBSD__) && defined(HAVE_ATOMIC_OPS)
111 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
112 gint32 exch, gint32 comp)
114 return atomic_cas_32((uint32_t*)dest, comp, exch);
117 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
119 return atomic_cas_ptr(dest, comp, exch);
122 static inline gint32 InterlockedIncrement(volatile gint32 *val)
124 return atomic_inc_32_nv((uint32_t*)val);
127 static inline gint32 InterlockedDecrement(volatile gint32 *val)
129 return atomic_dec_32_nv((uint32_t*)val);
132 static inline gint32 InterlockedExchange(volatile gint32 *val, gint32 new_val)
134 return atomic_swap_32((uint32_t*)val, new_val);
137 static inline gpointer InterlockedExchangePointer(volatile gpointer *val,
140 return atomic_swap_ptr(val, new_val);
143 static inline gint32 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
145 return atomic_add_32_nv((uint32_t*)val, add) - add;
148 #elif (defined(sparc) || defined (__sparc__)) && defined(__GNUC__)
151 static inline gint32 InterlockedCompareExchange(volatile gint32 *_dest, gint32 _exch, gint32 _comp)
153 register volatile gint32 *dest asm("g1") = _dest;
154 register gint32 comp asm("o4") = _comp;
155 register gint32 exch asm("o5") = _exch;
157 __asm__ __volatile__(
158 /* cas [%%g1], %%o4, %%o5 */
161 : "0" (exch), "r" (dest), "r" (comp)
168 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *_dest, gpointer _exch, gpointer _comp)
170 register volatile gpointer *dest asm("g1") = _dest;
171 register gpointer comp asm("o4") = _comp;
172 register gpointer exch asm("o5") = _exch;
174 __asm__ __volatile__(
176 /* casx [%%g1], %%o4, %%o5 */
179 /* cas [%%g1], %%o4, %%o5 */
183 : "0" (exch), "r" (dest), "r" (comp)
190 static inline gint32 InterlockedIncrement(volatile gint32 *_dest)
192 register volatile gint32 *dest asm("g1") = _dest;
193 register gint32 tmp asm("o4");
194 register gint32 ret asm("o5");
196 __asm__ __volatile__(
197 "1: ld [%%g1], %%o4\n\t"
198 " add %%o4, 1, %%o5\n\t"
199 /* cas [%%g1], %%o4, %%o5 */
200 " .word 0xdbe0500c\n\t"
201 " cmp %%o4, %%o5\n\t"
204 : "=&r" (tmp), "=&r" (ret)
212 static inline gint32 InterlockedDecrement(volatile gint32 *_dest)
214 register volatile gint32 *dest asm("g1") = _dest;
215 register gint32 tmp asm("o4");
216 register gint32 ret asm("o5");
218 __asm__ __volatile__(
219 "1: ld [%%g1], %%o4\n\t"
220 " sub %%o4, 1, %%o5\n\t"
221 /* cas [%%g1], %%o4, %%o5 */
222 " .word 0xdbe0500c\n\t"
223 " cmp %%o4, %%o5\n\t"
226 : "=&r" (tmp), "=&r" (ret)
234 static inline gint32 InterlockedExchange(volatile gint32 *_dest, gint32 exch)
236 register volatile gint32 *dest asm("g1") = _dest;
237 register gint32 tmp asm("o4");
238 register gint32 ret asm("o5");
240 __asm__ __volatile__(
241 "1: ld [%%g1], %%o4\n\t"
243 /* cas [%%g1], %%o4, %%o5 */
244 " .word 0xdbe0500c\n\t"
245 " cmp %%o4, %%o5\n\t"
248 : "=&r" (tmp), "=&r" (ret)
249 : "r" (dest), "r" (exch)
256 static inline gpointer InterlockedExchangePointer(volatile gpointer *_dest, gpointer exch)
258 register volatile gpointer *dest asm("g1") = _dest;
259 register gpointer tmp asm("o4");
260 register gpointer ret asm("o5");
262 __asm__ __volatile__(
264 "1: ldx [%%g1], %%o4\n\t"
266 "1: ld [%%g1], %%o4\n\t"
270 /* casx [%%g1], %%o4, %%o5 */
271 " .word 0xdbf0500c\n\t"
273 /* cas [%%g1], %%o4, %%o5 */
274 " .word 0xdbe0500c\n\t"
276 " cmp %%o4, %%o5\n\t"
279 : "=&r" (tmp), "=&r" (ret)
280 : "r" (dest), "r" (exch)
287 static inline gint32 InterlockedExchangeAdd(volatile gint32 *_dest, gint32 add)
289 register volatile gint32 *dest asm("g1") = _dest;
290 register gint32 tmp asm("o4");
291 register gint32 ret asm("o5");
293 __asm__ __volatile__(
294 "1: ld [%%g1], %%o4\n\t"
295 " add %%o4, %3, %%o5\n\t"
296 /* cas [%%g1], %%o4, %%o5 */
297 " .word 0xdbe0500c\n\t"
298 " cmp %%o4, %%o5\n\t"
300 " add %%o5, %3, %%o5"
301 : "=&r" (tmp), "=&r" (ret)
302 : "r" (dest), "r" (add)
311 InterlockedCompareExchange(volatile gint32 *dest,
312 gint32 exch, gint32 comp)
316 __asm__ __volatile__ ("\tLA\t1,%0\n"
319 : "+m" (*dest), "=&r" (old)
320 : "r" (exch), "r" (comp)
325 static inline gpointer
326 InterlockedCompareExchangePointer(volatile gpointer *dest,
332 __asm__ __volatile__ ("\tLA\t1,%0\n"
334 "\tCSG\t%1,%2,0(1)\n"
335 : "+m" (*dest), "=&r" (old)
336 : "r" (exch), "r" (comp)
343 InterlockedIncrement(volatile gint32 *val)
347 __asm__ __volatile__ ("\tLA\t2,%1\n"
354 : "=r" (tmp), "+m" (*val)
361 InterlockedDecrement(volatile gint32 *val)
365 __asm__ __volatile__ ("\tLA\t2,%1\n"
372 : "=r" (tmp), "+m" (*val)
379 InterlockedExchange(volatile gint32 *val, gint32 new_val)
383 __asm__ __volatile__ ("\tLA\t1,%0\n"
387 : "+m" (*val), "=&r" (ret)
394 static inline gpointer
395 InterlockedExchangePointer(volatile gpointer *val, gpointer new_val)
399 __asm__ __volatile__ ("\tLA\t1,%0\n"
401 "\tCSG\t%1,%2,0(1)\n"
403 : "+m" (*val), "=&r" (ret)
411 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
415 __asm__ __volatile__ ("\tLA\t2,%1\n"
421 : "=&r" (ret), "+m" (*val)
428 #elif defined(__ia64__)
430 #ifdef __INTEL_COMPILER
431 #include <ia64intrin.h>
434 static inline gint32 InterlockedCompareExchange(gint32 volatile *dest,
435 gint32 exch, gint32 comp)
440 #ifdef __INTEL_COMPILER
441 old = _InterlockedCompareExchange (dest, exch, comp);
443 /* cmpxchg4 zero extends the value read from memory */
444 real_comp = (guint64)(guint32)comp;
445 asm volatile ("mov ar.ccv = %2 ;;\n\t"
446 "cmpxchg4.acq %0 = [%1], %3, ar.ccv\n\t"
447 : "=r" (old) : "r" (dest), "r" (real_comp), "r" (exch));
453 static inline gpointer InterlockedCompareExchangePointer(gpointer volatile *dest,
454 gpointer exch, gpointer comp)
458 #ifdef __INTEL_COMPILER
459 old = _InterlockedCompareExchangePointer (dest, exch, comp);
461 asm volatile ("mov ar.ccv = %2 ;;\n\t"
462 "cmpxchg8.acq %0 = [%1], %3, ar.ccv\n\t"
463 : "=r" (old) : "r" (dest), "r" (comp), "r" (exch));
469 static inline gint32 InterlockedIncrement(gint32 volatile *val)
471 #ifdef __INTEL_COMPILER
472 return _InterlockedIncrement (val);
478 } while (InterlockedCompareExchange (val, old + 1, old) != old);
484 static inline gint32 InterlockedDecrement(gint32 volatile *val)
486 #ifdef __INTEL_COMPILER
487 return _InterlockedDecrement (val);
493 } while (InterlockedCompareExchange (val, old - 1, old) != old);
499 static inline gint32 InterlockedExchange(gint32 volatile *dest, gint32 new_val)
501 #ifdef __INTEL_COMPILER
502 return _InterlockedExchange (dest, new_val);
508 } while (InterlockedCompareExchange (dest, new_val, res) != res);
514 static inline gpointer InterlockedExchangePointer(gpointer volatile *dest, gpointer new_val)
516 #ifdef __INTEL_COMPILER
517 return (gpointer)_InterlockedExchange64 ((gint64*)dest, (gint64)new_val);
523 } while (InterlockedCompareExchangePointer (dest, new_val, res) != res);
529 static inline gint32 InterlockedExchangeAdd(gint32 volatile *val, gint32 add)
533 #ifdef __INTEL_COMPILER
534 old = _InterlockedExchangeAdd (val, add);
538 } while (InterlockedCompareExchange (val, old + add, old) != old);
546 #define WAPI_NO_ATOMIC_ASM
548 extern gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch, gint32 comp);
549 extern gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp);
550 extern gint32 InterlockedIncrement(volatile gint32 *dest);
551 extern gint32 InterlockedDecrement(volatile gint32 *dest);
552 extern gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch);
553 extern gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch);
554 extern gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add);
558 #ifndef HAS_64BITS_ATOMICS
559 extern gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp);
562 #endif /* _WAPI_ATOMIC_H_ */