2 * atomic.h: Atomic operations
5 * Dick Porter (dick@ximian.com)
7 * (C) 2002 Ximian, Inc.
8 * Copyright 2012 Xamarin Inc
11 #ifndef _WAPI_ATOMIC_H_
12 #define _WAPI_ATOMIC_H_
17 #ifdef ENABLE_EXTENSION_MODULE
18 #include "../../../mono-extensions/mono/utils/atomic.h"
21 /* On Windows, we always use the functions provided by the Windows API. */
22 #if defined(__WIN32__) || defined(_WIN32)
26 /* mingw is missing InterlockedCompareExchange64 () from winbase.h */
27 #if HAVE_DECL_INTERLOCKEDCOMPAREEXCHANGE64==0
28 static inline gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
30 return __sync_val_compare_and_swap (dest, comp, exch);
34 /* Prefer GCC atomic ops if the target supports it (see configure.in). */
35 #elif defined(USE_GCC_ATOMIC_OPS)
37 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
38 gint32 exch, gint32 comp)
40 return __sync_val_compare_and_swap (dest, comp, exch);
43 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
45 return __sync_val_compare_and_swap (dest, comp, exch);
48 static inline gint32 InterlockedAdd(volatile gint32 *dest, gint32 add)
50 return __sync_add_and_fetch (dest, add);
53 static inline gint32 InterlockedIncrement(volatile gint32 *val)
55 return __sync_add_and_fetch (val, 1);
58 static inline gint32 InterlockedDecrement(volatile gint32 *val)
60 return __sync_add_and_fetch (val, -1);
63 static inline gint32 InterlockedExchange(volatile gint32 *val, gint32 new_val)
68 } while (__sync_val_compare_and_swap (val, old_val, new_val) != old_val);
72 static inline gpointer InterlockedExchangePointer(volatile gpointer *val,
78 } while (__sync_val_compare_and_swap (val, old_val, new_val) != old_val);
82 static inline gint32 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
84 return __sync_fetch_and_add (val, add);
87 #if defined (TARGET_OSX)
88 #define BROKEN_64BIT_ATOMICS_INTRINSIC 1
91 #if !defined (BROKEN_64BIT_ATOMICS_INTRINSIC)
93 static inline gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
95 return __sync_val_compare_and_swap (dest, comp, exch);
98 static inline gint64 InterlockedAdd64(volatile gint64 *dest, gint64 add)
100 return __sync_add_and_fetch (dest, add);
103 static inline gint64 InterlockedIncrement64(volatile gint64 *val)
105 return __sync_add_and_fetch (val, 1);
108 static inline gint64 InterlockedDecrement64(volatile gint64 *val)
110 return __sync_sub_and_fetch (val, 1);
113 static inline gint64 InterlockedExchangeAdd64(volatile gint64 *val, gint64 add)
115 return __sync_fetch_and_add (val, add);
120 /* Implement 64-bit cmpxchg by hand or emulate it. */
121 extern gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp);
123 /* Implement all other 64-bit atomics in terms of a specialized CAS
124 * in this case, since chances are that the other 64-bit atomic
125 * intrinsics are broken too.
128 static inline gint64 InterlockedExchangeAdd64(volatile gint64 *dest, gint64 add)
133 } while (InterlockedCompareExchange64 (dest, old_val + add, old_val) != old_val);
137 static inline gint64 InterlockedIncrement64(volatile gint64 *val)
143 } while (InterlockedCompareExchange64 (val, set, get) != set);
147 static inline gint64 InterlockedDecrement64(volatile gint64 *val)
153 } while (InterlockedCompareExchange64 (val, set, get) != set);
157 static inline gint64 InterlockedAdd64(volatile gint64 *dest, gint64 add)
163 } while (InterlockedCompareExchange64 (dest, set, get) != set);
167 static inline gint64 InterlockedRead64(volatile gint64 *src)
169 return InterlockedCompareExchange64 (src, 0, 0);
174 /* We always implement this in terms of a 64-bit cmpxchg since
175 * GCC doesn't have an intrisic to model it anyway. */
176 static inline gint64 InterlockedExchange64(volatile gint64 *val, gint64 new_val)
181 } while (InterlockedCompareExchange64 (val, new_val, old_val) != old_val);
185 static inline void InterlockedWrite64(volatile gint64 *dst, gint64 val)
187 /* Nothing useful from GCC at all, so fall back to CAS. */
188 InterlockedExchange64 (dst, val);
191 #elif (defined(sparc) || defined (__sparc__)) && defined(__GNUC__)
194 static inline gint32 InterlockedCompareExchange(volatile gint32 *_dest, gint32 _exch, gint32 _comp)
196 register volatile gint32 *dest asm("g1") = _dest;
197 register gint32 comp asm("o4") = _comp;
198 register gint32 exch asm("o5") = _exch;
200 __asm__ __volatile__(
201 /* cas [%%g1], %%o4, %%o5 */
204 : "0" (exch), "r" (dest), "r" (comp)
211 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *_dest, gpointer _exch, gpointer _comp)
213 register volatile gpointer *dest asm("g1") = _dest;
214 register gpointer comp asm("o4") = _comp;
215 register gpointer exch asm("o5") = _exch;
217 __asm__ __volatile__(
219 /* casx [%%g1], %%o4, %%o5 */
222 /* cas [%%g1], %%o4, %%o5 */
226 : "0" (exch), "r" (dest), "r" (comp)
233 static inline gint32 InterlockedIncrement(volatile gint32 *_dest)
235 register volatile gint32 *dest asm("g1") = _dest;
236 register gint32 tmp asm("o4");
237 register gint32 ret asm("o5");
239 __asm__ __volatile__(
240 "1: ld [%%g1], %%o4\n\t"
241 " add %%o4, 1, %%o5\n\t"
242 /* cas [%%g1], %%o4, %%o5 */
243 " .word 0xdbe0500c\n\t"
244 " cmp %%o4, %%o5\n\t"
247 : "=&r" (tmp), "=&r" (ret)
255 static inline gint32 InterlockedDecrement(volatile gint32 *_dest)
257 register volatile gint32 *dest asm("g1") = _dest;
258 register gint32 tmp asm("o4");
259 register gint32 ret asm("o5");
261 __asm__ __volatile__(
262 "1: ld [%%g1], %%o4\n\t"
263 " sub %%o4, 1, %%o5\n\t"
264 /* cas [%%g1], %%o4, %%o5 */
265 " .word 0xdbe0500c\n\t"
266 " cmp %%o4, %%o5\n\t"
269 : "=&r" (tmp), "=&r" (ret)
277 static inline gint32 InterlockedExchange(volatile gint32 *_dest, gint32 exch)
279 register volatile gint32 *dest asm("g1") = _dest;
280 register gint32 tmp asm("o4");
281 register gint32 ret asm("o5");
283 __asm__ __volatile__(
284 "1: ld [%%g1], %%o4\n\t"
286 /* cas [%%g1], %%o4, %%o5 */
287 " .word 0xdbe0500c\n\t"
288 " cmp %%o4, %%o5\n\t"
291 : "=&r" (tmp), "=&r" (ret)
292 : "r" (dest), "r" (exch)
299 static inline gpointer InterlockedExchangePointer(volatile gpointer *_dest, gpointer exch)
301 register volatile gpointer *dest asm("g1") = _dest;
302 register gpointer tmp asm("o4");
303 register gpointer ret asm("o5");
305 __asm__ __volatile__(
307 "1: ldx [%%g1], %%o4\n\t"
309 "1: ld [%%g1], %%o4\n\t"
313 /* casx [%%g1], %%o4, %%o5 */
314 " .word 0xdbf0500c\n\t"
316 /* cas [%%g1], %%o4, %%o5 */
317 " .word 0xdbe0500c\n\t"
319 " cmp %%o4, %%o5\n\t"
322 : "=&r" (tmp), "=&r" (ret)
323 : "r" (dest), "r" (exch)
330 static inline gint32 InterlockedExchangeAdd(volatile gint32 *_dest, gint32 add)
332 register volatile gint32 *dest asm("g1") = _dest;
333 register gint32 tmp asm("o4");
334 register gint32 ret asm("o5");
336 __asm__ __volatile__(
337 "1: ld [%%g1], %%o4\n\t"
338 " add %%o4, %3, %%o5\n\t"
339 /* cas [%%g1], %%o4, %%o5 */
340 " .word 0xdbe0500c\n\t"
341 " cmp %%o4, %%o5\n\t"
343 " add %%o5, %3, %%o5"
344 : "=&r" (tmp), "=&r" (ret)
345 : "r" (dest), "r" (add)
351 #elif defined(__ia64__)
353 #ifdef __INTEL_COMPILER
354 #include <ia64intrin.h>
357 static inline gint32 InterlockedCompareExchange(gint32 volatile *dest,
358 gint32 exch, gint32 comp)
363 #ifdef __INTEL_COMPILER
364 old = _InterlockedCompareExchange (dest, exch, comp);
366 /* cmpxchg4 zero extends the value read from memory */
367 real_comp = (guint64)(guint32)comp;
368 asm volatile ("mov ar.ccv = %2 ;;\n\t"
369 "cmpxchg4.acq %0 = [%1], %3, ar.ccv\n\t"
370 : "=r" (old) : "r" (dest), "r" (real_comp), "r" (exch));
376 static inline gpointer InterlockedCompareExchangePointer(gpointer volatile *dest,
377 gpointer exch, gpointer comp)
381 #ifdef __INTEL_COMPILER
382 old = _InterlockedCompareExchangePointer (dest, exch, comp);
384 asm volatile ("mov ar.ccv = %2 ;;\n\t"
385 "cmpxchg8.acq %0 = [%1], %3, ar.ccv\n\t"
386 : "=r" (old) : "r" (dest), "r" (comp), "r" (exch));
392 static inline gint32 InterlockedIncrement(gint32 volatile *val)
394 #ifdef __INTEL_COMPILER
395 return _InterlockedIncrement (val);
401 } while (InterlockedCompareExchange (val, old + 1, old) != old);
407 static inline gint32 InterlockedDecrement(gint32 volatile *val)
409 #ifdef __INTEL_COMPILER
410 return _InterlockedDecrement (val);
416 } while (InterlockedCompareExchange (val, old - 1, old) != old);
422 static inline gint32 InterlockedExchange(gint32 volatile *dest, gint32 new_val)
424 #ifdef __INTEL_COMPILER
425 return _InterlockedExchange (dest, new_val);
431 } while (InterlockedCompareExchange (dest, new_val, res) != res);
437 static inline gpointer InterlockedExchangePointer(gpointer volatile *dest, gpointer new_val)
439 #ifdef __INTEL_COMPILER
440 return (gpointer)_InterlockedExchange64 ((gint64*)dest, (gint64)new_val);
446 } while (InterlockedCompareExchangePointer (dest, new_val, res) != res);
452 static inline gint32 InterlockedExchangeAdd(gint32 volatile *val, gint32 add)
456 #ifdef __INTEL_COMPILER
457 old = _InterlockedExchangeAdd (val, add);
461 } while (InterlockedCompareExchange (val, old + add, old) != old);
469 #define WAPI_NO_ATOMIC_ASM
471 extern gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch, gint32 comp);
472 extern gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp);
473 extern gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp);
474 extern gint32 InterlockedAdd(volatile gint32 *dest, gint32 add);
475 extern gint64 InterlockedAdd64(volatile gint64 *dest, gint64 add);
476 extern gint32 InterlockedIncrement(volatile gint32 *dest);
477 extern gint64 InterlockedIncrement64(volatile gint64 *dest);
478 extern gint32 InterlockedDecrement(volatile gint32 *dest);
479 extern gint64 InterlockedDecrement64(volatile gint64 *dest);
480 extern gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch);
481 extern gint64 InterlockedExchange64(volatile gint64 *dest, gint64 exch);
482 extern gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch);
483 extern gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add);
484 extern gint64 InterlockedExchangeAdd64(volatile gint64 *dest, gint64 add);
488 #endif /* _WAPI_ATOMIC_H_ */