2 * atomic.h: Atomic operations
5 * Dick Porter (dick@ximian.com)
7 * (C) 2002 Ximian, Inc.
8 * Copyright 2012 Xamarin Inc
11 #ifndef _WAPI_ATOMIC_H_
12 #define _WAPI_ATOMIC_H_
14 #if defined(__NetBSD__)
15 #include <sys/param.h>
17 #if __NetBSD_Version__ > 499004000
18 #include <sys/atomic.h>
19 #define HAVE_ATOMIC_OPS
27 /* On Windows, we always use the functions provided by the Windows API. */
28 #if defined(__WIN32__) || defined(_WIN32)
31 #define HAS_64BITS_ATOMICS 1
33 /* mingw is missing InterlockedCompareExchange64 () from winbase.h */
35 static inline gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
37 return __sync_val_compare_and_swap (dest, comp, exch);
41 /* Prefer GCC atomic ops if the target supports it (see configure.in). */
42 #elif defined(USE_GCC_ATOMIC_OPS)
44 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
45 gint32 exch, gint32 comp)
47 return __sync_val_compare_and_swap (dest, comp, exch);
50 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
52 return __sync_val_compare_and_swap (dest, comp, exch);
55 static inline gint32 InterlockedIncrement(volatile gint32 *val)
57 return __sync_add_and_fetch (val, 1);
60 static inline gint32 InterlockedDecrement(volatile gint32 *val)
62 return __sync_add_and_fetch (val, -1);
65 static inline gint32 InterlockedExchange(volatile gint32 *val, gint32 new_val)
70 } while (__sync_val_compare_and_swap (val, old_val, new_val) != old_val);
74 static inline gpointer InterlockedExchangePointer(volatile gpointer *val,
80 } while (__sync_val_compare_and_swap (val, old_val, new_val) != old_val);
84 static inline gint32 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
86 return __sync_fetch_and_add (val, add);
89 /*All Apple targets have broken compilers*/
90 #if defined (TARGET_MACH)
91 #define BROKEN_64BIT_ATOMICS_INTRINSIC 1
95 #if !defined (BROKEN_64BIT_ATOMICS_INTRINSIC)
96 #define HAS_64BITS_ATOMICS 1
98 static inline gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
100 return __sync_val_compare_and_swap (dest, comp, exch);
106 #elif defined(__NetBSD__) && defined(HAVE_ATOMIC_OPS)
108 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
109 gint32 exch, gint32 comp)
111 return atomic_cas_32((uint32_t*)dest, comp, exch);
114 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
116 return atomic_cas_ptr(dest, comp, exch);
119 static inline gint32 InterlockedIncrement(volatile gint32 *val)
121 return atomic_inc_32_nv((uint32_t*)val);
124 static inline gint32 InterlockedDecrement(volatile gint32 *val)
126 return atomic_dec_32_nv((uint32_t*)val);
129 static inline gint32 InterlockedExchange(volatile gint32 *val, gint32 new_val)
131 return atomic_swap_32((uint32_t*)val, new_val);
134 static inline gpointer InterlockedExchangePointer(volatile gpointer *val,
137 return atomic_swap_ptr(val, new_val);
140 static inline gint32 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
142 return atomic_add_32_nv((uint32_t*)val, add) - add;
145 #elif (defined(sparc) || defined (__sparc__)) && defined(__GNUC__)
148 static inline gint32 InterlockedCompareExchange(volatile gint32 *_dest, gint32 _exch, gint32 _comp)
150 register volatile gint32 *dest asm("g1") = _dest;
151 register gint32 comp asm("o4") = _comp;
152 register gint32 exch asm("o5") = _exch;
154 __asm__ __volatile__(
155 /* cas [%%g1], %%o4, %%o5 */
158 : "0" (exch), "r" (dest), "r" (comp)
165 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *_dest, gpointer _exch, gpointer _comp)
167 register volatile gpointer *dest asm("g1") = _dest;
168 register gpointer comp asm("o4") = _comp;
169 register gpointer exch asm("o5") = _exch;
171 __asm__ __volatile__(
173 /* casx [%%g1], %%o4, %%o5 */
176 /* cas [%%g1], %%o4, %%o5 */
180 : "0" (exch), "r" (dest), "r" (comp)
187 static inline gint32 InterlockedIncrement(volatile gint32 *_dest)
189 register volatile gint32 *dest asm("g1") = _dest;
190 register gint32 tmp asm("o4");
191 register gint32 ret asm("o5");
193 __asm__ __volatile__(
194 "1: ld [%%g1], %%o4\n\t"
195 " add %%o4, 1, %%o5\n\t"
196 /* cas [%%g1], %%o4, %%o5 */
197 " .word 0xdbe0500c\n\t"
198 " cmp %%o4, %%o5\n\t"
201 : "=&r" (tmp), "=&r" (ret)
209 static inline gint32 InterlockedDecrement(volatile gint32 *_dest)
211 register volatile gint32 *dest asm("g1") = _dest;
212 register gint32 tmp asm("o4");
213 register gint32 ret asm("o5");
215 __asm__ __volatile__(
216 "1: ld [%%g1], %%o4\n\t"
217 " sub %%o4, 1, %%o5\n\t"
218 /* cas [%%g1], %%o4, %%o5 */
219 " .word 0xdbe0500c\n\t"
220 " cmp %%o4, %%o5\n\t"
223 : "=&r" (tmp), "=&r" (ret)
231 static inline gint32 InterlockedExchange(volatile gint32 *_dest, gint32 exch)
233 register volatile gint32 *dest asm("g1") = _dest;
234 register gint32 tmp asm("o4");
235 register gint32 ret asm("o5");
237 __asm__ __volatile__(
238 "1: ld [%%g1], %%o4\n\t"
240 /* cas [%%g1], %%o4, %%o5 */
241 " .word 0xdbe0500c\n\t"
242 " cmp %%o4, %%o5\n\t"
245 : "=&r" (tmp), "=&r" (ret)
246 : "r" (dest), "r" (exch)
253 static inline gpointer InterlockedExchangePointer(volatile gpointer *_dest, gpointer exch)
255 register volatile gpointer *dest asm("g1") = _dest;
256 register gpointer tmp asm("o4");
257 register gpointer ret asm("o5");
259 __asm__ __volatile__(
261 "1: ldx [%%g1], %%o4\n\t"
263 "1: ld [%%g1], %%o4\n\t"
267 /* casx [%%g1], %%o4, %%o5 */
268 " .word 0xdbf0500c\n\t"
270 /* cas [%%g1], %%o4, %%o5 */
271 " .word 0xdbe0500c\n\t"
273 " cmp %%o4, %%o5\n\t"
276 : "=&r" (tmp), "=&r" (ret)
277 : "r" (dest), "r" (exch)
284 static inline gint32 InterlockedExchangeAdd(volatile gint32 *_dest, gint32 add)
286 register volatile gint32 *dest asm("g1") = _dest;
287 register gint32 tmp asm("o4");
288 register gint32 ret asm("o5");
290 __asm__ __volatile__(
291 "1: ld [%%g1], %%o4\n\t"
292 " add %%o4, %3, %%o5\n\t"
293 /* cas [%%g1], %%o4, %%o5 */
294 " .word 0xdbe0500c\n\t"
295 " cmp %%o4, %%o5\n\t"
297 " add %%o5, %3, %%o5"
298 : "=&r" (tmp), "=&r" (ret)
299 : "r" (dest), "r" (add)
308 InterlockedCompareExchange(volatile gint32 *dest,
309 gint32 exch, gint32 comp)
313 __asm__ __volatile__ ("\tLA\t1,%0\n"
316 : "+m" (*dest), "=&r" (old)
317 : "r" (exch), "r" (comp)
322 static inline gpointer
323 InterlockedCompareExchangePointer(volatile gpointer *dest,
329 __asm__ __volatile__ ("\tLA\t1,%0\n"
331 "\tCSG\t%1,%2,0(1)\n"
332 : "+m" (*dest), "=&r" (old)
333 : "r" (exch), "r" (comp)
340 InterlockedIncrement(volatile gint32 *val)
344 __asm__ __volatile__ ("\tLA\t2,%1\n"
351 : "=r" (tmp), "+m" (*val)
358 InterlockedDecrement(volatile gint32 *val)
362 __asm__ __volatile__ ("\tLA\t2,%1\n"
369 : "=r" (tmp), "+m" (*val)
376 InterlockedExchange(volatile gint32 *val, gint32 new_val)
380 __asm__ __volatile__ ("\tLA\t1,%0\n"
384 : "+m" (*val), "=&r" (ret)
391 static inline gpointer
392 InterlockedExchangePointer(volatile gpointer *val, gpointer new_val)
396 __asm__ __volatile__ ("\tLA\t1,%0\n"
398 "\tCSG\t%1,%2,0(1)\n"
400 : "+m" (*val), "=&r" (ret)
408 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
412 __asm__ __volatile__ ("\tLA\t2,%1\n"
418 : "=&r" (ret), "+m" (*val)
425 #elif defined(__ia64__)
427 #ifdef __INTEL_COMPILER
428 #include <ia64intrin.h>
431 static inline gint32 InterlockedCompareExchange(gint32 volatile *dest,
432 gint32 exch, gint32 comp)
437 #ifdef __INTEL_COMPILER
438 old = _InterlockedCompareExchange (dest, exch, comp);
440 /* cmpxchg4 zero extends the value read from memory */
441 real_comp = (guint64)(guint32)comp;
442 asm volatile ("mov ar.ccv = %2 ;;\n\t"
443 "cmpxchg4.acq %0 = [%1], %3, ar.ccv\n\t"
444 : "=r" (old) : "r" (dest), "r" (real_comp), "r" (exch));
450 static inline gpointer InterlockedCompareExchangePointer(gpointer volatile *dest,
451 gpointer exch, gpointer comp)
455 #ifdef __INTEL_COMPILER
456 old = _InterlockedCompareExchangePointer (dest, exch, comp);
458 asm volatile ("mov ar.ccv = %2 ;;\n\t"
459 "cmpxchg8.acq %0 = [%1], %3, ar.ccv\n\t"
460 : "=r" (old) : "r" (dest), "r" (comp), "r" (exch));
466 static inline gint32 InterlockedIncrement(gint32 volatile *val)
468 #ifdef __INTEL_COMPILER
469 return _InterlockedIncrement (val);
475 } while (InterlockedCompareExchange (val, old + 1, old) != old);
481 static inline gint32 InterlockedDecrement(gint32 volatile *val)
483 #ifdef __INTEL_COMPILER
484 return _InterlockedDecrement (val);
490 } while (InterlockedCompareExchange (val, old - 1, old) != old);
496 static inline gint32 InterlockedExchange(gint32 volatile *dest, gint32 new_val)
498 #ifdef __INTEL_COMPILER
499 return _InterlockedExchange (dest, new_val);
505 } while (InterlockedCompareExchange (dest, new_val, res) != res);
511 static inline gpointer InterlockedExchangePointer(gpointer volatile *dest, gpointer new_val)
513 #ifdef __INTEL_COMPILER
514 return (gpointer)_InterlockedExchange64 ((gint64*)dest, (gint64)new_val);
520 } while (InterlockedCompareExchangePointer (dest, new_val, res) != res);
526 static inline gint32 InterlockedExchangeAdd(gint32 volatile *val, gint32 add)
530 #ifdef __INTEL_COMPILER
531 old = _InterlockedExchangeAdd (val, add);
535 } while (InterlockedCompareExchange (val, old + add, old) != old);
543 #define WAPI_NO_ATOMIC_ASM
545 extern gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch, gint32 comp);
546 extern gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp);
547 extern gint32 InterlockedIncrement(volatile gint32 *dest);
548 extern gint32 InterlockedDecrement(volatile gint32 *dest);
549 extern gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch);
550 extern gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch);
551 extern gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add);
555 #ifndef HAS_64BITS_ATOMICS
556 extern gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp);
559 #endif /* _WAPI_ATOMIC_H_ */