2 * atomic.c: Workarounds for atomic operations for platforms that dont have
3 * really atomic asm functions in atomic.h
6 * Dick Porter (dick@ximian.com)
8 * (C) 2002 Ximian, Inc.
14 #include <mono/utils/atomic.h>
15 #include <mono/utils/mono-compiler.h>
17 #if defined (WAPI_NO_ATOMIC_ASM) || defined (BROKEN_64BIT_ATOMICS_INTRINSIC)
21 static pthread_mutex_t spin G_GNUC_UNUSED = PTHREAD_MUTEX_INITIALIZER;
23 #define NEED_64BIT_CMPXCHG_FALLBACK
27 #ifdef WAPI_NO_ATOMIC_ASM
29 gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch,
35 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
37 ret = pthread_mutex_lock(&spin);
45 ret = pthread_mutex_unlock(&spin);
48 pthread_cleanup_pop (0);
53 gpointer InterlockedCompareExchangePointer(volatile gpointer *dest,
54 gpointer exch, gpointer comp)
59 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
61 ret = pthread_mutex_lock(&spin);
69 ret = pthread_mutex_unlock(&spin);
72 pthread_cleanup_pop (0);
77 gint32 InterlockedAdd(volatile gint32 *dest, gint32 add)
82 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
84 thr_ret = pthread_mutex_lock(&spin);
85 g_assert (thr_ret == 0);
90 thr_ret = pthread_mutex_unlock(&spin);
91 g_assert (thr_ret == 0);
93 pthread_cleanup_pop (0);
98 gint64 InterlockedAdd64(volatile gint64 *dest, gint64 add)
103 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
105 thr_ret = pthread_mutex_lock(&spin);
106 g_assert (thr_ret == 0);
111 thr_ret = pthread_mutex_unlock(&spin);
112 g_assert (thr_ret == 0);
114 pthread_cleanup_pop (0);
119 gint32 InterlockedIncrement(volatile gint32 *dest)
124 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
126 thr_ret = pthread_mutex_lock(&spin);
127 g_assert (thr_ret == 0);
132 thr_ret = pthread_mutex_unlock(&spin);
133 g_assert (thr_ret == 0);
135 pthread_cleanup_pop (0);
140 gint64 InterlockedIncrement64(volatile gint64 *dest)
145 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
147 thr_ret = pthread_mutex_lock(&spin);
148 g_assert (thr_ret == 0);
153 thr_ret = pthread_mutex_unlock(&spin);
154 g_assert (thr_ret == 0);
156 pthread_cleanup_pop (0);
161 gint32 InterlockedDecrement(volatile gint32 *dest)
166 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
168 thr_ret = pthread_mutex_lock(&spin);
169 g_assert (thr_ret == 0);
174 thr_ret = pthread_mutex_unlock(&spin);
175 g_assert (thr_ret == 0);
177 pthread_cleanup_pop (0);
182 gint64 InterlockedDecrement64(volatile gint64 *dest)
187 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
189 thr_ret = pthread_mutex_lock(&spin);
190 g_assert (thr_ret == 0);
195 thr_ret = pthread_mutex_unlock(&spin);
196 g_assert (thr_ret == 0);
198 pthread_cleanup_pop (0);
203 gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch)
208 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
210 thr_ret = pthread_mutex_lock(&spin);
211 g_assert (thr_ret == 0);
216 thr_ret = pthread_mutex_unlock(&spin);
217 g_assert (thr_ret == 0);
219 pthread_cleanup_pop (0);
224 gint64 InterlockedExchange64(volatile gint64 *dest, gint64 exch)
229 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
231 thr_ret = pthread_mutex_lock(&spin);
232 g_assert (thr_ret == 0);
237 thr_ret = pthread_mutex_unlock(&spin);
238 g_assert (thr_ret == 0);
240 pthread_cleanup_pop (0);
245 gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch)
250 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
252 thr_ret = pthread_mutex_lock(&spin);
253 g_assert (thr_ret == 0);
258 thr_ret = pthread_mutex_unlock(&spin);
259 g_assert (thr_ret == 0);
261 pthread_cleanup_pop (0);
266 gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add)
271 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
273 thr_ret = pthread_mutex_lock(&spin);
274 g_assert (thr_ret == 0);
279 thr_ret = pthread_mutex_unlock(&spin);
280 g_assert (thr_ret == 0);
282 pthread_cleanup_pop (0);
287 gint64 InterlockedExchangeAdd64(volatile gint64 *dest, gint64 add)
292 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
294 thr_ret = pthread_mutex_lock(&spin);
295 g_assert (thr_ret == 0);
300 thr_ret = pthread_mutex_unlock(&spin);
301 g_assert (thr_ret == 0);
303 pthread_cleanup_pop (0);
308 gint8 InterlockedRead8(volatile gint8 *src)
313 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
315 thr_ret = pthread_mutex_lock(&spin);
316 g_assert (thr_ret == 0);
320 thr_ret = pthread_mutex_unlock(&spin);
321 g_assert (thr_ret == 0);
323 pthread_cleanup_pop (0);
328 gint16 InterlockedRead16(volatile gint16 *src)
333 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
335 thr_ret = pthread_mutex_lock(&spin);
336 g_assert (thr_ret == 0);
340 thr_ret = pthread_mutex_unlock(&spin);
341 g_assert (thr_ret == 0);
343 pthread_cleanup_pop (0);
348 gint32 InterlockedRead(volatile gint32 *src)
353 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
355 thr_ret = pthread_mutex_lock(&spin);
356 g_assert (thr_ret == 0);
360 thr_ret = pthread_mutex_unlock(&spin);
361 g_assert (thr_ret == 0);
363 pthread_cleanup_pop (0);
368 gint64 InterlockedRead64(volatile gint64 *src)
373 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
375 thr_ret = pthread_mutex_lock(&spin);
376 g_assert (thr_ret == 0);
380 thr_ret = pthread_mutex_unlock(&spin);
381 g_assert (thr_ret == 0);
383 pthread_cleanup_pop (0);
388 gpointer InterlockedReadPointer(volatile gpointer *src)
393 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
395 thr_ret = pthread_mutex_lock(&spin);
396 g_assert (thr_ret == 0);
400 thr_ret = pthread_mutex_unlock(&spin);
401 g_assert (thr_ret == 0);
403 pthread_cleanup_pop (0);
408 void InterlockedWrite(volatile gint8 *dst, gint8 val)
412 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
414 thr_ret = pthread_mutex_lock(&spin);
415 g_assert (thr_ret == 0);
419 thr_ret = pthread_mutex_unlock(&spin);
420 g_assert (thr_ret == 0);
422 pthread_cleanup_pop (0);
425 void InterlockedWrite16(volatile gint16 *dst, gint16 val)
429 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
431 thr_ret = pthread_mutex_lock(&spin);
432 g_assert (thr_ret == 0);
436 thr_ret = pthread_mutex_unlock(&spin);
437 g_assert (thr_ret == 0);
439 pthread_cleanup_pop (0);
442 void InterlockedWrite(volatile gint32 *dst, gint32 val)
446 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
448 thr_ret = pthread_mutex_lock(&spin);
449 g_assert (thr_ret == 0);
453 thr_ret = pthread_mutex_unlock(&spin);
454 g_assert (thr_ret == 0);
456 pthread_cleanup_pop (0);
459 void InterlockedWrite64(volatile gint64 *dst, gint64 val)
463 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
465 thr_ret = pthread_mutex_lock(&spin);
466 g_assert (thr_ret == 0);
470 thr_ret = pthread_mutex_unlock(&spin);
471 g_assert (thr_ret == 0);
473 pthread_cleanup_pop (0);
476 void InterlockedWritePointer(volatile gpointer *dst, gpointer val)
480 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
482 thr_ret = pthread_mutex_lock(&spin);
483 g_assert (thr_ret == 0);
487 thr_ret = pthread_mutex_unlock(&spin);
488 g_assert (thr_ret == 0);
490 pthread_cleanup_pop (0);
495 #if defined (NEED_64BIT_CMPXCHG_FALLBACK)
497 #if defined (TARGET_OSX)
499 /* The compiler breaks if this code is in the header... */
502 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
504 return __sync_val_compare_and_swap (dest, comp, exch);
507 #elif defined (__arm__) && defined (HAVE_ARMV7) && (defined(TARGET_IOS) || defined(TARGET_WATCHOS) || defined(TARGET_ANDROID))
509 #if defined (TARGET_IOS) || defined (TARGET_WATCHOS)
512 #error "Not supported."
516 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
518 return __sync_val_compare_and_swap (dest, comp, exch);
521 #elif defined (TARGET_ANDROID)
523 /* Some Android systems can't find the 64-bit CAS intrinsic at runtime,
524 * so we have to roll our own...
527 gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp) __attribute__ ((naked));
530 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
533 "push {r4, r5, r6, r7}\n"
534 "ldrd r4, [sp, #16]\n"
541 "strexd r1, r2, [r0]\n"
548 "pop {r4, r5, r6, r7}\n"
555 #error "Need a 64-bit CAS fallback!"
562 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
567 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
569 ret = pthread_mutex_lock(&spin);
577 ret = pthread_mutex_unlock(&spin);
580 pthread_cleanup_pop (0);
588 #if !defined (WAPI_NO_ATOMIC_ASM) && !defined (BROKEN_64BIT_ATOMICS_INTRINSIC) && !defined (NEED_64BIT_CMPXCHG_FALLBACK)
589 MONO_EMPTY_SOURCE_FILE (atomic);