3 * Workarounds for atomic operations for platforms that dont have
4 * really atomic asm functions in atomic.h
7 * Dick Porter (dick@ximian.com)
9 * (C) 2002 Ximian, Inc.
15 #include <mono/utils/atomic.h>
16 #include <mono/utils/mono-compiler.h>
18 #if defined (WAPI_NO_ATOMIC_ASM) || defined (BROKEN_64BIT_ATOMICS_INTRINSIC)
22 static pthread_mutex_t spin G_GNUC_UNUSED = PTHREAD_MUTEX_INITIALIZER;
24 #define NEED_64BIT_CMPXCHG_FALLBACK
28 #ifdef WAPI_NO_ATOMIC_ASM
30 gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch,
36 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
38 ret = pthread_mutex_lock(&spin);
46 ret = pthread_mutex_unlock(&spin);
49 pthread_cleanup_pop (0);
54 gpointer InterlockedCompareExchangePointer(volatile gpointer *dest,
55 gpointer exch, gpointer comp)
60 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
62 ret = pthread_mutex_lock(&spin);
70 ret = pthread_mutex_unlock(&spin);
73 pthread_cleanup_pop (0);
78 gint32 InterlockedAdd(volatile gint32 *dest, gint32 add)
83 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
85 thr_ret = pthread_mutex_lock(&spin);
86 g_assert (thr_ret == 0);
91 thr_ret = pthread_mutex_unlock(&spin);
92 g_assert (thr_ret == 0);
94 pthread_cleanup_pop (0);
99 gint64 InterlockedAdd64(volatile gint64 *dest, gint64 add)
104 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
106 thr_ret = pthread_mutex_lock(&spin);
107 g_assert (thr_ret == 0);
112 thr_ret = pthread_mutex_unlock(&spin);
113 g_assert (thr_ret == 0);
115 pthread_cleanup_pop (0);
120 gint32 InterlockedIncrement(volatile gint32 *dest)
125 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
127 thr_ret = pthread_mutex_lock(&spin);
128 g_assert (thr_ret == 0);
133 thr_ret = pthread_mutex_unlock(&spin);
134 g_assert (thr_ret == 0);
136 pthread_cleanup_pop (0);
141 gint64 InterlockedIncrement64(volatile gint64 *dest)
146 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
148 thr_ret = pthread_mutex_lock(&spin);
149 g_assert (thr_ret == 0);
154 thr_ret = pthread_mutex_unlock(&spin);
155 g_assert (thr_ret == 0);
157 pthread_cleanup_pop (0);
162 gint32 InterlockedDecrement(volatile gint32 *dest)
167 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
169 thr_ret = pthread_mutex_lock(&spin);
170 g_assert (thr_ret == 0);
175 thr_ret = pthread_mutex_unlock(&spin);
176 g_assert (thr_ret == 0);
178 pthread_cleanup_pop (0);
183 gint64 InterlockedDecrement64(volatile gint64 *dest)
188 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
190 thr_ret = pthread_mutex_lock(&spin);
191 g_assert (thr_ret == 0);
196 thr_ret = pthread_mutex_unlock(&spin);
197 g_assert (thr_ret == 0);
199 pthread_cleanup_pop (0);
204 gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch)
209 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
211 thr_ret = pthread_mutex_lock(&spin);
212 g_assert (thr_ret == 0);
217 thr_ret = pthread_mutex_unlock(&spin);
218 g_assert (thr_ret == 0);
220 pthread_cleanup_pop (0);
225 gint64 InterlockedExchange64(volatile gint64 *dest, gint64 exch)
230 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
232 thr_ret = pthread_mutex_lock(&spin);
233 g_assert (thr_ret == 0);
238 thr_ret = pthread_mutex_unlock(&spin);
239 g_assert (thr_ret == 0);
241 pthread_cleanup_pop (0);
246 gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch)
251 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
253 thr_ret = pthread_mutex_lock(&spin);
254 g_assert (thr_ret == 0);
259 thr_ret = pthread_mutex_unlock(&spin);
260 g_assert (thr_ret == 0);
262 pthread_cleanup_pop (0);
267 gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add)
272 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
274 thr_ret = pthread_mutex_lock(&spin);
275 g_assert (thr_ret == 0);
280 thr_ret = pthread_mutex_unlock(&spin);
281 g_assert (thr_ret == 0);
283 pthread_cleanup_pop (0);
288 gint64 InterlockedExchangeAdd64(volatile gint64 *dest, gint64 add)
293 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
295 thr_ret = pthread_mutex_lock(&spin);
296 g_assert (thr_ret == 0);
301 thr_ret = pthread_mutex_unlock(&spin);
302 g_assert (thr_ret == 0);
304 pthread_cleanup_pop (0);
309 gint8 InterlockedRead8(volatile gint8 *src)
314 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
316 thr_ret = pthread_mutex_lock(&spin);
317 g_assert (thr_ret == 0);
321 thr_ret = pthread_mutex_unlock(&spin);
322 g_assert (thr_ret == 0);
324 pthread_cleanup_pop (0);
329 gint16 InterlockedRead16(volatile gint16 *src)
334 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
336 thr_ret = pthread_mutex_lock(&spin);
337 g_assert (thr_ret == 0);
341 thr_ret = pthread_mutex_unlock(&spin);
342 g_assert (thr_ret == 0);
344 pthread_cleanup_pop (0);
349 gint32 InterlockedRead(volatile gint32 *src)
354 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
356 thr_ret = pthread_mutex_lock(&spin);
357 g_assert (thr_ret == 0);
361 thr_ret = pthread_mutex_unlock(&spin);
362 g_assert (thr_ret == 0);
364 pthread_cleanup_pop (0);
369 gint64 InterlockedRead64(volatile gint64 *src)
374 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
376 thr_ret = pthread_mutex_lock(&spin);
377 g_assert (thr_ret == 0);
381 thr_ret = pthread_mutex_unlock(&spin);
382 g_assert (thr_ret == 0);
384 pthread_cleanup_pop (0);
389 gpointer InterlockedReadPointer(volatile gpointer *src)
394 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
396 thr_ret = pthread_mutex_lock(&spin);
397 g_assert (thr_ret == 0);
401 thr_ret = pthread_mutex_unlock(&spin);
402 g_assert (thr_ret == 0);
404 pthread_cleanup_pop (0);
409 void InterlockedWrite8(volatile gint8 *dst, gint8 val)
413 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
415 thr_ret = pthread_mutex_lock(&spin);
416 g_assert (thr_ret == 0);
420 thr_ret = pthread_mutex_unlock(&spin);
421 g_assert (thr_ret == 0);
423 pthread_cleanup_pop (0);
426 void InterlockedWrite16(volatile gint16 *dst, gint16 val)
430 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
432 thr_ret = pthread_mutex_lock(&spin);
433 g_assert (thr_ret == 0);
437 thr_ret = pthread_mutex_unlock(&spin);
438 g_assert (thr_ret == 0);
440 pthread_cleanup_pop (0);
443 void InterlockedWrite(volatile gint32 *dst, gint32 val)
447 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
449 thr_ret = pthread_mutex_lock(&spin);
450 g_assert (thr_ret == 0);
454 thr_ret = pthread_mutex_unlock(&spin);
455 g_assert (thr_ret == 0);
457 pthread_cleanup_pop (0);
460 void InterlockedWrite64(volatile gint64 *dst, gint64 val)
464 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
466 thr_ret = pthread_mutex_lock(&spin);
467 g_assert (thr_ret == 0);
471 thr_ret = pthread_mutex_unlock(&spin);
472 g_assert (thr_ret == 0);
474 pthread_cleanup_pop (0);
477 void InterlockedWritePointer(volatile gpointer *dst, gpointer val)
481 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
483 thr_ret = pthread_mutex_lock(&spin);
484 g_assert (thr_ret == 0);
488 thr_ret = pthread_mutex_unlock(&spin);
489 g_assert (thr_ret == 0);
491 pthread_cleanup_pop (0);
496 #if defined (NEED_64BIT_CMPXCHG_FALLBACK)
498 #if defined (TARGET_OSX)
500 /* The compiler breaks if this code is in the header... */
503 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
505 return __sync_val_compare_and_swap (dest, comp, exch);
508 #elif defined (__arm__) && defined (HAVE_ARMV7) && (defined(TARGET_IOS) || defined(TARGET_WATCHOS) || defined(TARGET_ANDROID))
510 #if defined (TARGET_IOS) || defined (TARGET_WATCHOS)
513 #error "Not supported."
517 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
519 return __sync_val_compare_and_swap (dest, comp, exch);
522 #elif defined (TARGET_ANDROID)
524 /* Some Android systems can't find the 64-bit CAS intrinsic at runtime,
525 * so we have to roll our own...
528 gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp) __attribute__ ((__naked__));
531 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
534 "push {r4, r5, r6, r7}\n"
535 "ldrd r4, [sp, #16]\n"
542 "strexd r1, r2, [r0]\n"
549 "pop {r4, r5, r6, r7}\n"
556 #error "Need a 64-bit CAS fallback!"
563 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
568 pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
570 ret = pthread_mutex_lock(&spin);
578 ret = pthread_mutex_unlock(&spin);
581 pthread_cleanup_pop (0);
589 #if !defined (WAPI_NO_ATOMIC_ASM) && !defined (BROKEN_64BIT_ATOMICS_INTRINSIC) && !defined (NEED_64BIT_CMPXCHG_FALLBACK)
590 MONO_EMPTY_SOURCE_FILE (atomic);