2f5e8a8a673a8688e78c577b57b7c194effaad76
[mono.git] / mono / utils / atomic.c
1 /*
2  * atomic.c:  Workarounds for atomic operations for platforms that dont have
3  *            really atomic asm functions in atomic.h
4  *
5  * Author:
6  *      Dick Porter (dick@ximian.com)
7  *
8  * (C) 2002 Ximian, Inc.
9  */
10
11 #include <config.h>
12 #include <glib.h>
13
14 #include <mono/utils/atomic.h>
15
16 #if defined (WAPI_NO_ATOMIC_ASM) || defined (BROKEN_64BIT_ATOMICS_INTRINSIC)
17
18 #include <pthread.h>
19
20 static pthread_mutex_t spin G_GNUC_UNUSED = PTHREAD_MUTEX_INITIALIZER;
21
22 #define NEED_64BIT_CMPXCHG_FALLBACK
23
24 #endif
25
26 #ifdef WAPI_NO_ATOMIC_ASM
27
28 gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch,
29                                   gint32 comp)
30 {
31         gint32 old;
32         int ret;
33         
34         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
35                               (void *)&spin);
36         ret = pthread_mutex_lock(&spin);
37         g_assert (ret == 0);
38         
39         old= *dest;
40         if(old==comp) {
41                 *dest=exch;
42         }
43         
44         ret = pthread_mutex_unlock(&spin);
45         g_assert (ret == 0);
46         
47         pthread_cleanup_pop (0);
48
49         return(old);
50 }
51
52 gpointer InterlockedCompareExchangePointer(volatile gpointer *dest,
53                                            gpointer exch, gpointer comp)
54 {
55         gpointer old;
56         int ret;
57         
58         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
59                               (void *)&spin);
60         ret = pthread_mutex_lock(&spin);
61         g_assert (ret == 0);
62         
63         old= *dest;
64         if(old==comp) {
65                 *dest=exch;
66         }
67         
68         ret = pthread_mutex_unlock(&spin);
69         g_assert (ret == 0);
70         
71         pthread_cleanup_pop (0);
72
73         return(old);
74 }
75
76 gint32 InterlockedAdd(volatile gint32 *dest, gint32 add)
77 {
78         gint32 ret;
79         int thr_ret;
80
81         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
82                               (void *)&spin);
83         thr_ret = pthread_mutex_lock(&spin);
84         g_assert (thr_ret == 0);
85
86         *dest += add;
87         ret= *dest;
88
89         thr_ret = pthread_mutex_unlock(&spin);
90         g_assert (thr_ret == 0);
91
92         pthread_cleanup_pop (0);
93
94         return(ret);
95 }
96
97 gint64 InterlockedAdd64(volatile gint64 *dest, gint64 add)
98 {
99         gint64 ret;
100         int thr_ret;
101
102         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
103                               (void *)&spin);
104         thr_ret = pthread_mutex_lock(&spin);
105         g_assert (thr_ret == 0);
106
107         *dest += add;
108         ret= *dest;
109
110         thr_ret = pthread_mutex_unlock(&spin);
111         g_assert (thr_ret == 0);
112
113         pthread_cleanup_pop (0);
114
115         return(ret);
116 }
117
118 gint32 InterlockedIncrement(volatile gint32 *dest)
119 {
120         gint32 ret;
121         int thr_ret;
122         
123         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
124                               (void *)&spin);
125         thr_ret = pthread_mutex_lock(&spin);
126         g_assert (thr_ret == 0);
127
128         (*dest)++;
129         ret= *dest;
130         
131         thr_ret = pthread_mutex_unlock(&spin);
132         g_assert (thr_ret == 0);
133         
134         pthread_cleanup_pop (0);
135         
136         return(ret);
137 }
138
139 gint64 InterlockedIncrement64(volatile gint64 *dest)
140 {
141         gint64 ret;
142         int thr_ret;
143
144         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
145                               (void *)&spin);
146         thr_ret = pthread_mutex_lock(&spin);
147         g_assert (thr_ret == 0);
148
149         (*dest)++;
150         ret= *dest;
151
152         thr_ret = pthread_mutex_unlock(&spin);
153         g_assert (thr_ret == 0);
154
155         pthread_cleanup_pop (0);
156
157         return(ret);
158 }
159
160 gint32 InterlockedDecrement(volatile gint32 *dest)
161 {
162         gint32 ret;
163         int thr_ret;
164         
165         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
166                               (void *)&spin);
167         thr_ret = pthread_mutex_lock(&spin);
168         g_assert (thr_ret == 0);
169         
170         (*dest)--;
171         ret= *dest;
172         
173         thr_ret = pthread_mutex_unlock(&spin);
174         g_assert (thr_ret == 0);
175         
176         pthread_cleanup_pop (0);
177         
178         return(ret);
179 }
180
181 gint64 InterlockedDecrement64(volatile gint64 *dest)
182 {
183         gint64 ret;
184         int thr_ret;
185
186         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
187                               (void *)&spin);
188         thr_ret = pthread_mutex_lock(&spin);
189         g_assert (thr_ret == 0);
190
191         (*dest)--;
192         ret= *dest;
193
194         thr_ret = pthread_mutex_unlock(&spin);
195         g_assert (thr_ret == 0);
196
197         pthread_cleanup_pop (0);
198
199         return(ret);
200 }
201
202 gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch)
203 {
204         gint32 ret;
205         int thr_ret;
206         
207         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
208                               (void *)&spin);
209         thr_ret = pthread_mutex_lock(&spin);
210         g_assert (thr_ret == 0);
211
212         ret=*dest;
213         *dest=exch;
214         
215         thr_ret = pthread_mutex_unlock(&spin);
216         g_assert (thr_ret == 0);
217         
218         pthread_cleanup_pop (0);
219         
220         return(ret);
221 }
222
223 gint64 InterlockedExchange64(volatile gint64 *dest, gint64 exch)
224 {
225         gint64 ret;
226         int thr_ret;
227
228         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
229                               (void *)&spin);
230         thr_ret = pthread_mutex_lock(&spin);
231         g_assert (thr_ret == 0);
232
233         ret=*dest;
234         *dest=exch;
235
236         thr_ret = pthread_mutex_unlock(&spin);
237         g_assert (thr_ret == 0);
238
239         pthread_cleanup_pop (0);
240
241         return(ret);
242 }
243
244 gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch)
245 {
246         gpointer ret;
247         int thr_ret;
248         
249         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
250                               (void *)&spin);
251         thr_ret = pthread_mutex_lock(&spin);
252         g_assert (thr_ret == 0);
253         
254         ret=*dest;
255         *dest=exch;
256         
257         thr_ret = pthread_mutex_unlock(&spin);
258         g_assert (thr_ret == 0);
259         
260         pthread_cleanup_pop (0);
261         
262         return(ret);
263 }
264
265 gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add)
266 {
267         gint32 ret;
268         int thr_ret;
269         
270         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
271                               (void *)&spin);
272         thr_ret = pthread_mutex_lock(&spin);
273         g_assert (thr_ret == 0);
274
275         ret= *dest;
276         *dest+=add;
277         
278         thr_ret = pthread_mutex_unlock(&spin);
279         g_assert (thr_ret == 0);
280
281         pthread_cleanup_pop (0);
282
283         return(ret);
284 }
285
286 gint64 InterlockedExchangeAdd64(volatile gint64 *dest, gint64 add)
287 {
288         gint64 ret;
289         int thr_ret;
290         
291         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
292                               (void *)&spin);
293         thr_ret = pthread_mutex_lock(&spin);
294         g_assert (thr_ret == 0);
295
296         ret= *dest;
297         *dest+=add;
298         
299         thr_ret = pthread_mutex_unlock(&spin);
300         g_assert (thr_ret == 0);
301
302         pthread_cleanup_pop (0);
303
304         return(ret);
305 }
306
307 gint8 InterlockedRead8(volatile gint8 *src)
308 {
309         gint8 ret;
310         int thr_ret;
311         
312         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
313                               (void *)&spin);
314         thr_ret = pthread_mutex_lock(&spin);
315         g_assert (thr_ret == 0);
316
317         ret= *src;
318         
319         thr_ret = pthread_mutex_unlock(&spin);
320         g_assert (thr_ret == 0);
321
322         pthread_cleanup_pop (0);
323
324         return(ret);
325 }
326
327 gint16 InterlockedRead16(volatile gint16 *src)
328 {
329         gint16 ret;
330         int thr_ret;
331         
332         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
333                               (void *)&spin);
334         thr_ret = pthread_mutex_lock(&spin);
335         g_assert (thr_ret == 0);
336
337         ret= *src;
338         
339         thr_ret = pthread_mutex_unlock(&spin);
340         g_assert (thr_ret == 0);
341
342         pthread_cleanup_pop (0);
343
344         return(ret);
345 }
346
347 gint32 InterlockedRead(volatile gint32 *src)
348 {
349         gint32 ret;
350         int thr_ret;
351         
352         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
353                               (void *)&spin);
354         thr_ret = pthread_mutex_lock(&spin);
355         g_assert (thr_ret == 0);
356
357         ret= *src;
358         
359         thr_ret = pthread_mutex_unlock(&spin);
360         g_assert (thr_ret == 0);
361
362         pthread_cleanup_pop (0);
363
364         return(ret);
365 }
366
367 gint64 InterlockedRead64(volatile gint64 *src)
368 {
369         gint64 ret;
370         int thr_ret;
371         
372         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
373                               (void *)&spin);
374         thr_ret = pthread_mutex_lock(&spin);
375         g_assert (thr_ret == 0);
376
377         ret= *src;
378         
379         thr_ret = pthread_mutex_unlock(&spin);
380         g_assert (thr_ret == 0);
381
382         pthread_cleanup_pop (0);
383
384         return(ret);
385 }
386
387 gpointer InterlockedReadPointer(volatile gpointer *src)
388 {
389         gpointer ret;
390         int thr_ret;
391         
392         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
393                               (void *)&spin);
394         thr_ret = pthread_mutex_lock(&spin);
395         g_assert (thr_ret == 0);
396
397         ret= *src;
398         
399         thr_ret = pthread_mutex_unlock(&spin);
400         g_assert (thr_ret == 0);
401
402         pthread_cleanup_pop (0);
403
404         return(ret);
405 }
406
407 void InterlockedWrite(volatile gint8 *dst, gint8 val)
408 {
409         int thr_ret;
410         
411         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
412                               (void *)&spin);
413         thr_ret = pthread_mutex_lock(&spin);
414         g_assert (thr_ret == 0);
415
416         *dst=val;
417         
418         thr_ret = pthread_mutex_unlock(&spin);
419         g_assert (thr_ret == 0);
420         
421         pthread_cleanup_pop (0);
422 }
423
424 void InterlockedWrite16(volatile gint16 *dst, gint16 val)
425 {
426         int thr_ret;
427         
428         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
429                               (void *)&spin);
430         thr_ret = pthread_mutex_lock(&spin);
431         g_assert (thr_ret == 0);
432
433         *dst=val;
434         
435         thr_ret = pthread_mutex_unlock(&spin);
436         g_assert (thr_ret == 0);
437         
438         pthread_cleanup_pop (0);
439 }
440
441 void InterlockedWrite(volatile gint32 *dst, gint32 val)
442 {
443         int thr_ret;
444         
445         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
446                               (void *)&spin);
447         thr_ret = pthread_mutex_lock(&spin);
448         g_assert (thr_ret == 0);
449
450         *dst=val;
451         
452         thr_ret = pthread_mutex_unlock(&spin);
453         g_assert (thr_ret == 0);
454         
455         pthread_cleanup_pop (0);
456 }
457
458 void InterlockedWrite64(volatile gint64 *dst, gint64 val)
459 {
460         int thr_ret;
461         
462         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
463                               (void *)&spin);
464         thr_ret = pthread_mutex_lock(&spin);
465         g_assert (thr_ret == 0);
466
467         *dst=val;
468         
469         thr_ret = pthread_mutex_unlock(&spin);
470         g_assert (thr_ret == 0);
471         
472         pthread_cleanup_pop (0);
473 }
474
475 void InterlockedWritePointer(volatile gpointer *dst, gpointer val)
476 {
477         int thr_ret;
478         
479         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
480                               (void *)&spin);
481         thr_ret = pthread_mutex_lock(&spin);
482         g_assert (thr_ret == 0);
483
484         *dst=val;
485         
486         thr_ret = pthread_mutex_unlock(&spin);
487         g_assert (thr_ret == 0);
488         
489         pthread_cleanup_pop (0);
490 }
491
492 #endif
493
494 #if defined (NEED_64BIT_CMPXCHG_FALLBACK)
495
496 #if defined (TARGET_OSX)
497
498 /* The compiler breaks if this code is in the header... */
499
500 gint64
501 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
502 {
503         return __sync_val_compare_and_swap (dest, comp, exch);
504 }
505
506 #elif defined (__arm__) && defined (HAVE_ARMV7) && (defined(TARGET_IOS) || defined(TARGET_WATCHOS) || defined(TARGET_ANDROID))
507
508 #if defined (TARGET_IOS) || defined (TARGET_WATCHOS)
509
510 #ifndef __clang__
511 #error "Not supported."
512 #endif
513
514 gint64
515 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
516 {
517         return  __sync_val_compare_and_swap (dest, comp, exch);
518 }
519
520 #elif defined (TARGET_ANDROID)
521
522 /* Some Android systems can't find the 64-bit CAS intrinsic at runtime,
523  * so we have to roll our own...
524  */
525
526 gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp) __attribute__ ((naked));
527
528 gint64
529 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
530 {
531         __asm__ (
532                 "push           {r4, r5, r6, r7}\n"
533                 "ldrd           r4, [sp, #16]\n"
534                 "dmb            sy\n"
535         "1:\n"
536                 "ldrexd         r6, [r0]\n"
537                 "cmp            r7, r5\n"
538                 "cmpeq          r6, r4\n"
539                 "bne            2f\n"
540                 "strexd         r1, r2, [r0]\n"
541                 "cmp            r1, #0\n"
542                 "bne            1b\n"
543         "2:\n"
544                 "dmb            sy\n"
545                 "mov            r0, r6\n"
546                 "mov            r1, r7\n"
547                 "pop            {r4, r5, r6, r7}\n"
548                 "bx                     lr\n"
549         );
550 }
551
552 #else
553
554 #error "Need a 64-bit CAS fallback!"
555
556 #endif
557
558 #else
559
560 gint64
561 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
562 {
563         gint64 old;
564         int ret;
565         
566         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
567                               (void *)&spin);
568         ret = pthread_mutex_lock(&spin);
569         g_assert (ret == 0);
570         
571         old= *dest;
572         if(old==comp) {
573                 *dest=exch;
574         }
575         
576         ret = pthread_mutex_unlock(&spin);
577         g_assert (ret == 0);
578         
579         pthread_cleanup_pop (0);
580
581         return(old);
582 }
583
584 #endif
585 #endif
586
587 #if defined(HOST_WIN32) && defined(_MSC_VER)
588 // Quiet Visual Studio linker warning, LNK4221, in cases when this source file intentional ends up empty.
589 void __mono_win32_atomic_lnk4221(void) {}
590 #endif