Build mono runtime under none desktop Windows API family, adjustments and cleanup.
[mono.git] / mono / utils / atomic.c
1 /*
2  * atomic.c:  Workarounds for atomic operations for platforms that dont have
3  *            really atomic asm functions in atomic.h
4  *
5  * Author:
6  *      Dick Porter (dick@ximian.com)
7  *
8  * (C) 2002 Ximian, Inc.
9  */
10
11 #include <config.h>
12 #include <glib.h>
13
14 #include <mono/utils/atomic.h>
15 #include <mono/utils/mono-compiler.h>
16
17 #if defined (WAPI_NO_ATOMIC_ASM) || defined (BROKEN_64BIT_ATOMICS_INTRINSIC)
18
19 #include <pthread.h>
20
21 static pthread_mutex_t spin G_GNUC_UNUSED = PTHREAD_MUTEX_INITIALIZER;
22
23 #define NEED_64BIT_CMPXCHG_FALLBACK
24
25 #endif
26
27 #ifdef WAPI_NO_ATOMIC_ASM
28
29 gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch,
30                                   gint32 comp)
31 {
32         gint32 old;
33         int ret;
34         
35         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
36                               (void *)&spin);
37         ret = pthread_mutex_lock(&spin);
38         g_assert (ret == 0);
39         
40         old= *dest;
41         if(old==comp) {
42                 *dest=exch;
43         }
44         
45         ret = pthread_mutex_unlock(&spin);
46         g_assert (ret == 0);
47         
48         pthread_cleanup_pop (0);
49
50         return(old);
51 }
52
53 gpointer InterlockedCompareExchangePointer(volatile gpointer *dest,
54                                            gpointer exch, gpointer comp)
55 {
56         gpointer old;
57         int ret;
58         
59         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
60                               (void *)&spin);
61         ret = pthread_mutex_lock(&spin);
62         g_assert (ret == 0);
63         
64         old= *dest;
65         if(old==comp) {
66                 *dest=exch;
67         }
68         
69         ret = pthread_mutex_unlock(&spin);
70         g_assert (ret == 0);
71         
72         pthread_cleanup_pop (0);
73
74         return(old);
75 }
76
77 gint32 InterlockedAdd(volatile gint32 *dest, gint32 add)
78 {
79         gint32 ret;
80         int thr_ret;
81
82         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
83                               (void *)&spin);
84         thr_ret = pthread_mutex_lock(&spin);
85         g_assert (thr_ret == 0);
86
87         *dest += add;
88         ret= *dest;
89
90         thr_ret = pthread_mutex_unlock(&spin);
91         g_assert (thr_ret == 0);
92
93         pthread_cleanup_pop (0);
94
95         return(ret);
96 }
97
98 gint64 InterlockedAdd64(volatile gint64 *dest, gint64 add)
99 {
100         gint64 ret;
101         int thr_ret;
102
103         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
104                               (void *)&spin);
105         thr_ret = pthread_mutex_lock(&spin);
106         g_assert (thr_ret == 0);
107
108         *dest += add;
109         ret= *dest;
110
111         thr_ret = pthread_mutex_unlock(&spin);
112         g_assert (thr_ret == 0);
113
114         pthread_cleanup_pop (0);
115
116         return(ret);
117 }
118
119 gint32 InterlockedIncrement(volatile gint32 *dest)
120 {
121         gint32 ret;
122         int thr_ret;
123         
124         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
125                               (void *)&spin);
126         thr_ret = pthread_mutex_lock(&spin);
127         g_assert (thr_ret == 0);
128
129         (*dest)++;
130         ret= *dest;
131         
132         thr_ret = pthread_mutex_unlock(&spin);
133         g_assert (thr_ret == 0);
134         
135         pthread_cleanup_pop (0);
136         
137         return(ret);
138 }
139
140 gint64 InterlockedIncrement64(volatile gint64 *dest)
141 {
142         gint64 ret;
143         int thr_ret;
144
145         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
146                               (void *)&spin);
147         thr_ret = pthread_mutex_lock(&spin);
148         g_assert (thr_ret == 0);
149
150         (*dest)++;
151         ret= *dest;
152
153         thr_ret = pthread_mutex_unlock(&spin);
154         g_assert (thr_ret == 0);
155
156         pthread_cleanup_pop (0);
157
158         return(ret);
159 }
160
161 gint32 InterlockedDecrement(volatile gint32 *dest)
162 {
163         gint32 ret;
164         int thr_ret;
165         
166         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
167                               (void *)&spin);
168         thr_ret = pthread_mutex_lock(&spin);
169         g_assert (thr_ret == 0);
170         
171         (*dest)--;
172         ret= *dest;
173         
174         thr_ret = pthread_mutex_unlock(&spin);
175         g_assert (thr_ret == 0);
176         
177         pthread_cleanup_pop (0);
178         
179         return(ret);
180 }
181
182 gint64 InterlockedDecrement64(volatile gint64 *dest)
183 {
184         gint64 ret;
185         int thr_ret;
186
187         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
188                               (void *)&spin);
189         thr_ret = pthread_mutex_lock(&spin);
190         g_assert (thr_ret == 0);
191
192         (*dest)--;
193         ret= *dest;
194
195         thr_ret = pthread_mutex_unlock(&spin);
196         g_assert (thr_ret == 0);
197
198         pthread_cleanup_pop (0);
199
200         return(ret);
201 }
202
203 gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch)
204 {
205         gint32 ret;
206         int thr_ret;
207         
208         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
209                               (void *)&spin);
210         thr_ret = pthread_mutex_lock(&spin);
211         g_assert (thr_ret == 0);
212
213         ret=*dest;
214         *dest=exch;
215         
216         thr_ret = pthread_mutex_unlock(&spin);
217         g_assert (thr_ret == 0);
218         
219         pthread_cleanup_pop (0);
220         
221         return(ret);
222 }
223
224 gint64 InterlockedExchange64(volatile gint64 *dest, gint64 exch)
225 {
226         gint64 ret;
227         int thr_ret;
228
229         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
230                               (void *)&spin);
231         thr_ret = pthread_mutex_lock(&spin);
232         g_assert (thr_ret == 0);
233
234         ret=*dest;
235         *dest=exch;
236
237         thr_ret = pthread_mutex_unlock(&spin);
238         g_assert (thr_ret == 0);
239
240         pthread_cleanup_pop (0);
241
242         return(ret);
243 }
244
245 gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch)
246 {
247         gpointer ret;
248         int thr_ret;
249         
250         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
251                               (void *)&spin);
252         thr_ret = pthread_mutex_lock(&spin);
253         g_assert (thr_ret == 0);
254         
255         ret=*dest;
256         *dest=exch;
257         
258         thr_ret = pthread_mutex_unlock(&spin);
259         g_assert (thr_ret == 0);
260         
261         pthread_cleanup_pop (0);
262         
263         return(ret);
264 }
265
266 gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add)
267 {
268         gint32 ret;
269         int thr_ret;
270         
271         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
272                               (void *)&spin);
273         thr_ret = pthread_mutex_lock(&spin);
274         g_assert (thr_ret == 0);
275
276         ret= *dest;
277         *dest+=add;
278         
279         thr_ret = pthread_mutex_unlock(&spin);
280         g_assert (thr_ret == 0);
281
282         pthread_cleanup_pop (0);
283
284         return(ret);
285 }
286
287 gint64 InterlockedExchangeAdd64(volatile gint64 *dest, gint64 add)
288 {
289         gint64 ret;
290         int thr_ret;
291         
292         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
293                               (void *)&spin);
294         thr_ret = pthread_mutex_lock(&spin);
295         g_assert (thr_ret == 0);
296
297         ret= *dest;
298         *dest+=add;
299         
300         thr_ret = pthread_mutex_unlock(&spin);
301         g_assert (thr_ret == 0);
302
303         pthread_cleanup_pop (0);
304
305         return(ret);
306 }
307
308 gint8 InterlockedRead8(volatile gint8 *src)
309 {
310         gint8 ret;
311         int thr_ret;
312         
313         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
314                               (void *)&spin);
315         thr_ret = pthread_mutex_lock(&spin);
316         g_assert (thr_ret == 0);
317
318         ret= *src;
319         
320         thr_ret = pthread_mutex_unlock(&spin);
321         g_assert (thr_ret == 0);
322
323         pthread_cleanup_pop (0);
324
325         return(ret);
326 }
327
328 gint16 InterlockedRead16(volatile gint16 *src)
329 {
330         gint16 ret;
331         int thr_ret;
332         
333         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
334                               (void *)&spin);
335         thr_ret = pthread_mutex_lock(&spin);
336         g_assert (thr_ret == 0);
337
338         ret= *src;
339         
340         thr_ret = pthread_mutex_unlock(&spin);
341         g_assert (thr_ret == 0);
342
343         pthread_cleanup_pop (0);
344
345         return(ret);
346 }
347
348 gint32 InterlockedRead(volatile gint32 *src)
349 {
350         gint32 ret;
351         int thr_ret;
352         
353         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
354                               (void *)&spin);
355         thr_ret = pthread_mutex_lock(&spin);
356         g_assert (thr_ret == 0);
357
358         ret= *src;
359         
360         thr_ret = pthread_mutex_unlock(&spin);
361         g_assert (thr_ret == 0);
362
363         pthread_cleanup_pop (0);
364
365         return(ret);
366 }
367
368 gint64 InterlockedRead64(volatile gint64 *src)
369 {
370         gint64 ret;
371         int thr_ret;
372         
373         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
374                               (void *)&spin);
375         thr_ret = pthread_mutex_lock(&spin);
376         g_assert (thr_ret == 0);
377
378         ret= *src;
379         
380         thr_ret = pthread_mutex_unlock(&spin);
381         g_assert (thr_ret == 0);
382
383         pthread_cleanup_pop (0);
384
385         return(ret);
386 }
387
388 gpointer InterlockedReadPointer(volatile gpointer *src)
389 {
390         gpointer ret;
391         int thr_ret;
392         
393         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
394                               (void *)&spin);
395         thr_ret = pthread_mutex_lock(&spin);
396         g_assert (thr_ret == 0);
397
398         ret= *src;
399         
400         thr_ret = pthread_mutex_unlock(&spin);
401         g_assert (thr_ret == 0);
402
403         pthread_cleanup_pop (0);
404
405         return(ret);
406 }
407
408 void InterlockedWrite(volatile gint8 *dst, gint8 val)
409 {
410         int thr_ret;
411         
412         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
413                               (void *)&spin);
414         thr_ret = pthread_mutex_lock(&spin);
415         g_assert (thr_ret == 0);
416
417         *dst=val;
418         
419         thr_ret = pthread_mutex_unlock(&spin);
420         g_assert (thr_ret == 0);
421         
422         pthread_cleanup_pop (0);
423 }
424
425 void InterlockedWrite16(volatile gint16 *dst, gint16 val)
426 {
427         int thr_ret;
428         
429         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
430                               (void *)&spin);
431         thr_ret = pthread_mutex_lock(&spin);
432         g_assert (thr_ret == 0);
433
434         *dst=val;
435         
436         thr_ret = pthread_mutex_unlock(&spin);
437         g_assert (thr_ret == 0);
438         
439         pthread_cleanup_pop (0);
440 }
441
442 void InterlockedWrite(volatile gint32 *dst, gint32 val)
443 {
444         int thr_ret;
445         
446         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
447                               (void *)&spin);
448         thr_ret = pthread_mutex_lock(&spin);
449         g_assert (thr_ret == 0);
450
451         *dst=val;
452         
453         thr_ret = pthread_mutex_unlock(&spin);
454         g_assert (thr_ret == 0);
455         
456         pthread_cleanup_pop (0);
457 }
458
459 void InterlockedWrite64(volatile gint64 *dst, gint64 val)
460 {
461         int thr_ret;
462         
463         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
464                               (void *)&spin);
465         thr_ret = pthread_mutex_lock(&spin);
466         g_assert (thr_ret == 0);
467
468         *dst=val;
469         
470         thr_ret = pthread_mutex_unlock(&spin);
471         g_assert (thr_ret == 0);
472         
473         pthread_cleanup_pop (0);
474 }
475
476 void InterlockedWritePointer(volatile gpointer *dst, gpointer val)
477 {
478         int thr_ret;
479         
480         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
481                               (void *)&spin);
482         thr_ret = pthread_mutex_lock(&spin);
483         g_assert (thr_ret == 0);
484
485         *dst=val;
486         
487         thr_ret = pthread_mutex_unlock(&spin);
488         g_assert (thr_ret == 0);
489         
490         pthread_cleanup_pop (0);
491 }
492
493 #endif
494
495 #if defined (NEED_64BIT_CMPXCHG_FALLBACK)
496
497 #if defined (TARGET_OSX)
498
499 /* The compiler breaks if this code is in the header... */
500
501 gint64
502 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
503 {
504         return __sync_val_compare_and_swap (dest, comp, exch);
505 }
506
507 #elif defined (__arm__) && defined (HAVE_ARMV7) && (defined(TARGET_IOS) || defined(TARGET_WATCHOS) || defined(TARGET_ANDROID))
508
509 #if defined (TARGET_IOS) || defined (TARGET_WATCHOS)
510
511 #ifndef __clang__
512 #error "Not supported."
513 #endif
514
515 gint64
516 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
517 {
518         return  __sync_val_compare_and_swap (dest, comp, exch);
519 }
520
521 #elif defined (TARGET_ANDROID)
522
523 /* Some Android systems can't find the 64-bit CAS intrinsic at runtime,
524  * so we have to roll our own...
525  */
526
527 gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp) __attribute__ ((naked));
528
529 gint64
530 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
531 {
532         __asm__ (
533                 "push           {r4, r5, r6, r7}\n"
534                 "ldrd           r4, [sp, #16]\n"
535                 "dmb            sy\n"
536         "1:\n"
537                 "ldrexd         r6, [r0]\n"
538                 "cmp            r7, r5\n"
539                 "cmpeq          r6, r4\n"
540                 "bne            2f\n"
541                 "strexd         r1, r2, [r0]\n"
542                 "cmp            r1, #0\n"
543                 "bne            1b\n"
544         "2:\n"
545                 "dmb            sy\n"
546                 "mov            r0, r6\n"
547                 "mov            r1, r7\n"
548                 "pop            {r4, r5, r6, r7}\n"
549                 "bx                     lr\n"
550         );
551 }
552
553 #else
554
555 #error "Need a 64-bit CAS fallback!"
556
557 #endif
558
559 #else
560
561 gint64
562 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
563 {
564         gint64 old;
565         int ret;
566         
567         pthread_cleanup_push ((void(*)(void *))pthread_mutex_unlock,
568                               (void *)&spin);
569         ret = pthread_mutex_lock(&spin);
570         g_assert (ret == 0);
571         
572         old= *dest;
573         if(old==comp) {
574                 *dest=exch;
575         }
576         
577         ret = pthread_mutex_unlock(&spin);
578         g_assert (ret == 0);
579         
580         pthread_cleanup_pop (0);
581
582         return(old);
583 }
584
585 #endif
586 #endif
587
588 #if !defined (WAPI_NO_ATOMIC_ASM) && !defined (BROKEN_64BIT_ATOMICS_INTRINSIC) && !defined (NEED_64BIT_CMPXCHG_FALLBACK)
589 MONO_EMPTY_SOURCE_FILE (atomic);
590 #endif