Use GCC atomics on s390x.
[mono.git] / mono / utils / atomic.h
1 /*
2  * atomic.h:  Atomic operations
3  *
4  * Author:
5  *      Dick Porter (dick@ximian.com)
6  *
7  * (C) 2002 Ximian, Inc.
8  * Copyright 2012 Xamarin Inc
9  */
10
11 #ifndef _WAPI_ATOMIC_H_
12 #define _WAPI_ATOMIC_H_
13
14 #include "config.h"
15 #include <glib.h>
16
17 #ifdef ENABLE_EXTENSION_MODULE
18 #include "../../../mono-extensions/mono/utils/atomic.h"
19 #endif
20
21 /* On Windows, we always use the functions provided by the Windows API. */
22 #if defined(__WIN32__) || defined(_WIN32)
23
24 #include <windows.h>
25
26 /* mingw is missing InterlockedCompareExchange64 () from winbase.h */
27 #if HAVE_DECL_INTERLOCKEDCOMPAREEXCHANGE64==0
28 static inline gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
29 {
30         return __sync_val_compare_and_swap (dest, comp, exch);
31 }
32 #endif
33
34 /* Prefer GCC atomic ops if the target supports it (see configure.in). */
35 #elif defined(USE_GCC_ATOMIC_OPS)
36
37 static inline gint32 InterlockedCompareExchange(volatile gint32 *dest,
38                                                 gint32 exch, gint32 comp)
39 {
40         return __sync_val_compare_and_swap (dest, comp, exch);
41 }
42
43 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp)
44 {
45         return __sync_val_compare_and_swap (dest, comp, exch);
46 }
47
48 static inline gint32 InterlockedIncrement(volatile gint32 *val)
49 {
50         return __sync_add_and_fetch (val, 1);
51 }
52
53 static inline gint32 InterlockedDecrement(volatile gint32 *val)
54 {
55         return __sync_add_and_fetch (val, -1);
56 }
57
58 static inline gint32 InterlockedExchange(volatile gint32 *val, gint32 new_val)
59 {
60         gint32 old_val;
61         do {
62                 old_val = *val;
63         } while (__sync_val_compare_and_swap (val, old_val, new_val) != old_val);
64         return old_val;
65 }
66
67 static inline gpointer InterlockedExchangePointer(volatile gpointer *val,
68                                                   gpointer new_val)
69 {
70         gpointer old_val;
71         do {
72                 old_val = *val;
73         } while (__sync_val_compare_and_swap (val, old_val, new_val) != old_val);
74         return old_val;
75 }
76
77 static inline gint32 InterlockedExchangeAdd(volatile gint32 *val, gint32 add)
78 {
79         return __sync_fetch_and_add (val, add);
80 }
81
82 #if defined (TARGET_OSX)
83 #define BROKEN_64BIT_ATOMICS_INTRINSIC 1
84 #endif
85
86 #if !defined (BROKEN_64BIT_ATOMICS_INTRINSIC)
87
88 static inline gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp)
89 {
90         return __sync_val_compare_and_swap (dest, comp, exch);
91 }
92
93 #endif
94
95 #elif (defined(sparc) || defined (__sparc__)) && defined(__GNUC__)
96
97 G_GNUC_UNUSED 
98 static inline gint32 InterlockedCompareExchange(volatile gint32 *_dest, gint32 _exch, gint32 _comp)
99 {
100        register volatile gint32 *dest asm("g1") = _dest;
101        register gint32 comp asm("o4") = _comp;
102        register gint32 exch asm("o5") = _exch;
103
104        __asm__ __volatile__(
105                /* cas [%%g1], %%o4, %%o5 */
106                ".word 0xdbe0500c"
107                : "=r" (exch)
108                : "0" (exch), "r" (dest), "r" (comp)
109                : "memory");
110
111        return exch;
112 }
113
114 G_GNUC_UNUSED 
115 static inline gpointer InterlockedCompareExchangePointer(volatile gpointer *_dest, gpointer _exch, gpointer _comp)
116 {
117        register volatile gpointer *dest asm("g1") = _dest;
118        register gpointer comp asm("o4") = _comp;
119        register gpointer exch asm("o5") = _exch;
120
121        __asm__ __volatile__(
122 #ifdef SPARCV9
123                /* casx [%%g1], %%o4, %%o5 */
124                ".word 0xdbf0500c"
125 #else
126                /* cas [%%g1], %%o4, %%o5 */
127                ".word 0xdbe0500c"
128 #endif
129                : "=r" (exch)
130                : "0" (exch), "r" (dest), "r" (comp)
131                : "memory");
132
133        return exch;
134 }
135
136 G_GNUC_UNUSED 
137 static inline gint32 InterlockedIncrement(volatile gint32 *_dest)
138 {
139        register volatile gint32 *dest asm("g1") = _dest;
140        register gint32 tmp asm("o4");
141        register gint32 ret asm("o5");
142
143        __asm__ __volatile__(
144                "1:     ld      [%%g1], %%o4\n\t"
145                "       add     %%o4, 1, %%o5\n\t"
146                /*      cas     [%%g1], %%o4, %%o5 */
147                "       .word   0xdbe0500c\n\t"
148                "       cmp     %%o4, %%o5\n\t"
149                "       bne     1b\n\t"
150                "        add    %%o5, 1, %%o5"
151                : "=&r" (tmp), "=&r" (ret)
152                : "r" (dest)
153                : "memory", "cc");
154
155         return ret;
156 }
157
158 G_GNUC_UNUSED 
159 static inline gint32 InterlockedDecrement(volatile gint32 *_dest)
160 {
161        register volatile gint32 *dest asm("g1") = _dest;
162        register gint32 tmp asm("o4");
163        register gint32 ret asm("o5");
164
165        __asm__ __volatile__(
166                "1:     ld      [%%g1], %%o4\n\t"
167                "       sub     %%o4, 1, %%o5\n\t"
168                /*      cas     [%%g1], %%o4, %%o5 */
169                "       .word   0xdbe0500c\n\t"
170                "       cmp     %%o4, %%o5\n\t"
171                "       bne     1b\n\t"
172                "        sub    %%o5, 1, %%o5"
173                : "=&r" (tmp), "=&r" (ret)
174                : "r" (dest)
175                : "memory", "cc");
176
177         return ret;
178 }
179
180 G_GNUC_UNUSED
181 static inline gint32 InterlockedExchange(volatile gint32 *_dest, gint32 exch)
182 {
183        register volatile gint32 *dest asm("g1") = _dest;
184        register gint32 tmp asm("o4");
185        register gint32 ret asm("o5");
186
187        __asm__ __volatile__(
188                "1:     ld      [%%g1], %%o4\n\t"
189                "       mov     %3, %%o5\n\t"
190                /*      cas     [%%g1], %%o4, %%o5 */
191                "       .word   0xdbe0500c\n\t"
192                "       cmp     %%o4, %%o5\n\t"
193                "       bne     1b\n\t"
194                "        nop"
195                : "=&r" (tmp), "=&r" (ret)
196                : "r" (dest), "r" (exch)
197                : "memory", "cc");
198
199         return ret;
200 }
201
202 G_GNUC_UNUSED
203 static inline gpointer InterlockedExchangePointer(volatile gpointer *_dest, gpointer exch)
204 {
205        register volatile gpointer *dest asm("g1") = _dest;
206        register gpointer tmp asm("o4");
207        register gpointer ret asm("o5");
208
209        __asm__ __volatile__(
210 #ifdef SPARCV9
211                "1:     ldx     [%%g1], %%o4\n\t"
212 #else
213                "1:     ld      [%%g1], %%o4\n\t"
214 #endif
215                "       mov     %3, %%o5\n\t"
216 #ifdef SPARCV9
217                /*      casx    [%%g1], %%o4, %%o5 */
218                "       .word   0xdbf0500c\n\t"
219 #else
220                /*      cas     [%%g1], %%o4, %%o5 */
221                "       .word   0xdbe0500c\n\t"
222 #endif
223                "       cmp     %%o4, %%o5\n\t"
224                "       bne     1b\n\t"
225                "        nop"
226                : "=&r" (tmp), "=&r" (ret)
227                : "r" (dest), "r" (exch)
228                : "memory", "cc");
229
230         return ret;
231 }
232
233 G_GNUC_UNUSED
234 static inline gint32 InterlockedExchangeAdd(volatile gint32 *_dest, gint32 add)
235 {
236        register volatile gint32 *dest asm("g1") = _dest;
237        register gint32 tmp asm("o4");
238        register gint32 ret asm("o5");
239
240        __asm__ __volatile__(
241                "1:     ld      [%%g1], %%o4\n\t"
242                "       add     %%o4, %3, %%o5\n\t"
243                /*      cas     [%%g1], %%o4, %%o5 */
244                "       .word   0xdbe0500c\n\t"
245                "       cmp     %%o4, %%o5\n\t"
246                "       bne     1b\n\t"
247                "        add    %%o5, %3, %%o5"
248                : "=&r" (tmp), "=&r" (ret)
249                : "r" (dest), "r" (add)
250                : "memory", "cc");
251
252         return ret;
253 }
254
255 #elif defined(__ia64__)
256
257 #ifdef __INTEL_COMPILER
258 #include <ia64intrin.h>
259 #endif
260
261 static inline gint32 InterlockedCompareExchange(gint32 volatile *dest,
262                                                 gint32 exch, gint32 comp)
263 {
264         gint32 old;
265         guint64 real_comp;
266
267 #ifdef __INTEL_COMPILER
268         old = _InterlockedCompareExchange (dest, exch, comp);
269 #else
270         /* cmpxchg4 zero extends the value read from memory */
271         real_comp = (guint64)(guint32)comp;
272         asm volatile ("mov ar.ccv = %2 ;;\n\t"
273                                   "cmpxchg4.acq %0 = [%1], %3, ar.ccv\n\t"
274                                   : "=r" (old) : "r" (dest), "r" (real_comp), "r" (exch));
275 #endif
276
277         return(old);
278 }
279
280 static inline gpointer InterlockedCompareExchangePointer(gpointer volatile *dest,
281                                                 gpointer exch, gpointer comp)
282 {
283         gpointer old;
284
285 #ifdef __INTEL_COMPILER
286         old = _InterlockedCompareExchangePointer (dest, exch, comp);
287 #else
288         asm volatile ("mov ar.ccv = %2 ;;\n\t"
289                                   "cmpxchg8.acq %0 = [%1], %3, ar.ccv\n\t"
290                                   : "=r" (old) : "r" (dest), "r" (comp), "r" (exch));
291 #endif
292
293         return(old);
294 }
295
296 static inline gint32 InterlockedIncrement(gint32 volatile *val)
297 {
298 #ifdef __INTEL_COMPILER
299         return _InterlockedIncrement (val);
300 #else
301         gint32 old;
302
303         do {
304                 old = *val;
305         } while (InterlockedCompareExchange (val, old + 1, old) != old);
306
307         return old + 1;
308 #endif
309 }
310
311 static inline gint32 InterlockedDecrement(gint32 volatile *val)
312 {
313 #ifdef __INTEL_COMPILER
314         return _InterlockedDecrement (val);
315 #else
316         gint32 old;
317
318         do {
319                 old = *val;
320         } while (InterlockedCompareExchange (val, old - 1, old) != old);
321
322         return old - 1;
323 #endif
324 }
325
326 static inline gint32 InterlockedExchange(gint32 volatile *dest, gint32 new_val)
327 {
328 #ifdef __INTEL_COMPILER
329         return _InterlockedExchange (dest, new_val);
330 #else
331         gint32 res;
332
333         do {
334                 res = *dest;
335         } while (InterlockedCompareExchange (dest, new_val, res) != res);
336
337         return res;
338 #endif
339 }
340
341 static inline gpointer InterlockedExchangePointer(gpointer volatile *dest, gpointer new_val)
342 {
343 #ifdef __INTEL_COMPILER
344         return (gpointer)_InterlockedExchange64 ((gint64*)dest, (gint64)new_val);
345 #else
346         gpointer res;
347
348         do {
349                 res = *dest;
350         } while (InterlockedCompareExchangePointer (dest, new_val, res) != res);
351
352         return res;
353 #endif
354 }
355
356 static inline gint32 InterlockedExchangeAdd(gint32 volatile *val, gint32 add)
357 {
358         gint32 old;
359
360 #ifdef __INTEL_COMPILER
361         old = _InterlockedExchangeAdd (val, add);
362 #else
363         do {
364                 old = *val;
365         } while (InterlockedCompareExchange (val, old + add, old) != old);
366
367         return old;
368 #endif
369 }
370
371 #else
372
373 #define WAPI_NO_ATOMIC_ASM
374
375 extern gint32 InterlockedCompareExchange(volatile gint32 *dest, gint32 exch, gint32 comp);
376 extern gpointer InterlockedCompareExchangePointer(volatile gpointer *dest, gpointer exch, gpointer comp);
377 extern gint32 InterlockedIncrement(volatile gint32 *dest);
378 extern gint32 InterlockedDecrement(volatile gint32 *dest);
379 extern gint32 InterlockedExchange(volatile gint32 *dest, gint32 exch);
380 extern gpointer InterlockedExchangePointer(volatile gpointer *dest, gpointer exch);
381 extern gint32 InterlockedExchangeAdd(volatile gint32 *dest, gint32 add);
382
383 #endif
384
385 #if defined (WAPI_NO_ATOMIC_ASM) || defined (BROKEN_64BIT_ATOMICS_INTRINSIC)
386
387 extern gint64 InterlockedCompareExchange64(volatile gint64 *dest, gint64 exch, gint64 comp);
388
389 #endif
390
391 #endif /* _WAPI_ATOMIC_H_ */