2 * Copyright (c) 2003 Hewlett-Packard Development Company, L.P.
4 * Permission is hereby granted, free of charge, to any person obtaining a copy
5 * of this software and associated documentation files (the "Software"), to deal
6 * in the Software without restriction, including without limitation the rights
7 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 * copies of the Software, and to permit persons to whom the Software is
9 * furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23 #include "../all_atomic_load_store.h"
25 #include "../all_acquire_release_volatile.h"
27 #include "../test_and_set_t_is_char.h"
30 /* 32-bit HP/UX code. */
31 /* This requires pointer "swizzling". Pointers need to be expanded */
32 /* to 64 bits using the addp4 instruction before use. This makes it */
33 /* hard to share code, but we try anyway. */
35 /* We assume that addr always appears in argument position 1 in asm */
36 /* code. If it is clobbered due to swizzling, we also need it in */
37 /* second position. Any later arguments are referenced symbolically, */
38 /* so that we don't have to worry about their position. This requires*/
39 /* gcc 3.1, but you shouldn't be using anything older than that on */
41 /* The AO_MASK macro is a workaround for the fact that HP/UX gcc */
42 /* appears to otherwise store 64-bit pointers in ar.ccv, i.e. it */
43 /* doesn't appear to clear high bits in a pointer value we pass into */
44 /* assembly code, even if it is supposedly of type AO_t. */
45 # define AO_IN_ADDR "1"(addr)
46 # define AO_OUT_ADDR , "=r"(addr)
47 # define AO_SWIZZLE "addp4 %1=0,%1;;\n"
48 # define AO_MASK(ptr) __asm__ __volatile__("zxt4 %1=%1": "=r"(ptr) : "0"(ptr))
51 # define AO_IN_ADDR "r"(addr)
54 # define AO_MASK(ptr) /* empty */
60 __asm__ __volatile__("mf" : : : "memory");
62 #define AO_HAVE_nop_full
65 AO_fetch_and_add1_acquire (volatile AO_t *addr)
69 __asm__ __volatile__ (AO_SWIZZLE
70 "fetchadd" AO_LEN ".acq %0=[%1],1":
71 "=r" (result) AO_OUT_ADDR: AO_IN_ADDR :"memory");
74 #define AO_HAVE_fetch_and_add1_acquire
77 AO_fetch_and_add1_release (volatile AO_t *addr)
81 __asm__ __volatile__ (AO_SWIZZLE
82 "fetchadd" AO_LEN ".rel %0=[%1],1":
83 "=r" (result) AO_OUT_ADDR: AO_IN_ADDR :"memory");
86 #define AO_HAVE_fetch_and_add1_release
89 AO_fetch_and_sub1_acquire (volatile AO_t *addr)
93 __asm__ __volatile__ (AO_SWIZZLE
94 "fetchadd" AO_LEN ".acq %0=[%1],-1":
95 "=r" (result) AO_OUT_ADDR: AO_IN_ADDR :"memory");
98 #define AO_HAVE_fetch_and_sub1_acquire
101 AO_fetch_and_sub1_release (volatile AO_t *addr)
105 __asm__ __volatile__ (AO_SWIZZLE
106 "fetchadd" AO_LEN ".rel %0=[%1],-1":
107 "=r" (result) AO_OUT_ADDR: AO_IN_ADDR :"memory");
110 #define AO_HAVE_fetch_and_sub1_release
114 AO_INLINE unsigned int
115 AO_int_fetch_and_add1_acquire (volatile unsigned int *addr)
119 __asm__ __volatile__ ("fetchadd4.acq %0=[%1],1":
120 "=r" (result): AO_IN_ADDR :"memory");
123 #define AO_HAVE_int_fetch_and_add1_acquire
125 AO_INLINE unsigned int
126 AO_int_fetch_and_add1_release (volatile unsigned int *addr)
130 __asm__ __volatile__ ("fetchadd4.rel %0=[%1],1":
131 "=r" (result): AO_IN_ADDR :"memory");
134 #define AO_HAVE_int_fetch_and_add1_release
136 AO_INLINE unsigned int
137 AO_int_fetch_and_sub1_acquire (volatile unsigned int *addr)
141 __asm__ __volatile__ ("fetchadd4.acq %0=[%1],-1":
142 "=r" (result): AO_IN_ADDR :"memory");
145 #define AO_HAVE_int_fetch_and_sub1_acquire
147 AO_INLINE unsigned int
148 AO_int_fetch_and_sub1_release (volatile unsigned int *addr)
152 __asm__ __volatile__ ("fetchadd4.rel %0=[%1],-1":
153 "=r" (result): AO_IN_ADDR :"memory");
156 #define AO_HAVE_int_fetch_and_sub1_release
161 AO_compare_and_swap_acquire(volatile AO_t *addr,
162 AO_t old, AO_t new_val)
166 __asm__ __volatile__(AO_SWIZZLE
167 "mov ar.ccv=%[old] ;; cmpxchg" AO_LEN
168 ".acq %0=[%1],%[new_val],ar.ccv"
169 : "=r"(oldval) AO_OUT_ADDR
170 : AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"(old)
172 return (oldval == old);
174 #define AO_HAVE_compare_and_swap_acquire
177 AO_compare_and_swap_release(volatile AO_t *addr,
178 AO_t old, AO_t new_val)
182 __asm__ __volatile__(AO_SWIZZLE
183 "mov ar.ccv=%[old] ;; cmpxchg" AO_LEN
184 ".rel %0=[%1],%[new_val],ar.ccv"
185 : "=r"(oldval) AO_OUT_ADDR
186 : AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"(old)
188 return (oldval == old);
190 #define AO_HAVE_compare_and_swap_release
193 AO_char_compare_and_swap_acquire(volatile unsigned char *addr,
194 unsigned char old, unsigned char new_val)
196 unsigned char oldval;
197 __asm__ __volatile__(AO_SWIZZLE
198 "mov ar.ccv=%[old] ;; cmpxchg1.acq %0=[%1],%[new_val],ar.ccv"
199 : "=r"(oldval) AO_OUT_ADDR
200 : AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"((AO_t)old)
202 return (oldval == old);
204 #define AO_HAVE_char_compare_and_swap_acquire
207 AO_char_compare_and_swap_release(volatile unsigned char *addr,
208 unsigned char old, unsigned char new_val)
210 unsigned char oldval;
211 __asm__ __volatile__(AO_SWIZZLE
212 "mov ar.ccv=%[old] ;; cmpxchg1.rel %0=[%1],%[new_val],ar.ccv"
213 : "=r"(oldval) AO_OUT_ADDR
214 : AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"((AO_t)old)
216 return (oldval == old);
218 #define AO_HAVE_char_compare_and_swap_release
221 AO_short_compare_and_swap_acquire(volatile unsigned short *addr,
222 unsigned short old, unsigned short new_val)
224 unsigned short oldval;
225 __asm__ __volatile__(AO_SWIZZLE
226 "mov ar.ccv=%[old] ;; cmpxchg2.acq %0=[%1],%[new_val],ar.ccv"
227 : "=r"(oldval) AO_OUT_ADDR
228 : AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"((AO_t)old)
230 return (oldval == old);
232 #define AO_HAVE_short_compare_and_swap_acquire
235 AO_short_compare_and_swap_release(volatile unsigned short *addr,
236 unsigned short old, unsigned short new_val)
238 unsigned short oldval;
239 __asm__ __volatile__(AO_SWIZZLE
240 "mov ar.ccv=%[old] ;; cmpxchg2.rel %0=[%1],%[new_val],ar.ccv"
241 : "=r"(oldval) AO_OUT_ADDR
242 : AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"((AO_t)old)
244 return (oldval == old);
246 #define AO_HAVE_short_compare_and_swap_release
251 AO_int_compare_and_swap_acquire(volatile unsigned int *addr,
252 unsigned int old, unsigned int new_val)
255 __asm__ __volatile__("mov ar.ccv=%3 ;; cmpxchg4.acq %0=[%1],%2,ar.ccv"
257 : AO_IN_ADDR, "r"(new_val), "r"((AO_t)old) : "memory");
258 return (oldval == old);
260 #define AO_HAVE_int_compare_and_swap_acquire
263 AO_int_compare_and_swap_release(volatile unsigned int *addr,
264 unsigned int old, unsigned int new_val)
267 __asm__ __volatile__("mov ar.ccv=%3 ;; cmpxchg4.rel %0=[%1],%2,ar.ccv"
269 : AO_IN_ADDR, "r"(new_val), "r"((AO_t)old) : "memory");
270 return (oldval == old);
272 #define AO_HAVE_int_compare_and_swap_release
276 /* FIXME: Add compare_and_swap_double as soon as there is widely */
277 /* available hardware that implements it. */
279 /* FIXME: Add compare_double_and_swap_double for the _ILP32 case. */
282 /* Generalize first to define more AO_int_... primitives. */
283 # include "../../generalize.h"
284 # include "../ao_t_is_int.h"