+#if defined(__powerpc64__) || defined(__ppc64__) || defined(__64BIT__)
+/* FIXME: Completely untested. */
+
+AO_INLINE AO_t
+AO_fetch_and_add(volatile AO_t *addr, AO_t incr) {
+ AO_t oldval;
+ AO_t newval;
+
+ __asm__ __volatile__(
+ "1:ldarx %0,0,%2\n" /* load and reserve */
+ "add %1,%0,%3\n" /* increment */
+ "stdcx. %1,0,%2\n" /* store conditional */
+ "bne- 1b\n" /* retry if lost reservation */
+ : "=&r"(oldval), "=&r"(newval)
+ : "r"(addr), "r"(incr)
+ : "memory", "cr0");
+
+ return oldval;
+}
+
+#define AO_HAVE_fetch_and_add
+
+#else
+
+AO_INLINE AO_t
+AO_fetch_and_add(volatile AO_t *addr, AO_t incr) {
+ AO_t oldval;
+ AO_t newval;
+
+ __asm__ __volatile__(
+ "1:lwarx %0,0,%2\n" /* load and reserve */
+ "add %1,%0,%3\n" /* increment */
+ "stwcx. %1,0,%2\n" /* store conditional */
+ "bne- 1b\n" /* retry if lost reservation */
+ : "=&r"(oldval), "=&r"(newval)
+ : "r"(addr), "r"(incr)
+ : "memory", "cr0");
+
+ return oldval;
+}
+
+#define AO_HAVE_fetch_and_add
+
+#endif
+
+AO_INLINE AO_t
+AO_fetch_and_add_acquire(volatile AO_t *addr, AO_t incr) {
+ AO_t result = AO_fetch_and_add(addr, incr);
+ AO_lwsync();
+ return result;
+}
+
+#define AO_HAVE_fetch_and_add_acquire
+
+AO_INLINE AO_t
+AO_fetch_and_add_release(volatile AO_t *addr, AO_t incr) {
+ AO_lwsync();
+ return AO_fetch_and_add(addr, incr);
+}
+
+#define AO_HAVE_fetch_and_add_release
+
+AO_INLINE AO_t
+AO_fetch_and_add_full(volatile AO_t *addr, AO_t incr) {
+ AO_t result;
+ AO_lwsync();
+ result = AO_fetch_and_add(addr, incr);
+ AO_lwsync();
+ return result;
+}
+
+#define AO_HAVE_fetch_and_add_full
+
+#if defined(__powerpc64__) || defined(__ppc64__) || defined(__64BIT__)
+#else
+# include "../ao_t_is_int.h"
+#endif
+