1 #ifndef _MACHINE_INSTR_H
2 #define _MACHINE_INSTR_H
5 __attribute__ ((unused))
6 atomic_add (volatile int *mem, int val)
8 __asm__ __volatile__ ("lock; addl %1,%0"
10 : "ir" (val), "m" (*mem));
13 /* compare_and_swap ********************************************************
15 Atomically do the following: Check if the location still contains
16 `oldval`. If so, replace it by `newval` and return `oldval`.
21 ***************************************************************************/
24 __attribute__ ((unused))
25 compare_and_swap (volatile long *p, long oldval, long newval)
29 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
30 : "=a" (ret), "=m" (*p)
31 : "r" (newval), "m" (*p), "0" (oldval));
35 #define STORE_ORDER_BARRIER() __asm__ __volatile__ ("" : : : "memory");
36 #define MEMORY_BARRIER_BEFORE_ATOMIC() __asm__ __volatile__ ("" : : : "memory");
37 #define MEMORY_BARRIER_AFTER_ATOMIC() __asm__ __volatile__ ("" : : : "memory");
38 #define MEMORY_BARRIER() __asm__ __volatile__ ( \
39 "lock; add $0, 0(%%esp)" : : : "memory" );