1 #ifndef __ASM_CMPXCHG_H
2 #define __ASM_CMPXCHG_H
4 #include <asm/alternative.h> /* Provides LOCK_PREFIX */
6 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
8 #define __xg(x) ((volatile long *)(x))
10 static inline void set_64bit(volatile unsigned long *ptr
, unsigned long val
)
15 #define _set_64bit set_64bit
18 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
19 * Note 2: xchg has side effect, so that attribute volatile is necessary,
20 * but generally the primitive is invalid, *ptr is output argument. --ANK
22 static inline unsigned long __xchg(unsigned long x
, volatile void * ptr
, int size
)
26 __asm__
__volatile__("xchgb %b0,%1"
28 :"m" (*__xg(ptr
)), "0" (x
)
32 __asm__
__volatile__("xchgw %w0,%1"
34 :"m" (*__xg(ptr
)), "0" (x
)
38 __asm__
__volatile__("xchgl %k0,%1"
40 :"m" (*__xg(ptr
)), "0" (x
)
44 __asm__
__volatile__("xchgq %0,%1"
46 :"m" (*__xg(ptr
)), "0" (x
)
54 * Atomic compare and exchange. Compare OLD with MEM, if identical,
55 * store NEW in MEM. Return the initial value in MEM. Success is
56 * indicated by comparing RETURN with OLD.
59 #define __HAVE_ARCH_CMPXCHG 1
61 static inline unsigned long __cmpxchg(volatile void *ptr
, unsigned long old
,
62 unsigned long new, int size
)
67 __asm__
__volatile__(LOCK_PREFIX
"cmpxchgb %b1,%2"
69 : "q"(new), "m"(*__xg(ptr
)), "0"(old
)
73 __asm__
__volatile__(LOCK_PREFIX
"cmpxchgw %w1,%2"
75 : "r"(new), "m"(*__xg(ptr
)), "0"(old
)
79 __asm__
__volatile__(LOCK_PREFIX
"cmpxchgl %k1,%2"
81 : "r"(new), "m"(*__xg(ptr
)), "0"(old
)
85 __asm__
__volatile__(LOCK_PREFIX
"cmpxchgq %1,%2"
87 : "r"(new), "m"(*__xg(ptr
)), "0"(old
)
94 static inline unsigned long __cmpxchg_local(volatile void *ptr
,
95 unsigned long old
, unsigned long new, int size
)
100 __asm__
__volatile__("cmpxchgb %b1,%2"
102 : "q"(new), "m"(*__xg(ptr
)), "0"(old
)
106 __asm__
__volatile__("cmpxchgw %w1,%2"
108 : "r"(new), "m"(*__xg(ptr
)), "0"(old
)
112 __asm__
__volatile__("cmpxchgl %k1,%2"
114 : "r"(new), "m"(*__xg(ptr
)), "0"(old
)
118 __asm__
__volatile__("cmpxchgq %1,%2"
120 : "r"(new), "m"(*__xg(ptr
)), "0"(old
)
127 #define cmpxchg(ptr, o, n) \
128 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
129 (unsigned long)(n), sizeof(*(ptr))))
130 #define cmpxchg64(ptr, o, n) \
132 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
133 cmpxchg((ptr), (o), (n)); \
135 #define cmpxchg_local(ptr, o, n) \
136 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
137 (unsigned long)(n), sizeof(*(ptr))))
138 #define cmpxchg64_local(ptr, o, n) \
140 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
141 cmpxchg_local((ptr), (o), (n)); \