2 * Copyright IBM Corp. 1999, 2011
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
7 #ifndef __ASM_CMPXCHG_H
8 #define __ASM_CMPXCHG_H
10 #include <linux/types.h>
12 extern void __xchg_called_with_bad_pointer(void);
14 static inline unsigned long __xchg(unsigned long x
, void *ptr
, int size
)
16 unsigned long addr
, old
;
21 addr
= (unsigned long) ptr
;
22 shift
= (3 ^ (addr
& 3)) << 3;
31 : "=&d" (old
), "=Q" (*(int *) addr
)
32 : "d" (x
<< shift
), "d" (~(255 << shift
)),
33 "Q" (*(int *) addr
) : "memory", "cc", "0");
36 addr
= (unsigned long) ptr
;
37 shift
= (2 ^ (addr
& 2)) << 3;
46 : "=&d" (old
), "=Q" (*(int *) addr
)
47 : "d" (x
<< shift
), "d" (~(65535 << shift
)),
48 "Q" (*(int *) addr
) : "memory", "cc", "0");
55 : "=&d" (old
), "=Q" (*(int *) ptr
)
56 : "d" (x
), "Q" (*(int *) ptr
)
65 : "=&d" (old
), "=m" (*(long *) ptr
)
66 : "d" (x
), "Q" (*(long *) ptr
)
69 #endif /* CONFIG_64BIT */
71 __xchg_called_with_bad_pointer();
75 #define xchg(ptr, x) \
77 __typeof__(*(ptr)) __ret; \
78 __ret = (__typeof__(*(ptr))) \
79 __xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\
84 * Atomic compare and exchange. Compare OLD with MEM, if identical,
85 * store NEW in MEM. Return the initial value in MEM. Success is
86 * indicated by comparing RETURN with OLD.
89 #define __HAVE_ARCH_CMPXCHG
91 extern void __cmpxchg_called_with_bad_pointer(void);
93 static inline unsigned long __cmpxchg(void *ptr
, unsigned long old
,
94 unsigned long new, int size
)
96 unsigned long addr
, prev
, tmp
;
101 addr
= (unsigned long) ptr
;
102 shift
= (3 ^ (addr
& 3)) << 3;
116 : "=&d" (prev
), "=&d" (tmp
), "=Q" (*(int *) ptr
)
117 : "d" (old
<< shift
), "d" (new << shift
),
118 "d" (~(255 << shift
)), "Q" (*(int *) ptr
)
120 return prev
>> shift
;
122 addr
= (unsigned long) ptr
;
123 shift
= (2 ^ (addr
& 2)) << 3;
137 : "=&d" (prev
), "=&d" (tmp
), "=Q" (*(int *) ptr
)
138 : "d" (old
<< shift
), "d" (new << shift
),
139 "d" (~(65535 << shift
)), "Q" (*(int *) ptr
)
141 return prev
>> shift
;
145 : "=&d" (prev
), "=Q" (*(int *) ptr
)
146 : "0" (old
), "d" (new), "Q" (*(int *) ptr
)
153 : "=&d" (prev
), "=Q" (*(long *) ptr
)
154 : "0" (old
), "d" (new), "Q" (*(long *) ptr
)
157 #endif /* CONFIG_64BIT */
159 __cmpxchg_called_with_bad_pointer();
163 #define cmpxchg(ptr, o, n) \
164 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
165 (unsigned long)(n), sizeof(*(ptr))))
168 #define cmpxchg64(ptr, o, n) \
170 cmpxchg((ptr), (o), (n)); \
172 #else /* CONFIG_64BIT */
173 static inline unsigned long long __cmpxchg64(void *ptr
,
174 unsigned long long old
,
175 unsigned long long new)
177 register_pair rp_old
= {.pair
= old
};
178 register_pair rp_new
= {.pair
= new};
182 : "+&d" (rp_old
), "=Q" (ptr
)
183 : "d" (rp_new
), "Q" (ptr
)
187 #define cmpxchg64(ptr, o, n) \
188 ((__typeof__(*(ptr)))__cmpxchg64((ptr), \
189 (unsigned long long)(o), \
190 (unsigned long long)(n)))
191 #endif /* CONFIG_64BIT */
193 #include <asm-generic/cmpxchg-local.h>
195 static inline unsigned long __cmpxchg_local(void *ptr
,
197 unsigned long new, int size
)
206 return __cmpxchg(ptr
, old
, new, size
);
208 return __cmpxchg_local_generic(ptr
, old
, new, size
);
215 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
218 #define cmpxchg_local(ptr, o, n) \
219 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
220 (unsigned long)(n), sizeof(*(ptr))))
222 #define cmpxchg64_local(ptr, o, n) cmpxchg64((ptr), (o), (n))
224 #endif /* __ASM_CMPXCHG_H */