x86: Add memory modify constraints to xchg() and cmpxchg()
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / arch / x86 / include / asm / cmpxchg_64.h
blobb92f147339f3948e37323743cc6a743c13758d28
1 #ifndef _ASM_X86_CMPXCHG_64_H
2 #define _ASM_X86_CMPXCHG_64_H
4 #include <asm/alternative.h> /* Provides LOCK_PREFIX */
6 #define __xg(x) ((volatile long *)(x))
8 static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
10 *ptr = val;
13 #define _set_64bit set_64bit
15 extern void __xchg_wrong_size(void);
16 extern void __cmpxchg_wrong_size(void);
19 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
20 * Note 2: xchg has side effect, so that attribute volatile is necessary,
21 * but generally the primitive is invalid, *ptr is output argument. --ANK
23 #define __xchg(x, ptr, size) \
24 ({ \
25 __typeof(*(ptr)) __x = (x); \
26 switch (size) { \
27 case 1: \
28 asm volatile("xchgb %b0,%1" \
29 : "=q" (__x), "+m" (*__xg(ptr)) \
30 : "0" (__x) \
31 : "memory"); \
32 break; \
33 case 2: \
34 asm volatile("xchgw %w0,%1" \
35 : "=r" (__x), "+m" (*__xg(ptr)) \
36 : "0" (__x) \
37 : "memory"); \
38 break; \
39 case 4: \
40 asm volatile("xchgl %k0,%1" \
41 : "=r" (__x), "+m" (*__xg(ptr)) \
42 : "0" (__x) \
43 : "memory"); \
44 break; \
45 case 8: \
46 asm volatile("xchgq %0,%1" \
47 : "=r" (__x), "+m" (*__xg(ptr)) \
48 : "0" (__x) \
49 : "memory"); \
50 break; \
51 default: \
52 __xchg_wrong_size(); \
53 } \
54 __x; \
57 #define xchg(ptr, v) \
58 __xchg((v), (ptr), sizeof(*ptr))
60 #define __HAVE_ARCH_CMPXCHG 1
63 * Atomic compare and exchange. Compare OLD with MEM, if identical,
64 * store NEW in MEM. Return the initial value in MEM. Success is
65 * indicated by comparing RETURN with OLD.
67 #define __raw_cmpxchg(ptr, old, new, size, lock) \
68 ({ \
69 __typeof__(*(ptr)) __ret; \
70 __typeof__(*(ptr)) __old = (old); \
71 __typeof__(*(ptr)) __new = (new); \
72 switch (size) { \
73 case 1: \
74 asm volatile(lock "cmpxchgb %b2,%1" \
75 : "=a" (__ret), "+m" (*__xg(ptr)) \
76 : "q" (__new), "0" (__old) \
77 : "memory"); \
78 break; \
79 case 2: \
80 asm volatile(lock "cmpxchgw %w2,%1" \
81 : "=a" (__ret), "+m" (*__xg(ptr)) \
82 : "r" (__new), "0" (__old) \
83 : "memory"); \
84 break; \
85 case 4: \
86 asm volatile(lock "cmpxchgl %k2,%1" \
87 : "=a" (__ret), "+m" (*__xg(ptr)) \
88 : "r" (__new), "0" (__old) \
89 : "memory"); \
90 break; \
91 case 8: \
92 asm volatile(lock "cmpxchgq %2,%1" \
93 : "=a" (__ret), "+m" (*__xg(ptr)) \
94 : "r" (__new), "0" (__old) \
95 : "memory"); \
96 break; \
97 default: \
98 __cmpxchg_wrong_size(); \
99 } \
100 __ret; \
103 #define __cmpxchg(ptr, old, new, size) \
104 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
106 #define __sync_cmpxchg(ptr, old, new, size) \
107 __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
109 #define __cmpxchg_local(ptr, old, new, size) \
110 __raw_cmpxchg((ptr), (old), (new), (size), "")
112 #define cmpxchg(ptr, old, new) \
113 __cmpxchg((ptr), (old), (new), sizeof(*ptr))
115 #define sync_cmpxchg(ptr, old, new) \
116 __sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))
118 #define cmpxchg_local(ptr, old, new) \
119 __cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
121 #define cmpxchg64(ptr, o, n) \
122 ({ \
123 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
124 cmpxchg((ptr), (o), (n)); \
127 #define cmpxchg64_local(ptr, o, n) \
128 ({ \
129 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
130 cmpxchg_local((ptr), (o), (n)); \
133 #endif /* _ASM_X86_CMPXCHG_64_H */