Committer: Michael Beasley <mike@snafu.setup>
[mikesnafu-overlay.git] / include / asm-x86 / cmpxchg_64.h
blob56f5b41e071c49b2c2f2f50a481858411a4a224e
1 #ifndef __ASM_CMPXCHG_H
2 #define __ASM_CMPXCHG_H
4 #include <asm/alternative.h> /* Provides LOCK_PREFIX */
6 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
8 #define __xg(x) ((volatile long *)(x))
10 static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
12 *ptr = val;
15 #define _set_64bit set_64bit
18 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
19 * Note 2: xchg has side effect, so that attribute volatile is necessary,
20 * but generally the primitive is invalid, *ptr is output argument. --ANK
22 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
24 switch (size) {
25 case 1:
26 __asm__ __volatile__("xchgb %b0,%1"
27 :"=q" (x)
28 :"m" (*__xg(ptr)), "0" (x)
29 :"memory");
30 break;
31 case 2:
32 __asm__ __volatile__("xchgw %w0,%1"
33 :"=r" (x)
34 :"m" (*__xg(ptr)), "0" (x)
35 :"memory");
36 break;
37 case 4:
38 __asm__ __volatile__("xchgl %k0,%1"
39 :"=r" (x)
40 :"m" (*__xg(ptr)), "0" (x)
41 :"memory");
42 break;
43 case 8:
44 __asm__ __volatile__("xchgq %0,%1"
45 :"=r" (x)
46 :"m" (*__xg(ptr)), "0" (x)
47 :"memory");
48 break;
50 return x;
54 * Atomic compare and exchange. Compare OLD with MEM, if identical,
55 * store NEW in MEM. Return the initial value in MEM. Success is
56 * indicated by comparing RETURN with OLD.
59 #define __HAVE_ARCH_CMPXCHG 1
61 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
62 unsigned long new, int size)
64 unsigned long prev;
65 switch (size) {
66 case 1:
67 __asm__ __volatile__(LOCK_PREFIX "cmpxchgb %b1,%2"
68 : "=a"(prev)
69 : "q"(new), "m"(*__xg(ptr)), "0"(old)
70 : "memory");
71 return prev;
72 case 2:
73 __asm__ __volatile__(LOCK_PREFIX "cmpxchgw %w1,%2"
74 : "=a"(prev)
75 : "r"(new), "m"(*__xg(ptr)), "0"(old)
76 : "memory");
77 return prev;
78 case 4:
79 __asm__ __volatile__(LOCK_PREFIX "cmpxchgl %k1,%2"
80 : "=a"(prev)
81 : "r"(new), "m"(*__xg(ptr)), "0"(old)
82 : "memory");
83 return prev;
84 case 8:
85 __asm__ __volatile__(LOCK_PREFIX "cmpxchgq %1,%2"
86 : "=a"(prev)
87 : "r"(new), "m"(*__xg(ptr)), "0"(old)
88 : "memory");
89 return prev;
91 return old;
94 static inline unsigned long __cmpxchg_local(volatile void *ptr,
95 unsigned long old, unsigned long new, int size)
97 unsigned long prev;
98 switch (size) {
99 case 1:
100 __asm__ __volatile__("cmpxchgb %b1,%2"
101 : "=a"(prev)
102 : "q"(new), "m"(*__xg(ptr)), "0"(old)
103 : "memory");
104 return prev;
105 case 2:
106 __asm__ __volatile__("cmpxchgw %w1,%2"
107 : "=a"(prev)
108 : "r"(new), "m"(*__xg(ptr)), "0"(old)
109 : "memory");
110 return prev;
111 case 4:
112 __asm__ __volatile__("cmpxchgl %k1,%2"
113 : "=a"(prev)
114 : "r"(new), "m"(*__xg(ptr)), "0"(old)
115 : "memory");
116 return prev;
117 case 8:
118 __asm__ __volatile__("cmpxchgq %1,%2"
119 : "=a"(prev)
120 : "r"(new), "m"(*__xg(ptr)), "0"(old)
121 : "memory");
122 return prev;
124 return old;
127 #define cmpxchg(ptr, o, n) \
128 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
129 (unsigned long)(n), sizeof(*(ptr))))
130 #define cmpxchg64(ptr, o, n) \
131 ({ \
132 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
133 cmpxchg((ptr), (o), (n)); \
135 #define cmpxchg_local(ptr, o, n) \
136 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
137 (unsigned long)(n), sizeof(*(ptr))))
138 #define cmpxchg64_local(ptr, o, n) \
139 ({ \
140 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
141 cmpxchg_local((ptr), (o), (n)); \
144 #endif