Disintegrate asm/system.h for X86
[linux-2.6.git] / arch / x86 / include / asm / local.h
blobc8bed0da434a40dcd23ef16ecd518fa9cc164bc1
1 #ifndef _ASM_X86_LOCAL_H
2 #define _ASM_X86_LOCAL_H
4 #include <linux/percpu.h>
6 #include <linux/atomic.h>
7 #include <asm/asm.h>
9 typedef struct {
10 atomic_long_t a;
11 } local_t;
13 #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
15 #define local_read(l) atomic_long_read(&(l)->a)
16 #define local_set(l, i) atomic_long_set(&(l)->a, (i))
18 static inline void local_inc(local_t *l)
20 asm volatile(_ASM_INC "%0"
21 : "+m" (l->a.counter));
24 static inline void local_dec(local_t *l)
26 asm volatile(_ASM_DEC "%0"
27 : "+m" (l->a.counter));
30 static inline void local_add(long i, local_t *l)
32 asm volatile(_ASM_ADD "%1,%0"
33 : "+m" (l->a.counter)
34 : "ir" (i));
37 static inline void local_sub(long i, local_t *l)
39 asm volatile(_ASM_SUB "%1,%0"
40 : "+m" (l->a.counter)
41 : "ir" (i));
44 /**
45 * local_sub_and_test - subtract value from variable and test result
46 * @i: integer value to subtract
47 * @l: pointer to type local_t
49 * Atomically subtracts @i from @l and returns
50 * true if the result is zero, or false for all
51 * other cases.
53 static inline int local_sub_and_test(long i, local_t *l)
55 unsigned char c;
57 asm volatile(_ASM_SUB "%2,%0; sete %1"
58 : "+m" (l->a.counter), "=qm" (c)
59 : "ir" (i) : "memory");
60 return c;
63 /**
64 * local_dec_and_test - decrement and test
65 * @l: pointer to type local_t
67 * Atomically decrements @l by 1 and
68 * returns true if the result is 0, or false for all other
69 * cases.
71 static inline int local_dec_and_test(local_t *l)
73 unsigned char c;
75 asm volatile(_ASM_DEC "%0; sete %1"
76 : "+m" (l->a.counter), "=qm" (c)
77 : : "memory");
78 return c != 0;
81 /**
82 * local_inc_and_test - increment and test
83 * @l: pointer to type local_t
85 * Atomically increments @l by 1
86 * and returns true if the result is zero, or false for all
87 * other cases.
89 static inline int local_inc_and_test(local_t *l)
91 unsigned char c;
93 asm volatile(_ASM_INC "%0; sete %1"
94 : "+m" (l->a.counter), "=qm" (c)
95 : : "memory");
96 return c != 0;
99 /**
100 * local_add_negative - add and test if negative
101 * @i: integer value to add
102 * @l: pointer to type local_t
104 * Atomically adds @i to @l and returns true
105 * if the result is negative, or false when
106 * result is greater than or equal to zero.
108 static inline int local_add_negative(long i, local_t *l)
110 unsigned char c;
112 asm volatile(_ASM_ADD "%2,%0; sets %1"
113 : "+m" (l->a.counter), "=qm" (c)
114 : "ir" (i) : "memory");
115 return c;
119 * local_add_return - add and return
120 * @i: integer value to add
121 * @l: pointer to type local_t
123 * Atomically adds @i to @l and returns @i + @l
125 static inline long local_add_return(long i, local_t *l)
127 long __i;
128 #ifdef CONFIG_M386
129 unsigned long flags;
130 if (unlikely(boot_cpu_data.x86 <= 3))
131 goto no_xadd;
132 #endif
133 /* Modern 486+ processor */
134 __i = i;
135 asm volatile(_ASM_XADD "%0, %1;"
136 : "+r" (i), "+m" (l->a.counter)
137 : : "memory");
138 return i + __i;
140 #ifdef CONFIG_M386
141 no_xadd: /* Legacy 386 processor */
142 local_irq_save(flags);
143 __i = local_read(l);
144 local_set(l, i + __i);
145 local_irq_restore(flags);
146 return i + __i;
147 #endif
150 static inline long local_sub_return(long i, local_t *l)
152 return local_add_return(-i, l);
155 #define local_inc_return(l) (local_add_return(1, l))
156 #define local_dec_return(l) (local_sub_return(1, l))
158 #define local_cmpxchg(l, o, n) \
159 (cmpxchg_local(&((l)->a.counter), (o), (n)))
160 /* Always has a lock prefix */
161 #define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
164 * local_add_unless - add unless the number is a given value
165 * @l: pointer of type local_t
166 * @a: the amount to add to l...
167 * @u: ...unless l is equal to u.
169 * Atomically adds @a to @l, so long as it was not @u.
170 * Returns non-zero if @l was not @u, and zero otherwise.
172 #define local_add_unless(l, a, u) \
173 ({ \
174 long c, old; \
175 c = local_read((l)); \
176 for (;;) { \
177 if (unlikely(c == (u))) \
178 break; \
179 old = local_cmpxchg((l), c, c + (a)); \
180 if (likely(old == c)) \
181 break; \
182 c = old; \
184 c != (u); \
186 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
188 /* On x86_32, these are no better than the atomic variants.
189 * On x86-64 these are better than the atomic variants on SMP kernels
190 * because they dont use a lock prefix.
192 #define __local_inc(l) local_inc(l)
193 #define __local_dec(l) local_dec(l)
194 #define __local_add(i, l) local_add((i), (l))
195 #define __local_sub(i, l) local_sub((i), (l))
197 #endif /* _ASM_X86_LOCAL_H */