Blackfin arch: Fix Anomaly hanlding, as pointed out by Mike
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / include / asm-mips / local.h
blobf9a5ce5c9af174aae59d3da955f0c351280f4d7f
1 #ifndef _ARCH_MIPS_LOCAL_H
2 #define _ARCH_MIPS_LOCAL_H
4 #include <linux/percpu.h>
5 #include <linux/bitops.h>
6 #include <asm/atomic.h>
7 #include <asm/cmpxchg.h>
8 #include <asm/war.h>
10 typedef struct
12 atomic_long_t a;
13 } local_t;
15 #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
17 #define local_read(l) atomic_long_read(&(l)->a)
18 #define local_set(l,i) atomic_long_set(&(l)->a, (i))
20 #define local_add(i,l) atomic_long_add((i),(&(l)->a))
21 #define local_sub(i,l) atomic_long_sub((i),(&(l)->a))
22 #define local_inc(l) atomic_long_inc(&(l)->a)
23 #define local_dec(l) atomic_long_dec(&(l)->a)
26 * Same as above, but return the result value
28 static __inline__ long local_add_return(long i, local_t * l)
30 unsigned long result;
32 if (cpu_has_llsc && R10000_LLSC_WAR) {
33 unsigned long temp;
35 __asm__ __volatile__(
36 " .set mips3 \n"
37 "1:" __LL "%1, %2 # local_add_return \n"
38 " addu %0, %1, %3 \n"
39 __SC "%0, %2 \n"
40 " beqzl %0, 1b \n"
41 " addu %0, %1, %3 \n"
42 " .set mips0 \n"
43 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
44 : "Ir" (i), "m" (l->a.counter)
45 : "memory");
46 } else if (cpu_has_llsc) {
47 unsigned long temp;
49 __asm__ __volatile__(
50 " .set mips3 \n"
51 "1:" __LL "%1, %2 # local_add_return \n"
52 " addu %0, %1, %3 \n"
53 __SC "%0, %2 \n"
54 " beqz %0, 1b \n"
55 " addu %0, %1, %3 \n"
56 " .set mips0 \n"
57 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
58 : "Ir" (i), "m" (l->a.counter)
59 : "memory");
60 } else {
61 unsigned long flags;
63 local_irq_save(flags);
64 result = l->a.counter;
65 result += i;
66 l->a.counter = result;
67 local_irq_restore(flags);
70 return result;
73 static __inline__ long local_sub_return(long i, local_t * l)
75 unsigned long result;
77 if (cpu_has_llsc && R10000_LLSC_WAR) {
78 unsigned long temp;
80 __asm__ __volatile__(
81 " .set mips3 \n"
82 "1:" __LL "%1, %2 # local_sub_return \n"
83 " subu %0, %1, %3 \n"
84 __SC "%0, %2 \n"
85 " beqzl %0, 1b \n"
86 " subu %0, %1, %3 \n"
87 " .set mips0 \n"
88 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
89 : "Ir" (i), "m" (l->a.counter)
90 : "memory");
91 } else if (cpu_has_llsc) {
92 unsigned long temp;
94 __asm__ __volatile__(
95 " .set mips3 \n"
96 "1:" __LL "%1, %2 # local_sub_return \n"
97 " subu %0, %1, %3 \n"
98 __SC "%0, %2 \n"
99 " beqz %0, 1b \n"
100 " subu %0, %1, %3 \n"
101 " .set mips0 \n"
102 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
103 : "Ir" (i), "m" (l->a.counter)
104 : "memory");
105 } else {
106 unsigned long flags;
108 local_irq_save(flags);
109 result = l->a.counter;
110 result -= i;
111 l->a.counter = result;
112 local_irq_restore(flags);
115 return result;
118 #define local_cmpxchg(l, o, n) \
119 ((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
120 #define local_xchg(l, n) (xchg_local(&((l)->a.counter),(n)))
123 * local_add_unless - add unless the number is a given value
124 * @l: pointer of type local_t
125 * @a: the amount to add to l...
126 * @u: ...unless l is equal to u.
128 * Atomically adds @a to @l, so long as it was not @u.
129 * Returns non-zero if @l was not @u, and zero otherwise.
131 #define local_add_unless(l, a, u) \
132 ({ \
133 long c, old; \
134 c = local_read(l); \
135 while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
136 c = old; \
137 c != (u); \
139 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
141 #define local_dec_return(l) local_sub_return(1,(l))
142 #define local_inc_return(l) local_add_return(1,(l))
145 * local_sub_and_test - subtract value from variable and test result
146 * @i: integer value to subtract
147 * @l: pointer of type local_t
149 * Atomically subtracts @i from @l and returns
150 * true if the result is zero, or false for all
151 * other cases.
153 #define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0)
156 * local_inc_and_test - increment and test
157 * @l: pointer of type local_t
159 * Atomically increments @l by 1
160 * and returns true if the result is zero, or false for all
161 * other cases.
163 #define local_inc_and_test(l) (local_inc_return(l) == 0)
166 * local_dec_and_test - decrement by 1 and test
167 * @l: pointer of type local_t
169 * Atomically decrements @l by 1 and
170 * returns true if the result is 0, or false for all other
171 * cases.
173 #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
176 * local_add_negative - add and test if negative
177 * @l: pointer of type local_t
178 * @i: integer value to add
180 * Atomically adds @i to @l and returns true
181 * if the result is negative, or false when
182 * result is greater than or equal to zero.
184 #define local_add_negative(i,l) (local_add_return(i, (l)) < 0)
186 /* Use these for per-cpu local_t variables: on some archs they are
187 * much more efficient than these naive implementations. Note they take
188 * a variable, not an address.
191 #define __local_inc(l) ((l)->a.counter++)
192 #define __local_dec(l) ((l)->a.counter++)
193 #define __local_add(i,l) ((l)->a.counter+=(i))
194 #define __local_sub(i,l) ((l)->a.counter-=(i))
196 /* Need to disable preemption for the cpu local counters otherwise we could
197 still access a variable of a previous CPU in a non atomic way. */
198 #define cpu_local_wrap_v(l) \
199 ({ local_t res__; \
200 preempt_disable(); \
201 res__ = (l); \
202 preempt_enable(); \
203 res__; })
204 #define cpu_local_wrap(l) \
205 ({ preempt_disable(); \
206 l; \
207 preempt_enable(); }) \
209 #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l)))
210 #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i)))
211 #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l)))
212 #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l)))
213 #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l)))
214 #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l)))
216 #define __cpu_local_inc(l) cpu_local_inc(l)
217 #define __cpu_local_dec(l) cpu_local_dec(l)
218 #define __cpu_local_add(i, l) cpu_local_add((i), (l))
219 #define __cpu_local_sub(i, l) cpu_local_sub((i), (l))
221 #endif /* _ARCH_MIPS_LOCAL_H */