allow coexistance of N build and AC build.
[tomato.git] / release / src-rt-6.x / linux / linux-2.6 / include / asm-blackfin / bitops.h
blob27c2d0e48e1b6b314e88f18b274dd15c41541017
1 #ifndef _BLACKFIN_BITOPS_H
2 #define _BLACKFIN_BITOPS_H
4 /*
5 * Copyright 1992, Linus Torvalds.
6 */
8 #include <linux/compiler.h>
9 #include <asm/byteorder.h> /* swab32 */
10 #include <asm/system.h> /* save_flags */
12 #ifdef __KERNEL__
14 #include <asm-generic/bitops/ffs.h>
15 #include <asm-generic/bitops/__ffs.h>
16 #include <asm-generic/bitops/sched.h>
17 #include <asm-generic/bitops/ffz.h>
19 static __inline__ void set_bit(int nr, volatile unsigned long *addr)
21 int *a = (int *)addr;
22 int mask;
23 unsigned long flags;
25 a += nr >> 5;
26 mask = 1 << (nr & 0x1f);
27 local_irq_save(flags);
28 *a |= mask;
29 local_irq_restore(flags);
32 static __inline__ void __set_bit(int nr, volatile unsigned long *addr)
34 int *a = (int *)addr;
35 int mask;
37 a += nr >> 5;
38 mask = 1 << (nr & 0x1f);
39 *a |= mask;
43 * clear_bit() doesn't provide any barrier for the compiler.
45 #define smp_mb__before_clear_bit() barrier()
46 #define smp_mb__after_clear_bit() barrier()
48 static __inline__ void clear_bit(int nr, volatile unsigned long *addr)
50 int *a = (int *)addr;
51 int mask;
52 unsigned long flags;
53 a += nr >> 5;
54 mask = 1 << (nr & 0x1f);
55 local_irq_save(flags);
56 *a &= ~mask;
57 local_irq_restore(flags);
60 static __inline__ void __clear_bit(int nr, volatile unsigned long *addr)
62 int *a = (int *)addr;
63 int mask;
65 a += nr >> 5;
66 mask = 1 << (nr & 0x1f);
67 *a &= ~mask;
70 static __inline__ void change_bit(int nr, volatile unsigned long *addr)
72 int mask, flags;
73 unsigned long *ADDR = (unsigned long *)addr;
75 ADDR += nr >> 5;
76 mask = 1 << (nr & 31);
77 local_irq_save(flags);
78 *ADDR ^= mask;
79 local_irq_restore(flags);
82 static __inline__ void __change_bit(int nr, volatile unsigned long *addr)
84 int mask;
85 unsigned long *ADDR = (unsigned long *)addr;
87 ADDR += nr >> 5;
88 mask = 1 << (nr & 31);
89 *ADDR ^= mask;
92 static __inline__ int test_and_set_bit(int nr, void *addr)
94 int mask, retval;
95 volatile unsigned int *a = (volatile unsigned int *)addr;
96 unsigned long flags;
98 a += nr >> 5;
99 mask = 1 << (nr & 0x1f);
100 local_irq_save(flags);
101 retval = (mask & *a) != 0;
102 *a |= mask;
103 local_irq_restore(flags);
105 return retval;
108 static __inline__ int __test_and_set_bit(int nr, volatile unsigned long *addr)
110 int mask, retval;
111 volatile unsigned int *a = (volatile unsigned int *)addr;
113 a += nr >> 5;
114 mask = 1 << (nr & 0x1f);
115 retval = (mask & *a) != 0;
116 *a |= mask;
117 return retval;
120 static __inline__ int test_and_clear_bit(int nr, volatile unsigned long *addr)
122 int mask, retval;
123 volatile unsigned int *a = (volatile unsigned int *)addr;
124 unsigned long flags;
126 a += nr >> 5;
127 mask = 1 << (nr & 0x1f);
128 local_irq_save(flags);
129 retval = (mask & *a) != 0;
130 *a &= ~mask;
131 local_irq_restore(flags);
133 return retval;
136 static __inline__ int __test_and_clear_bit(int nr, volatile unsigned long *addr)
138 int mask, retval;
139 volatile unsigned int *a = (volatile unsigned int *)addr;
141 a += nr >> 5;
142 mask = 1 << (nr & 0x1f);
143 retval = (mask & *a) != 0;
144 *a &= ~mask;
145 return retval;
148 static __inline__ int test_and_change_bit(int nr, volatile unsigned long *addr)
150 int mask, retval;
151 volatile unsigned int *a = (volatile unsigned int *)addr;
152 unsigned long flags;
154 a += nr >> 5;
155 mask = 1 << (nr & 0x1f);
156 local_irq_save(flags);
157 retval = (mask & *a) != 0;
158 *a ^= mask;
159 local_irq_restore(flags);
160 return retval;
163 static __inline__ int __test_and_change_bit(int nr,
164 volatile unsigned long *addr)
166 int mask, retval;
167 volatile unsigned int *a = (volatile unsigned int *)addr;
169 a += nr >> 5;
170 mask = 1 << (nr & 0x1f);
171 retval = (mask & *a) != 0;
172 *a ^= mask;
173 return retval;
177 * This routine doesn't need to be atomic.
179 static __inline__ int __constant_test_bit(int nr, const void *addr)
181 return ((1UL << (nr & 31)) &
182 (((const volatile unsigned int *)addr)[nr >> 5])) != 0;
185 static __inline__ int __test_bit(int nr, const void *addr)
187 int *a = (int *)addr;
188 int mask;
190 a += nr >> 5;
191 mask = 1 << (nr & 0x1f);
192 return ((mask & *a) != 0);
195 #define test_bit(nr,addr) \
196 (__builtin_constant_p(nr) ? \
197 __constant_test_bit((nr),(addr)) : \
198 __test_bit((nr),(addr)))
200 #include <asm-generic/bitops/find.h>
201 #include <asm-generic/bitops/hweight.h>
203 #include <asm-generic/bitops/ext2-atomic.h>
204 #include <asm-generic/bitops/ext2-non-atomic.h>
206 #include <asm-generic/bitops/minix.h>
208 #endif /* __KERNEL__ */
210 #include <asm-generic/bitops/fls.h>
211 #include <asm-generic/bitops/fls64.h>
213 #endif /* _BLACKFIN_BITOPS_H */