1 #ifndef _BLACKFIN_BITOPS_H
2 #define _BLACKFIN_BITOPS_H
5 * Copyright 1992, Linus Torvalds.
8 #include <linux/compiler.h>
9 #include <asm/byteorder.h> /* swab32 */
10 #include <asm/system.h> /* save_flags */
14 #ifndef _LINUX_BITOPS_H
15 #error only <linux/bitops.h> can be included directly
18 #include <asm-generic/bitops/ffs.h>
19 #include <asm-generic/bitops/__ffs.h>
20 #include <asm-generic/bitops/sched.h>
21 #include <asm-generic/bitops/ffz.h>
23 static __inline__
void set_bit(int nr
, volatile unsigned long *addr
)
30 mask
= 1 << (nr
& 0x1f);
31 local_irq_save(flags
);
33 local_irq_restore(flags
);
36 static __inline__
void __set_bit(int nr
, volatile unsigned long *addr
)
42 mask
= 1 << (nr
& 0x1f);
47 * clear_bit() doesn't provide any barrier for the compiler.
49 #define smp_mb__before_clear_bit() barrier()
50 #define smp_mb__after_clear_bit() barrier()
52 static __inline__
void clear_bit(int nr
, volatile unsigned long *addr
)
58 mask
= 1 << (nr
& 0x1f);
59 local_irq_save(flags
);
61 local_irq_restore(flags
);
64 static __inline__
void __clear_bit(int nr
, volatile unsigned long *addr
)
70 mask
= 1 << (nr
& 0x1f);
74 static __inline__
void change_bit(int nr
, volatile unsigned long *addr
)
77 unsigned long *ADDR
= (unsigned long *)addr
;
80 mask
= 1 << (nr
& 31);
81 local_irq_save(flags
);
83 local_irq_restore(flags
);
86 static __inline__
void __change_bit(int nr
, volatile unsigned long *addr
)
89 unsigned long *ADDR
= (unsigned long *)addr
;
92 mask
= 1 << (nr
& 31);
96 static __inline__
int test_and_set_bit(int nr
, void *addr
)
99 volatile unsigned int *a
= (volatile unsigned int *)addr
;
103 mask
= 1 << (nr
& 0x1f);
104 local_irq_save(flags
);
105 retval
= (mask
& *a
) != 0;
107 local_irq_restore(flags
);
112 static __inline__
int __test_and_set_bit(int nr
, volatile unsigned long *addr
)
115 volatile unsigned int *a
= (volatile unsigned int *)addr
;
118 mask
= 1 << (nr
& 0x1f);
119 retval
= (mask
& *a
) != 0;
124 static __inline__
int test_and_clear_bit(int nr
, volatile unsigned long *addr
)
127 volatile unsigned int *a
= (volatile unsigned int *)addr
;
131 mask
= 1 << (nr
& 0x1f);
132 local_irq_save(flags
);
133 retval
= (mask
& *a
) != 0;
135 local_irq_restore(flags
);
140 static __inline__
int __test_and_clear_bit(int nr
, volatile unsigned long *addr
)
143 volatile unsigned int *a
= (volatile unsigned int *)addr
;
146 mask
= 1 << (nr
& 0x1f);
147 retval
= (mask
& *a
) != 0;
152 static __inline__
int test_and_change_bit(int nr
, volatile unsigned long *addr
)
155 volatile unsigned int *a
= (volatile unsigned int *)addr
;
159 mask
= 1 << (nr
& 0x1f);
160 local_irq_save(flags
);
161 retval
= (mask
& *a
) != 0;
163 local_irq_restore(flags
);
167 static __inline__
int __test_and_change_bit(int nr
,
168 volatile unsigned long *addr
)
171 volatile unsigned int *a
= (volatile unsigned int *)addr
;
174 mask
= 1 << (nr
& 0x1f);
175 retval
= (mask
& *a
) != 0;
181 * This routine doesn't need to be atomic.
183 static __inline__
int __constant_test_bit(int nr
, const void *addr
)
185 return ((1UL << (nr
& 31)) &
186 (((const volatile unsigned int *)addr
)[nr
>> 5])) != 0;
189 static __inline__
int __test_bit(int nr
, const void *addr
)
191 int *a
= (int *)addr
;
195 mask
= 1 << (nr
& 0x1f);
196 return ((mask
& *a
) != 0);
199 #define test_bit(nr,addr) \
200 (__builtin_constant_p(nr) ? \
201 __constant_test_bit((nr),(addr)) : \
202 __test_bit((nr),(addr)))
204 #include <asm-generic/bitops/find.h>
205 #include <asm-generic/bitops/hweight.h>
206 #include <asm-generic/bitops/lock.h>
208 #include <asm-generic/bitops/ext2-atomic.h>
209 #include <asm-generic/bitops/ext2-non-atomic.h>
211 #include <asm-generic/bitops/minix.h>
213 #endif /* __KERNEL__ */
215 #include <asm-generic/bitops/fls.h>
216 #include <asm-generic/bitops/fls64.h>
218 #endif /* _BLACKFIN_BITOPS_H */