added code again, which got removed my big merge:-(
[linux-2.6/linux-mips.git] / include / asm-i386 / bitops.h
blob00dd9dcc847e166750560f9fa2dccf85f087301f
1 #ifndef _I386_BITOPS_H
2 #define _I386_BITOPS_H
4 /*
5 * Copyright 1992, Linus Torvalds.
6 */
8 /*
9 * These have to be done with inline assembly: that way the bit-setting
10 * is guaranteed to be atomic. All bit operations return 0 if the bit
11 * was cleared before the operation and != 0 if it was not.
13 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
16 #ifdef __SMP__
17 #define LOCK_PREFIX "lock ; "
18 #else
19 #define LOCK_PREFIX ""
20 #endif
23 * Function prototypes to keep gcc -Wall happy
25 extern void set_bit(int nr, volatile void * addr);
26 extern void clear_bit(int nr, volatile void * addr);
27 extern void change_bit(int nr, volatile void * addr);
28 extern int test_and_set_bit(int nr, volatile void * addr);
29 extern int test_and_clear_bit(int nr, volatile void * addr);
30 extern int test_and_change_bit(int nr, volatile void * addr);
31 extern int __constant_test_bit(int nr, const volatile void * addr);
32 extern int __test_bit(int nr, volatile void * addr);
33 extern int find_first_zero_bit(void * addr, unsigned size);
34 extern int find_next_zero_bit (void * addr, int size, int offset);
35 extern unsigned long ffz(unsigned long word);
38 * Some hacks to defeat gcc over-optimizations..
40 struct __dummy { unsigned long a[100]; };
41 #define ADDR (*(volatile struct __dummy *) addr)
42 #define CONST_ADDR (*(volatile const struct __dummy *) addr)
44 extern __inline__ void set_bit(int nr, volatile void * addr)
46 __asm__ __volatile__( LOCK_PREFIX
47 "btsl %1,%0"
48 :"=m" (ADDR)
49 :"ir" (nr));
52 extern __inline__ void clear_bit(int nr, volatile void * addr)
54 __asm__ __volatile__( LOCK_PREFIX
55 "btrl %1,%0"
56 :"=m" (ADDR)
57 :"ir" (nr));
60 extern __inline__ void change_bit(int nr, volatile void * addr)
62 __asm__ __volatile__( LOCK_PREFIX
63 "btcl %1,%0"
64 :"=m" (ADDR)
65 :"ir" (nr));
68 extern __inline__ int test_and_set_bit(int nr, volatile void * addr)
70 int oldbit;
72 __asm__ __volatile__( LOCK_PREFIX
73 "btsl %2,%1\n\tsbbl %0,%0"
74 :"=r" (oldbit),"=m" (ADDR)
75 :"ir" (nr));
76 return oldbit;
79 extern __inline__ int test_and_clear_bit(int nr, volatile void * addr)
81 int oldbit;
83 __asm__ __volatile__( LOCK_PREFIX
84 "btrl %2,%1\n\tsbbl %0,%0"
85 :"=r" (oldbit),"=m" (ADDR)
86 :"ir" (nr));
87 return oldbit;
90 extern __inline__ int test_and_change_bit(int nr, volatile void * addr)
92 int oldbit;
94 __asm__ __volatile__( LOCK_PREFIX
95 "btcl %2,%1\n\tsbbl %0,%0"
96 :"=r" (oldbit),"=m" (ADDR)
97 :"ir" (nr));
98 return oldbit;
102 * This routine doesn't need to be atomic.
104 extern __inline__ int __constant_test_bit(int nr, const volatile void * addr)
106 return ((1UL << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0;
109 extern __inline__ int __test_bit(int nr, volatile void * addr)
111 int oldbit;
113 __asm__ __volatile__(
114 "btl %2,%1\n\tsbbl %0,%0"
115 :"=r" (oldbit)
116 :"m" (ADDR),"ir" (nr));
117 return oldbit;
120 #define test_bit(nr,addr) \
121 (__builtin_constant_p(nr) ? \
122 __constant_test_bit((nr),(addr)) : \
123 __test_bit((nr),(addr)))
126 * Find-bit routines..
128 extern __inline__ int find_first_zero_bit(void * addr, unsigned size)
130 int res;
132 if (!size)
133 return 0;
134 __asm__("cld\n\t"
135 "movl $-1,%%eax\n\t"
136 "xorl %%edx,%%edx\n\t"
137 "repe; scasl\n\t"
138 "je 1f\n\t"
139 "xorl -4(%%edi),%%eax\n\t"
140 "subl $4,%%edi\n\t"
141 "bsfl %%eax,%%edx\n"
142 "1:\tsubl %%ebx,%%edi\n\t"
143 "shll $3,%%edi\n\t"
144 "addl %%edi,%%edx"
145 :"=d" (res)
146 :"c" ((size + 31) >> 5), "D" (addr), "b" (addr)
147 :"ax", "cx", "di");
148 return res;
151 extern __inline__ int find_next_zero_bit (void * addr, int size, int offset)
153 unsigned long * p = ((unsigned long *) addr) + (offset >> 5);
154 int set = 0, bit = offset & 31, res;
156 if (bit) {
158 * Look for zero in first byte
160 __asm__("bsfl %1,%0\n\t"
161 "jne 1f\n\t"
162 "movl $32, %0\n"
163 "1:"
164 : "=r" (set)
165 : "r" (~(*p >> bit)));
166 if (set < (32 - bit))
167 return set + offset;
168 set = 32 - bit;
169 p++;
172 * No zero yet, search remaining full bytes for a zero
174 res = find_first_zero_bit (p, size - 32 * (p - (unsigned long *) addr));
175 return (offset + set + res);
179 * ffz = Find First Zero in word. Undefined if no zero exists,
180 * so code should check against ~0UL first..
182 extern __inline__ unsigned long ffz(unsigned long word)
184 __asm__("bsfl %1,%0"
185 :"=r" (word)
186 :"r" (~word));
187 return word;
190 #ifdef __KERNEL__
193 * ffs: find first bit set. This is defined the same way as
194 * the libc and compiler builtin ffs routines, therefore
195 * differs in spirit from the above ffz (man ffs).
198 extern __inline__ int ffs(int x)
200 int r;
202 __asm__("bsfl %1,%0\n\t"
203 "jnz 1f\n\t"
204 "movl $-1,%0\n"
205 "1:" : "=r" (r) : "g" (x));
206 return r+1;
210 * hweightN: returns the hamming weight (i.e. the number
211 * of bits set) of a N-bit word
214 #define hweight32(x) generic_hweight32(x)
215 #define hweight16(x) generic_hweight16(x)
216 #define hweight8(x) generic_hweight8(x)
218 #endif /* __KERNEL__ */
220 #ifdef __KERNEL__
222 #define ext2_set_bit test_and_set_bit
223 #define ext2_clear_bit test_and_clear_bit
224 #define ext2_test_bit test_bit
225 #define ext2_find_first_zero_bit find_first_zero_bit
226 #define ext2_find_next_zero_bit find_next_zero_bit
228 /* Bitmap functions for the minix filesystem. */
229 #define minix_set_bit(nr,addr) test_and_set_bit(nr,addr)
230 #define minix_clear_bit(nr,addr) test_and_clear_bit(nr,addr)
231 #define minix_test_bit(nr,addr) test_bit(nr,addr)
232 #define minix_find_first_zero_bit(addr,size) find_first_zero_bit(addr,size)
234 #endif /* __KERNEL__ */
236 #endif /* _I386_BITOPS_H */