Make HZ_TO_STD macro name lowercase.
[linux-2.6/linux-mips.git] / include / asm-i386 / bitops.h
blob53486c97788ccaf8e369a859918b6d35fa10914b
1 #ifndef _I386_BITOPS_H
2 #define _I386_BITOPS_H
4 /*
5 * Copyright 1992, Linus Torvalds.
6 */
8 #include <linux/config.h>
11 * These have to be done with inline assembly: that way the bit-setting
12 * is guaranteed to be atomic. All bit operations return 0 if the bit
13 * was cleared before the operation and != 0 if it was not.
15 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
18 #ifdef CONFIG_SMP
19 #define LOCK_PREFIX "lock ; "
20 #else
21 #define LOCK_PREFIX ""
22 #endif
25 * Function prototypes to keep gcc -Wall happy
27 extern void set_bit(int nr, volatile void * addr);
28 extern void clear_bit(int nr, volatile void * addr);
29 extern void change_bit(int nr, volatile void * addr);
30 extern int test_and_set_bit(int nr, volatile void * addr);
31 extern int test_and_clear_bit(int nr, volatile void * addr);
32 extern int test_and_change_bit(int nr, volatile void * addr);
33 extern int __constant_test_bit(int nr, const volatile void * addr);
34 extern int __test_bit(int nr, volatile void * addr);
35 extern int find_first_zero_bit(void * addr, unsigned size);
36 extern int find_next_zero_bit (void * addr, int size, int offset);
37 extern unsigned long ffz(unsigned long word);
40 * Some hacks to defeat gcc over-optimizations..
42 struct __dummy { unsigned long a[100]; };
43 #define ADDR (*(volatile struct __dummy *) addr)
44 #define CONST_ADDR (*(volatile const struct __dummy *) addr)
46 extern __inline__ void set_bit(int nr, volatile void * addr)
48 __asm__ __volatile__( LOCK_PREFIX
49 "btsl %1,%0"
50 :"=m" (ADDR)
51 :"Ir" (nr));
54 extern __inline__ void clear_bit(int nr, volatile void * addr)
56 __asm__ __volatile__( LOCK_PREFIX
57 "btrl %1,%0"
58 :"=m" (ADDR)
59 :"Ir" (nr));
62 extern __inline__ void change_bit(int nr, volatile void * addr)
64 __asm__ __volatile__( LOCK_PREFIX
65 "btcl %1,%0"
66 :"=m" (ADDR)
67 :"Ir" (nr));
70 extern __inline__ int test_and_set_bit(int nr, volatile void * addr)
72 int oldbit;
74 __asm__ __volatile__( LOCK_PREFIX
75 "btsl %2,%1\n\tsbbl %0,%0"
76 :"=r" (oldbit),"=m" (ADDR)
77 :"Ir" (nr));
78 return oldbit;
81 extern __inline__ int test_and_clear_bit(int nr, volatile void * addr)
83 int oldbit;
85 __asm__ __volatile__( LOCK_PREFIX
86 "btrl %2,%1\n\tsbbl %0,%0"
87 :"=r" (oldbit),"=m" (ADDR)
88 :"Ir" (nr));
89 return oldbit;
92 extern __inline__ int test_and_change_bit(int nr, volatile void * addr)
94 int oldbit;
96 __asm__ __volatile__( LOCK_PREFIX
97 "btcl %2,%1\n\tsbbl %0,%0"
98 :"=r" (oldbit),"=m" (ADDR)
99 :"Ir" (nr));
100 return oldbit;
104 * This routine doesn't need to be atomic.
106 extern __inline__ int __constant_test_bit(int nr, const volatile void * addr)
108 return ((1UL << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0;
111 extern __inline__ int __test_bit(int nr, volatile void * addr)
113 int oldbit;
115 __asm__ __volatile__(
116 "btl %2,%1\n\tsbbl %0,%0"
117 :"=r" (oldbit)
118 :"m" (ADDR),"Ir" (nr));
119 return oldbit;
122 #define test_bit(nr,addr) \
123 (__builtin_constant_p(nr) ? \
124 __constant_test_bit((nr),(addr)) : \
125 __test_bit((nr),(addr)))
128 * Find-bit routines..
130 extern __inline__ int find_first_zero_bit(void * addr, unsigned size)
132 int d0, d1, d2;
133 int res;
135 if (!size)
136 return 0;
137 __asm__("movl $-1,%%eax\n\t"
138 "xorl %%edx,%%edx\n\t"
139 "repe; scasl\n\t"
140 "je 1f\n\t"
141 "xorl -4(%%edi),%%eax\n\t"
142 "subl $4,%%edi\n\t"
143 "bsfl %%eax,%%edx\n"
144 "1:\tsubl %%ebx,%%edi\n\t"
145 "shll $3,%%edi\n\t"
146 "addl %%edi,%%edx"
147 :"=d" (res), "=&c" (d0), "=&D" (d1), "=&a" (d2)
148 :"1" ((size + 31) >> 5), "2" (addr), "b" (addr));
149 return res;
152 extern __inline__ int find_next_zero_bit (void * addr, int size, int offset)
154 unsigned long * p = ((unsigned long *) addr) + (offset >> 5);
155 int set = 0, bit = offset & 31, res;
157 if (bit) {
159 * Look for zero in first byte
161 __asm__("bsfl %1,%0\n\t"
162 "jne 1f\n\t"
163 "movl $32, %0\n"
164 "1:"
165 : "=r" (set)
166 : "r" (~(*p >> bit)));
167 if (set < (32 - bit))
168 return set + offset;
169 set = 32 - bit;
170 p++;
173 * No zero yet, search remaining full bytes for a zero
175 res = find_first_zero_bit (p, size - 32 * (p - (unsigned long *) addr));
176 return (offset + set + res);
180 * ffz = Find First Zero in word. Undefined if no zero exists,
181 * so code should check against ~0UL first..
183 extern __inline__ unsigned long ffz(unsigned long word)
185 __asm__("bsfl %1,%0"
186 :"=r" (word)
187 :"r" (~word));
188 return word;
191 #ifdef __KERNEL__
194 * ffs: find first bit set. This is defined the same way as
195 * the libc and compiler builtin ffs routines, therefore
196 * differs in spirit from the above ffz (man ffs).
199 extern __inline__ int ffs(int x)
201 int r;
203 __asm__("bsfl %1,%0\n\t"
204 "jnz 1f\n\t"
205 "movl $-1,%0\n"
206 "1:" : "=r" (r) : "g" (x));
207 return r+1;
211 * hweightN: returns the hamming weight (i.e. the number
212 * of bits set) of a N-bit word
215 #define hweight32(x) generic_hweight32(x)
216 #define hweight16(x) generic_hweight16(x)
217 #define hweight8(x) generic_hweight8(x)
219 #endif /* __KERNEL__ */
221 #ifdef __KERNEL__
223 #define ext2_set_bit test_and_set_bit
224 #define ext2_clear_bit test_and_clear_bit
225 #define ext2_test_bit test_bit
226 #define ext2_find_first_zero_bit find_first_zero_bit
227 #define ext2_find_next_zero_bit find_next_zero_bit
229 /* Bitmap functions for the minix filesystem. */
230 #define minix_test_and_set_bit(nr,addr) test_and_set_bit(nr,addr)
231 #define minix_set_bit(nr,addr) set_bit(nr,addr)
232 #define minix_test_and_clear_bit(nr,addr) test_and_clear_bit(nr,addr)
233 #define minix_test_bit(nr,addr) test_bit(nr,addr)
234 #define minix_find_first_zero_bit(addr,size) find_first_zero_bit(addr,size)
236 #endif /* __KERNEL__ */
238 #endif /* _I386_BITOPS_H */