btrfs-progs: add missing limits headers
[btrfs-progs-unstable/devel.git] / kernel-lib / bitops.h
blob5b35f9fc5213719acbf5f32b269df14299bbdaa4
1 #ifndef _PERF_LINUX_BITOPS_H_
2 #define _PERF_LINUX_BITOPS_H_
4 #include <linux/kernel.h>
6 #ifndef DIV_ROUND_UP
7 #define DIV_ROUND_UP(n, d) (((n) + (d) - 1) / (d))
8 #endif
10 #define BITS_PER_BYTE 8
11 #define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long))
12 #define BITS_TO_U64(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(u64))
13 #define BITS_TO_U32(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(u32))
15 #define for_each_set_bit(bit, addr, size) \
16 for ((bit) = find_first_bit((addr), (size)); \
17 (bit) < (size); \
18 (bit) = find_next_bit((addr), (size), (bit) + 1))
20 /* same as for_each_set_bit() but use bit as value to start with */
21 #define for_each_set_bit_from(bit, addr, size) \
22 for ((bit) = find_next_bit((addr), (size), (bit)); \
23 (bit) < (size); \
24 (bit) = find_next_bit((addr), (size), (bit) + 1))
26 static inline void set_bit(int nr, unsigned long *addr)
28 addr[nr / BITS_PER_LONG] |= 1UL << (nr % BITS_PER_LONG);
31 static inline void clear_bit(int nr, unsigned long *addr)
33 addr[nr / BITS_PER_LONG] &= ~(1UL << (nr % BITS_PER_LONG));
36 /**
37 * hweightN - returns the hamming weight of a N-bit word
38 * @x: the word to weigh
40 * The Hamming Weight of a number is the total number of bits set in it.
43 static inline unsigned int hweight32(unsigned int w)
45 unsigned int res = w - ((w >> 1) & 0x55555555);
46 res = (res & 0x33333333) + ((res >> 2) & 0x33333333);
47 res = (res + (res >> 4)) & 0x0F0F0F0F;
48 res = res + (res >> 8);
49 return (res + (res >> 16)) & 0x000000FF;
52 static inline unsigned long hweight64(__u64 w)
54 #if BITS_PER_LONG == 32
55 return hweight32((unsigned int)(w >> 32)) + hweight32((unsigned int)w);
56 #elif BITS_PER_LONG == 64
57 __u64 res = w - ((w >> 1) & 0x5555555555555555ul);
58 res = (res & 0x3333333333333333ul) + ((res >> 2) & 0x3333333333333333ul);
59 res = (res + (res >> 4)) & 0x0F0F0F0F0F0F0F0Ful;
60 res = res + (res >> 8);
61 res = res + (res >> 16);
62 return (res + (res >> 32)) & 0x00000000000000FFul;
63 #endif
66 static inline unsigned long hweight_long(unsigned long w)
68 return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
71 #define BITOP_WORD(nr) ((nr) / BITS_PER_LONG)
73 /**
74 * __ffs - find first bit in word.
75 * @word: The word to search
77 * Undefined if no bit exists, so code should check against 0 first.
79 static __always_inline unsigned long __ffs(unsigned long word)
81 int num = 0;
83 #if BITS_PER_LONG == 64
84 if ((word & 0xffffffff) == 0) {
85 num += 32;
86 word >>= 32;
88 #endif
89 if ((word & 0xffff) == 0) {
90 num += 16;
91 word >>= 16;
93 if ((word & 0xff) == 0) {
94 num += 8;
95 word >>= 8;
97 if ((word & 0xf) == 0) {
98 num += 4;
99 word >>= 4;
101 if ((word & 0x3) == 0) {
102 num += 2;
103 word >>= 2;
105 if ((word & 0x1) == 0)
106 num += 1;
107 return num;
110 #define ffz(x) __ffs(~(x))
113 * Find the first set bit in a memory region.
115 static inline unsigned long
116 find_first_bit(const unsigned long *addr, unsigned long size)
118 const unsigned long *p = addr;
119 unsigned long result = 0;
120 unsigned long tmp;
122 while (size & ~(BITS_PER_LONG-1)) {
123 if ((tmp = *(p++)))
124 goto found;
125 result += BITS_PER_LONG;
126 size -= BITS_PER_LONG;
128 if (!size)
129 return result;
131 tmp = (*p) & (~0UL >> (BITS_PER_LONG - size));
132 if (tmp == 0UL) /* Are any bits set? */
133 return result + size; /* Nope. */
134 found:
135 return result + __ffs(tmp);
139 * Find the next set bit in a memory region.
141 static inline unsigned long
142 find_next_bit(const unsigned long *addr, unsigned long size,
143 unsigned long offset)
145 const unsigned long *p = addr + BITOP_WORD(offset);
146 unsigned long result = offset & ~(BITS_PER_LONG-1);
147 unsigned long tmp;
149 if (offset >= size)
150 return size;
151 size -= result;
152 offset %= BITS_PER_LONG;
153 if (offset) {
154 tmp = *(p++);
155 tmp &= (~0UL << offset);
156 if (size < BITS_PER_LONG)
157 goto found_first;
158 if (tmp)
159 goto found_middle;
160 size -= BITS_PER_LONG;
161 result += BITS_PER_LONG;
163 while (size & ~(BITS_PER_LONG-1)) {
164 if ((tmp = *(p++)))
165 goto found_middle;
166 result += BITS_PER_LONG;
167 size -= BITS_PER_LONG;
169 if (!size)
170 return result;
171 tmp = *p;
173 found_first:
174 tmp &= (~0UL >> (BITS_PER_LONG - size));
175 if (tmp == 0UL) /* Are any bits set? */
176 return result + size; /* Nope. */
177 found_middle:
178 return result + __ffs(tmp);
182 * This implementation of find_{first,next}_zero_bit was stolen from
183 * Linus' asm-alpha/bitops.h.
185 static inline unsigned long
186 find_next_zero_bit(const unsigned long *addr, unsigned long size,
187 unsigned long offset)
189 const unsigned long *p = addr + BITOP_WORD(offset);
190 unsigned long result = offset & ~(BITS_PER_LONG-1);
191 unsigned long tmp;
193 if (offset >= size)
194 return size;
195 size -= result;
196 offset %= BITS_PER_LONG;
197 if (offset) {
198 tmp = *(p++);
199 tmp |= ~0UL >> (BITS_PER_LONG - offset);
200 if (size < BITS_PER_LONG)
201 goto found_first;
202 if (~tmp)
203 goto found_middle;
204 size -= BITS_PER_LONG;
205 result += BITS_PER_LONG;
207 while (size & ~(BITS_PER_LONG-1)) {
208 if (~(tmp = *(p++)))
209 goto found_middle;
210 result += BITS_PER_LONG;
211 size -= BITS_PER_LONG;
213 if (!size)
214 return result;
215 tmp = *p;
217 found_first:
218 tmp |= ~0UL << size;
219 if (tmp == ~0UL) /* Are any bits zero? */
220 return result + size; /* Nope. */
221 found_middle:
222 return result + ffz(tmp);
224 #endif