2 * Copyright (c) 2010 Isilon Systems, Inc.
3 * Copyright (c) 2010 iX Systems, Inc.
4 * Copyright (c) 2010 Panasas, Inc.
5 * Copyright (c) 2015-2016 François Tigeot
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions
11 * 1. Redistributions of source code must retain the above copyright
12 * notice unmodified, this list of conditions, and the following
14 * 2. Redistributions in binary form must reproduce the above copyright
15 * notice, this list of conditions and the following disclaimer in the
16 * documentation and/or other materials provided with the distribution.
18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 #ifndef _LINUX_BITOPS_H_
30 #define _LINUX_BITOPS_H_
32 #include <sys/types.h>
33 #include <sys/systm.h>
35 #define BIT(nr) (1UL << (nr))
37 #define BITS_PER_LONG 64
39 #define BITS_PER_LONG 32
41 #define BIT_MASK(n) (~0UL >> (BITS_PER_LONG - (n)))
42 #define BITS_TO_LONGS(n) howmany((n), BITS_PER_LONG)
43 #define BIT_WORD(nr) ((nr) / BITS_PER_LONG)
48 return (ffs(mask
) - 1);
54 return (fls(mask
) - 1);
60 return (ffsl(mask
) - 1);
66 return (flsl(mask
) - 1);
70 #define ffz(mask) __ffs(~(mask))
72 static inline int get_count_order(unsigned int count
)
76 order
= fls(count
) - 1;
77 if (count
& (count
- 1))
82 static inline unsigned long
83 find_first_bit(unsigned long *addr
, unsigned long size
)
88 for (bit
= 0; size
>= BITS_PER_LONG
;
89 size
-= BITS_PER_LONG
, bit
+= BITS_PER_LONG
, addr
++) {
92 return (bit
+ __ffsl(*addr
));
95 mask
= (*addr
) & BIT_MASK(size
);
104 static inline unsigned long
105 find_first_zero_bit(unsigned long *addr
, unsigned long size
)
110 for (bit
= 0; size
>= BITS_PER_LONG
;
111 size
-= BITS_PER_LONG
, bit
+= BITS_PER_LONG
, addr
++) {
114 return (bit
+ __ffsl(~(*addr
)));
117 mask
= ~(*addr
) & BIT_MASK(size
);
126 static inline unsigned long
127 find_last_bit(unsigned long *addr
, unsigned long size
)
134 pos
= size
/ BITS_PER_LONG
;
135 offs
= size
% BITS_PER_LONG
;
136 bit
= BITS_PER_LONG
* pos
;
139 mask
= (*addr
) & BIT_MASK(offs
);
141 return (bit
+ __flsl(mask
));
145 bit
-= BITS_PER_LONG
;
147 return (bit
+ __flsl(mask
));
152 static inline unsigned long
153 find_next_bit(unsigned long *addr
, unsigned long size
, unsigned long offset
)
162 pos
= offset
/ BITS_PER_LONG
;
163 offs
= offset
% BITS_PER_LONG
;
164 bit
= BITS_PER_LONG
* pos
;
167 mask
= (*addr
) & ~BIT_MASK(offs
);
169 return (bit
+ __ffsl(mask
));
170 bit
+= BITS_PER_LONG
;
173 for (size
-= bit
; size
>= BITS_PER_LONG
;
174 size
-= BITS_PER_LONG
, bit
+= BITS_PER_LONG
, addr
++) {
177 return (bit
+ __ffsl(*addr
));
180 mask
= (*addr
) & BIT_MASK(size
);
189 static inline unsigned long
190 find_next_zero_bit(unsigned long *addr
, unsigned long size
,
191 unsigned long offset
)
200 pos
= offset
/ BITS_PER_LONG
;
201 offs
= offset
% BITS_PER_LONG
;
202 bit
= BITS_PER_LONG
* pos
;
205 mask
= ~(*addr
) & ~BIT_MASK(offs
);
207 return (bit
+ __ffsl(mask
));
208 bit
+= BITS_PER_LONG
;
211 for (size
-= bit
; size
>= BITS_PER_LONG
;
212 size
-= BITS_PER_LONG
, bit
+= BITS_PER_LONG
, addr
++) {
215 return (bit
+ __ffsl(~(*addr
)));
218 mask
= ~(*addr
) & BIT_MASK(size
);
228 bitmap_zero(unsigned long *addr
, int size
)
232 len
= BITS_TO_LONGS(size
) * sizeof(long);
233 memset(addr
, 0, len
);
237 bitmap_fill(unsigned long *addr
, int size
)
242 len
= (size
/ BITS_PER_LONG
) * sizeof(long);
243 memset(addr
, 0xff, len
);
244 tail
= size
& (BITS_PER_LONG
- 1);
246 addr
[size
/ BITS_PER_LONG
] = BIT_MASK(tail
);
250 bitmap_full(unsigned long *addr
, int size
)
257 len
= size
/ BITS_PER_LONG
;
258 for (i
= 0; i
< len
; i
++)
261 tail
= size
& (BITS_PER_LONG
- 1);
263 mask
= BIT_MASK(tail
);
264 if ((addr
[i
] & mask
) != mask
)
271 bitmap_empty(unsigned long *addr
, int size
)
278 len
= size
/ BITS_PER_LONG
;
279 for (i
= 0; i
< len
; i
++)
282 tail
= size
& (BITS_PER_LONG
- 1);
284 mask
= BIT_MASK(tail
);
285 if ((addr
[i
] & mask
) != 0)
291 #define NBLONG (NBBY * sizeof(long))
293 #define set_bit(i, a) \
294 atomic_set_long(&((volatile long *)(a))[(i)/NBLONG], 1LU << ((i) % NBLONG))
296 #define clear_bit(i, a) \
297 atomic_clear_long(&((volatile long *)(a))[(i)/NBLONG], 1LU << ((i) % NBLONG))
299 #define test_bit(i, a) \
300 !!(atomic_load_acq_long(&((volatile long *)(a))[(i)/NBLONG]) & \
301 (1LU << ((i) % NBLONG)))
304 test_and_clear_bit(long bit
, long *var
)
308 var
+= bit
/ (sizeof(long) * NBBY
);
309 bit
%= sizeof(long) * NBBY
;
312 val
= *(volatile long *)var
;
313 } while (atomic_cmpset_long(var
, val
, val
& ~bit
) == 0);
315 return !!(val
& bit
);
318 static inline unsigned long
319 __test_and_clear_bit(unsigned int bit
, volatile unsigned long *ptr
)
321 const unsigned int units
= (sizeof(*ptr
) * NBBY
);
322 volatile unsigned long *const p
= &ptr
[bit
/ units
];
323 const unsigned long mask
= (1UL << (bit
% units
));
329 return ((v
& mask
) != 0);
333 test_and_set_bit(long bit
, volatile unsigned long *var
)
337 var
+= bit
/ (sizeof(long) * NBBY
);
338 bit
%= sizeof(long) * NBBY
;
341 val
= *(volatile long *)var
;
342 } while (atomic_cmpset_long(var
, val
, val
| bit
) == 0);
344 return !!(val
& bit
);
348 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG))
349 #define BITMAP_LAST_WORD_MASK(nbits) \
351 ((nbits) % BITS_PER_LONG) ? \
352 (1UL<<((nbits) % BITS_PER_LONG))-1 : ~0UL \
357 bitmap_set(unsigned long *map
, int start
, int nr
)
359 unsigned long *p
= map
+ BIT_WORD(start
);
360 const int size
= start
+ nr
;
361 int bits_to_set
= BITS_PER_LONG
- (start
% BITS_PER_LONG
);
362 unsigned long mask_to_set
= BITMAP_FIRST_WORD_MASK(start
);
364 while (nr
- bits_to_set
>= 0) {
367 bits_to_set
= BITS_PER_LONG
;
372 mask_to_set
&= BITMAP_LAST_WORD_MASK(size
);
378 bitmap_clear(unsigned long *map
, int start
, int nr
)
380 unsigned long *p
= map
+ BIT_WORD(start
);
381 const int size
= start
+ nr
;
382 int bits_to_clear
= BITS_PER_LONG
- (start
% BITS_PER_LONG
);
383 unsigned long mask_to_clear
= BITMAP_FIRST_WORD_MASK(start
);
385 while (nr
- bits_to_clear
>= 0) {
386 *p
&= ~mask_to_clear
;
388 bits_to_clear
= BITS_PER_LONG
;
389 mask_to_clear
= ~0UL;
393 mask_to_clear
&= BITMAP_LAST_WORD_MASK(size
);
394 *p
&= ~mask_to_clear
;
399 REG_OP_ISFREE
, /* true if region is all zero bits */
400 REG_OP_ALLOC
, /* set all bits in region */
401 REG_OP_RELEASE
, /* clear all bits in region */
404 static int __reg_op(unsigned long *bitmap
, int pos
, int order
, int reg_op
)
406 int nbits_reg
; /* number of bits in region */
407 int index
; /* index first long of region in bitmap */
408 int offset
; /* bit offset region in bitmap[index] */
409 int nlongs_reg
; /* num longs spanned by region in bitmap */
410 int nbitsinlong
; /* num bits of region in each spanned long */
411 unsigned long mask
; /* bitmask for one long of region */
412 int i
; /* scans bitmap by longs */
413 int ret
= 0; /* return value */
416 * Either nlongs_reg == 1 (for small orders that fit in one long)
417 * or (offset == 0 && mask == ~0UL) (for larger multiword orders.)
419 nbits_reg
= 1 << order
;
420 index
= pos
/ BITS_PER_LONG
;
421 offset
= pos
- (index
* BITS_PER_LONG
);
422 nlongs_reg
= BITS_TO_LONGS(nbits_reg
);
423 nbitsinlong
= min(nbits_reg
, BITS_PER_LONG
);
426 * Can't do "mask = (1UL << nbitsinlong) - 1", as that
427 * overflows if nbitsinlong == BITS_PER_LONG.
429 mask
= (1UL << (nbitsinlong
- 1));
435 for (i
= 0; i
< nlongs_reg
; i
++) {
436 if (bitmap
[index
+ i
] & mask
)
439 ret
= 1; /* all bits in region free (zero) */
443 for (i
= 0; i
< nlongs_reg
; i
++)
444 bitmap
[index
+ i
] |= mask
;
448 for (i
= 0; i
< nlongs_reg
; i
++)
449 bitmap
[index
+ i
] &= ~mask
;
457 * bitmap_find_free_region - find a contiguous aligned mem region
458 * @bitmap: array of unsigned longs corresponding to the bitmap
459 * @bits: number of bits in the bitmap
460 * @order: region size (log base 2 of number of bits) to find
462 * Find a region of free (zero) bits in a @bitmap of @bits bits and
463 * allocate them (set them to one). Only consider regions of length
464 * a power (@order) of two, aligned to that power of two, which
465 * makes the search algorithm much faster.
467 * Return the bit offset in bitmap of the allocated region,
468 * or -errno on failure.
471 bitmap_find_free_region(unsigned long *bitmap
, int bits
, int order
)
473 int pos
, end
; /* scans bitmap by regions of size order */
475 for (pos
= 0 ; (end
= pos
+ (1 << order
)) <= bits
; pos
= end
) {
476 if (!__reg_op(bitmap
, pos
, order
, REG_OP_ISFREE
))
478 __reg_op(bitmap
, pos
, order
, REG_OP_ALLOC
);
485 * bitmap_release_region - release allocated bitmap region
486 * @bitmap: array of unsigned longs corresponding to the bitmap
487 * @pos: beginning of bit region to release
488 * @order: region size (log base 2 of number of bits) to release
490 * This is the complement to __bitmap_find_free_region() and releases
491 * the found region (by clearing it in the bitmap).
496 bitmap_release_region(unsigned long *bitmap
, int pos
, int order
)
498 __reg_op(bitmap
, pos
, order
, REG_OP_RELEASE
);
501 /* Returns a contiguous bitmask from bits h to l */
502 #define GENMASK(h, l) \
503 ((~0UL) >> (BITS_PER_LONG - h - 1)) & ((~0UL) << l)
505 #include <asm/bitops/non-atomic.h>
506 #include <asm/bitops/const_hweight.h>
508 #define for_each_set_bit(bit, addr, size) \
509 for ((bit) = find_first_bit((addr), (size)); \
511 (bit) = find_next_bit((addr), (size), (bit) + 1))
514 static inline int64_t
515 sign_extend64(uint64_t value
, int index
)
517 uint8_t shift
= 63 - index
;
518 return (int64_t)(value
<< shift
) >> shift
;
521 #endif /* _LINUX_BITOPS_H_ */