powerpc/pmac/smp: Properly NAP offlined CPU on G5
[linux-2.6/kvm.git] / include / linux / bitops.h
blob2184c6b97aebb699206e2eb7426bd716b8f0e54f
1 #ifndef _LINUX_BITOPS_H
2 #define _LINUX_BITOPS_H
3 #include <asm/types.h>
5 #ifdef __KERNEL__
6 #define BIT(nr) (1UL << (nr))
7 #define BIT_MASK(nr) (1UL << ((nr) % BITS_PER_LONG))
8 #define BIT_WORD(nr) ((nr) / BITS_PER_LONG)
9 #define BITS_PER_BYTE 8
10 #define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long))
11 #endif
13 extern unsigned int __sw_hweight8(unsigned int w);
14 extern unsigned int __sw_hweight16(unsigned int w);
15 extern unsigned int __sw_hweight32(unsigned int w);
16 extern unsigned long __sw_hweight64(__u64 w);
19 * Include this here because some architectures need generic_ffs/fls in
20 * scope
22 #include <asm/bitops.h>
24 #define for_each_set_bit(bit, addr, size) \
25 for ((bit) = find_first_bit((addr), (size)); \
26 (bit) < (size); \
27 (bit) = find_next_bit((addr), (size), (bit) + 1))
29 static __inline__ int get_bitmask_order(unsigned int count)
31 int order;
33 order = fls(count);
34 return order; /* We could be slightly more clever with -1 here... */
37 static __inline__ int get_count_order(unsigned int count)
39 int order;
41 order = fls(count) - 1;
42 if (count & (count - 1))
43 order++;
44 return order;
47 static inline unsigned long hweight_long(unsigned long w)
49 return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
52 /**
53 * rol32 - rotate a 32-bit value left
54 * @word: value to rotate
55 * @shift: bits to roll
57 static inline __u32 rol32(__u32 word, unsigned int shift)
59 return (word << shift) | (word >> (32 - shift));
62 /**
63 * ror32 - rotate a 32-bit value right
64 * @word: value to rotate
65 * @shift: bits to roll
67 static inline __u32 ror32(__u32 word, unsigned int shift)
69 return (word >> shift) | (word << (32 - shift));
72 /**
73 * rol16 - rotate a 16-bit value left
74 * @word: value to rotate
75 * @shift: bits to roll
77 static inline __u16 rol16(__u16 word, unsigned int shift)
79 return (word << shift) | (word >> (16 - shift));
82 /**
83 * ror16 - rotate a 16-bit value right
84 * @word: value to rotate
85 * @shift: bits to roll
87 static inline __u16 ror16(__u16 word, unsigned int shift)
89 return (word >> shift) | (word << (16 - shift));
92 /**
93 * rol8 - rotate an 8-bit value left
94 * @word: value to rotate
95 * @shift: bits to roll
97 static inline __u8 rol8(__u8 word, unsigned int shift)
99 return (word << shift) | (word >> (8 - shift));
103 * ror8 - rotate an 8-bit value right
104 * @word: value to rotate
105 * @shift: bits to roll
107 static inline __u8 ror8(__u8 word, unsigned int shift)
109 return (word >> shift) | (word << (8 - shift));
113 * sign_extend32 - sign extend a 32-bit value using specified bit as sign-bit
114 * @value: value to sign extend
115 * @index: 0 based bit index (0<=index<32) to sign bit
117 static inline __s32 sign_extend32(__u32 value, int index)
119 __u8 shift = 31 - index;
120 return (__s32)(value << shift) >> shift;
123 static inline unsigned fls_long(unsigned long l)
125 if (sizeof(l) == 4)
126 return fls(l);
127 return fls64(l);
131 * __ffs64 - find first set bit in a 64 bit word
132 * @word: The 64 bit word
134 * On 64 bit arches this is a synomyn for __ffs
135 * The result is not defined if no bits are set, so check that @word
136 * is non-zero before calling this.
138 static inline unsigned long __ffs64(u64 word)
140 #if BITS_PER_LONG == 32
141 if (((u32)word) == 0UL)
142 return __ffs((u32)(word >> 32)) + 32;
143 #elif BITS_PER_LONG != 64
144 #error BITS_PER_LONG not 32 or 64
145 #endif
146 return __ffs((unsigned long)word);
149 #ifdef __KERNEL__
151 #ifdef CONFIG_GENERIC_FIND_LAST_BIT
153 * find_last_bit - find the last set bit in a memory region
154 * @addr: The address to start the search at
155 * @size: The maximum size to search
157 * Returns the bit number of the first set bit, or size.
159 extern unsigned long find_last_bit(const unsigned long *addr,
160 unsigned long size);
161 #endif /* CONFIG_GENERIC_FIND_LAST_BIT */
163 #endif /* __KERNEL__ */
164 #endif