1
2#ifndef _LINUX_BITOPS_H
3#define _LINUX_BITOPS_H
4#include <asm/types.h>
5#include <linux/bits.h>
6
7
8#ifdef __LITTLE_ENDIAN
9# define aligned_byte_mask(n) ((1UL << 8*(n))-1)
10#else
11# define aligned_byte_mask(n) (~0xffUL << (BITS_PER_LONG - 8 - 8*(n)))
12#endif
13
14#define BITS_PER_TYPE(type) (sizeof(type) * BITS_PER_BYTE)
15#define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(long))
16#define BITS_TO_U64(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(u64))
17#define BITS_TO_U32(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(u32))
18#define BITS_TO_BYTES(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(char))
19
20extern unsigned int __sw_hweight8(unsigned int w);
21extern unsigned int __sw_hweight16(unsigned int w);
22extern unsigned int __sw_hweight32(unsigned int w);
23extern unsigned long __sw_hweight64(__u64 w);
24
25
26
27
28
29#include <asm/bitops.h>
30
31#define for_each_set_bit(bit, addr, size) \
32 for ((bit) = find_first_bit((addr), (size)); \
33 (bit) < (size); \
34 (bit) = find_next_bit((addr), (size), (bit) + 1))
35
36
37#define for_each_set_bit_from(bit, addr, size) \
38 for ((bit) = find_next_bit((addr), (size), (bit)); \
39 (bit) < (size); \
40 (bit) = find_next_bit((addr), (size), (bit) + 1))
41
42#define for_each_clear_bit(bit, addr, size) \
43 for ((bit) = find_first_zero_bit((addr), (size)); \
44 (bit) < (size); \
45 (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
46
47
48#define for_each_clear_bit_from(bit, addr, size) \
49 for ((bit) = find_next_zero_bit((addr), (size), (bit)); \
50 (bit) < (size); \
51 (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
52
53
54
55
56
57
58
59
60#define for_each_set_clump8(start, clump, bits, size) \
61 for ((start) = find_first_clump8(&(clump), (bits), (size)); \
62 (start) < (size); \
63 (start) = find_next_clump8(&(clump), (bits), (size), (start) + 8))
64
65static inline int get_bitmask_order(unsigned int count)
66{
67 int order;
68
69 order = fls(count);
70 return order;
71}
72
73static __always_inline unsigned long hweight_long(unsigned long w)
74{
75 return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
76}
77
78
79
80
81
82
83static inline __u64 rol64(__u64 word, unsigned int shift)
84{
85 return (word << (shift & 63)) | (word >> ((-shift) & 63));
86}
87
88
89
90
91
92
93static inline __u64 ror64(__u64 word, unsigned int shift)
94{
95 return (word >> (shift & 63)) | (word << ((-shift) & 63));
96}
97
98
99
100
101
102
103static inline __u32 rol32(__u32 word, unsigned int shift)
104{
105 return (word << (shift & 31)) | (word >> ((-shift) & 31));
106}
107
108
109
110
111
112
113static inline __u32 ror32(__u32 word, unsigned int shift)
114{
115 return (word >> (shift & 31)) | (word << ((-shift) & 31));
116}
117
118
119
120
121
122
123static inline __u16 rol16(__u16 word, unsigned int shift)
124{
125 return (word << (shift & 15)) | (word >> ((-shift) & 15));
126}
127
128
129
130
131
132
133static inline __u16 ror16(__u16 word, unsigned int shift)
134{
135 return (word >> (shift & 15)) | (word << ((-shift) & 15));
136}
137
138
139
140
141
142
143static inline __u8 rol8(__u8 word, unsigned int shift)
144{
145 return (word << (shift & 7)) | (word >> ((-shift) & 7));
146}
147
148
149
150
151
152
153static inline __u8 ror8(__u8 word, unsigned int shift)
154{
155 return (word >> (shift & 7)) | (word << ((-shift) & 7));
156}
157
158
159
160
161
162
163
164
165static __always_inline __s32 sign_extend32(__u32 value, int index)
166{
167 __u8 shift = 31 - index;
168 return (__s32)(value << shift) >> shift;
169}
170
171
172
173
174
175
176static __always_inline __s64 sign_extend64(__u64 value, int index)
177{
178 __u8 shift = 63 - index;
179 return (__s64)(value << shift) >> shift;
180}
181
182static inline unsigned fls_long(unsigned long l)
183{
184 if (sizeof(l) == 4)
185 return fls(l);
186 return fls64(l);
187}
188
189static inline int get_count_order(unsigned int count)
190{
191 int order;
192
193 order = fls(count) - 1;
194 if (count & (count - 1))
195 order++;
196 return order;
197}
198
199
200
201
202
203
204
205static inline int get_count_order_long(unsigned long l)
206{
207 if (l == 0UL)
208 return -1;
209 else if (l & (l - 1UL))
210 return (int)fls_long(l);
211 else
212 return (int)fls_long(l) - 1;
213}
214
215
216
217
218
219
220
221
222
223static inline unsigned long __ffs64(u64 word)
224{
225#if BITS_PER_LONG == 32
226 if (((u32)word) == 0UL)
227 return __ffs((u32)(word >> 32)) + 32;
228#elif BITS_PER_LONG != 64
229#error BITS_PER_LONG not 32 or 64
230#endif
231 return __ffs((unsigned long)word);
232}
233
234
235
236
237
238
239
240static __always_inline void assign_bit(long nr, volatile unsigned long *addr,
241 bool value)
242{
243 if (value)
244 set_bit(nr, addr);
245 else
246 clear_bit(nr, addr);
247}
248
249static __always_inline void __assign_bit(long nr, volatile unsigned long *addr,
250 bool value)
251{
252 if (value)
253 __set_bit(nr, addr);
254 else
255 __clear_bit(nr, addr);
256}
257
258#ifdef __KERNEL__
259
260#ifndef set_mask_bits
261#define set_mask_bits(ptr, mask, bits) \
262({ \
263 const typeof(*(ptr)) mask__ = (mask), bits__ = (bits); \
264 typeof(*(ptr)) old__, new__; \
265 \
266 do { \
267 old__ = READ_ONCE(*(ptr)); \
268 new__ = (old__ & ~mask__) | bits__; \
269 } while (cmpxchg(ptr, old__, new__) != old__); \
270 \
271 old__; \
272})
273#endif
274
275#ifndef bit_clear_unless
276#define bit_clear_unless(ptr, clear, test) \
277({ \
278 const typeof(*(ptr)) clear__ = (clear), test__ = (test);\
279 typeof(*(ptr)) old__, new__; \
280 \
281 do { \
282 old__ = READ_ONCE(*(ptr)); \
283 new__ = old__ & ~clear__; \
284 } while (!(old__ & test__) && \
285 cmpxchg(ptr, old__, new__) != old__); \
286 \
287 !(old__ & test__); \
288})
289#endif
290
291#ifndef find_last_bit
292
293
294
295
296
297
298
299extern unsigned long find_last_bit(const unsigned long *addr,
300 unsigned long size);
301#endif
302
303#endif
304#endif
305