1
2#ifndef _ASM_X86_BITOPS_H
3#define _ASM_X86_BITOPS_H
4
5
6
7
8
9
10
11
12#ifndef _LINUX_BITOPS_H
13#error only <linux/bitops.h> can be included directly
14#endif
15
16#include <linux/compiler.h>
17#include <asm/alternative.h>
18#include <asm/rmwcc.h>
19#include <asm/barrier.h>
20
21#if BITS_PER_LONG == 32
22# define _BITOPS_LONG_SHIFT 5
23#elif BITS_PER_LONG == 64
24# define _BITOPS_LONG_SHIFT 6
25#else
26# error "Unexpected BITS_PER_LONG"
27#endif
28
29#define BIT_64(n) (U64_C(1) << (n))
30
31
32
33
34
35
36
37
38
39#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 1)
40
41
42#define BITOP_ADDR(x) "=m" (*(volatile long *) (x))
43#else
44#define BITOP_ADDR(x) "+m" (*(volatile long *) (x))
45#endif
46
47#define ADDR BITOP_ADDR(addr)
48
49
50
51
52
53#define IS_IMMEDIATE(nr) (__builtin_constant_p(nr))
54#define CONST_MASK_ADDR(nr, addr) BITOP_ADDR((void *)(addr) + ((nr)>>3))
55#define CONST_MASK(nr) (1 << ((nr) & 7))
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72static __always_inline void
73set_bit(long nr, volatile unsigned long *addr)
74{
75 if (IS_IMMEDIATE(nr)) {
76 asm volatile(LOCK_PREFIX "orb %1,%0"
77 : CONST_MASK_ADDR(nr, addr)
78 : "iq" ((u8)CONST_MASK(nr))
79 : "memory");
80 } else {
81 asm volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0"
82 : BITOP_ADDR(addr) : "Ir" (nr) : "memory");
83 }
84}
85
86
87
88
89
90
91
92
93
94
95static __always_inline void __set_bit(long nr, volatile unsigned long *addr)
96{
97 asm volatile(__ASM_SIZE(bts) " %1,%0" : ADDR : "Ir" (nr) : "memory");
98}
99
100
101
102
103
104
105
106
107
108
109
110static __always_inline void
111clear_bit(long nr, volatile unsigned long *addr)
112{
113 if (IS_IMMEDIATE(nr)) {
114 asm volatile(LOCK_PREFIX "andb %1,%0"
115 : CONST_MASK_ADDR(nr, addr)
116 : "iq" ((u8)~CONST_MASK(nr)));
117 } else {
118 asm volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0"
119 : BITOP_ADDR(addr)
120 : "Ir" (nr));
121 }
122}
123
124
125
126
127
128
129
130
131
132static __always_inline void clear_bit_unlock(long nr, volatile unsigned long *addr)
133{
134 barrier();
135 clear_bit(nr, addr);
136}
137
138static __always_inline void __clear_bit(long nr, volatile unsigned long *addr)
139{
140 asm volatile(__ASM_SIZE(btr) " %1,%0" : ADDR : "Ir" (nr));
141}
142
143static __always_inline bool clear_bit_unlock_is_negative_byte(long nr, volatile unsigned long *addr)
144{
145 bool negative;
146 asm volatile(LOCK_PREFIX "andb %2,%1"
147 CC_SET(s)
148 : CC_OUT(s) (negative), ADDR
149 : "ir" ((char) ~(1 << nr)) : "memory");
150 return negative;
151}
152
153
154#define clear_bit_unlock_is_negative_byte clear_bit_unlock_is_negative_byte
155
156
157
158
159
160
161
162
163
164
165
166
167
168static __always_inline void __clear_bit_unlock(long nr, volatile unsigned long *addr)
169{
170 barrier();
171 __clear_bit(nr, addr);
172}
173
174
175
176
177
178
179
180
181
182
183static __always_inline void __change_bit(long nr, volatile unsigned long *addr)
184{
185 asm volatile(__ASM_SIZE(btc) " %1,%0" : ADDR : "Ir" (nr));
186}
187
188
189
190
191
192
193
194
195
196
197static __always_inline void change_bit(long nr, volatile unsigned long *addr)
198{
199 if (IS_IMMEDIATE(nr)) {
200 asm volatile(LOCK_PREFIX "xorb %1,%0"
201 : CONST_MASK_ADDR(nr, addr)
202 : "iq" ((u8)CONST_MASK(nr)));
203 } else {
204 asm volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0"
205 : BITOP_ADDR(addr)
206 : "Ir" (nr));
207 }
208}
209
210
211
212
213
214
215
216
217
218static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr)
219{
220 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts), *addr, c, "Ir", nr);
221}
222
223
224
225
226
227
228
229
230static __always_inline bool
231test_and_set_bit_lock(long nr, volatile unsigned long *addr)
232{
233 return test_and_set_bit(nr, addr);
234}
235
236
237
238
239
240
241
242
243
244
245static __always_inline bool __test_and_set_bit(long nr, volatile unsigned long *addr)
246{
247 bool oldbit;
248
249 asm(__ASM_SIZE(bts) " %2,%1"
250 CC_SET(c)
251 : CC_OUT(c) (oldbit), ADDR
252 : "Ir" (nr));
253 return oldbit;
254}
255
256
257
258
259
260
261
262
263
264static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr)
265{
266 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr), *addr, c, "Ir", nr);
267}
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285static __always_inline bool __test_and_clear_bit(long nr, volatile unsigned long *addr)
286{
287 bool oldbit;
288
289 asm volatile(__ASM_SIZE(btr) " %2,%1"
290 CC_SET(c)
291 : CC_OUT(c) (oldbit), ADDR
292 : "Ir" (nr));
293 return oldbit;
294}
295
296
297static __always_inline bool __test_and_change_bit(long nr, volatile unsigned long *addr)
298{
299 bool oldbit;
300
301 asm volatile(__ASM_SIZE(btc) " %2,%1"
302 CC_SET(c)
303 : CC_OUT(c) (oldbit), ADDR
304 : "Ir" (nr) : "memory");
305
306 return oldbit;
307}
308
309
310
311
312
313
314
315
316
317static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr)
318{
319 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc), *addr, c, "Ir", nr);
320}
321
322static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)
323{
324 return ((1UL << (nr & (BITS_PER_LONG-1))) &
325 (addr[nr >> _BITOPS_LONG_SHIFT])) != 0;
326}
327
328static __always_inline bool variable_test_bit(long nr, volatile const unsigned long *addr)
329{
330 bool oldbit;
331
332 asm volatile(__ASM_SIZE(bt) " %2,%1"
333 CC_SET(c)
334 : CC_OUT(c) (oldbit)
335 : "m" (*(unsigned long *)addr), "Ir" (nr));
336
337 return oldbit;
338}
339
340#if 0
341
342
343
344
345
346static bool test_bit(int nr, const volatile unsigned long *addr);
347#endif
348
349#define test_bit(nr, addr) \
350 (__builtin_constant_p((nr)) \
351 ? constant_test_bit((nr), (addr)) \
352 : variable_test_bit((nr), (addr)))
353
354
355
356
357
358
359
360static __always_inline unsigned long __ffs(unsigned long word)
361{
362 asm("rep; bsf %1,%0"
363 : "=r" (word)
364 : "rm" (word));
365 return word;
366}
367
368
369
370
371
372
373
374static __always_inline unsigned long ffz(unsigned long word)
375{
376 asm("rep; bsf %1,%0"
377 : "=r" (word)
378 : "r" (~word));
379 return word;
380}
381
382
383
384
385
386
387
388static __always_inline unsigned long __fls(unsigned long word)
389{
390 asm("bsr %1,%0"
391 : "=r" (word)
392 : "rm" (word));
393 return word;
394}
395
396#undef ADDR
397
398#ifdef __KERNEL__
399
400
401
402
403
404
405
406
407
408
409
410static __always_inline int ffs(int x)
411{
412 int r;
413
414#ifdef CONFIG_X86_64
415
416
417
418
419
420
421
422
423
424 asm("bsfl %1,%0"
425 : "=r" (r)
426 : "rm" (x), "0" (-1));
427#elif defined(CONFIG_X86_CMOV)
428 asm("bsfl %1,%0\n\t"
429 "cmovzl %2,%0"
430 : "=&r" (r) : "rm" (x), "r" (-1));
431#else
432 asm("bsfl %1,%0\n\t"
433 "jnz 1f\n\t"
434 "movl $-1,%0\n"
435 "1:" : "=r" (r) : "rm" (x));
436#endif
437 return r + 1;
438}
439
440
441
442
443
444
445
446
447
448
449
450
451static __always_inline int fls(int x)
452{
453 int r;
454
455#ifdef CONFIG_X86_64
456
457
458
459
460
461
462
463
464
465 asm("bsrl %1,%0"
466 : "=r" (r)
467 : "rm" (x), "0" (-1));
468#elif defined(CONFIG_X86_CMOV)
469 asm("bsrl %1,%0\n\t"
470 "cmovzl %2,%0"
471 : "=&r" (r) : "rm" (x), "rm" (-1));
472#else
473 asm("bsrl %1,%0\n\t"
474 "jnz 1f\n\t"
475 "movl $-1,%0\n"
476 "1:" : "=r" (r) : "rm" (x));
477#endif
478 return r + 1;
479}
480
481
482
483
484
485
486
487
488
489
490
491
492#ifdef CONFIG_X86_64
493static __always_inline int fls64(__u64 x)
494{
495 int bitpos = -1;
496
497
498
499
500
501 asm("bsrq %1,%q0"
502 : "+r" (bitpos)
503 : "rm" (x));
504 return bitpos + 1;
505}
506#else
507#include <asm-generic/bitops/fls64.h>
508#endif
509
510#include <asm-generic/bitops/find.h>
511
512#include <asm-generic/bitops/sched.h>
513
514#include <asm/arch_hweight.h>
515
516#include <asm-generic/bitops/const_hweight.h>
517
518#include <asm-generic/bitops/le.h>
519
520#include <asm-generic/bitops/ext2-atomic-setbit.h>
521
522#endif
523#endif
524