1
2#ifndef _ASM_X86_BITOPS_H
3#define _ASM_X86_BITOPS_H
4
5
6
7
8
9
10
11
12#ifndef _LINUX_BITOPS_H
13#error only <linux/bitops.h> can be included directly
14#endif
15
16#include <linux/compiler.h>
17#include <asm/alternative.h>
18#include <asm/rmwcc.h>
19#include <asm/barrier.h>
20
21#if BITS_PER_LONG == 32
22# define _BITOPS_LONG_SHIFT 5
23#elif BITS_PER_LONG == 64
24# define _BITOPS_LONG_SHIFT 6
25#else
26# error "Unexpected BITS_PER_LONG"
27#endif
28
29#define BIT_64(n) (U64_C(1) << (n))
30
31
32
33
34
35
36
37
38
39#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 1)
40
41
42#define BITOP_ADDR(x) "=m" (*(volatile long *) (x))
43#else
44#define BITOP_ADDR(x) "+m" (*(volatile long *) (x))
45#endif
46
47#define ADDR BITOP_ADDR(addr)
48
49
50
51
52
53#define IS_IMMEDIATE(nr) (__builtin_constant_p(nr))
54#define CONST_MASK_ADDR(nr, addr) BITOP_ADDR((void *)(addr) + ((nr)>>3))
55#define CONST_MASK(nr) (1 << ((nr) & 7))
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72static __always_inline void
73set_bit(long nr, volatile unsigned long *addr)
74{
75 if (IS_IMMEDIATE(nr)) {
76 asm volatile(LOCK_PREFIX "orb %1,%0"
77 : CONST_MASK_ADDR(nr, addr)
78 : "iq" ((u8)CONST_MASK(nr))
79 : "memory");
80 } else {
81 asm volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0"
82 : BITOP_ADDR(addr) : "Ir" (nr) : "memory");
83 }
84}
85
86
87
88
89
90
91
92
93
94
95static __always_inline void __set_bit(long nr, volatile unsigned long *addr)
96{
97 asm volatile(__ASM_SIZE(bts) " %1,%0" : ADDR : "Ir" (nr) : "memory");
98}
99
100
101
102
103
104
105
106
107
108
109
110static __always_inline void
111clear_bit(long nr, volatile unsigned long *addr)
112{
113 if (IS_IMMEDIATE(nr)) {
114 asm volatile(LOCK_PREFIX "andb %1,%0"
115 : CONST_MASK_ADDR(nr, addr)
116 : "iq" ((u8)~CONST_MASK(nr)));
117 } else {
118 asm volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0"
119 : BITOP_ADDR(addr)
120 : "Ir" (nr));
121 }
122}
123
124
125
126
127
128
129
130
131
132static __always_inline void clear_bit_unlock(long nr, volatile unsigned long *addr)
133{
134 barrier();
135 clear_bit(nr, addr);
136}
137
138static __always_inline void __clear_bit(long nr, volatile unsigned long *addr)
139{
140 asm volatile(__ASM_SIZE(btr) " %1,%0" : ADDR : "Ir" (nr));
141}
142
143static __always_inline bool clear_bit_unlock_is_negative_byte(long nr, volatile unsigned long *addr)
144{
145 bool negative;
146 asm volatile(LOCK_PREFIX "andb %2,%1"
147 CC_SET(s)
148 : CC_OUT(s) (negative), ADDR
149 : "ir" ((char) ~(1 << nr)) : "memory");
150 return negative;
151}
152
153
154#define clear_bit_unlock_is_negative_byte clear_bit_unlock_is_negative_byte
155
156
157
158
159
160
161
162
163
164
165
166
167
168static __always_inline void __clear_bit_unlock(long nr, volatile unsigned long *addr)
169{
170 barrier();
171 __clear_bit(nr, addr);
172}
173
174
175
176
177
178
179
180
181
182
183static __always_inline void __change_bit(long nr, volatile unsigned long *addr)
184{
185 asm volatile(__ASM_SIZE(btc) " %1,%0" : ADDR : "Ir" (nr));
186}
187
188
189
190
191
192
193
194
195
196
197static __always_inline void change_bit(long nr, volatile unsigned long *addr)
198{
199 if (IS_IMMEDIATE(nr)) {
200 asm volatile(LOCK_PREFIX "xorb %1,%0"
201 : CONST_MASK_ADDR(nr, addr)
202 : "iq" ((u8)CONST_MASK(nr)));
203 } else {
204 asm volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0"
205 : BITOP_ADDR(addr)
206 : "Ir" (nr));
207 }
208}
209
210
211
212
213
214
215
216
217
218static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr)
219{
220 GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts),
221 *addr, "Ir", nr, "%0", c);
222}
223
224
225
226
227
228
229
230
231static __always_inline bool
232test_and_set_bit_lock(long nr, volatile unsigned long *addr)
233{
234 return test_and_set_bit(nr, addr);
235}
236
237
238
239
240
241
242
243
244
245
246static __always_inline bool __test_and_set_bit(long nr, volatile unsigned long *addr)
247{
248 bool oldbit;
249
250 asm(__ASM_SIZE(bts) " %2,%1"
251 CC_SET(c)
252 : CC_OUT(c) (oldbit), ADDR
253 : "Ir" (nr));
254 return oldbit;
255}
256
257
258
259
260
261
262
263
264
265static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr)
266{
267 GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr),
268 *addr, "Ir", nr, "%0", c);
269}
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287static __always_inline bool __test_and_clear_bit(long nr, volatile unsigned long *addr)
288{
289 bool oldbit;
290
291 asm volatile(__ASM_SIZE(btr) " %2,%1"
292 CC_SET(c)
293 : CC_OUT(c) (oldbit), ADDR
294 : "Ir" (nr));
295 return oldbit;
296}
297
298
299static __always_inline bool __test_and_change_bit(long nr, volatile unsigned long *addr)
300{
301 bool oldbit;
302
303 asm volatile(__ASM_SIZE(btc) " %2,%1"
304 CC_SET(c)
305 : CC_OUT(c) (oldbit), ADDR
306 : "Ir" (nr) : "memory");
307
308 return oldbit;
309}
310
311
312
313
314
315
316
317
318
319static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr)
320{
321 GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc),
322 *addr, "Ir", nr, "%0", c);
323}
324
325static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)
326{
327 return ((1UL << (nr & (BITS_PER_LONG-1))) &
328 (addr[nr >> _BITOPS_LONG_SHIFT])) != 0;
329}
330
331static __always_inline bool variable_test_bit(long nr, volatile const unsigned long *addr)
332{
333 bool oldbit;
334
335 asm volatile(__ASM_SIZE(bt) " %2,%1"
336 CC_SET(c)
337 : CC_OUT(c) (oldbit)
338 : "m" (*(unsigned long *)addr), "Ir" (nr));
339
340 return oldbit;
341}
342
343#if 0
344
345
346
347
348
349static bool test_bit(int nr, const volatile unsigned long *addr);
350#endif
351
352#define test_bit(nr, addr) \
353 (__builtin_constant_p((nr)) \
354 ? constant_test_bit((nr), (addr)) \
355 : variable_test_bit((nr), (addr)))
356
357
358
359
360
361
362
363static __always_inline unsigned long __ffs(unsigned long word)
364{
365 asm("rep; bsf %1,%0"
366 : "=r" (word)
367 : "rm" (word));
368 return word;
369}
370
371
372
373
374
375
376
377static __always_inline unsigned long ffz(unsigned long word)
378{
379 asm("rep; bsf %1,%0"
380 : "=r" (word)
381 : "r" (~word));
382 return word;
383}
384
385
386
387
388
389
390
391static __always_inline unsigned long __fls(unsigned long word)
392{
393 asm("bsr %1,%0"
394 : "=r" (word)
395 : "rm" (word));
396 return word;
397}
398
399#undef ADDR
400
401#ifdef __KERNEL__
402
403
404
405
406
407
408
409
410
411
412
413static __always_inline int ffs(int x)
414{
415 int r;
416
417#ifdef CONFIG_X86_64
418
419
420
421
422
423
424
425
426
427 asm("bsfl %1,%0"
428 : "=r" (r)
429 : "rm" (x), "0" (-1));
430#elif defined(CONFIG_X86_CMOV)
431 asm("bsfl %1,%0\n\t"
432 "cmovzl %2,%0"
433 : "=&r" (r) : "rm" (x), "r" (-1));
434#else
435 asm("bsfl %1,%0\n\t"
436 "jnz 1f\n\t"
437 "movl $-1,%0\n"
438 "1:" : "=r" (r) : "rm" (x));
439#endif
440 return r + 1;
441}
442
443
444
445
446
447
448
449
450
451
452
453
454static __always_inline int fls(int x)
455{
456 int r;
457
458#ifdef CONFIG_X86_64
459
460
461
462
463
464
465
466
467
468 asm("bsrl %1,%0"
469 : "=r" (r)
470 : "rm" (x), "0" (-1));
471#elif defined(CONFIG_X86_CMOV)
472 asm("bsrl %1,%0\n\t"
473 "cmovzl %2,%0"
474 : "=&r" (r) : "rm" (x), "rm" (-1));
475#else
476 asm("bsrl %1,%0\n\t"
477 "jnz 1f\n\t"
478 "movl $-1,%0\n"
479 "1:" : "=r" (r) : "rm" (x));
480#endif
481 return r + 1;
482}
483
484
485
486
487
488
489
490
491
492
493
494
495#ifdef CONFIG_X86_64
496static __always_inline int fls64(__u64 x)
497{
498 int bitpos = -1;
499
500
501
502
503
504 asm("bsrq %1,%q0"
505 : "+r" (bitpos)
506 : "rm" (x));
507 return bitpos + 1;
508}
509#else
510#include <asm-generic/bitops/fls64.h>
511#endif
512
513#include <asm-generic/bitops/find.h>
514
515#include <asm-generic/bitops/sched.h>
516
517#include <asm/arch_hweight.h>
518
519#include <asm-generic/bitops/const_hweight.h>
520
521#include <asm-generic/bitops/le.h>
522
523#include <asm-generic/bitops/ext2-atomic-setbit.h>
524
525#endif
526#endif
527