1#ifndef _ASM_X86_BITOPS_H
2#define _ASM_X86_BITOPS_H
3
4
5
6
7
8
9
10
11#ifndef _LINUX_BITOPS_H
12#error only <linux/bitops.h> can be included directly
13#endif
14
15#include <linux/compiler.h>
16#include <asm/alternative.h>
17#include <asm/rmwcc.h>
18#include <asm/barrier.h>
19
20#if BITS_PER_LONG == 32
21# define _BITOPS_LONG_SHIFT 5
22#elif BITS_PER_LONG == 64
23# define _BITOPS_LONG_SHIFT 6
24#else
25# error "Unexpected BITS_PER_LONG"
26#endif
27
28#define BIT_64(n) (U64_C(1) << (n))
29
30
31
32
33
34
35
36
37
38#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 1)
39
40
41#define BITOP_ADDR(x) "=m" (*(volatile long *) (x))
42#else
43#define BITOP_ADDR(x) "+m" (*(volatile long *) (x))
44#endif
45
46#define ADDR BITOP_ADDR(addr)
47
48
49
50
51
52#define IS_IMMEDIATE(nr) (__builtin_constant_p(nr))
53#define CONST_MASK_ADDR(nr, addr) BITOP_ADDR((void *)(addr) + ((nr)>>3))
54#define CONST_MASK(nr) (1 << ((nr) & 7))
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71static __always_inline void
72set_bit(long nr, volatile unsigned long *addr)
73{
74 if (IS_IMMEDIATE(nr)) {
75 asm volatile(LOCK_PREFIX "orb %1,%0"
76 : CONST_MASK_ADDR(nr, addr)
77 : "iq" ((u8)CONST_MASK(nr))
78 : "memory");
79 } else {
80 asm volatile(LOCK_PREFIX "bts %1,%0"
81 : BITOP_ADDR(addr) : "Ir" (nr) : "memory");
82 }
83}
84
85
86
87
88
89
90
91
92
93
94static __always_inline void __set_bit(long nr, volatile unsigned long *addr)
95{
96 asm volatile("bts %1,%0" : ADDR : "Ir" (nr) : "memory");
97}
98
99
100
101
102
103
104
105
106
107
108
109static __always_inline void
110clear_bit(long nr, volatile unsigned long *addr)
111{
112 if (IS_IMMEDIATE(nr)) {
113 asm volatile(LOCK_PREFIX "andb %1,%0"
114 : CONST_MASK_ADDR(nr, addr)
115 : "iq" ((u8)~CONST_MASK(nr)));
116 } else {
117 asm volatile(LOCK_PREFIX "btr %1,%0"
118 : BITOP_ADDR(addr)
119 : "Ir" (nr));
120 }
121}
122
123
124
125
126
127
128
129
130
131static __always_inline void clear_bit_unlock(long nr, volatile unsigned long *addr)
132{
133 barrier();
134 clear_bit(nr, addr);
135}
136
137static __always_inline void __clear_bit(long nr, volatile unsigned long *addr)
138{
139 asm volatile("btr %1,%0" : ADDR : "Ir" (nr));
140}
141
142static __always_inline bool clear_bit_unlock_is_negative_byte(long nr, volatile unsigned long *addr)
143{
144 bool negative;
145 asm volatile(LOCK_PREFIX "andb %2,%1\n\t"
146 CC_SET(s)
147 : CC_OUT(s) (negative), ADDR
148 : "ir" ((char) ~(1 << nr)) : "memory");
149 return negative;
150}
151
152
153#define clear_bit_unlock_is_negative_byte clear_bit_unlock_is_negative_byte
154
155
156
157
158
159
160
161
162
163
164
165
166
167static __always_inline void __clear_bit_unlock(long nr, volatile unsigned long *addr)
168{
169 barrier();
170 __clear_bit(nr, addr);
171}
172
173
174
175
176
177
178
179
180
181
182static __always_inline void __change_bit(long nr, volatile unsigned long *addr)
183{
184 asm volatile("btc %1,%0" : ADDR : "Ir" (nr));
185}
186
187
188
189
190
191
192
193
194
195
196static __always_inline void change_bit(long nr, volatile unsigned long *addr)
197{
198 if (IS_IMMEDIATE(nr)) {
199 asm volatile(LOCK_PREFIX "xorb %1,%0"
200 : CONST_MASK_ADDR(nr, addr)
201 : "iq" ((u8)CONST_MASK(nr)));
202 } else {
203 asm volatile(LOCK_PREFIX "btc %1,%0"
204 : BITOP_ADDR(addr)
205 : "Ir" (nr));
206 }
207}
208
209
210
211
212
213
214
215
216
217static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr)
218{
219 GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, "Ir", nr, "%0", c);
220}
221
222
223
224
225
226
227
228
229static __always_inline bool
230test_and_set_bit_lock(long nr, volatile unsigned long *addr)
231{
232 return test_and_set_bit(nr, addr);
233}
234
235
236
237
238
239
240
241
242
243
244static __always_inline bool __test_and_set_bit(long nr, volatile unsigned long *addr)
245{
246 bool oldbit;
247
248 asm("bts %2,%1\n\t"
249 CC_SET(c)
250 : CC_OUT(c) (oldbit), ADDR
251 : "Ir" (nr));
252 return oldbit;
253}
254
255
256
257
258
259
260
261
262
263static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr)
264{
265 GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, "Ir", nr, "%0", c);
266}
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284static __always_inline bool __test_and_clear_bit(long nr, volatile unsigned long *addr)
285{
286 bool oldbit;
287
288 asm volatile("btr %2,%1\n\t"
289 CC_SET(c)
290 : CC_OUT(c) (oldbit), ADDR
291 : "Ir" (nr));
292 return oldbit;
293}
294
295
296static __always_inline bool __test_and_change_bit(long nr, volatile unsigned long *addr)
297{
298 bool oldbit;
299
300 asm volatile("btc %2,%1\n\t"
301 CC_SET(c)
302 : CC_OUT(c) (oldbit), ADDR
303 : "Ir" (nr) : "memory");
304
305 return oldbit;
306}
307
308
309
310
311
312
313
314
315
316static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr)
317{
318 GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, "Ir", nr, "%0", c);
319}
320
321static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)
322{
323 return ((1UL << (nr & (BITS_PER_LONG-1))) &
324 (addr[nr >> _BITOPS_LONG_SHIFT])) != 0;
325}
326
327static __always_inline bool variable_test_bit(long nr, volatile const unsigned long *addr)
328{
329 bool oldbit;
330
331 asm volatile("bt %2,%1\n\t"
332 CC_SET(c)
333 : CC_OUT(c) (oldbit)
334 : "m" (*(unsigned long *)addr), "Ir" (nr));
335
336 return oldbit;
337}
338
339#if 0
340
341
342
343
344
345static bool test_bit(int nr, const volatile unsigned long *addr);
346#endif
347
348#define test_bit(nr, addr) \
349 (__builtin_constant_p((nr)) \
350 ? constant_test_bit((nr), (addr)) \
351 : variable_test_bit((nr), (addr)))
352
353
354
355
356
357
358
359static __always_inline unsigned long __ffs(unsigned long word)
360{
361 asm("rep; bsf %1,%0"
362 : "=r" (word)
363 : "rm" (word));
364 return word;
365}
366
367
368
369
370
371
372
373static __always_inline unsigned long ffz(unsigned long word)
374{
375 asm("rep; bsf %1,%0"
376 : "=r" (word)
377 : "r" (~word));
378 return word;
379}
380
381
382
383
384
385
386
387static __always_inline unsigned long __fls(unsigned long word)
388{
389 asm("bsr %1,%0"
390 : "=r" (word)
391 : "rm" (word));
392 return word;
393}
394
395#undef ADDR
396
397#ifdef __KERNEL__
398
399
400
401
402
403
404
405
406
407
408
409static __always_inline int ffs(int x)
410{
411 int r;
412
413#ifdef CONFIG_X86_64
414
415
416
417
418
419
420
421
422
423 asm("bsfl %1,%0"
424 : "=r" (r)
425 : "rm" (x), "0" (-1));
426#elif defined(CONFIG_X86_CMOV)
427 asm("bsfl %1,%0\n\t"
428 "cmovzl %2,%0"
429 : "=&r" (r) : "rm" (x), "r" (-1));
430#else
431 asm("bsfl %1,%0\n\t"
432 "jnz 1f\n\t"
433 "movl $-1,%0\n"
434 "1:" : "=r" (r) : "rm" (x));
435#endif
436 return r + 1;
437}
438
439
440
441
442
443
444
445
446
447
448
449
450static __always_inline int fls(int x)
451{
452 int r;
453
454#ifdef CONFIG_X86_64
455
456
457
458
459
460
461
462
463
464 asm("bsrl %1,%0"
465 : "=r" (r)
466 : "rm" (x), "0" (-1));
467#elif defined(CONFIG_X86_CMOV)
468 asm("bsrl %1,%0\n\t"
469 "cmovzl %2,%0"
470 : "=&r" (r) : "rm" (x), "rm" (-1));
471#else
472 asm("bsrl %1,%0\n\t"
473 "jnz 1f\n\t"
474 "movl $-1,%0\n"
475 "1:" : "=r" (r) : "rm" (x));
476#endif
477 return r + 1;
478}
479
480
481
482
483
484
485
486
487
488
489
490
491#ifdef CONFIG_X86_64
492static __always_inline int fls64(__u64 x)
493{
494 int bitpos = -1;
495
496
497
498
499
500 asm("bsrq %1,%q0"
501 : "+r" (bitpos)
502 : "rm" (x));
503 return bitpos + 1;
504}
505#else
506#include <asm-generic/bitops/fls64.h>
507#endif
508
509#include <asm-generic/bitops/find.h>
510
511#include <asm-generic/bitops/sched.h>
512
513#include <asm/arch_hweight.h>
514
515#include <asm-generic/bitops/const_hweight.h>
516
517#include <asm-generic/bitops/le.h>
518
519#include <asm-generic/bitops/ext2-atomic-setbit.h>
520
521#endif
522#endif
523