1
2#ifndef _ASM_X86_BITOPS_H
3#define _ASM_X86_BITOPS_H
4
5
6
7
8
9
10
11
12#ifndef _LINUX_BITOPS_H
13#error only <linux/bitops.h> can be included directly
14#endif
15
16#include <linux/compiler.h>
17#include <asm/alternative.h>
18#include <asm/rmwcc.h>
19#include <asm/barrier.h>
20
21#if BITS_PER_LONG == 32
22# define _BITOPS_LONG_SHIFT 5
23#elif BITS_PER_LONG == 64
24# define _BITOPS_LONG_SHIFT 6
25#else
26# error "Unexpected BITS_PER_LONG"
27#endif
28
29#define BIT_64(n) (U64_C(1) << (n))
30
31
32
33
34
35
36
37
38
39#define RLONG_ADDR(x) "m" (*(volatile long *) (x))
40#define WBYTE_ADDR(x) "+m" (*(volatile char *) (x))
41
42#define ADDR RLONG_ADDR(addr)
43
44
45
46
47
48#define CONST_MASK_ADDR(nr, addr) WBYTE_ADDR((void *)(addr) + ((nr)>>3))
49#define CONST_MASK(nr) (1 << ((nr) & 7))
50
51static __always_inline void
52arch_set_bit(long nr, volatile unsigned long *addr)
53{
54 if (__builtin_constant_p(nr)) {
55 asm volatile(LOCK_PREFIX "orb %b1,%0"
56 : CONST_MASK_ADDR(nr, addr)
57 : "iq" (CONST_MASK(nr))
58 : "memory");
59 } else {
60 asm volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0"
61 : : RLONG_ADDR(addr), "Ir" (nr) : "memory");
62 }
63}
64
65static __always_inline void
66arch___set_bit(long nr, volatile unsigned long *addr)
67{
68 asm volatile(__ASM_SIZE(bts) " %1,%0" : : ADDR, "Ir" (nr) : "memory");
69}
70
71static __always_inline void
72arch_clear_bit(long nr, volatile unsigned long *addr)
73{
74 if (__builtin_constant_p(nr)) {
75 asm volatile(LOCK_PREFIX "andb %b1,%0"
76 : CONST_MASK_ADDR(nr, addr)
77 : "iq" (~CONST_MASK(nr)));
78 } else {
79 asm volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0"
80 : : RLONG_ADDR(addr), "Ir" (nr) : "memory");
81 }
82}
83
84static __always_inline void
85arch_clear_bit_unlock(long nr, volatile unsigned long *addr)
86{
87 barrier();
88 arch_clear_bit(nr, addr);
89}
90
91static __always_inline void
92arch___clear_bit(long nr, volatile unsigned long *addr)
93{
94 asm volatile(__ASM_SIZE(btr) " %1,%0" : : ADDR, "Ir" (nr) : "memory");
95}
96
97static __always_inline bool
98arch_clear_bit_unlock_is_negative_byte(long nr, volatile unsigned long *addr)
99{
100 bool negative;
101 asm volatile(LOCK_PREFIX "andb %2,%1"
102 CC_SET(s)
103 : CC_OUT(s) (negative), WBYTE_ADDR(addr)
104 : "ir" ((char) ~(1 << nr)) : "memory");
105 return negative;
106}
107#define arch_clear_bit_unlock_is_negative_byte \
108 arch_clear_bit_unlock_is_negative_byte
109
110static __always_inline void
111arch___clear_bit_unlock(long nr, volatile unsigned long *addr)
112{
113 arch___clear_bit(nr, addr);
114}
115
116static __always_inline void
117arch___change_bit(long nr, volatile unsigned long *addr)
118{
119 asm volatile(__ASM_SIZE(btc) " %1,%0" : : ADDR, "Ir" (nr) : "memory");
120}
121
122static __always_inline void
123arch_change_bit(long nr, volatile unsigned long *addr)
124{
125 if (__builtin_constant_p(nr)) {
126 asm volatile(LOCK_PREFIX "xorb %b1,%0"
127 : CONST_MASK_ADDR(nr, addr)
128 : "iq" (CONST_MASK(nr)));
129 } else {
130 asm volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0"
131 : : RLONG_ADDR(addr), "Ir" (nr) : "memory");
132 }
133}
134
135static __always_inline bool
136arch_test_and_set_bit(long nr, volatile unsigned long *addr)
137{
138 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(bts), *addr, c, "Ir", nr);
139}
140
141static __always_inline bool
142arch_test_and_set_bit_lock(long nr, volatile unsigned long *addr)
143{
144 return arch_test_and_set_bit(nr, addr);
145}
146
147static __always_inline bool
148arch___test_and_set_bit(long nr, volatile unsigned long *addr)
149{
150 bool oldbit;
151
152 asm(__ASM_SIZE(bts) " %2,%1"
153 CC_SET(c)
154 : CC_OUT(c) (oldbit)
155 : ADDR, "Ir" (nr) : "memory");
156 return oldbit;
157}
158
159static __always_inline bool
160arch_test_and_clear_bit(long nr, volatile unsigned long *addr)
161{
162 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btr), *addr, c, "Ir", nr);
163}
164
165
166
167
168
169
170
171
172
173static __always_inline bool
174arch___test_and_clear_bit(long nr, volatile unsigned long *addr)
175{
176 bool oldbit;
177
178 asm volatile(__ASM_SIZE(btr) " %2,%1"
179 CC_SET(c)
180 : CC_OUT(c) (oldbit)
181 : ADDR, "Ir" (nr) : "memory");
182 return oldbit;
183}
184
185static __always_inline bool
186arch___test_and_change_bit(long nr, volatile unsigned long *addr)
187{
188 bool oldbit;
189
190 asm volatile(__ASM_SIZE(btc) " %2,%1"
191 CC_SET(c)
192 : CC_OUT(c) (oldbit)
193 : ADDR, "Ir" (nr) : "memory");
194
195 return oldbit;
196}
197
198static __always_inline bool
199arch_test_and_change_bit(long nr, volatile unsigned long *addr)
200{
201 return GEN_BINARY_RMWcc(LOCK_PREFIX __ASM_SIZE(btc), *addr, c, "Ir", nr);
202}
203
204static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)
205{
206 return ((1UL << (nr & (BITS_PER_LONG-1))) &
207 (addr[nr >> _BITOPS_LONG_SHIFT])) != 0;
208}
209
210static __always_inline bool variable_test_bit(long nr, volatile const unsigned long *addr)
211{
212 bool oldbit;
213
214 asm volatile(__ASM_SIZE(bt) " %2,%1"
215 CC_SET(c)
216 : CC_OUT(c) (oldbit)
217 : "m" (*(unsigned long *)addr), "Ir" (nr) : "memory");
218
219 return oldbit;
220}
221
222#define arch_test_bit(nr, addr) \
223 (__builtin_constant_p((nr)) \
224 ? constant_test_bit((nr), (addr)) \
225 : variable_test_bit((nr), (addr)))
226
227
228
229
230
231
232
233static __always_inline unsigned long __ffs(unsigned long word)
234{
235 asm("rep; bsf %1,%0"
236 : "=r" (word)
237 : "rm" (word));
238 return word;
239}
240
241
242
243
244
245
246
247static __always_inline unsigned long ffz(unsigned long word)
248{
249 asm("rep; bsf %1,%0"
250 : "=r" (word)
251 : "r" (~word));
252 return word;
253}
254
255
256
257
258
259
260
261static __always_inline unsigned long __fls(unsigned long word)
262{
263 asm("bsr %1,%0"
264 : "=r" (word)
265 : "rm" (word));
266 return word;
267}
268
269#undef ADDR
270
271#ifdef __KERNEL__
272
273
274
275
276
277
278
279
280
281
282
283static __always_inline int ffs(int x)
284{
285 int r;
286
287#ifdef CONFIG_X86_64
288
289
290
291
292
293
294
295
296
297 asm("bsfl %1,%0"
298 : "=r" (r)
299 : "rm" (x), "0" (-1));
300#elif defined(CONFIG_X86_CMOV)
301 asm("bsfl %1,%0\n\t"
302 "cmovzl %2,%0"
303 : "=&r" (r) : "rm" (x), "r" (-1));
304#else
305 asm("bsfl %1,%0\n\t"
306 "jnz 1f\n\t"
307 "movl $-1,%0\n"
308 "1:" : "=r" (r) : "rm" (x));
309#endif
310 return r + 1;
311}
312
313
314
315
316
317
318
319
320
321
322
323
324static __always_inline int fls(unsigned int x)
325{
326 int r;
327
328#ifdef CONFIG_X86_64
329
330
331
332
333
334
335
336
337
338 asm("bsrl %1,%0"
339 : "=r" (r)
340 : "rm" (x), "0" (-1));
341#elif defined(CONFIG_X86_CMOV)
342 asm("bsrl %1,%0\n\t"
343 "cmovzl %2,%0"
344 : "=&r" (r) : "rm" (x), "rm" (-1));
345#else
346 asm("bsrl %1,%0\n\t"
347 "jnz 1f\n\t"
348 "movl $-1,%0\n"
349 "1:" : "=r" (r) : "rm" (x));
350#endif
351 return r + 1;
352}
353
354
355
356
357
358
359
360
361
362
363
364
365#ifdef CONFIG_X86_64
366static __always_inline int fls64(__u64 x)
367{
368 int bitpos = -1;
369
370
371
372
373
374 asm("bsrq %1,%q0"
375 : "+r" (bitpos)
376 : "rm" (x));
377 return bitpos + 1;
378}
379#else
380#include <asm-generic/bitops/fls64.h>
381#endif
382
383#include <asm-generic/bitops/find.h>
384
385#include <asm-generic/bitops/sched.h>
386
387#include <asm/arch_hweight.h>
388
389#include <asm-generic/bitops/const_hweight.h>
390
391#include <asm-generic/bitops/instrumented-atomic.h>
392#include <asm-generic/bitops/instrumented-non-atomic.h>
393#include <asm-generic/bitops/instrumented-lock.h>
394
395#include <asm-generic/bitops/le.h>
396
397#include <asm-generic/bitops/ext2-atomic-setbit.h>
398
399#endif
400#endif
401