1
2
3
4
5
6
7
8
9#ifndef _ASM_BITOPS_H
10#define _ASM_BITOPS_H
11
12#ifndef _LINUX_BITOPS_H
13#error only <linux/bitops.h> can be included directly
14#endif
15
16#include <linux/compiler.h>
17#include <linux/types.h>
18#include <asm/barrier.h>
19#include <asm/byteorder.h>
20#include <asm/compiler.h>
21#include <asm/cpu-features.h>
22#include <asm/llsc.h>
23#include <asm/sgidefs.h>
24#include <asm/war.h>
25
26
27
28
29
30void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
31void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
32void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
33int __mips_test_and_set_bit(unsigned long nr,
34 volatile unsigned long *addr);
35int __mips_test_and_set_bit_lock(unsigned long nr,
36 volatile unsigned long *addr);
37int __mips_test_and_clear_bit(unsigned long nr,
38 volatile unsigned long *addr);
39int __mips_test_and_change_bit(unsigned long nr,
40 volatile unsigned long *addr);
41
42
43
44
45
46
47
48
49
50
51
52
53static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
54{
55 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
56 int bit = nr & SZLONG_MASK;
57 unsigned long temp;
58
59 if (kernel_uses_llsc && R10000_LLSC_WAR) {
60 __asm__ __volatile__(
61 " .set push \n"
62 " .set arch=r4000 \n"
63 "1: " __LL "%0, %1 # set_bit \n"
64 " or %0, %2 \n"
65 " " __SC "%0, %1 \n"
66 " beqzl %0, 1b \n"
67 " .set pop \n"
68 : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
69 : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
70#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
71 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
72 loongson_llsc_mb();
73 do {
74 __asm__ __volatile__(
75 " " __LL "%0, %1 # set_bit \n"
76 " " __INS "%0, %3, %2, 1 \n"
77 " " __SC "%0, %1 \n"
78 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
79 : "ir" (bit), "r" (~0));
80 } while (unlikely(!temp));
81#endif
82 } else if (kernel_uses_llsc) {
83 loongson_llsc_mb();
84 do {
85 __asm__ __volatile__(
86 " .set push \n"
87 " .set "MIPS_ISA_ARCH_LEVEL" \n"
88 " " __LL "%0, %1 # set_bit \n"
89 " or %0, %2 \n"
90 " " __SC "%0, %1 \n"
91 " .set pop \n"
92 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
93 : "ir" (1UL << bit));
94 } while (unlikely(!temp));
95 } else
96 __mips_set_bit(nr, addr);
97}
98
99
100
101
102
103
104
105
106
107
108
109static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
110{
111 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
112 int bit = nr & SZLONG_MASK;
113 unsigned long temp;
114
115 if (kernel_uses_llsc && R10000_LLSC_WAR) {
116 __asm__ __volatile__(
117 " .set push \n"
118 " .set arch=r4000 \n"
119 "1: " __LL "%0, %1 # clear_bit \n"
120 " and %0, %2 \n"
121 " " __SC "%0, %1 \n"
122 " beqzl %0, 1b \n"
123 " .set pop \n"
124 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
125 : "ir" (~(1UL << bit)));
126#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
127 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
128 loongson_llsc_mb();
129 do {
130 __asm__ __volatile__(
131 " " __LL "%0, %1 # clear_bit \n"
132 " " __INS "%0, $0, %2, 1 \n"
133 " " __SC "%0, %1 \n"
134 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
135 : "ir" (bit));
136 } while (unlikely(!temp));
137#endif
138 } else if (kernel_uses_llsc) {
139 loongson_llsc_mb();
140 do {
141 __asm__ __volatile__(
142 " .set push \n"
143 " .set "MIPS_ISA_ARCH_LEVEL" \n"
144 " " __LL "%0, %1 # clear_bit \n"
145 " and %0, %2 \n"
146 " " __SC "%0, %1 \n"
147 " .set pop \n"
148 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
149 : "ir" (~(1UL << bit)));
150 } while (unlikely(!temp));
151 } else
152 __mips_clear_bit(nr, addr);
153}
154
155
156
157
158
159
160
161
162
163static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
164{
165 smp_mb__before_atomic();
166 clear_bit(nr, addr);
167}
168
169
170
171
172
173
174
175
176
177
178static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
179{
180 int bit = nr & SZLONG_MASK;
181
182 if (kernel_uses_llsc && R10000_LLSC_WAR) {
183 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
184 unsigned long temp;
185
186 __asm__ __volatile__(
187 " .set push \n"
188 " .set arch=r4000 \n"
189 "1: " __LL "%0, %1 # change_bit \n"
190 " xor %0, %2 \n"
191 " " __SC "%0, %1 \n"
192 " beqzl %0, 1b \n"
193 " .set pop \n"
194 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
195 : "ir" (1UL << bit));
196 } else if (kernel_uses_llsc) {
197 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
198 unsigned long temp;
199
200 loongson_llsc_mb();
201 do {
202 __asm__ __volatile__(
203 " .set push \n"
204 " .set "MIPS_ISA_ARCH_LEVEL" \n"
205 " " __LL "%0, %1 # change_bit \n"
206 " xor %0, %2 \n"
207 " " __SC "%0, %1 \n"
208 " .set pop \n"
209 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
210 : "ir" (1UL << bit));
211 } while (unlikely(!temp));
212 } else
213 __mips_change_bit(nr, addr);
214}
215
216
217
218
219
220
221
222
223
224static inline int test_and_set_bit(unsigned long nr,
225 volatile unsigned long *addr)
226{
227 int bit = nr & SZLONG_MASK;
228 unsigned long res;
229
230 smp_mb__before_llsc();
231
232 if (kernel_uses_llsc && R10000_LLSC_WAR) {
233 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
234 unsigned long temp;
235
236 __asm__ __volatile__(
237 " .set push \n"
238 " .set arch=r4000 \n"
239 "1: " __LL "%0, %1 # test_and_set_bit \n"
240 " or %2, %0, %3 \n"
241 " " __SC "%2, %1 \n"
242 " beqzl %2, 1b \n"
243 " and %2, %0, %3 \n"
244 " .set pop \n"
245 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
246 : "r" (1UL << bit)
247 : "memory");
248 } else if (kernel_uses_llsc) {
249 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
250 unsigned long temp;
251
252 do {
253 __asm__ __volatile__(
254 " .set push \n"
255 " .set "MIPS_ISA_ARCH_LEVEL" \n"
256 " " __LL "%0, %1 # test_and_set_bit \n"
257 " or %2, %0, %3 \n"
258 " " __SC "%2, %1 \n"
259 " .set pop \n"
260 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
261 : "r" (1UL << bit)
262 : "memory");
263 } while (unlikely(!res));
264
265 res = temp & (1UL << bit);
266 } else
267 res = __mips_test_and_set_bit(nr, addr);
268
269 smp_llsc_mb();
270
271 return res != 0;
272}
273
274
275
276
277
278
279
280
281
282static inline int test_and_set_bit_lock(unsigned long nr,
283 volatile unsigned long *addr)
284{
285 int bit = nr & SZLONG_MASK;
286 unsigned long res;
287
288 if (kernel_uses_llsc && R10000_LLSC_WAR) {
289 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
290 unsigned long temp;
291
292 __asm__ __volatile__(
293 " .set push \n"
294 " .set arch=r4000 \n"
295 "1: " __LL "%0, %1 # test_and_set_bit \n"
296 " or %2, %0, %3 \n"
297 " " __SC "%2, %1 \n"
298 " beqzl %2, 1b \n"
299 " and %2, %0, %3 \n"
300 " .set pop \n"
301 : "=&r" (temp), "+m" (*m), "=&r" (res)
302 : "r" (1UL << bit)
303 : "memory");
304 } else if (kernel_uses_llsc) {
305 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
306 unsigned long temp;
307
308 do {
309 __asm__ __volatile__(
310 " .set push \n"
311 " .set "MIPS_ISA_ARCH_LEVEL" \n"
312 " " __LL "%0, %1 # test_and_set_bit \n"
313 " or %2, %0, %3 \n"
314 " " __SC "%2, %1 \n"
315 " .set pop \n"
316 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
317 : "r" (1UL << bit)
318 : "memory");
319 } while (unlikely(!res));
320
321 res = temp & (1UL << bit);
322 } else
323 res = __mips_test_and_set_bit_lock(nr, addr);
324
325 smp_llsc_mb();
326
327 return res != 0;
328}
329
330
331
332
333
334
335
336
337static inline int test_and_clear_bit(unsigned long nr,
338 volatile unsigned long *addr)
339{
340 int bit = nr & SZLONG_MASK;
341 unsigned long res;
342
343 smp_mb__before_llsc();
344
345 if (kernel_uses_llsc && R10000_LLSC_WAR) {
346 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
347 unsigned long temp;
348
349 __asm__ __volatile__(
350 " .set push \n"
351 " .set arch=r4000 \n"
352 "1: " __LL "%0, %1 # test_and_clear_bit \n"
353 " or %2, %0, %3 \n"
354 " xor %2, %3 \n"
355 " " __SC "%2, %1 \n"
356 " beqzl %2, 1b \n"
357 " and %2, %0, %3 \n"
358 " .set pop \n"
359 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
360 : "r" (1UL << bit)
361 : "memory");
362#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
363 } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
364 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
365 unsigned long temp;
366
367 do {
368 __asm__ __volatile__(
369 " " __LL "%0, %1 # test_and_clear_bit \n"
370 " " __EXT "%2, %0, %3, 1 \n"
371 " " __INS "%0, $0, %3, 1 \n"
372 " " __SC "%0, %1 \n"
373 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
374 : "ir" (bit)
375 : "memory");
376 } while (unlikely(!temp));
377#endif
378 } else if (kernel_uses_llsc) {
379 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
380 unsigned long temp;
381
382 do {
383 __asm__ __volatile__(
384 " .set push \n"
385 " .set "MIPS_ISA_ARCH_LEVEL" \n"
386 " " __LL "%0, %1 # test_and_clear_bit \n"
387 " or %2, %0, %3 \n"
388 " xor %2, %3 \n"
389 " " __SC "%2, %1 \n"
390 " .set pop \n"
391 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
392 : "r" (1UL << bit)
393 : "memory");
394 } while (unlikely(!res));
395
396 res = temp & (1UL << bit);
397 } else
398 res = __mips_test_and_clear_bit(nr, addr);
399
400 smp_llsc_mb();
401
402 return res != 0;
403}
404
405
406
407
408
409
410
411
412
413static inline int test_and_change_bit(unsigned long nr,
414 volatile unsigned long *addr)
415{
416 int bit = nr & SZLONG_MASK;
417 unsigned long res;
418
419 smp_mb__before_llsc();
420
421 if (kernel_uses_llsc && R10000_LLSC_WAR) {
422 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
423 unsigned long temp;
424
425 __asm__ __volatile__(
426 " .set push \n"
427 " .set arch=r4000 \n"
428 "1: " __LL "%0, %1 # test_and_change_bit \n"
429 " xor %2, %0, %3 \n"
430 " " __SC "%2, %1 \n"
431 " beqzl %2, 1b \n"
432 " and %2, %0, %3 \n"
433 " .set pop \n"
434 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
435 : "r" (1UL << bit)
436 : "memory");
437 } else if (kernel_uses_llsc) {
438 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
439 unsigned long temp;
440
441 do {
442 __asm__ __volatile__(
443 " .set push \n"
444 " .set "MIPS_ISA_ARCH_LEVEL" \n"
445 " " __LL "%0, %1 # test_and_change_bit \n"
446 " xor %2, %0, %3 \n"
447 " " __SC "\t%2, %1 \n"
448 " .set pop \n"
449 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
450 : "r" (1UL << bit)
451 : "memory");
452 } while (unlikely(!res));
453
454 res = temp & (1UL << bit);
455 } else
456 res = __mips_test_and_change_bit(nr, addr);
457
458 smp_llsc_mb();
459
460 return res != 0;
461}
462
463#include <asm-generic/bitops/non-atomic.h>
464
465
466
467
468
469
470
471
472
473
474static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
475{
476 smp_mb__before_llsc();
477 __clear_bit(nr, addr);
478 nudge_writes();
479}
480
481
482
483
484
485static inline unsigned long __fls(unsigned long word)
486{
487 int num;
488
489 if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
490 __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
491 __asm__(
492 " .set push \n"
493 " .set "MIPS_ISA_LEVEL" \n"
494 " clz %0, %1 \n"
495 " .set pop \n"
496 : "=r" (num)
497 : "r" (word));
498
499 return 31 - num;
500 }
501
502 if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
503 __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
504 __asm__(
505 " .set push \n"
506 " .set "MIPS_ISA_LEVEL" \n"
507 " dclz %0, %1 \n"
508 " .set pop \n"
509 : "=r" (num)
510 : "r" (word));
511
512 return 63 - num;
513 }
514
515 num = BITS_PER_LONG - 1;
516
517#if BITS_PER_LONG == 64
518 if (!(word & (~0ul << 32))) {
519 num -= 32;
520 word <<= 32;
521 }
522#endif
523 if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
524 num -= 16;
525 word <<= 16;
526 }
527 if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
528 num -= 8;
529 word <<= 8;
530 }
531 if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
532 num -= 4;
533 word <<= 4;
534 }
535 if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
536 num -= 2;
537 word <<= 2;
538 }
539 if (!(word & (~0ul << (BITS_PER_LONG-1))))
540 num -= 1;
541 return num;
542}
543
544
545
546
547
548
549
550
551static inline unsigned long __ffs(unsigned long word)
552{
553 return __fls(word & -word);
554}
555
556
557
558
559
560
561
562
563static inline int fls(unsigned int x)
564{
565 int r;
566
567 if (!__builtin_constant_p(x) &&
568 __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
569 __asm__(
570 " .set push \n"
571 " .set "MIPS_ISA_LEVEL" \n"
572 " clz %0, %1 \n"
573 " .set pop \n"
574 : "=r" (x)
575 : "r" (x));
576
577 return 32 - x;
578 }
579
580 r = 32;
581 if (!x)
582 return 0;
583 if (!(x & 0xffff0000u)) {
584 x <<= 16;
585 r -= 16;
586 }
587 if (!(x & 0xff000000u)) {
588 x <<= 8;
589 r -= 8;
590 }
591 if (!(x & 0xf0000000u)) {
592 x <<= 4;
593 r -= 4;
594 }
595 if (!(x & 0xc0000000u)) {
596 x <<= 2;
597 r -= 2;
598 }
599 if (!(x & 0x80000000u)) {
600 x <<= 1;
601 r -= 1;
602 }
603 return r;
604}
605
606#include <asm-generic/bitops/fls64.h>
607
608
609
610
611
612
613
614
615
616static inline int ffs(int word)
617{
618 if (!word)
619 return 0;
620
621 return fls(word & -word);
622}
623
624#include <asm-generic/bitops/ffz.h>
625#include <asm-generic/bitops/find.h>
626
627#ifdef __KERNEL__
628
629#include <asm-generic/bitops/sched.h>
630
631#include <asm/arch_hweight.h>
632#include <asm-generic/bitops/const_hweight.h>
633
634#include <asm-generic/bitops/le.h>
635#include <asm-generic/bitops/ext2-atomic.h>
636
637#endif
638
639#endif
640