1
2
3
4
5
6
7#ifndef _ASM_BITOPS_H
8#define _ASM_BITOPS_H
9
10#include <linux/types.h>
11#include <asm/byteorder.h>
12
13#ifdef __KERNEL__
14
15#include <asm/sgidefs.h>
16#include <asm/system.h>
17
18#include <asm-generic/bitops/fls.h>
19#include <asm-generic/bitops/__fls.h>
20#include <asm-generic/bitops/fls64.h>
21#include <asm-generic/bitops/__ffs.h>
22
23
24
25
26#define smp_mb__before_clear_bit() barrier()
27#define smp_mb__after_clear_bit() barrier()
28
29
30
31
32
33#define __bi_flags unsigned long flags
34#define __bi_cli() __cli()
35#define __bi_save_flags(x) __save_flags(x)
36#define __bi_save_and_cli(x) __save_and_cli(x)
37#define __bi_restore_flags(x) __restore_flags(x)
38#else
39#define __bi_flags
40#define __bi_cli()
41#define __bi_save_flags(x)
42#define __bi_save_and_cli(x)
43#define __bi_restore_flags(x)
44#endif
45
46#ifdef CONFIG_CPU_HAS_LLSC
47
48#include <asm/mipsregs.h>
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65static __inline__ void
66set_bit(int nr, volatile void *addr)
67{
68 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
69 unsigned long temp;
70
71 __asm__ __volatile__(
72 "1:\tll\t%0, %1\t\t# set_bit\n\t"
73 "or\t%0, %2\n\t"
74 "sc\t%0, %1\n\t"
75 "beqz\t%0, 1b"
76 : "=&r" (temp), "=m" (*m)
77 : "ir" (1UL << (nr & 0x1f)), "m" (*m));
78}
79
80
81
82
83
84
85
86
87
88
89static __inline__ void __set_bit(int nr, volatile void * addr)
90{
91 unsigned long * m = ((unsigned long *) addr) + (nr >> 5);
92
93 *m |= 1UL << (nr & 31);
94}
95#define PLATFORM__SET_BIT
96
97
98
99
100
101
102
103
104
105
106
107static __inline__ void
108clear_bit(int nr, volatile void *addr)
109{
110 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
111 unsigned long temp;
112
113 __asm__ __volatile__(
114 "1:\tll\t%0, %1\t\t# clear_bit\n\t"
115 "and\t%0, %2\n\t"
116 "sc\t%0, %1\n\t"
117 "beqz\t%0, 1b\n\t"
118 : "=&r" (temp), "=m" (*m)
119 : "ir" (~(1UL << (nr & 0x1f))), "m" (*m));
120}
121
122
123
124
125
126
127
128
129
130
131static __inline__ void
132change_bit(int nr, volatile void *addr)
133{
134 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
135 unsigned long temp;
136
137 __asm__ __volatile__(
138 "1:\tll\t%0, %1\t\t# change_bit\n\t"
139 "xor\t%0, %2\n\t"
140 "sc\t%0, %1\n\t"
141 "beqz\t%0, 1b"
142 : "=&r" (temp), "=m" (*m)
143 : "ir" (1UL << (nr & 0x1f)), "m" (*m));
144}
145
146
147
148
149
150
151
152
153
154
155static __inline__ void __change_bit(int nr, volatile void * addr)
156{
157 unsigned long * m = ((unsigned long *) addr) + (nr >> 5);
158
159 *m ^= 1UL << (nr & 31);
160}
161
162
163
164
165
166
167
168
169
170static __inline__ int
171test_and_set_bit(int nr, volatile void *addr)
172{
173 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
174 unsigned long temp, res;
175
176 __asm__ __volatile__(
177 ".set\tnoreorder\t\t# test_and_set_bit\n"
178 "1:\tll\t%0, %1\n\t"
179 "or\t%2, %0, %3\n\t"
180 "sc\t%2, %1\n\t"
181 "beqz\t%2, 1b\n\t"
182 " and\t%2, %0, %3\n\t"
183 ".set\treorder"
184 : "=&r" (temp), "=m" (*m), "=&r" (res)
185 : "r" (1UL << (nr & 0x1f)), "m" (*m)
186 : "memory");
187
188 return res != 0;
189}
190
191
192
193
194
195
196
197
198
199
200static __inline__ int __test_and_set_bit(int nr, volatile void * addr)
201{
202 int mask, retval;
203 volatile int *a = addr;
204
205 a += nr >> 5;
206 mask = 1 << (nr & 0x1f);
207 retval = (mask & *a) != 0;
208 *a |= mask;
209
210 return retval;
211}
212
213
214
215
216
217
218
219
220
221static __inline__ int
222test_and_clear_bit(int nr, volatile void *addr)
223{
224 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
225 unsigned long temp, res;
226
227 __asm__ __volatile__(
228 ".set\tnoreorder\t\t# test_and_clear_bit\n"
229 "1:\tll\t%0, %1\n\t"
230 "or\t%2, %0, %3\n\t"
231 "xor\t%2, %3\n\t"
232 "sc\t%2, %1\n\t"
233 "beqz\t%2, 1b\n\t"
234 " and\t%2, %0, %3\n\t"
235 ".set\treorder"
236 : "=&r" (temp), "=m" (*m), "=&r" (res)
237 : "r" (1UL << (nr & 0x1f)), "m" (*m)
238 : "memory");
239
240 return res != 0;
241}
242
243
244
245
246
247
248
249
250
251
252static __inline__ int __test_and_clear_bit(int nr, volatile void * addr)
253{
254 int mask, retval;
255 volatile int *a = addr;
256
257 a += nr >> 5;
258 mask = 1 << (nr & 0x1f);
259 retval = (mask & *a) != 0;
260 *a &= ~mask;
261
262 return retval;
263}
264
265
266
267
268
269
270
271
272
273static __inline__ int
274test_and_change_bit(int nr, volatile void *addr)
275{
276 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
277 unsigned long temp, res;
278
279 __asm__ __volatile__(
280 ".set\tnoreorder\t\t# test_and_change_bit\n"
281 "1:\tll\t%0, %1\n\t"
282 "xor\t%2, %0, %3\n\t"
283 "sc\t%2, %1\n\t"
284 "beqz\t%2, 1b\n\t"
285 " and\t%2, %0, %3\n\t"
286 ".set\treorder"
287 : "=&r" (temp), "=m" (*m), "=&r" (res)
288 : "r" (1UL << (nr & 0x1f)), "m" (*m)
289 : "memory");
290
291 return res != 0;
292}
293
294
295
296
297
298
299
300
301
302
303static __inline__ int __test_and_change_bit(int nr, volatile void * addr)
304{
305 int mask, retval;
306 volatile int *a = addr;
307
308 a += nr >> 5;
309 mask = 1 << (nr & 0x1f);
310 retval = (mask & *a) != 0;
311 *a ^= mask;
312
313 return retval;
314}
315
316#else
317
318
319
320
321
322
323
324
325
326
327
328static __inline__ void set_bit(int nr, volatile void * addr)
329{
330 int mask;
331 volatile int *a = addr;
332 __bi_flags;
333
334 a += nr >> 5;
335 mask = 1 << (nr & 0x1f);
336 __bi_save_and_cli(flags);
337 *a |= mask;
338 __bi_restore_flags(flags);
339}
340
341
342
343
344
345
346
347
348
349
350static __inline__ void __set_bit(int nr, volatile void * addr)
351{
352 int mask;
353 volatile int *a = addr;
354
355 a += nr >> 5;
356 mask = 1 << (nr & 0x1f);
357 *a |= mask;
358}
359
360
361
362
363
364
365
366
367
368
369
370static __inline__ void clear_bit(int nr, volatile void * addr)
371{
372 int mask;
373 volatile int *a = addr;
374 __bi_flags;
375
376 a += nr >> 5;
377 mask = 1 << (nr & 0x1f);
378 __bi_save_and_cli(flags);
379 *a &= ~mask;
380 __bi_restore_flags(flags);
381}
382
383
384
385
386
387
388
389
390
391
392static __inline__ void change_bit(int nr, volatile void * addr)
393{
394 int mask;
395 volatile int *a = addr;
396 __bi_flags;
397
398 a += nr >> 5;
399 mask = 1 << (nr & 0x1f);
400 __bi_save_and_cli(flags);
401 *a ^= mask;
402 __bi_restore_flags(flags);
403}
404
405
406
407
408
409
410
411
412
413
414static __inline__ void __change_bit(int nr, volatile void * addr)
415{
416 unsigned long * m = ((unsigned long *) addr) + (nr >> 5);
417
418 *m ^= 1UL << (nr & 31);
419}
420
421
422
423
424
425
426
427
428
429static __inline__ int test_and_set_bit(int nr, volatile void * addr)
430{
431 int mask, retval;
432 volatile int *a = addr;
433 __bi_flags;
434
435 a += nr >> 5;
436 mask = 1 << (nr & 0x1f);
437 __bi_save_and_cli(flags);
438 retval = (mask & *a) != 0;
439 *a |= mask;
440 __bi_restore_flags(flags);
441
442 return retval;
443}
444
445
446
447
448
449
450
451
452
453
454static __inline__ int __test_and_set_bit(int nr, volatile void * addr)
455{
456 int mask, retval;
457 volatile int *a = addr;
458
459 a += nr >> 5;
460 mask = 1 << (nr & 0x1f);
461 retval = (mask & *a) != 0;
462 *a |= mask;
463
464 return retval;
465}
466
467
468
469
470
471
472
473
474
475static __inline__ int test_and_clear_bit(int nr, volatile void * addr)
476{
477 int mask, retval;
478 volatile int *a = addr;
479 __bi_flags;
480
481 a += nr >> 5;
482 mask = 1 << (nr & 0x1f);
483 __bi_save_and_cli(flags);
484 retval = (mask & *a) != 0;
485 *a &= ~mask;
486 __bi_restore_flags(flags);
487
488 return retval;
489}
490
491
492
493
494
495
496
497
498
499
500static __inline__ int __test_and_clear_bit(int nr, volatile void * addr)
501{
502 int mask, retval;
503 volatile int *a = addr;
504
505 a += nr >> 5;
506 mask = 1 << (nr & 0x1f);
507 retval = (mask & *a) != 0;
508 *a &= ~mask;
509
510 return retval;
511}
512
513
514
515
516
517
518
519
520
521static __inline__ int test_and_change_bit(int nr, volatile void * addr)
522{
523 int mask, retval;
524 volatile int *a = addr;
525 __bi_flags;
526
527 a += nr >> 5;
528 mask = 1 << (nr & 0x1f);
529 __bi_save_and_cli(flags);
530 retval = (mask & *a) != 0;
531 *a ^= mask;
532 __bi_restore_flags(flags);
533
534 return retval;
535}
536
537
538
539
540
541
542
543
544
545
546static __inline__ int __test_and_change_bit(int nr, volatile void * addr)
547{
548 int mask, retval;
549 volatile int *a = addr;
550
551 a += nr >> 5;
552 mask = 1 << (nr & 0x1f);
553 retval = (mask & *a) != 0;
554 *a ^= mask;
555
556 return retval;
557}
558
559#undef __bi_flags
560#undef __bi_cli
561#undef __bi_save_flags
562#undef __bi_restore_flags
563
564#endif
565
566
567
568
569
570
571static __inline__ int test_bit(int nr, const volatile void *addr)
572{
573 return ((1UL << (nr & 31)) & (((const unsigned int *) addr)[nr >> 5])) != 0;
574}
575
576#ifndef __MIPSEB__
577
578
579
580
581
582
583
584
585
586
587
588static __inline__ int find_first_zero_bit (void *addr, unsigned size)
589{
590 unsigned long dummy;
591 int res;
592
593 if (!size)
594 return 0;
595
596 __asm__ (".set\tnoreorder\n\t"
597 ".set\tnoat\n"
598 "1:\tsubu\t$1,%6,%0\n\t"
599 "blez\t$1,2f\n\t"
600 "lw\t$1,(%5)\n\t"
601 "addiu\t%5,4\n\t"
602#if (_MIPS_ISA == _MIPS_ISA_MIPS2 ) || (_MIPS_ISA == _MIPS_ISA_MIPS3 ) || \
603 (_MIPS_ISA == _MIPS_ISA_MIPS4 ) || (_MIPS_ISA == _MIPS_ISA_MIPS5 ) || \
604 (_MIPS_ISA == _MIPS_ISA_MIPS32) || (_MIPS_ISA == _MIPS_ISA_MIPS64)
605 "beql\t%1,$1,1b\n\t"
606 "addiu\t%0,32\n\t"
607#else
608 "addiu\t%0,32\n\t"
609 "beq\t%1,$1,1b\n\t"
610 "nop\n\t"
611 "subu\t%0,32\n\t"
612#endif
613#ifdef __MIPSEB__
614#error "Fix this for big endian"
615#endif
616 "li\t%1,1\n"
617 "1:\tand\t%2,$1,%1\n\t"
618 "beqz\t%2,2f\n\t"
619 "sll\t%1,%1,1\n\t"
620 "bnez\t%1,1b\n\t"
621 "add\t%0,%0,1\n\t"
622 ".set\tat\n\t"
623 ".set\treorder\n"
624 "2:"
625 : "=r" (res), "=r" (dummy), "=r" (addr)
626 : "0" ((signed int) 0), "1" ((unsigned int) 0xffffffff),
627 "2" (addr), "r" (size)
628 : "$1");
629
630 return res;
631}
632
633
634
635
636
637
638
639static __inline__ int find_next_zero_bit (void * addr, int size, int offset)
640{
641 unsigned int *p = ((unsigned int *) addr) + (offset >> 5);
642 int set = 0, bit = offset & 31, res;
643 unsigned long dummy;
644
645 if (bit) {
646
647
648
649#ifdef __MIPSEB__
650#error "Fix this for big endian byte order"
651#endif
652 __asm__(".set\tnoreorder\n\t"
653 ".set\tnoat\n"
654 "1:\tand\t$1,%4,%1\n\t"
655 "beqz\t$1,1f\n\t"
656 "sll\t%1,%1,1\n\t"
657 "bnez\t%1,1b\n\t"
658 "addiu\t%0,1\n\t"
659 ".set\tat\n\t"
660 ".set\treorder\n"
661 "1:"
662 : "=r" (set), "=r" (dummy)
663 : "0" (0), "1" (1 << bit), "r" (*p)
664 : "$1");
665 if (set < (32 - bit))
666 return set + offset;
667 set = 32 - bit;
668 p++;
669 }
670
671
672
673 res = find_first_zero_bit(p, size - 32 * (p - (unsigned int *) addr));
674 return offset + set + res;
675}
676
677#endif
678
679
680
681
682
683
684
685static __inline__ unsigned long ffz(unsigned long word)
686{
687 unsigned int __res;
688 unsigned int mask = 1;
689
690 __asm__ (
691 ".set\tnoreorder\n\t"
692 ".set\tnoat\n\t"
693 "move\t%0,$0\n"
694 "1:\tand\t$1,%2,%1\n\t"
695 "beqz\t$1,2f\n\t"
696 "sll\t%1,1\n\t"
697 "bnez\t%1,1b\n\t"
698 "addiu\t%0,1\n\t"
699 ".set\tat\n\t"
700 ".set\treorder\n"
701 "2:\n\t"
702 : "=&r" (__res), "=r" (mask)
703 : "r" (word), "1" (mask)
704 : "$1");
705
706 return __res;
707}
708
709#ifdef __KERNEL__
710
711
712
713
714
715
716
717
718#define hweight32(x) generic_hweight32(x)
719#define hweight16(x) generic_hweight16(x)
720#define hweight8(x) generic_hweight8(x)
721
722#endif
723
724#ifdef __MIPSEB__
725
726
727
728
729
730
731static __inline__ int find_next_zero_bit(void *addr, int size, int offset)
732{
733 unsigned long *p = ((unsigned long *) addr) + (offset >> 5);
734 unsigned long result = offset & ~31UL;
735 unsigned long tmp;
736
737 if (offset >= size)
738 return size;
739 size -= result;
740 offset &= 31UL;
741 if (offset) {
742 tmp = *(p++);
743 tmp |= ~0UL >> (32-offset);
744 if (size < 32)
745 goto found_first;
746 if (~tmp)
747 goto found_middle;
748 size -= 32;
749 result += 32;
750 }
751 while (size & ~31UL) {
752 if (~(tmp = *(p++)))
753 goto found_middle;
754 result += 32;
755 size -= 32;
756 }
757 if (!size)
758 return result;
759 tmp = *p;
760
761found_first:
762 tmp |= ~0UL << size;
763found_middle:
764 return result + ffz(tmp);
765}
766
767
768
769
770
771#if 0
772
773
774
775
776
777
778
779
780static int find_first_zero_bit (void *addr, unsigned size);
781#endif
782
783#define find_first_zero_bit(addr, size) \
784 find_next_zero_bit((addr), (size), 0)
785
786#endif
787
788
789
790#ifdef __MIPSEB__
791static __inline__ int ext2_set_bit(int nr, void * addr)
792{
793 int mask, retval, flags;
794 unsigned char *ADDR = (unsigned char *) addr;
795
796 ADDR += nr >> 3;
797 mask = 1 << (nr & 0x07);
798 save_and_cli(flags);
799 retval = (mask & *ADDR) != 0;
800 *ADDR |= mask;
801 restore_flags(flags);
802 return retval;
803}
804
805static __inline__ int ext2_clear_bit(int nr, void * addr)
806{
807 int mask, retval, flags;
808 unsigned char *ADDR = (unsigned char *) addr;
809
810 ADDR += nr >> 3;
811 mask = 1 << (nr & 0x07);
812 save_and_cli(flags);
813 retval = (mask & *ADDR) != 0;
814 *ADDR &= ~mask;
815 restore_flags(flags);
816 return retval;
817}
818
819static __inline__ int ext2_test_bit(int nr, const void * addr)
820{
821 int mask;
822 const unsigned char *ADDR = (const unsigned char *) addr;
823
824 ADDR += nr >> 3;
825 mask = 1 << (nr & 0x07);
826 return ((mask & *ADDR) != 0);
827}
828
829#define ext2_find_first_zero_bit(addr, size) \
830 ext2_find_next_zero_bit((addr), (size), 0)
831
832static __inline__ unsigned long ext2_find_next_zero_bit(void *addr, unsigned long size, unsigned long offset)
833{
834 unsigned long *p = ((unsigned long *) addr) + (offset >> 5);
835 unsigned long result = offset & ~31UL;
836 unsigned long tmp;
837
838 if (offset >= size)
839 return size;
840 size -= result;
841 offset &= 31UL;
842 if(offset) {
843
844
845
846
847
848
849
850
851
852
853 tmp = *(p++);
854 tmp |= __swab32(~0UL >> (32-offset));
855 if(size < 32)
856 goto found_first;
857 if(~tmp)
858 goto found_middle;
859 size -= 32;
860 result += 32;
861 }
862 while(size & ~31UL) {
863 if(~(tmp = *(p++)))
864 goto found_middle;
865 result += 32;
866 size -= 32;
867 }
868 if(!size)
869 return result;
870 tmp = *p;
871
872found_first:
873
874
875
876
877 return result + ffz(__swab32(tmp) | (~0UL << size));
878found_middle:
879 return result + ffz(__swab32(tmp));
880}
881#else
882
883
884#define ext2_set_bit(nr, addr) test_and_set_bit((nr), (addr))
885#define ext2_clear_bit(nr, addr) test_and_clear_bit((nr), (addr))
886#define ext2_test_bit(nr, addr) test_bit((nr), (addr))
887#define ext2_find_first_zero_bit(addr, size) find_first_zero_bit((addr), (size))
888#define ext2_find_next_zero_bit(addr, size, offset) \
889 find_next_zero_bit((addr), (size), (offset))
890
891#endif
892
893
894
895
896
897
898#define minix_test_and_set_bit(nr,addr) test_and_set_bit(nr,addr)
899#define minix_set_bit(nr,addr) set_bit(nr,addr)
900#define minix_test_and_clear_bit(nr,addr) test_and_clear_bit(nr,addr)
901#define minix_test_bit(nr,addr) test_bit(nr,addr)
902#define minix_find_first_zero_bit(addr,size) find_first_zero_bit(addr,size)
903
904#endif
905