1
2
3
4
5
6#ifndef _ASM_BITOPS_H
7#define _ASM_BITOPS_H
8
9#include <linux/types.h>
10#include <asm/byteorder.h>
11
12#ifdef __KERNEL__
13
14#include <asm/sgidefs.h>
15#include <asm/system.h>
16
17#include <asm-generic/bitops/fls.h>
18#include <asm-generic/bitops/__fls.h>
19#include <asm-generic/bitops/fls64.h>
20#include <asm-generic/bitops/__ffs.h>
21
22
23
24
25#define smp_mb__before_clear_bit() barrier()
26#define smp_mb__after_clear_bit() barrier()
27
28
29
30
31
32#define __bi_flags unsigned long flags
33#define __bi_cli() __cli()
34#define __bi_save_flags(x) __save_flags(x)
35#define __bi_save_and_cli(x) __save_and_cli(x)
36#define __bi_restore_flags(x) __restore_flags(x)
37#else
38#define __bi_flags
39#define __bi_cli()
40#define __bi_save_flags(x)
41#define __bi_save_and_cli(x)
42#define __bi_restore_flags(x)
43#endif
44
45#ifdef CONFIG_CPU_HAS_LLSC
46
47#include <asm/mipsregs.h>
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64static __inline__ void
65set_bit(int nr, volatile void *addr)
66{
67 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
68 unsigned long temp;
69
70 __asm__ __volatile__(
71 "1:\tll\t%0, %1\t\t# set_bit\n\t"
72 "or\t%0, %2\n\t"
73 "sc\t%0, %1\n\t"
74 "beqz\t%0, 1b"
75 : "=&r" (temp), "=m" (*m)
76 : "ir" (1UL << (nr & 0x1f)), "m" (*m));
77}
78
79
80
81
82
83
84
85
86
87
88static __inline__ void __set_bit(int nr, volatile void * addr)
89{
90 unsigned long * m = ((unsigned long *) addr) + (nr >> 5);
91
92 *m |= 1UL << (nr & 31);
93}
94#define PLATFORM__SET_BIT
95
96
97
98
99
100
101
102
103
104
105
106static __inline__ void
107clear_bit(int nr, volatile void *addr)
108{
109 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
110 unsigned long temp;
111
112 __asm__ __volatile__(
113 "1:\tll\t%0, %1\t\t# clear_bit\n\t"
114 "and\t%0, %2\n\t"
115 "sc\t%0, %1\n\t"
116 "beqz\t%0, 1b\n\t"
117 : "=&r" (temp), "=m" (*m)
118 : "ir" (~(1UL << (nr & 0x1f))), "m" (*m));
119}
120
121
122
123
124
125
126
127
128
129
130static __inline__ void
131change_bit(int nr, volatile void *addr)
132{
133 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
134 unsigned long temp;
135
136 __asm__ __volatile__(
137 "1:\tll\t%0, %1\t\t# change_bit\n\t"
138 "xor\t%0, %2\n\t"
139 "sc\t%0, %1\n\t"
140 "beqz\t%0, 1b"
141 : "=&r" (temp), "=m" (*m)
142 : "ir" (1UL << (nr & 0x1f)), "m" (*m));
143}
144
145
146
147
148
149
150
151
152
153
154static __inline__ void __change_bit(int nr, volatile void * addr)
155{
156 unsigned long * m = ((unsigned long *) addr) + (nr >> 5);
157
158 *m ^= 1UL << (nr & 31);
159}
160
161
162
163
164
165
166
167
168
169static __inline__ int
170test_and_set_bit(int nr, volatile void *addr)
171{
172 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
173 unsigned long temp, res;
174
175 __asm__ __volatile__(
176 ".set\tnoreorder\t\t# test_and_set_bit\n"
177 "1:\tll\t%0, %1\n\t"
178 "or\t%2, %0, %3\n\t"
179 "sc\t%2, %1\n\t"
180 "beqz\t%2, 1b\n\t"
181 " and\t%2, %0, %3\n\t"
182 ".set\treorder"
183 : "=&r" (temp), "=m" (*m), "=&r" (res)
184 : "r" (1UL << (nr & 0x1f)), "m" (*m)
185 : "memory");
186
187 return res != 0;
188}
189
190
191
192
193
194
195
196
197
198
199static __inline__ int __test_and_set_bit(int nr, volatile void * addr)
200{
201 int mask, retval;
202 volatile int *a = addr;
203
204 a += nr >> 5;
205 mask = 1 << (nr & 0x1f);
206 retval = (mask & *a) != 0;
207 *a |= mask;
208
209 return retval;
210}
211
212
213
214
215
216
217
218
219
220static __inline__ int
221test_and_clear_bit(int nr, volatile void *addr)
222{
223 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
224 unsigned long temp, res;
225
226 __asm__ __volatile__(
227 ".set\tnoreorder\t\t# test_and_clear_bit\n"
228 "1:\tll\t%0, %1\n\t"
229 "or\t%2, %0, %3\n\t"
230 "xor\t%2, %3\n\t"
231 "sc\t%2, %1\n\t"
232 "beqz\t%2, 1b\n\t"
233 " and\t%2, %0, %3\n\t"
234 ".set\treorder"
235 : "=&r" (temp), "=m" (*m), "=&r" (res)
236 : "r" (1UL << (nr & 0x1f)), "m" (*m)
237 : "memory");
238
239 return res != 0;
240}
241
242
243
244
245
246
247
248
249
250
251static __inline__ int __test_and_clear_bit(int nr, volatile void * addr)
252{
253 int mask, retval;
254 volatile int *a = addr;
255
256 a += nr >> 5;
257 mask = 1 << (nr & 0x1f);
258 retval = (mask & *a) != 0;
259 *a &= ~mask;
260
261 return retval;
262}
263
264
265
266
267
268
269
270
271
272static __inline__ int
273test_and_change_bit(int nr, volatile void *addr)
274{
275 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
276 unsigned long temp, res;
277
278 __asm__ __volatile__(
279 ".set\tnoreorder\t\t# test_and_change_bit\n"
280 "1:\tll\t%0, %1\n\t"
281 "xor\t%2, %0, %3\n\t"
282 "sc\t%2, %1\n\t"
283 "beqz\t%2, 1b\n\t"
284 " and\t%2, %0, %3\n\t"
285 ".set\treorder"
286 : "=&r" (temp), "=m" (*m), "=&r" (res)
287 : "r" (1UL << (nr & 0x1f)), "m" (*m)
288 : "memory");
289
290 return res != 0;
291}
292
293
294
295
296
297
298
299
300
301
302static __inline__ int __test_and_change_bit(int nr, volatile void * addr)
303{
304 int mask, retval;
305 volatile int *a = addr;
306
307 a += nr >> 5;
308 mask = 1 << (nr & 0x1f);
309 retval = (mask & *a) != 0;
310 *a ^= mask;
311
312 return retval;
313}
314
315#else
316
317
318
319
320
321
322
323
324
325
326
327static __inline__ void set_bit(int nr, volatile void * addr)
328{
329 int mask;
330 volatile int *a = addr;
331 __bi_flags;
332
333 a += nr >> 5;
334 mask = 1 << (nr & 0x1f);
335 __bi_save_and_cli(flags);
336 *a |= mask;
337 __bi_restore_flags(flags);
338}
339
340
341
342
343
344
345
346
347
348
349static __inline__ void __set_bit(int nr, volatile void * addr)
350{
351 int mask;
352 volatile int *a = addr;
353
354 a += nr >> 5;
355 mask = 1 << (nr & 0x1f);
356 *a |= mask;
357}
358
359
360
361
362
363
364
365
366
367
368
369static __inline__ void clear_bit(int nr, volatile void * addr)
370{
371 int mask;
372 volatile int *a = addr;
373 __bi_flags;
374
375 a += nr >> 5;
376 mask = 1 << (nr & 0x1f);
377 __bi_save_and_cli(flags);
378 *a &= ~mask;
379 __bi_restore_flags(flags);
380}
381
382
383
384
385
386
387
388
389
390
391static __inline__ void change_bit(int nr, volatile void * addr)
392{
393 int mask;
394 volatile int *a = addr;
395 __bi_flags;
396
397 a += nr >> 5;
398 mask = 1 << (nr & 0x1f);
399 __bi_save_and_cli(flags);
400 *a ^= mask;
401 __bi_restore_flags(flags);
402}
403
404
405
406
407
408
409
410
411
412
413static __inline__ void __change_bit(int nr, volatile void * addr)
414{
415 unsigned long * m = ((unsigned long *) addr) + (nr >> 5);
416
417 *m ^= 1UL << (nr & 31);
418}
419
420
421
422
423
424
425
426
427
428static __inline__ int test_and_set_bit(int nr, volatile void * addr)
429{
430 int mask, retval;
431 volatile int *a = addr;
432 __bi_flags;
433
434 a += nr >> 5;
435 mask = 1 << (nr & 0x1f);
436 __bi_save_and_cli(flags);
437 retval = (mask & *a) != 0;
438 *a |= mask;
439 __bi_restore_flags(flags);
440
441 return retval;
442}
443
444
445
446
447
448
449
450
451
452
453static __inline__ int __test_and_set_bit(int nr, volatile void * addr)
454{
455 int mask, retval;
456 volatile int *a = addr;
457
458 a += nr >> 5;
459 mask = 1 << (nr & 0x1f);
460 retval = (mask & *a) != 0;
461 *a |= mask;
462
463 return retval;
464}
465
466
467
468
469
470
471
472
473
474static __inline__ int test_and_clear_bit(int nr, volatile void * addr)
475{
476 int mask, retval;
477 volatile int *a = addr;
478 __bi_flags;
479
480 a += nr >> 5;
481 mask = 1 << (nr & 0x1f);
482 __bi_save_and_cli(flags);
483 retval = (mask & *a) != 0;
484 *a &= ~mask;
485 __bi_restore_flags(flags);
486
487 return retval;
488}
489
490
491
492
493
494
495
496
497
498
499static __inline__ int __test_and_clear_bit(int nr, volatile void * addr)
500{
501 int mask, retval;
502 volatile int *a = addr;
503
504 a += nr >> 5;
505 mask = 1 << (nr & 0x1f);
506 retval = (mask & *a) != 0;
507 *a &= ~mask;
508
509 return retval;
510}
511
512
513
514
515
516
517
518
519
520static __inline__ int test_and_change_bit(int nr, volatile void * addr)
521{
522 int mask, retval;
523 volatile int *a = addr;
524 __bi_flags;
525
526 a += nr >> 5;
527 mask = 1 << (nr & 0x1f);
528 __bi_save_and_cli(flags);
529 retval = (mask & *a) != 0;
530 *a ^= mask;
531 __bi_restore_flags(flags);
532
533 return retval;
534}
535
536
537
538
539
540
541
542
543
544
545static __inline__ int __test_and_change_bit(int nr, volatile void * addr)
546{
547 int mask, retval;
548 volatile int *a = addr;
549
550 a += nr >> 5;
551 mask = 1 << (nr & 0x1f);
552 retval = (mask & *a) != 0;
553 *a ^= mask;
554
555 return retval;
556}
557
558#undef __bi_flags
559#undef __bi_cli
560#undef __bi_save_flags
561#undef __bi_restore_flags
562
563#endif
564
565
566
567
568
569
570static __inline__ int test_bit(int nr, const volatile void *addr)
571{
572 return ((1UL << (nr & 31)) & (((const unsigned int *) addr)[nr >> 5])) != 0;
573}
574
575#ifndef __MIPSEB__
576
577
578
579
580
581
582
583
584
585
586
587static __inline__ int find_first_zero_bit (void *addr, unsigned size)
588{
589 unsigned long dummy;
590 int res;
591
592 if (!size)
593 return 0;
594
595 __asm__ (".set\tnoreorder\n\t"
596 ".set\tnoat\n"
597 "1:\tsubu\t$1,%6,%0\n\t"
598 "blez\t$1,2f\n\t"
599 "lw\t$1,(%5)\n\t"
600 "addiu\t%5,4\n\t"
601#if (_MIPS_ISA == _MIPS_ISA_MIPS2 ) || (_MIPS_ISA == _MIPS_ISA_MIPS3 ) || \
602 (_MIPS_ISA == _MIPS_ISA_MIPS4 ) || (_MIPS_ISA == _MIPS_ISA_MIPS5 ) || \
603 (_MIPS_ISA == _MIPS_ISA_MIPS32) || (_MIPS_ISA == _MIPS_ISA_MIPS64)
604 "beql\t%1,$1,1b\n\t"
605 "addiu\t%0,32\n\t"
606#else
607 "addiu\t%0,32\n\t"
608 "beq\t%1,$1,1b\n\t"
609 "nop\n\t"
610 "subu\t%0,32\n\t"
611#endif
612#ifdef __MIPSEB__
613#error "Fix this for big endian"
614#endif
615 "li\t%1,1\n"
616 "1:\tand\t%2,$1,%1\n\t"
617 "beqz\t%2,2f\n\t"
618 "sll\t%1,%1,1\n\t"
619 "bnez\t%1,1b\n\t"
620 "add\t%0,%0,1\n\t"
621 ".set\tat\n\t"
622 ".set\treorder\n"
623 "2:"
624 : "=r" (res), "=r" (dummy), "=r" (addr)
625 : "0" ((signed int) 0), "1" ((unsigned int) 0xffffffff),
626 "2" (addr), "r" (size)
627 : "$1");
628
629 return res;
630}
631
632
633
634
635
636
637
638static __inline__ int find_next_zero_bit (void * addr, int size, int offset)
639{
640 unsigned int *p = ((unsigned int *) addr) + (offset >> 5);
641 int set = 0, bit = offset & 31, res;
642 unsigned long dummy;
643
644 if (bit) {
645
646
647
648#ifdef __MIPSEB__
649#error "Fix this for big endian byte order"
650#endif
651 __asm__(".set\tnoreorder\n\t"
652 ".set\tnoat\n"
653 "1:\tand\t$1,%4,%1\n\t"
654 "beqz\t$1,1f\n\t"
655 "sll\t%1,%1,1\n\t"
656 "bnez\t%1,1b\n\t"
657 "addiu\t%0,1\n\t"
658 ".set\tat\n\t"
659 ".set\treorder\n"
660 "1:"
661 : "=r" (set), "=r" (dummy)
662 : "0" (0), "1" (1 << bit), "r" (*p)
663 : "$1");
664 if (set < (32 - bit))
665 return set + offset;
666 set = 32 - bit;
667 p++;
668 }
669
670
671
672 res = find_first_zero_bit(p, size - 32 * (p - (unsigned int *) addr));
673 return offset + set + res;
674}
675
676#endif
677
678
679
680
681
682
683
684static __inline__ unsigned long ffz(unsigned long word)
685{
686 unsigned int __res;
687 unsigned int mask = 1;
688
689 __asm__ (
690 ".set\tnoreorder\n\t"
691 ".set\tnoat\n\t"
692 "move\t%0,$0\n"
693 "1:\tand\t$1,%2,%1\n\t"
694 "beqz\t$1,2f\n\t"
695 "sll\t%1,1\n\t"
696 "bnez\t%1,1b\n\t"
697 "addiu\t%0,1\n\t"
698 ".set\tat\n\t"
699 ".set\treorder\n"
700 "2:\n\t"
701 : "=&r" (__res), "=r" (mask)
702 : "r" (word), "1" (mask)
703 : "$1");
704
705 return __res;
706}
707
708#ifdef __KERNEL__
709
710
711
712
713
714
715
716
717#define hweight32(x) generic_hweight32(x)
718#define hweight16(x) generic_hweight16(x)
719#define hweight8(x) generic_hweight8(x)
720
721#endif
722
723#ifdef __MIPSEB__
724
725
726
727
728
729
730static __inline__ int find_next_zero_bit(void *addr, int size, int offset)
731{
732 unsigned long *p = ((unsigned long *) addr) + (offset >> 5);
733 unsigned long result = offset & ~31UL;
734 unsigned long tmp;
735
736 if (offset >= size)
737 return size;
738 size -= result;
739 offset &= 31UL;
740 if (offset) {
741 tmp = *(p++);
742 tmp |= ~0UL >> (32-offset);
743 if (size < 32)
744 goto found_first;
745 if (~tmp)
746 goto found_middle;
747 size -= 32;
748 result += 32;
749 }
750 while (size & ~31UL) {
751 if (~(tmp = *(p++)))
752 goto found_middle;
753 result += 32;
754 size -= 32;
755 }
756 if (!size)
757 return result;
758 tmp = *p;
759
760found_first:
761 tmp |= ~0UL << size;
762found_middle:
763 return result + ffz(tmp);
764}
765
766
767
768
769
770#if 0
771
772
773
774
775
776
777
778
779static int find_first_zero_bit (void *addr, unsigned size);
780#endif
781
782#define find_first_zero_bit(addr, size) \
783 find_next_zero_bit((addr), (size), 0)
784
785#endif
786
787
788
789#ifdef __MIPSEB__
790static __inline__ int ext2_set_bit(int nr, void * addr)
791{
792 int mask, retval, flags;
793 unsigned char *ADDR = (unsigned char *) addr;
794
795 ADDR += nr >> 3;
796 mask = 1 << (nr & 0x07);
797 save_and_cli(flags);
798 retval = (mask & *ADDR) != 0;
799 *ADDR |= mask;
800 restore_flags(flags);
801 return retval;
802}
803
804static __inline__ int ext2_clear_bit(int nr, void * addr)
805{
806 int mask, retval, flags;
807 unsigned char *ADDR = (unsigned char *) addr;
808
809 ADDR += nr >> 3;
810 mask = 1 << (nr & 0x07);
811 save_and_cli(flags);
812 retval = (mask & *ADDR) != 0;
813 *ADDR &= ~mask;
814 restore_flags(flags);
815 return retval;
816}
817
818static __inline__ int ext2_test_bit(int nr, const void * addr)
819{
820 int mask;
821 const unsigned char *ADDR = (const unsigned char *) addr;
822
823 ADDR += nr >> 3;
824 mask = 1 << (nr & 0x07);
825 return ((mask & *ADDR) != 0);
826}
827
828#define ext2_find_first_zero_bit(addr, size) \
829 ext2_find_next_zero_bit((addr), (size), 0)
830
831static __inline__ unsigned long ext2_find_next_zero_bit(void *addr, unsigned long size, unsigned long offset)
832{
833 unsigned long *p = ((unsigned long *) addr) + (offset >> 5);
834 unsigned long result = offset & ~31UL;
835 unsigned long tmp;
836
837 if (offset >= size)
838 return size;
839 size -= result;
840 offset &= 31UL;
841 if(offset) {
842
843
844
845
846
847
848
849
850
851
852 tmp = *(p++);
853 tmp |= __swab32(~0UL >> (32-offset));
854 if(size < 32)
855 goto found_first;
856 if(~tmp)
857 goto found_middle;
858 size -= 32;
859 result += 32;
860 }
861 while(size & ~31UL) {
862 if(~(tmp = *(p++)))
863 goto found_middle;
864 result += 32;
865 size -= 32;
866 }
867 if(!size)
868 return result;
869 tmp = *p;
870
871found_first:
872
873
874
875
876 return result + ffz(__swab32(tmp) | (~0UL << size));
877found_middle:
878 return result + ffz(__swab32(tmp));
879}
880#else
881
882
883#define ext2_set_bit(nr, addr) test_and_set_bit((nr), (addr))
884#define ext2_clear_bit(nr, addr) test_and_clear_bit((nr), (addr))
885#define ext2_test_bit(nr, addr) test_bit((nr), (addr))
886#define ext2_find_first_zero_bit(addr, size) find_first_zero_bit((addr), (size))
887#define ext2_find_next_zero_bit(addr, size, offset) \
888 find_next_zero_bit((addr), (size), (offset))
889
890#endif
891
892
893
894
895
896
897#define minix_test_and_set_bit(nr,addr) test_and_set_bit(nr,addr)
898#define minix_set_bit(nr,addr) set_bit(nr,addr)
899#define minix_test_and_clear_bit(nr,addr) test_and_clear_bit(nr,addr)
900#define minix_test_bit(nr,addr) test_bit(nr,addr)
901#define minix_find_first_zero_bit(addr,size) find_first_zero_bit(addr,size)
902
903#endif
904