1
2
3
4
5
6
7
8
9#ifndef _ASM_BITOPS_H
10#define _ASM_BITOPS_H
11
12#include <linux/types.h>
13#include <asm/byteorder.h>
14
15#ifdef __KERNEL__
16
17#include <asm/sgidefs.h>
18#include <asm/system.h>
19
20
21
22
23#define smp_mb__before_clear_bit() barrier()
24#define smp_mb__after_clear_bit() barrier()
25
26
27
28
29
30#define __bi_flags unsigned long flags
31#define __bi_cli() __cli()
32#define __bi_save_flags(x) __save_flags(x)
33#define __bi_save_and_cli(x) __save_and_cli(x)
34#define __bi_restore_flags(x) __restore_flags(x)
35#else
36#define __bi_flags
37#define __bi_cli()
38#define __bi_save_flags(x)
39#define __bi_save_and_cli(x)
40#define __bi_restore_flags(x)
41#endif
42
43#ifdef CONFIG_CPU_HAS_LLSC
44
45#include <asm/mipsregs.h>
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62static __inline__ void
63set_bit(int nr, volatile void *addr)
64{
65 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
66 unsigned long temp;
67
68 __asm__ __volatile__(
69 "1:\tll\t%0, %1\t\t# set_bit\n\t"
70 "or\t%0, %2\n\t"
71 "sc\t%0, %1\n\t"
72 "beqz\t%0, 1b"
73 : "=&r" (temp), "=m" (*m)
74 : "ir" (1UL << (nr & 0x1f)), "m" (*m));
75}
76
77
78
79
80
81
82
83
84
85
86static __inline__ void __set_bit(int nr, volatile void * addr)
87{
88 unsigned long * m = ((unsigned long *) addr) + (nr >> 5);
89
90 *m |= 1UL << (nr & 31);
91}
92#define PLATFORM__SET_BIT
93
94
95
96
97
98
99
100
101
102
103
104static __inline__ void
105clear_bit(int nr, volatile void *addr)
106{
107 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
108 unsigned long temp;
109
110 __asm__ __volatile__(
111 "1:\tll\t%0, %1\t\t# clear_bit\n\t"
112 "and\t%0, %2\n\t"
113 "sc\t%0, %1\n\t"
114 "beqz\t%0, 1b\n\t"
115 : "=&r" (temp), "=m" (*m)
116 : "ir" (~(1UL << (nr & 0x1f))), "m" (*m));
117}
118
119
120
121
122
123
124
125
126
127
128static __inline__ void
129change_bit(int nr, volatile void *addr)
130{
131 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
132 unsigned long temp;
133
134 __asm__ __volatile__(
135 "1:\tll\t%0, %1\t\t# change_bit\n\t"
136 "xor\t%0, %2\n\t"
137 "sc\t%0, %1\n\t"
138 "beqz\t%0, 1b"
139 : "=&r" (temp), "=m" (*m)
140 : "ir" (1UL << (nr & 0x1f)), "m" (*m));
141}
142
143
144
145
146
147
148
149
150
151
152static __inline__ void __change_bit(int nr, volatile void * addr)
153{
154 unsigned long * m = ((unsigned long *) addr) + (nr >> 5);
155
156 *m ^= 1UL << (nr & 31);
157}
158
159
160
161
162
163
164
165
166
167static __inline__ int
168test_and_set_bit(int nr, volatile void *addr)
169{
170 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
171 unsigned long temp, res;
172
173 __asm__ __volatile__(
174 ".set\tnoreorder\t\t# test_and_set_bit\n"
175 "1:\tll\t%0, %1\n\t"
176 "or\t%2, %0, %3\n\t"
177 "sc\t%2, %1\n\t"
178 "beqz\t%2, 1b\n\t"
179 " and\t%2, %0, %3\n\t"
180 ".set\treorder"
181 : "=&r" (temp), "=m" (*m), "=&r" (res)
182 : "r" (1UL << (nr & 0x1f)), "m" (*m)
183 : "memory");
184
185 return res != 0;
186}
187
188
189
190
191
192
193
194
195
196
197static __inline__ int __test_and_set_bit(int nr, volatile void * addr)
198{
199 int mask, retval;
200 volatile int *a = addr;
201
202 a += nr >> 5;
203 mask = 1 << (nr & 0x1f);
204 retval = (mask & *a) != 0;
205 *a |= mask;
206
207 return retval;
208}
209
210
211
212
213
214
215
216
217
218static __inline__ int
219test_and_clear_bit(int nr, volatile void *addr)
220{
221 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
222 unsigned long temp, res;
223
224 __asm__ __volatile__(
225 ".set\tnoreorder\t\t# test_and_clear_bit\n"
226 "1:\tll\t%0, %1\n\t"
227 "or\t%2, %0, %3\n\t"
228 "xor\t%2, %3\n\t"
229 "sc\t%2, %1\n\t"
230 "beqz\t%2, 1b\n\t"
231 " and\t%2, %0, %3\n\t"
232 ".set\treorder"
233 : "=&r" (temp), "=m" (*m), "=&r" (res)
234 : "r" (1UL << (nr & 0x1f)), "m" (*m)
235 : "memory");
236
237 return res != 0;
238}
239
240
241
242
243
244
245
246
247
248
249static __inline__ int __test_and_clear_bit(int nr, volatile void * addr)
250{
251 int mask, retval;
252 volatile int *a = addr;
253
254 a += nr >> 5;
255 mask = 1 << (nr & 0x1f);
256 retval = (mask & *a) != 0;
257 *a &= ~mask;
258
259 return retval;
260}
261
262
263
264
265
266
267
268
269
270static __inline__ int
271test_and_change_bit(int nr, volatile void *addr)
272{
273 unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
274 unsigned long temp, res;
275
276 __asm__ __volatile__(
277 ".set\tnoreorder\t\t# test_and_change_bit\n"
278 "1:\tll\t%0, %1\n\t"
279 "xor\t%2, %0, %3\n\t"
280 "sc\t%2, %1\n\t"
281 "beqz\t%2, 1b\n\t"
282 " and\t%2, %0, %3\n\t"
283 ".set\treorder"
284 : "=&r" (temp), "=m" (*m), "=&r" (res)
285 : "r" (1UL << (nr & 0x1f)), "m" (*m)
286 : "memory");
287
288 return res != 0;
289}
290
291
292
293
294
295
296
297
298
299
300static __inline__ int __test_and_change_bit(int nr, volatile void * addr)
301{
302 int mask, retval;
303 volatile int *a = addr;
304
305 a += nr >> 5;
306 mask = 1 << (nr & 0x1f);
307 retval = (mask & *a) != 0;
308 *a ^= mask;
309
310 return retval;
311}
312
313#else
314
315
316
317
318
319
320
321
322
323
324
325static __inline__ void set_bit(int nr, volatile void * addr)
326{
327 int mask;
328 volatile int *a = addr;
329 __bi_flags;
330
331 a += nr >> 5;
332 mask = 1 << (nr & 0x1f);
333 __bi_save_and_cli(flags);
334 *a |= mask;
335 __bi_restore_flags(flags);
336}
337
338
339
340
341
342
343
344
345
346
347static __inline__ void __set_bit(int nr, volatile void * addr)
348{
349 int mask;
350 volatile int *a = addr;
351
352 a += nr >> 5;
353 mask = 1 << (nr & 0x1f);
354 *a |= mask;
355}
356
357
358
359
360
361
362
363
364
365
366
367static __inline__ void clear_bit(int nr, volatile void * addr)
368{
369 int mask;
370 volatile int *a = addr;
371 __bi_flags;
372
373 a += nr >> 5;
374 mask = 1 << (nr & 0x1f);
375 __bi_save_and_cli(flags);
376 *a &= ~mask;
377 __bi_restore_flags(flags);
378}
379
380
381
382
383
384
385
386
387
388
389static __inline__ void change_bit(int nr, volatile void * addr)
390{
391 int mask;
392 volatile int *a = addr;
393 __bi_flags;
394
395 a += nr >> 5;
396 mask = 1 << (nr & 0x1f);
397 __bi_save_and_cli(flags);
398 *a ^= mask;
399 __bi_restore_flags(flags);
400}
401
402
403
404
405
406
407
408
409
410
411static __inline__ void __change_bit(int nr, volatile void * addr)
412{
413 unsigned long * m = ((unsigned long *) addr) + (nr >> 5);
414
415 *m ^= 1UL << (nr & 31);
416}
417
418
419
420
421
422
423
424
425
426static __inline__ int test_and_set_bit(int nr, volatile void * addr)
427{
428 int mask, retval;
429 volatile int *a = addr;
430 __bi_flags;
431
432 a += nr >> 5;
433 mask = 1 << (nr & 0x1f);
434 __bi_save_and_cli(flags);
435 retval = (mask & *a) != 0;
436 *a |= mask;
437 __bi_restore_flags(flags);
438
439 return retval;
440}
441
442
443
444
445
446
447
448
449
450
451static __inline__ int __test_and_set_bit(int nr, volatile void * addr)
452{
453 int mask, retval;
454 volatile int *a = addr;
455
456 a += nr >> 5;
457 mask = 1 << (nr & 0x1f);
458 retval = (mask & *a) != 0;
459 *a |= mask;
460
461 return retval;
462}
463
464
465
466
467
468
469
470
471
472static __inline__ int test_and_clear_bit(int nr, volatile void * addr)
473{
474 int mask, retval;
475 volatile int *a = addr;
476 __bi_flags;
477
478 a += nr >> 5;
479 mask = 1 << (nr & 0x1f);
480 __bi_save_and_cli(flags);
481 retval = (mask & *a) != 0;
482 *a &= ~mask;
483 __bi_restore_flags(flags);
484
485 return retval;
486}
487
488
489
490
491
492
493
494
495
496
497static __inline__ int __test_and_clear_bit(int nr, volatile void * addr)
498{
499 int mask, retval;
500 volatile int *a = addr;
501
502 a += nr >> 5;
503 mask = 1 << (nr & 0x1f);
504 retval = (mask & *a) != 0;
505 *a &= ~mask;
506
507 return retval;
508}
509
510
511
512
513
514
515
516
517
518static __inline__ int test_and_change_bit(int nr, volatile void * addr)
519{
520 int mask, retval;
521 volatile int *a = addr;
522 __bi_flags;
523
524 a += nr >> 5;
525 mask = 1 << (nr & 0x1f);
526 __bi_save_and_cli(flags);
527 retval = (mask & *a) != 0;
528 *a ^= mask;
529 __bi_restore_flags(flags);
530
531 return retval;
532}
533
534
535
536
537
538
539
540
541
542
543static __inline__ int __test_and_change_bit(int nr, volatile void * addr)
544{
545 int mask, retval;
546 volatile int *a = addr;
547
548 a += nr >> 5;
549 mask = 1 << (nr & 0x1f);
550 retval = (mask & *a) != 0;
551 *a ^= mask;
552
553 return retval;
554}
555
556#undef __bi_flags
557#undef __bi_cli
558#undef __bi_save_flags
559#undef __bi_restore_flags
560
561#endif
562
563
564
565
566
567
568static __inline__ int test_bit(int nr, const volatile void *addr)
569{
570 return ((1UL << (nr & 31)) & (((const unsigned int *) addr)[nr >> 5])) != 0;
571}
572
573#ifndef __MIPSEB__
574
575
576
577
578
579
580
581
582
583
584
585static __inline__ int find_first_zero_bit (void *addr, unsigned size)
586{
587 unsigned long dummy;
588 int res;
589
590 if (!size)
591 return 0;
592
593 __asm__ (".set\tnoreorder\n\t"
594 ".set\tnoat\n"
595 "1:\tsubu\t$1,%6,%0\n\t"
596 "blez\t$1,2f\n\t"
597 "lw\t$1,(%5)\n\t"
598 "addiu\t%5,4\n\t"
599#if (_MIPS_ISA == _MIPS_ISA_MIPS2 ) || (_MIPS_ISA == _MIPS_ISA_MIPS3 ) || \
600 (_MIPS_ISA == _MIPS_ISA_MIPS4 ) || (_MIPS_ISA == _MIPS_ISA_MIPS5 ) || \
601 (_MIPS_ISA == _MIPS_ISA_MIPS32) || (_MIPS_ISA == _MIPS_ISA_MIPS64)
602 "beql\t%1,$1,1b\n\t"
603 "addiu\t%0,32\n\t"
604#else
605 "addiu\t%0,32\n\t"
606 "beq\t%1,$1,1b\n\t"
607 "nop\n\t"
608 "subu\t%0,32\n\t"
609#endif
610#ifdef __MIPSEB__
611#error "Fix this for big endian"
612#endif
613 "li\t%1,1\n"
614 "1:\tand\t%2,$1,%1\n\t"
615 "beqz\t%2,2f\n\t"
616 "sll\t%1,%1,1\n\t"
617 "bnez\t%1,1b\n\t"
618 "add\t%0,%0,1\n\t"
619 ".set\tat\n\t"
620 ".set\treorder\n"
621 "2:"
622 : "=r" (res), "=r" (dummy), "=r" (addr)
623 : "0" ((signed int) 0), "1" ((unsigned int) 0xffffffff),
624 "2" (addr), "r" (size)
625 : "$1");
626
627 return res;
628}
629
630
631
632
633
634
635
636static __inline__ int find_next_zero_bit (void * addr, int size, int offset)
637{
638 unsigned int *p = ((unsigned int *) addr) + (offset >> 5);
639 int set = 0, bit = offset & 31, res;
640 unsigned long dummy;
641
642 if (bit) {
643
644
645
646#ifdef __MIPSEB__
647#error "Fix this for big endian byte order"
648#endif
649 __asm__(".set\tnoreorder\n\t"
650 ".set\tnoat\n"
651 "1:\tand\t$1,%4,%1\n\t"
652 "beqz\t$1,1f\n\t"
653 "sll\t%1,%1,1\n\t"
654 "bnez\t%1,1b\n\t"
655 "addiu\t%0,1\n\t"
656 ".set\tat\n\t"
657 ".set\treorder\n"
658 "1:"
659 : "=r" (set), "=r" (dummy)
660 : "0" (0), "1" (1 << bit), "r" (*p)
661 : "$1");
662 if (set < (32 - bit))
663 return set + offset;
664 set = 32 - bit;
665 p++;
666 }
667
668
669
670 res = find_first_zero_bit(p, size - 32 * (p - (unsigned int *) addr));
671 return offset + set + res;
672}
673
674#endif
675
676
677
678
679
680
681
682static __inline__ unsigned long ffz(unsigned long word)
683{
684 unsigned int __res;
685 unsigned int mask = 1;
686
687 __asm__ (
688 ".set\tnoreorder\n\t"
689 ".set\tnoat\n\t"
690 "move\t%0,$0\n"
691 "1:\tand\t$1,%2,%1\n\t"
692 "beqz\t$1,2f\n\t"
693 "sll\t%1,1\n\t"
694 "bnez\t%1,1b\n\t"
695 "addiu\t%0,1\n\t"
696 ".set\tat\n\t"
697 ".set\treorder\n"
698 "2:\n\t"
699 : "=&r" (__res), "=r" (mask)
700 : "r" (word), "1" (mask)
701 : "$1");
702
703 return __res;
704}
705
706#ifdef __KERNEL__
707
708
709
710
711
712
713
714
715#define hweight32(x) generic_hweight32(x)
716#define hweight16(x) generic_hweight16(x)
717#define hweight8(x) generic_hweight8(x)
718
719#endif
720
721#ifdef __MIPSEB__
722
723
724
725
726
727
728static __inline__ int find_next_zero_bit(void *addr, int size, int offset)
729{
730 unsigned long *p = ((unsigned long *) addr) + (offset >> 5);
731 unsigned long result = offset & ~31UL;
732 unsigned long tmp;
733
734 if (offset >= size)
735 return size;
736 size -= result;
737 offset &= 31UL;
738 if (offset) {
739 tmp = *(p++);
740 tmp |= ~0UL >> (32-offset);
741 if (size < 32)
742 goto found_first;
743 if (~tmp)
744 goto found_middle;
745 size -= 32;
746 result += 32;
747 }
748 while (size & ~31UL) {
749 if (~(tmp = *(p++)))
750 goto found_middle;
751 result += 32;
752 size -= 32;
753 }
754 if (!size)
755 return result;
756 tmp = *p;
757
758found_first:
759 tmp |= ~0UL << size;
760found_middle:
761 return result + ffz(tmp);
762}
763
764
765
766
767
768#if 0
769
770
771
772
773
774
775
776
777static int find_first_zero_bit (void *addr, unsigned size);
778#endif
779
780#define find_first_zero_bit(addr, size) \
781 find_next_zero_bit((addr), (size), 0)
782
783#endif
784
785
786
787#ifdef __MIPSEB__
788static __inline__ int ext2_set_bit(int nr, void * addr)
789{
790 int mask, retval, flags;
791 unsigned char *ADDR = (unsigned char *) addr;
792
793 ADDR += nr >> 3;
794 mask = 1 << (nr & 0x07);
795 save_and_cli(flags);
796 retval = (mask & *ADDR) != 0;
797 *ADDR |= mask;
798 restore_flags(flags);
799 return retval;
800}
801
802static __inline__ int ext2_clear_bit(int nr, void * addr)
803{
804 int mask, retval, flags;
805 unsigned char *ADDR = (unsigned char *) addr;
806
807 ADDR += nr >> 3;
808 mask = 1 << (nr & 0x07);
809 save_and_cli(flags);
810 retval = (mask & *ADDR) != 0;
811 *ADDR &= ~mask;
812 restore_flags(flags);
813 return retval;
814}
815
816static __inline__ int ext2_test_bit(int nr, const void * addr)
817{
818 int mask;
819 const unsigned char *ADDR = (const unsigned char *) addr;
820
821 ADDR += nr >> 3;
822 mask = 1 << (nr & 0x07);
823 return ((mask & *ADDR) != 0);
824}
825
826#define ext2_find_first_zero_bit(addr, size) \
827 ext2_find_next_zero_bit((addr), (size), 0)
828
829static __inline__ unsigned long ext2_find_next_zero_bit(void *addr, unsigned long size, unsigned long offset)
830{
831 unsigned long *p = ((unsigned long *) addr) + (offset >> 5);
832 unsigned long result = offset & ~31UL;
833 unsigned long tmp;
834
835 if (offset >= size)
836 return size;
837 size -= result;
838 offset &= 31UL;
839 if(offset) {
840
841
842
843
844
845
846
847
848
849
850 tmp = *(p++);
851 tmp |= __swab32(~0UL >> (32-offset));
852 if(size < 32)
853 goto found_first;
854 if(~tmp)
855 goto found_middle;
856 size -= 32;
857 result += 32;
858 }
859 while(size & ~31UL) {
860 if(~(tmp = *(p++)))
861 goto found_middle;
862 result += 32;
863 size -= 32;
864 }
865 if(!size)
866 return result;
867 tmp = *p;
868
869found_first:
870
871
872
873
874 return result + ffz(__swab32(tmp) | (~0UL << size));
875found_middle:
876 return result + ffz(__swab32(tmp));
877}
878#else
879
880
881#define ext2_set_bit(nr, addr) test_and_set_bit((nr), (addr))
882#define ext2_clear_bit(nr, addr) test_and_clear_bit((nr), (addr))
883#define ext2_test_bit(nr, addr) test_bit((nr), (addr))
884#define ext2_find_first_zero_bit(addr, size) find_first_zero_bit((addr), (size))
885#define ext2_find_next_zero_bit(addr, size, offset) \
886 find_next_zero_bit((addr), (size), (offset))
887
888#endif
889
890
891
892
893
894
895#define minix_test_and_set_bit(nr,addr) test_and_set_bit(nr,addr)
896#define minix_set_bit(nr,addr) set_bit(nr,addr)
897#define minix_test_and_clear_bit(nr,addr) test_and_clear_bit(nr,addr)
898#define minix_test_bit(nr,addr) test_bit(nr,addr)
899#define minix_find_first_zero_bit(addr,size) find_first_zero_bit(addr,size)
900
901#endif
902