1
2
3
4
5
6
7
8
9
10
11#ifndef _S390_BITOPS_H
12#define _S390_BITOPS_H
13
14#ifndef _LINUX_BITOPS_H
15#error only <linux/bitops.h> can be included directly
16#endif
17
18#include <linux/compiler.h>
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57extern const char _oi_bitmap[];
58extern const char _ni_bitmap[];
59extern const char _zb_findmap[];
60extern const char _sb_findmap[];
61
62#ifndef CONFIG_64BIT
63
64#define __BITOPS_OR "or"
65#define __BITOPS_AND "nr"
66#define __BITOPS_XOR "xr"
67
68#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \
69 asm volatile( \
70 " l %0,%2\n" \
71 "0: lr %1,%0\n" \
72 __op_string " %1,%3\n" \
73 " cs %0,%1,%2\n" \
74 " jl 0b" \
75 : "=&d" (__old), "=&d" (__new), \
76 "=Q" (*(unsigned long *) __addr) \
77 : "d" (__val), "Q" (*(unsigned long *) __addr) \
78 : "cc");
79
80#else
81
82#define __BITOPS_OR "ogr"
83#define __BITOPS_AND "ngr"
84#define __BITOPS_XOR "xgr"
85
86#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \
87 asm volatile( \
88 " lg %0,%2\n" \
89 "0: lgr %1,%0\n" \
90 __op_string " %1,%3\n" \
91 " csg %0,%1,%2\n" \
92 " jl 0b" \
93 : "=&d" (__old), "=&d" (__new), \
94 "=Q" (*(unsigned long *) __addr) \
95 : "d" (__val), "Q" (*(unsigned long *) __addr) \
96 : "cc");
97
98#endif
99
100#define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
101
102#ifdef CONFIG_SMP
103
104
105
106static inline void set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
107{
108 unsigned long addr, old, new, mask;
109
110 addr = (unsigned long) ptr;
111
112 addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
113
114 mask = 1UL << (nr & (BITS_PER_LONG - 1));
115
116 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR);
117}
118
119
120
121
122static inline void clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
123{
124 unsigned long addr, old, new, mask;
125
126 addr = (unsigned long) ptr;
127
128 addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
129
130 mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
131
132 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND);
133}
134
135
136
137
138static inline void change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
139{
140 unsigned long addr, old, new, mask;
141
142 addr = (unsigned long) ptr;
143
144 addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
145
146 mask = 1UL << (nr & (BITS_PER_LONG - 1));
147
148 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR);
149}
150
151
152
153
154static inline int
155test_and_set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
156{
157 unsigned long addr, old, new, mask;
158
159 addr = (unsigned long) ptr;
160
161 addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
162
163 mask = 1UL << (nr & (BITS_PER_LONG - 1));
164
165 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR);
166 barrier();
167 return (old & mask) != 0;
168}
169
170
171
172
173static inline int
174test_and_clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
175{
176 unsigned long addr, old, new, mask;
177
178 addr = (unsigned long) ptr;
179
180 addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
181
182 mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
183
184 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND);
185 barrier();
186 return (old ^ new) != 0;
187}
188
189
190
191
192static inline int
193test_and_change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
194{
195 unsigned long addr, old, new, mask;
196
197 addr = (unsigned long) ptr;
198
199 addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
200
201 mask = 1UL << (nr & (BITS_PER_LONG - 1));
202
203 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR);
204 barrier();
205 return (old & mask) != 0;
206}
207#endif
208
209
210
211
212static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
213{
214 unsigned long addr;
215
216 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
217 asm volatile(
218 " oc %O0(1,%R0),%1"
219 : "+Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7]) : "cc");
220}
221
222static inline void
223__constant_set_bit(const unsigned long nr, volatile unsigned long *ptr)
224{
225 unsigned long addr;
226
227 addr = ((unsigned long) ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
228 *(unsigned char *) addr |= 1 << (nr & 7);
229}
230
231#define set_bit_simple(nr,addr) \
232(__builtin_constant_p((nr)) ? \
233 __constant_set_bit((nr),(addr)) : \
234 __set_bit((nr),(addr)) )
235
236
237
238
239static inline void
240__clear_bit(unsigned long nr, volatile unsigned long *ptr)
241{
242 unsigned long addr;
243
244 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
245 asm volatile(
246 " nc %O0(1,%R0),%1"
247 : "+Q" (*(char *) addr) : "Q" (_ni_bitmap[nr & 7]) : "cc");
248}
249
250static inline void
251__constant_clear_bit(const unsigned long nr, volatile unsigned long *ptr)
252{
253 unsigned long addr;
254
255 addr = ((unsigned long) ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
256 *(unsigned char *) addr &= ~(1 << (nr & 7));
257}
258
259#define clear_bit_simple(nr,addr) \
260(__builtin_constant_p((nr)) ? \
261 __constant_clear_bit((nr),(addr)) : \
262 __clear_bit((nr),(addr)) )
263
264
265
266
267static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
268{
269 unsigned long addr;
270
271 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
272 asm volatile(
273 " xc %O0(1,%R0),%1"
274 : "+Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7]) : "cc");
275}
276
277static inline void
278__constant_change_bit(const unsigned long nr, volatile unsigned long *ptr)
279{
280 unsigned long addr;
281
282 addr = ((unsigned long) ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
283 *(unsigned char *) addr ^= 1 << (nr & 7);
284}
285
286#define change_bit_simple(nr,addr) \
287(__builtin_constant_p((nr)) ? \
288 __constant_change_bit((nr),(addr)) : \
289 __change_bit((nr),(addr)) )
290
291
292
293
294static inline int
295test_and_set_bit_simple(unsigned long nr, volatile unsigned long *ptr)
296{
297 unsigned long addr;
298 unsigned char ch;
299
300 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
301 ch = *(unsigned char *) addr;
302 asm volatile(
303 " oc %O0(1,%R0),%1"
304 : "+Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7])
305 : "cc", "memory");
306 return (ch >> (nr & 7)) & 1;
307}
308#define __test_and_set_bit(X,Y) test_and_set_bit_simple(X,Y)
309
310
311
312
313static inline int
314test_and_clear_bit_simple(unsigned long nr, volatile unsigned long *ptr)
315{
316 unsigned long addr;
317 unsigned char ch;
318
319 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
320 ch = *(unsigned char *) addr;
321 asm volatile(
322 " nc %O0(1,%R0),%1"
323 : "+Q" (*(char *) addr) : "Q" (_ni_bitmap[nr & 7])
324 : "cc", "memory");
325 return (ch >> (nr & 7)) & 1;
326}
327#define __test_and_clear_bit(X,Y) test_and_clear_bit_simple(X,Y)
328
329
330
331
332static inline int
333test_and_change_bit_simple(unsigned long nr, volatile unsigned long *ptr)
334{
335 unsigned long addr;
336 unsigned char ch;
337
338 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
339 ch = *(unsigned char *) addr;
340 asm volatile(
341 " xc %O0(1,%R0),%1"
342 : "+Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7])
343 : "cc", "memory");
344 return (ch >> (nr & 7)) & 1;
345}
346#define __test_and_change_bit(X,Y) test_and_change_bit_simple(X,Y)
347
348#ifdef CONFIG_SMP
349#define set_bit set_bit_cs
350#define clear_bit clear_bit_cs
351#define change_bit change_bit_cs
352#define test_and_set_bit test_and_set_bit_cs
353#define test_and_clear_bit test_and_clear_bit_cs
354#define test_and_change_bit test_and_change_bit_cs
355#else
356#define set_bit set_bit_simple
357#define clear_bit clear_bit_simple
358#define change_bit change_bit_simple
359#define test_and_set_bit test_and_set_bit_simple
360#define test_and_clear_bit test_and_clear_bit_simple
361#define test_and_change_bit test_and_change_bit_simple
362#endif
363
364
365
366
367
368
369static inline int __test_bit(unsigned long nr, const volatile unsigned long *ptr)
370{
371 unsigned long addr;
372 unsigned char ch;
373
374 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
375 ch = *(volatile unsigned char *) addr;
376 return (ch >> (nr & 7)) & 1;
377}
378
379static inline int
380__constant_test_bit(unsigned long nr, const volatile unsigned long *addr) {
381 return (((volatile char *) addr)
382 [(nr^(BITS_PER_LONG-8))>>3] & (1<<(nr&7))) != 0;
383}
384
385#define test_bit(nr,addr) \
386(__builtin_constant_p((nr)) ? \
387 __constant_test_bit((nr),(addr)) : \
388 __test_bit((nr),(addr)) )
389
390
391
392
393
394
395
396
397
398
399static inline unsigned long __ffz_word_loop(const unsigned long *addr,
400 unsigned long size)
401{
402 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype;
403 unsigned long bytes = 0;
404
405 asm volatile(
406#ifndef CONFIG_64BIT
407 " ahi %1,-1\n"
408 " sra %1,5\n"
409 " jz 1f\n"
410 "0: c %2,0(%0,%3)\n"
411 " jne 1f\n"
412 " la %0,4(%0)\n"
413 " brct %1,0b\n"
414 "1:\n"
415#else
416 " aghi %1,-1\n"
417 " srag %1,%1,6\n"
418 " jz 1f\n"
419 "0: cg %2,0(%0,%3)\n"
420 " jne 1f\n"
421 " la %0,8(%0)\n"
422 " brct %1,0b\n"
423 "1:\n"
424#endif
425 : "+&a" (bytes), "+&d" (size)
426 : "d" (-1UL), "a" (addr), "m" (*(addrtype *) addr)
427 : "cc" );
428 return bytes;
429}
430
431
432
433
434
435
436static inline unsigned long __ffs_word_loop(const unsigned long *addr,
437 unsigned long size)
438{
439 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype;
440 unsigned long bytes = 0;
441
442 asm volatile(
443#ifndef CONFIG_64BIT
444 " ahi %1,-1\n"
445 " sra %1,5\n"
446 " jz 1f\n"
447 "0: c %2,0(%0,%3)\n"
448 " jne 1f\n"
449 " la %0,4(%0)\n"
450 " brct %1,0b\n"
451 "1:\n"
452#else
453 " aghi %1,-1\n"
454 " srag %1,%1,6\n"
455 " jz 1f\n"
456 "0: cg %2,0(%0,%3)\n"
457 " jne 1f\n"
458 " la %0,8(%0)\n"
459 " brct %1,0b\n"
460 "1:\n"
461#endif
462 : "+&a" (bytes), "+&a" (size)
463 : "d" (0UL), "a" (addr), "m" (*(addrtype *) addr)
464 : "cc" );
465 return bytes;
466}
467
468
469
470
471
472
473static inline unsigned long __ffz_word(unsigned long nr, unsigned long word)
474{
475#ifdef CONFIG_64BIT
476 if ((word & 0xffffffff) == 0xffffffff) {
477 word >>= 32;
478 nr += 32;
479 }
480#endif
481 if ((word & 0xffff) == 0xffff) {
482 word >>= 16;
483 nr += 16;
484 }
485 if ((word & 0xff) == 0xff) {
486 word >>= 8;
487 nr += 8;
488 }
489 return nr + _zb_findmap[(unsigned char) word];
490}
491
492
493
494
495
496
497static inline unsigned long __ffs_word(unsigned long nr, unsigned long word)
498{
499#ifdef CONFIG_64BIT
500 if ((word & 0xffffffff) == 0) {
501 word >>= 32;
502 nr += 32;
503 }
504#endif
505 if ((word & 0xffff) == 0) {
506 word >>= 16;
507 nr += 16;
508 }
509 if ((word & 0xff) == 0) {
510 word >>= 8;
511 nr += 8;
512 }
513 return nr + _sb_findmap[(unsigned char) word];
514}
515
516
517
518
519
520
521
522static inline unsigned long __load_ulong_be(const unsigned long *p,
523 unsigned long offset)
524{
525 p = (unsigned long *)((unsigned long) p + offset);
526 return *p;
527}
528
529
530
531
532
533
534static inline unsigned long __load_ulong_le(const unsigned long *p,
535 unsigned long offset)
536{
537 unsigned long word;
538
539 p = (unsigned long *)((unsigned long) p + offset);
540#ifndef CONFIG_64BIT
541 asm volatile(
542 " ic %0,%O1(%R1)\n"
543 " icm %0,2,%O1+1(%R1)\n"
544 " icm %0,4,%O1+2(%R1)\n"
545 " icm %0,8,%O1+3(%R1)"
546 : "=&d" (word) : "Q" (*p) : "cc");
547#else
548 asm volatile(
549 " lrvg %0,%1"
550 : "=d" (word) : "m" (*p) );
551#endif
552 return word;
553}
554
555
556
557
558
559
560
561
562
563
564
565static inline unsigned long ffz(unsigned long word)
566{
567 return __ffz_word(0, word);
568}
569
570
571
572
573
574
575
576static inline unsigned long __ffs (unsigned long word)
577{
578 return __ffs_word(0, word);
579}
580
581
582
583
584
585
586
587
588
589static inline int ffs(int x)
590{
591 if (!x)
592 return 0;
593 return __ffs_word(1, x);
594}
595
596
597
598
599
600
601
602
603
604static inline unsigned long find_first_zero_bit(const unsigned long *addr,
605 unsigned long size)
606{
607 unsigned long bytes, bits;
608
609 if (!size)
610 return 0;
611 bytes = __ffz_word_loop(addr, size);
612 bits = __ffz_word(bytes*8, __load_ulong_be(addr, bytes));
613 return (bits < size) ? bits : size;
614}
615#define find_first_zero_bit find_first_zero_bit
616
617
618
619
620
621
622
623
624
625static inline unsigned long find_first_bit(const unsigned long * addr,
626 unsigned long size)
627{
628 unsigned long bytes, bits;
629
630 if (!size)
631 return 0;
632 bytes = __ffs_word_loop(addr, size);
633 bits = __ffs_word(bytes*8, __load_ulong_be(addr, bytes));
634 return (bits < size) ? bits : size;
635}
636#define find_first_bit find_first_bit
637
638
639
640
641
642static inline unsigned long __flo_word(unsigned long nr, unsigned long val)
643{
644 register unsigned long bit asm("2") = val;
645 register unsigned long out asm("3");
646
647 asm volatile (
648 " .insn rre,0xb9830000,%[bit],%[bit]\n"
649 : [bit] "+d" (bit), [out] "=d" (out) : : "cc");
650 return nr + bit;
651}
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670static inline unsigned long find_first_bit_left(const unsigned long *addr,
671 unsigned long size)
672{
673 unsigned long bytes, bits;
674
675 if (!size)
676 return 0;
677 bytes = __ffs_word_loop(addr, size);
678 bits = __flo_word(bytes * 8, __load_ulong_be(addr, bytes));
679 return (bits < size) ? bits : size;
680}
681
682static inline int find_next_bit_left(const unsigned long *addr,
683 unsigned long size,
684 unsigned long offset)
685{
686 const unsigned long *p;
687 unsigned long bit, set;
688
689 if (offset >= size)
690 return size;
691 bit = offset & (BITS_PER_LONG - 1);
692 offset -= bit;
693 size -= offset;
694 p = addr + offset / BITS_PER_LONG;
695 if (bit) {
696 set = __flo_word(0, *p & (~0UL >> bit));
697 if (set >= size)
698 return size + offset;
699 if (set < BITS_PER_LONG)
700 return set + offset;
701 offset += BITS_PER_LONG;
702 size -= BITS_PER_LONG;
703 p++;
704 }
705 return offset + find_first_bit_left(p, size);
706}
707
708#define for_each_set_bit_left(bit, addr, size) \
709 for ((bit) = find_first_bit_left((addr), (size)); \
710 (bit) < (size); \
711 (bit) = find_next_bit_left((addr), (size), (bit) + 1))
712
713
714#define for_each_set_bit_left_cont(bit, addr, size) \
715 for ((bit) = find_next_bit_left((addr), (size), (bit)); \
716 (bit) < (size); \
717 (bit) = find_next_bit_left((addr), (size), (bit) + 1))
718
719
720
721
722
723
724
725static inline int find_next_zero_bit (const unsigned long * addr,
726 unsigned long size,
727 unsigned long offset)
728{
729 const unsigned long *p;
730 unsigned long bit, set;
731
732 if (offset >= size)
733 return size;
734 bit = offset & (BITS_PER_LONG - 1);
735 offset -= bit;
736 size -= offset;
737 p = addr + offset / BITS_PER_LONG;
738 if (bit) {
739
740
741
742
743 set = __ffz_word(bit, *p >> bit);
744 if (set >= size)
745 return size + offset;
746 if (set < BITS_PER_LONG)
747 return set + offset;
748 offset += BITS_PER_LONG;
749 size -= BITS_PER_LONG;
750 p++;
751 }
752 return offset + find_first_zero_bit(p, size);
753}
754#define find_next_zero_bit find_next_zero_bit
755
756
757
758
759
760
761
762static inline int find_next_bit (const unsigned long * addr,
763 unsigned long size,
764 unsigned long offset)
765{
766 const unsigned long *p;
767 unsigned long bit, set;
768
769 if (offset >= size)
770 return size;
771 bit = offset & (BITS_PER_LONG - 1);
772 offset -= bit;
773 size -= offset;
774 p = addr + offset / BITS_PER_LONG;
775 if (bit) {
776
777
778
779
780 set = __ffs_word(0, *p & (~0UL << bit));
781 if (set >= size)
782 return size + offset;
783 if (set < BITS_PER_LONG)
784 return set + offset;
785 offset += BITS_PER_LONG;
786 size -= BITS_PER_LONG;
787 p++;
788 }
789 return offset + find_first_bit(p, size);
790}
791#define find_next_bit find_next_bit
792
793
794
795
796
797
798
799static inline int sched_find_first_bit(unsigned long *b)
800{
801 return find_first_bit(b, 140);
802}
803
804#include <asm-generic/bitops/fls.h>
805#include <asm-generic/bitops/__fls.h>
806#include <asm-generic/bitops/fls64.h>
807
808#include <asm-generic/bitops/hweight.h>
809#include <asm-generic/bitops/lock.h>
810
811
812
813
814
815
816
817
818
819
820
821static inline int find_first_zero_bit_le(void *vaddr, unsigned int size)
822{
823 unsigned long bytes, bits;
824
825 if (!size)
826 return 0;
827 bytes = __ffz_word_loop(vaddr, size);
828 bits = __ffz_word(bytes*8, __load_ulong_le(vaddr, bytes));
829 return (bits < size) ? bits : size;
830}
831#define find_first_zero_bit_le find_first_zero_bit_le
832
833static inline int find_next_zero_bit_le(void *vaddr, unsigned long size,
834 unsigned long offset)
835{
836 unsigned long *addr = vaddr, *p;
837 unsigned long bit, set;
838
839 if (offset >= size)
840 return size;
841 bit = offset & (BITS_PER_LONG - 1);
842 offset -= bit;
843 size -= offset;
844 p = addr + offset / BITS_PER_LONG;
845 if (bit) {
846
847
848
849
850 set = __ffz_word(bit, __load_ulong_le(p, 0) >> bit);
851 if (set >= size)
852 return size + offset;
853 if (set < BITS_PER_LONG)
854 return set + offset;
855 offset += BITS_PER_LONG;
856 size -= BITS_PER_LONG;
857 p++;
858 }
859 return offset + find_first_zero_bit_le(p, size);
860}
861#define find_next_zero_bit_le find_next_zero_bit_le
862
863static inline unsigned long find_first_bit_le(void *vaddr, unsigned long size)
864{
865 unsigned long bytes, bits;
866
867 if (!size)
868 return 0;
869 bytes = __ffs_word_loop(vaddr, size);
870 bits = __ffs_word(bytes*8, __load_ulong_le(vaddr, bytes));
871 return (bits < size) ? bits : size;
872}
873#define find_first_bit_le find_first_bit_le
874
875static inline int find_next_bit_le(void *vaddr, unsigned long size,
876 unsigned long offset)
877{
878 unsigned long *addr = vaddr, *p;
879 unsigned long bit, set;
880
881 if (offset >= size)
882 return size;
883 bit = offset & (BITS_PER_LONG - 1);
884 offset -= bit;
885 size -= offset;
886 p = addr + offset / BITS_PER_LONG;
887 if (bit) {
888
889
890
891
892 set = __ffs_word(0, __load_ulong_le(p, 0) & (~0UL << bit));
893 if (set >= size)
894 return size + offset;
895 if (set < BITS_PER_LONG)
896 return set + offset;
897 offset += BITS_PER_LONG;
898 size -= BITS_PER_LONG;
899 p++;
900 }
901 return offset + find_first_bit_le(p, size);
902}
903#define find_next_bit_le find_next_bit_le
904
905#include <asm-generic/bitops/le.h>
906
907#include <asm-generic/bitops/ext2-atomic-setbit.h>
908
909#endif
910