1
2
3
4
5
6
7#ifndef _BLACKFIN_BITOPS_H
8#define _BLACKFIN_BITOPS_H
9
10#include <linux/compiler.h>
11
12#include <asm-generic/bitops/__ffs.h>
13#include <asm-generic/bitops/ffz.h>
14#include <asm-generic/bitops/fls.h>
15#include <asm-generic/bitops/__fls.h>
16#include <asm-generic/bitops/fls64.h>
17#include <asm-generic/bitops/find.h>
18
19#ifndef _LINUX_BITOPS_H
20#error only <linux/bitops.h> can be included directly
21#endif
22
23#include <asm-generic/bitops/sched.h>
24#include <asm-generic/bitops/ffs.h>
25#include <asm-generic/bitops/const_hweight.h>
26#include <asm-generic/bitops/lock.h>
27
28#include <asm-generic/bitops/ext2-atomic.h>
29
30#ifndef CONFIG_SMP
31#include <linux/irqflags.h>
32
33
34
35
36#ifndef smp_mb__before_clear_bit
37#define smp_mb__before_clear_bit() smp_mb()
38#define smp_mb__after_clear_bit() smp_mb()
39#endif
40#include <asm-generic/bitops/atomic.h>
41#include <asm-generic/bitops/non-atomic.h>
42#else
43
44#include <asm/byteorder.h>
45#include <linux/linkage.h>
46
47asmlinkage int __raw_bit_set_asm(volatile unsigned long *addr, int nr);
48
49asmlinkage int __raw_bit_clear_asm(volatile unsigned long *addr, int nr);
50
51asmlinkage int __raw_bit_toggle_asm(volatile unsigned long *addr, int nr);
52
53asmlinkage int __raw_bit_test_set_asm(volatile unsigned long *addr, int nr);
54
55asmlinkage int __raw_bit_test_clear_asm(volatile unsigned long *addr, int nr);
56
57asmlinkage int __raw_bit_test_toggle_asm(volatile unsigned long *addr, int nr);
58
59asmlinkage int __raw_bit_test_asm(const volatile unsigned long *addr, int nr);
60
61static inline void set_bit(int nr, volatile unsigned long *addr)
62{
63 volatile unsigned long *a = addr + (nr >> 5);
64 __raw_bit_set_asm(a, nr & 0x1f);
65}
66
67static inline void clear_bit(int nr, volatile unsigned long *addr)
68{
69 volatile unsigned long *a = addr + (nr >> 5);
70 __raw_bit_clear_asm(a, nr & 0x1f);
71}
72
73static inline void change_bit(int nr, volatile unsigned long *addr)
74{
75 volatile unsigned long *a = addr + (nr >> 5);
76 __raw_bit_toggle_asm(a, nr & 0x1f);
77}
78
79static inline int test_bit(int nr, const volatile unsigned long *addr)
80{
81 volatile const unsigned long *a = addr + (nr >> 5);
82 return __raw_bit_test_asm(a, nr & 0x1f) != 0;
83}
84
85static inline int test_and_set_bit(int nr, volatile unsigned long *addr)
86{
87 volatile unsigned long *a = addr + (nr >> 5);
88 return __raw_bit_test_set_asm(a, nr & 0x1f);
89}
90
91static inline int test_and_clear_bit(int nr, volatile unsigned long *addr)
92{
93 volatile unsigned long *a = addr + (nr >> 5);
94 return __raw_bit_test_clear_asm(a, nr & 0x1f);
95}
96
97static inline int test_and_change_bit(int nr, volatile unsigned long *addr)
98{
99 volatile unsigned long *a = addr + (nr >> 5);
100 return __raw_bit_test_toggle_asm(a, nr & 0x1f);
101}
102
103
104
105
106#define smp_mb__before_clear_bit() barrier()
107#define smp_mb__after_clear_bit() barrier()
108
109#define test_bit __skip_test_bit
110#include <asm-generic/bitops/non-atomic.h>
111#undef test_bit
112
113#endif
114
115
116#include <asm-generic/bitops/le.h>
117
118
119
120
121
122
123static inline unsigned int __arch_hweight32(unsigned int w)
124{
125 unsigned int res;
126
127 __asm__ ("%0.l = ONES %1;"
128 "%0 = %0.l (Z);"
129 : "=d" (res) : "d" (w));
130 return res;
131}
132
133static inline unsigned int __arch_hweight64(__u64 w)
134{
135 return __arch_hweight32((unsigned int)(w >> 32)) +
136 __arch_hweight32((unsigned int)w);
137}
138
139static inline unsigned int __arch_hweight16(unsigned int w)
140{
141 return __arch_hweight32(w & 0xffff);
142}
143
144static inline unsigned int __arch_hweight8(unsigned int w)
145{
146 return __arch_hweight32(w & 0xff);
147}
148
149#endif
150