1
2
3
4
5
6
7
8
9#ifndef _ASM_BITOPS_H
10#define _ASM_BITOPS_H
11
12#ifndef _LINUX_BITOPS_H
13#error only <linux/bitops.h> can be included directly
14#endif
15
16#ifndef __ASSEMBLY__
17
18#include <linux/types.h>
19#include <linux/compiler.h>
20#include <asm/barrier.h>
21#ifndef CONFIG_ARC_HAS_LLSC
22#include <asm/smp.h>
23#endif
24
25#if defined(CONFIG_ARC_HAS_LLSC)
26
27
28
29
30
31#define BIT_OP(op, c_op, asm_op) \
32static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
33{ \
34 unsigned int temp; \
35 \
36 m += nr >> 5; \
37 \
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52 \
53 nr &= 0x1f; \
54 \
55 __asm__ __volatile__( \
56 "1: llock %0, [%1] \n" \
57 " " #asm_op " %0, %0, %2 \n" \
58 " scond %0, [%1] \n" \
59 " bnz 1b \n" \
60 : "=&r"(temp) \
61 : "r"(m), \
62 "ir"(nr) \
63 : "cc"); \
64}
65
66
67
68
69
70
71
72
73
74
75
76
77#define TEST_N_BIT_OP(op, c_op, asm_op) \
78static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
79{ \
80 unsigned long old, temp; \
81 \
82 m += nr >> 5; \
83 \
84 nr &= 0x1f; \
85 \
86
87
88
89 \
90 smp_mb(); \
91 \
92 __asm__ __volatile__( \
93 "1: llock %0, [%2] \n" \
94 " " #asm_op " %1, %0, %3 \n" \
95 " scond %1, [%2] \n" \
96 " bnz 1b \n" \
97 : "=&r"(old), "=&r"(temp) \
98 : "r"(m), "ir"(nr) \
99 : "cc"); \
100 \
101 smp_mb(); \
102 \
103 return (old & (1 << nr)) != 0; \
104}
105
106#else
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124#define BIT_OP(op, c_op, asm_op) \
125static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
126{ \
127 unsigned long temp, flags; \
128 m += nr >> 5; \
129 \
130
131
132 \
133 bitops_lock(flags); \
134 \
135 temp = *m; \
136 *m = temp c_op (1UL << (nr & 0x1f)); \
137 \
138 bitops_unlock(flags); \
139}
140
141#define TEST_N_BIT_OP(op, c_op, asm_op) \
142static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
143{ \
144 unsigned long old, flags; \
145 m += nr >> 5; \
146 \
147 bitops_lock(flags); \
148 \
149 old = *m; \
150 *m = old c_op (1UL << (nr & 0x1f)); \
151 \
152 bitops_unlock(flags); \
153 \
154 return (old & (1UL << (nr & 0x1f))) != 0; \
155}
156
157#endif
158
159
160
161
162
163#define __BIT_OP(op, c_op, asm_op) \
164static inline void __##op##_bit(unsigned long nr, volatile unsigned long *m) \
165{ \
166 unsigned long temp; \
167 m += nr >> 5; \
168 \
169 temp = *m; \
170 *m = temp c_op (1UL << (nr & 0x1f)); \
171}
172
173#define __TEST_N_BIT_OP(op, c_op, asm_op) \
174static inline int __test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
175{ \
176 unsigned long old; \
177 m += nr >> 5; \
178 \
179 old = *m; \
180 *m = old c_op (1UL << (nr & 0x1f)); \
181 \
182 return (old & (1UL << (nr & 0x1f))) != 0; \
183}
184
185#define BIT_OPS(op, c_op, asm_op) \
186 \
187 \
188 BIT_OP(op, c_op, asm_op) \
189 \
190 \
191 TEST_N_BIT_OP(op, c_op, asm_op) \
192 \
193 \
194 __BIT_OP(op, c_op, asm_op) \
195 \
196 \
197 __TEST_N_BIT_OP(op, c_op, asm_op)
198
199BIT_OPS(set, |, bset)
200BIT_OPS(clear, & ~, bclr)
201BIT_OPS(change, ^, bxor)
202
203
204
205
206static inline int
207test_bit(unsigned int nr, const volatile unsigned long *addr)
208{
209 unsigned long mask;
210
211 addr += nr >> 5;
212
213 mask = 1UL << (nr & 0x1f);
214
215 return ((mask & *addr) != 0);
216}
217
218#ifdef CONFIG_ISA_ARCOMPACT
219
220
221
222
223
224
225
226
227static inline __attribute__ ((const)) int clz(unsigned int x)
228{
229 unsigned int res;
230
231 __asm__ __volatile__(
232 " norm.f %0, %1 \n"
233 " mov.n %0, 0 \n"
234 " add.p %0, %0, 1 \n"
235 : "=r"(res)
236 : "r"(x)
237 : "cc");
238
239 return res;
240}
241
242static inline int constant_fls(int x)
243{
244 int r = 32;
245
246 if (!x)
247 return 0;
248 if (!(x & 0xffff0000u)) {
249 x <<= 16;
250 r -= 16;
251 }
252 if (!(x & 0xff000000u)) {
253 x <<= 8;
254 r -= 8;
255 }
256 if (!(x & 0xf0000000u)) {
257 x <<= 4;
258 r -= 4;
259 }
260 if (!(x & 0xc0000000u)) {
261 x <<= 2;
262 r -= 2;
263 }
264 if (!(x & 0x80000000u)) {
265 x <<= 1;
266 r -= 1;
267 }
268 return r;
269}
270
271
272
273
274
275
276static inline __attribute__ ((const)) int fls(unsigned long x)
277{
278 if (__builtin_constant_p(x))
279 return constant_fls(x);
280
281 return 32 - clz(x);
282}
283
284
285
286
287static inline __attribute__ ((const)) int __fls(unsigned long x)
288{
289 if (!x)
290 return 0;
291 else
292 return fls(x) - 1;
293}
294
295
296
297
298
299#define ffs(x) ({ unsigned long __t = (x); fls(__t & -__t); })
300
301
302
303
304static inline __attribute__ ((const)) int __ffs(unsigned long word)
305{
306 if (!word)
307 return word;
308
309 return ffs(word) - 1;
310}
311
312#else
313
314
315
316
317
318
319static inline __attribute__ ((const)) int fls(unsigned long x)
320{
321 int n;
322
323 asm volatile(
324 " fls.f %0, %1 \n"
325 " add.nz %0, %0, 1 \n"
326 : "=r"(n)
327 : "r"(x)
328 : "cc");
329
330 return n;
331}
332
333
334
335
336static inline __attribute__ ((const)) int __fls(unsigned long x)
337{
338
339 return __builtin_arc_fls(x);
340}
341
342
343
344
345
346static inline __attribute__ ((const)) int ffs(unsigned long x)
347{
348 int n;
349
350 asm volatile(
351 " ffs.f %0, %1 \n"
352 " add.nz %0, %0, 1 \n"
353 " mov.z %0, 0 \n"
354 : "=r"(n)
355 : "r"(x)
356 : "cc");
357
358 return n;
359}
360
361
362
363
364static inline __attribute__ ((const)) int __ffs(unsigned long x)
365{
366 int n;
367
368 asm volatile(
369 " ffs.f %0, %1 \n"
370 " mov.z %0, 0 \n"
371 : "=r"(n)
372 : "r"(x)
373 : "cc");
374
375 return n;
376
377}
378
379#endif
380
381
382
383
384
385#define ffz(x) __ffs(~(x))
386
387#include <asm-generic/bitops/hweight.h>
388#include <asm-generic/bitops/fls64.h>
389#include <asm-generic/bitops/sched.h>
390#include <asm-generic/bitops/lock.h>
391
392#include <asm-generic/bitops/find.h>
393#include <asm-generic/bitops/le.h>
394#include <asm-generic/bitops/ext2-atomic-setbit.h>
395
396#endif
397
398#endif
399