1
2
3
4
5
6#ifndef _ASM_BITOPS_H
7#define _ASM_BITOPS_H
8
9#ifndef _LINUX_BITOPS_H
10#error only <linux/bitops.h> can be included directly
11#endif
12
13#ifndef __ASSEMBLY__
14
15#include <linux/types.h>
16#include <linux/compiler.h>
17#include <asm/barrier.h>
18#ifndef CONFIG_ARC_HAS_LLSC
19#include <asm/smp.h>
20#endif
21
22#ifdef CONFIG_ARC_HAS_LLSC
23
24
25
26
27
28#define BIT_OP(op, c_op, asm_op) \
29static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
30{ \
31 unsigned int temp; \
32 \
33 m += nr >> 5; \
34 \
35 nr &= 0x1f; \
36 \
37 __asm__ __volatile__( \
38 "1: llock %0, [%1] \n" \
39 " " #asm_op " %0, %0, %2 \n" \
40 " scond %0, [%1] \n" \
41 " bnz 1b \n" \
42 : "=&r"(temp) \
43 : "r"(m), \
44 "ir"(nr) \
45 : "cc"); \
46}
47
48
49
50
51
52
53
54
55
56
57
58
59#define TEST_N_BIT_OP(op, c_op, asm_op) \
60static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
61{ \
62 unsigned long old, temp; \
63 \
64 m += nr >> 5; \
65 \
66 nr &= 0x1f; \
67 \
68
69
70
71 \
72 smp_mb(); \
73 \
74 __asm__ __volatile__( \
75 "1: llock %0, [%2] \n" \
76 " " #asm_op " %1, %0, %3 \n" \
77 " scond %1, [%2] \n" \
78 " bnz 1b \n" \
79 : "=&r"(old), "=&r"(temp) \
80 : "r"(m), "ir"(nr) \
81 : "cc"); \
82 \
83 smp_mb(); \
84 \
85 return (old & (1 << nr)) != 0; \
86}
87
88#elif !defined(CONFIG_ARC_PLAT_EZNPS)
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106#define BIT_OP(op, c_op, asm_op) \
107static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
108{ \
109 unsigned long temp, flags; \
110 m += nr >> 5; \
111 \
112
113
114 \
115 bitops_lock(flags); \
116 \
117 temp = *m; \
118 *m = temp c_op (1UL << (nr & 0x1f)); \
119 \
120 bitops_unlock(flags); \
121}
122
123#define TEST_N_BIT_OP(op, c_op, asm_op) \
124static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
125{ \
126 unsigned long old, flags; \
127 m += nr >> 5; \
128 \
129 bitops_lock(flags); \
130 \
131 old = *m; \
132 *m = old c_op (1UL << (nr & 0x1f)); \
133 \
134 bitops_unlock(flags); \
135 \
136 return (old & (1UL << (nr & 0x1f))) != 0; \
137}
138
139#else
140
141#define BIT_OP(op, c_op, asm_op) \
142static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
143{ \
144 m += nr >> 5; \
145 \
146 nr = (1UL << (nr & 0x1f)); \
147 if (asm_op == CTOP_INST_AAND_DI_R2_R2_R3) \
148 nr = ~nr; \
149 \
150 __asm__ __volatile__( \
151 " mov r2, %0\n" \
152 " mov r3, %1\n" \
153 " .word %2\n" \
154 : \
155 : "r"(nr), "r"(m), "i"(asm_op) \
156 : "r2", "r3", "memory"); \
157}
158
159#define TEST_N_BIT_OP(op, c_op, asm_op) \
160static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
161{ \
162 unsigned long old; \
163 \
164 m += nr >> 5; \
165 \
166 nr = old = (1UL << (nr & 0x1f)); \
167 if (asm_op == CTOP_INST_AAND_DI_R2_R2_R3) \
168 old = ~old; \
169 \
170 \
171 smp_mb(); \
172 \
173 __asm__ __volatile__( \
174 " mov r2, %0\n" \
175 " mov r3, %1\n" \
176 " .word %2\n" \
177 " mov %0, r2" \
178 : "+r"(old) \
179 : "r"(m), "i"(asm_op) \
180 : "r2", "r3", "memory"); \
181 \
182 smp_mb(); \
183 \
184 return (old & nr) != 0; \
185}
186
187#endif
188
189
190
191
192
193#define __BIT_OP(op, c_op, asm_op) \
194static inline void __##op##_bit(unsigned long nr, volatile unsigned long *m) \
195{ \
196 unsigned long temp; \
197 m += nr >> 5; \
198 \
199 temp = *m; \
200 *m = temp c_op (1UL << (nr & 0x1f)); \
201}
202
203#define __TEST_N_BIT_OP(op, c_op, asm_op) \
204static inline int __test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
205{ \
206 unsigned long old; \
207 m += nr >> 5; \
208 \
209 old = *m; \
210 *m = old c_op (1UL << (nr & 0x1f)); \
211 \
212 return (old & (1UL << (nr & 0x1f))) != 0; \
213}
214
215#define BIT_OPS(op, c_op, asm_op) \
216 \
217 \
218 BIT_OP(op, c_op, asm_op) \
219 \
220 \
221 TEST_N_BIT_OP(op, c_op, asm_op) \
222 \
223 \
224 __BIT_OP(op, c_op, asm_op) \
225 \
226 \
227 __TEST_N_BIT_OP(op, c_op, asm_op)
228
229#ifndef CONFIG_ARC_PLAT_EZNPS
230BIT_OPS(set, |, bset)
231BIT_OPS(clear, & ~, bclr)
232BIT_OPS(change, ^, bxor)
233#else
234BIT_OPS(set, |, CTOP_INST_AOR_DI_R2_R2_R3)
235BIT_OPS(clear, & ~, CTOP_INST_AAND_DI_R2_R2_R3)
236BIT_OPS(change, ^, CTOP_INST_AXOR_DI_R2_R2_R3)
237#endif
238
239
240
241
242static inline int
243test_bit(unsigned int nr, const volatile unsigned long *addr)
244{
245 unsigned long mask;
246
247 addr += nr >> 5;
248
249 mask = 1UL << (nr & 0x1f);
250
251 return ((mask & *addr) != 0);
252}
253
254#ifdef CONFIG_ISA_ARCOMPACT
255
256
257
258
259
260
261
262
263static inline __attribute__ ((const)) int clz(unsigned int x)
264{
265 unsigned int res;
266
267 __asm__ __volatile__(
268 " norm.f %0, %1 \n"
269 " mov.n %0, 0 \n"
270 " add.p %0, %0, 1 \n"
271 : "=r"(res)
272 : "r"(x)
273 : "cc");
274
275 return res;
276}
277
278static inline int constant_fls(unsigned int x)
279{
280 int r = 32;
281
282 if (!x)
283 return 0;
284 if (!(x & 0xffff0000u)) {
285 x <<= 16;
286 r -= 16;
287 }
288 if (!(x & 0xff000000u)) {
289 x <<= 8;
290 r -= 8;
291 }
292 if (!(x & 0xf0000000u)) {
293 x <<= 4;
294 r -= 4;
295 }
296 if (!(x & 0xc0000000u)) {
297 x <<= 2;
298 r -= 2;
299 }
300 if (!(x & 0x80000000u)) {
301 x <<= 1;
302 r -= 1;
303 }
304 return r;
305}
306
307
308
309
310
311
312static inline __attribute__ ((const)) int fls(unsigned int x)
313{
314 if (__builtin_constant_p(x))
315 return constant_fls(x);
316
317 return 32 - clz(x);
318}
319
320
321
322
323static inline __attribute__ ((const)) int __fls(unsigned long x)
324{
325 if (!x)
326 return 0;
327 else
328 return fls(x) - 1;
329}
330
331
332
333
334
335#define ffs(x) ({ unsigned long __t = (x); fls(__t & -__t); })
336
337
338
339
340static inline __attribute__ ((const)) unsigned long __ffs(unsigned long word)
341{
342 if (!word)
343 return word;
344
345 return ffs(word) - 1;
346}
347
348#else
349
350
351
352
353
354
355static inline __attribute__ ((const)) int fls(unsigned long x)
356{
357 int n;
358
359 asm volatile(
360 " fls.f %0, %1 \n"
361 " add.nz %0, %0, 1 \n"
362 : "=r"(n)
363 : "r"(x)
364 : "cc");
365
366 return n;
367}
368
369
370
371
372static inline __attribute__ ((const)) int __fls(unsigned long x)
373{
374
375 return __builtin_arc_fls(x);
376}
377
378
379
380
381
382static inline __attribute__ ((const)) int ffs(unsigned long x)
383{
384 int n;
385
386 asm volatile(
387 " ffs.f %0, %1 \n"
388 " add.nz %0, %0, 1 \n"
389 " mov.z %0, 0 \n"
390 : "=r"(n)
391 : "r"(x)
392 : "cc");
393
394 return n;
395}
396
397
398
399
400static inline __attribute__ ((const)) unsigned long __ffs(unsigned long x)
401{
402 unsigned long n;
403
404 asm volatile(
405 " ffs.f %0, %1 \n"
406 " mov.z %0, 0 \n"
407 : "=r"(n)
408 : "r"(x)
409 : "cc");
410
411 return n;
412
413}
414
415#endif
416
417
418
419
420
421#define ffz(x) __ffs(~(x))
422
423#include <asm-generic/bitops/hweight.h>
424#include <asm-generic/bitops/fls64.h>
425#include <asm-generic/bitops/sched.h>
426#include <asm-generic/bitops/lock.h>
427
428#include <asm-generic/bitops/find.h>
429#include <asm-generic/bitops/le.h>
430#include <asm-generic/bitops/ext2-atomic-setbit.h>
431
432#endif
433
434#endif
435