1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32#ifndef _ASM_POWERPC_BITOPS_H
33#define _ASM_POWERPC_BITOPS_H
34
35#ifdef __KERNEL__
36
37#ifndef _LINUX_BITOPS_H
38#error only <linux/bitops.h> can be included directly
39#endif
40
41#include <linux/compiler.h>
42#include <asm/asm-compat.h>
43#include <asm/synch.h>
44
45
46#define PPC_BITLSHIFT(be) (BITS_PER_LONG - 1 - (be))
47#define PPC_BIT(bit) (1UL << PPC_BITLSHIFT(bit))
48#define PPC_BITMASK(bs, be) ((PPC_BIT(bs) - PPC_BIT(be)) | PPC_BIT(bs))
49
50
51#define PPC_BITEXTRACT(bits, ppc_bit, dst_bit) \
52 ((((bits) >> PPC_BITLSHIFT(ppc_bit)) & 1) << (dst_bit))
53
54#define PPC_BITLSHIFT32(be) (32 - 1 - (be))
55#define PPC_BIT32(bit) (1UL << PPC_BITLSHIFT32(bit))
56#define PPC_BITMASK32(bs, be) ((PPC_BIT32(bs) - PPC_BIT32(be))|PPC_BIT32(bs))
57
58#define PPC_BITLSHIFT8(be) (8 - 1 - (be))
59#define PPC_BIT8(bit) (1UL << PPC_BITLSHIFT8(bit))
60#define PPC_BITMASK8(bs, be) ((PPC_BIT8(bs) - PPC_BIT8(be))|PPC_BIT8(bs))
61
62#include <asm/barrier.h>
63
64
65#define DEFINE_BITOP(fn, op, prefix) \
66static inline void fn(unsigned long mask, \
67 volatile unsigned long *_p) \
68{ \
69 unsigned long old; \
70 unsigned long *p = (unsigned long *)_p; \
71 __asm__ __volatile__ ( \
72 prefix \
73"1:" PPC_LLARX(%0,0,%3,0) "\n" \
74 stringify_in_c(op) "%0,%0,%2\n" \
75 PPC_STLCX "%0,0,%3\n" \
76 "bne- 1b\n" \
77 : "=&r" (old), "+m" (*p) \
78 : "r" (mask), "r" (p) \
79 : "cc", "memory"); \
80}
81
82DEFINE_BITOP(set_bits, or, "")
83DEFINE_BITOP(clear_bits, andc, "")
84DEFINE_BITOP(clear_bits_unlock, andc, PPC_RELEASE_BARRIER)
85DEFINE_BITOP(change_bits, xor, "")
86
87static inline void arch_set_bit(int nr, volatile unsigned long *addr)
88{
89 set_bits(BIT_MASK(nr), addr + BIT_WORD(nr));
90}
91
92static inline void arch_clear_bit(int nr, volatile unsigned long *addr)
93{
94 clear_bits(BIT_MASK(nr), addr + BIT_WORD(nr));
95}
96
97static inline void arch_clear_bit_unlock(int nr, volatile unsigned long *addr)
98{
99 clear_bits_unlock(BIT_MASK(nr), addr + BIT_WORD(nr));
100}
101
102static inline void arch_change_bit(int nr, volatile unsigned long *addr)
103{
104 change_bits(BIT_MASK(nr), addr + BIT_WORD(nr));
105}
106
107
108
109#define DEFINE_TESTOP(fn, op, prefix, postfix, eh) \
110static inline unsigned long fn( \
111 unsigned long mask, \
112 volatile unsigned long *_p) \
113{ \
114 unsigned long old, t; \
115 unsigned long *p = (unsigned long *)_p; \
116 __asm__ __volatile__ ( \
117 prefix \
118"1:" PPC_LLARX(%0,0,%3,eh) "\n" \
119 stringify_in_c(op) "%1,%0,%2\n" \
120 PPC_STLCX "%1,0,%3\n" \
121 "bne- 1b\n" \
122 postfix \
123 : "=&r" (old), "=&r" (t) \
124 : "r" (mask), "r" (p) \
125 : "cc", "memory"); \
126 return (old & mask); \
127}
128
129DEFINE_TESTOP(test_and_set_bits, or, PPC_ATOMIC_ENTRY_BARRIER,
130 PPC_ATOMIC_EXIT_BARRIER, 0)
131DEFINE_TESTOP(test_and_set_bits_lock, or, "",
132 PPC_ACQUIRE_BARRIER, 1)
133DEFINE_TESTOP(test_and_clear_bits, andc, PPC_ATOMIC_ENTRY_BARRIER,
134 PPC_ATOMIC_EXIT_BARRIER, 0)
135DEFINE_TESTOP(test_and_change_bits, xor, PPC_ATOMIC_ENTRY_BARRIER,
136 PPC_ATOMIC_EXIT_BARRIER, 0)
137
138static inline int arch_test_and_set_bit(unsigned long nr,
139 volatile unsigned long *addr)
140{
141 return test_and_set_bits(BIT_MASK(nr), addr + BIT_WORD(nr)) != 0;
142}
143
144static inline int arch_test_and_set_bit_lock(unsigned long nr,
145 volatile unsigned long *addr)
146{
147 return test_and_set_bits_lock(BIT_MASK(nr),
148 addr + BIT_WORD(nr)) != 0;
149}
150
151static inline int arch_test_and_clear_bit(unsigned long nr,
152 volatile unsigned long *addr)
153{
154 return test_and_clear_bits(BIT_MASK(nr), addr + BIT_WORD(nr)) != 0;
155}
156
157static inline int arch_test_and_change_bit(unsigned long nr,
158 volatile unsigned long *addr)
159{
160 return test_and_change_bits(BIT_MASK(nr), addr + BIT_WORD(nr)) != 0;
161}
162
163#ifdef CONFIG_PPC64
164static inline unsigned long
165clear_bit_unlock_return_word(int nr, volatile unsigned long *addr)
166{
167 unsigned long old, t;
168 unsigned long *p = (unsigned long *)addr + BIT_WORD(nr);
169 unsigned long mask = BIT_MASK(nr);
170
171 __asm__ __volatile__ (
172 PPC_RELEASE_BARRIER
173"1:" PPC_LLARX(%0,0,%3,0) "\n"
174 "andc %1,%0,%2\n"
175 PPC_STLCX "%1,0,%3\n"
176 "bne- 1b\n"
177 : "=&r" (old), "=&r" (t)
178 : "r" (mask), "r" (p)
179 : "cc", "memory");
180
181 return old;
182}
183
184
185
186
187
188#define arch_clear_bit_unlock_is_negative_byte(nr, addr) \
189 (clear_bit_unlock_return_word(nr, addr) & BIT_MASK(7))
190
191#endif
192
193#include <asm-generic/bitops/non-atomic.h>
194
195static inline void arch___clear_bit_unlock(int nr, volatile unsigned long *addr)
196{
197 __asm__ __volatile__(PPC_RELEASE_BARRIER "" ::: "memory");
198 __clear_bit(nr, addr);
199}
200
201
202
203
204
205#define __ilog2(x) ilog2(x)
206
207#include <asm-generic/bitops/ffz.h>
208
209#include <asm-generic/bitops/builtin-__ffs.h>
210
211#include <asm-generic/bitops/builtin-ffs.h>
212
213
214
215
216
217static inline int fls(unsigned int x)
218{
219 return 32 - __builtin_clz(x);
220}
221
222#include <asm-generic/bitops/builtin-__fls.h>
223
224static inline int fls64(__u64 x)
225{
226 return 64 - __builtin_clzll(x);
227}
228
229#ifdef CONFIG_PPC64
230unsigned int __arch_hweight8(unsigned int w);
231unsigned int __arch_hweight16(unsigned int w);
232unsigned int __arch_hweight32(unsigned int w);
233unsigned long __arch_hweight64(__u64 w);
234#include <asm-generic/bitops/const_hweight.h>
235#else
236#include <asm-generic/bitops/hweight.h>
237#endif
238
239#include <asm-generic/bitops/find.h>
240
241
242#include <asm-generic/bitops/instrumented-atomic.h>
243#include <asm-generic/bitops/instrumented-lock.h>
244
245
246#include <asm-generic/bitops/le.h>
247
248
249
250#include <asm-generic/bitops/ext2-atomic-setbit.h>
251
252#include <asm-generic/bitops/sched.h>
253
254#endif
255
256#endif
257