1#ifndef _ASM_M32R_SPINLOCK_H
2#define _ASM_M32R_SPINLOCK_H
3
4
5
6
7
8
9
10
11
12#include <linux/compiler.h>
13#include <linux/atomic.h>
14#include <asm/dcache_clear.h>
15#include <asm/page.h>
16
17
18
19
20
21
22
23
24
25
26
27
28#define arch_spin_is_locked(x) (*(volatile int *)(&(x)->slock) <= 0)
29#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
30#define arch_spin_unlock_wait(x) \
31 do { cpu_relax(); } while (arch_spin_is_locked(x))
32
33
34
35
36
37
38
39
40static inline int arch_spin_trylock(arch_spinlock_t *lock)
41{
42 int oldval;
43 unsigned long tmp1, tmp2;
44
45
46
47
48
49
50
51
52
53 __asm__ __volatile__ (
54 "# arch_spin_trylock \n\t"
55 "ldi %1, #0; \n\t"
56 "mvfc %2, psw; \n\t"
57 "clrpsw #0x40 -> nop; \n\t"
58 DCACHE_CLEAR("%0", "r6", "%3")
59 "lock %0, @%3; \n\t"
60 "unlock %1, @%3; \n\t"
61 "mvtc %2, psw; \n\t"
62 : "=&r" (oldval), "=&r" (tmp1), "=&r" (tmp2)
63 : "r" (&lock->slock)
64 : "memory"
65#ifdef CONFIG_CHIP_M32700_TS1
66 , "r6"
67#endif
68 );
69
70 return (oldval > 0);
71}
72
73static inline void arch_spin_lock(arch_spinlock_t *lock)
74{
75 unsigned long tmp0, tmp1;
76
77
78
79
80
81
82
83
84
85
86
87 __asm__ __volatile__ (
88 "# arch_spin_lock \n\t"
89 ".fillinsn \n"
90 "1: \n\t"
91 "mvfc %1, psw; \n\t"
92 "clrpsw #0x40 -> nop; \n\t"
93 DCACHE_CLEAR("%0", "r6", "%2")
94 "lock %0, @%2; \n\t"
95 "addi %0, #-1; \n\t"
96 "unlock %0, @%2; \n\t"
97 "mvtc %1, psw; \n\t"
98 "bltz %0, 2f; \n\t"
99 LOCK_SECTION_START(".balign 4 \n\t")
100 ".fillinsn \n"
101 "2: \n\t"
102 "ld %0, @%2; \n\t"
103 "bgtz %0, 1b; \n\t"
104 "bra 2b; \n\t"
105 LOCK_SECTION_END
106 : "=&r" (tmp0), "=&r" (tmp1)
107 : "r" (&lock->slock)
108 : "memory"
109#ifdef CONFIG_CHIP_M32700_TS1
110 , "r6"
111#endif
112 );
113}
114
115static inline void arch_spin_unlock(arch_spinlock_t *lock)
116{
117 mb();
118 lock->slock = 1;
119}
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144#define arch_read_can_lock(x) ((int)(x)->lock > 0)
145
146
147
148
149
150#define arch_write_can_lock(x) ((x)->lock == RW_LOCK_BIAS)
151
152static inline void arch_read_lock(arch_rwlock_t *rw)
153{
154 unsigned long tmp0, tmp1;
155
156
157
158
159
160
161
162
163
164
165
166
167 __asm__ __volatile__ (
168 "# read_lock \n\t"
169 ".fillinsn \n"
170 "1: \n\t"
171 "mvfc %1, psw; \n\t"
172 "clrpsw #0x40 -> nop; \n\t"
173 DCACHE_CLEAR("%0", "r6", "%2")
174 "lock %0, @%2; \n\t"
175 "addi %0, #-1; \n\t"
176 "unlock %0, @%2; \n\t"
177 "mvtc %1, psw; \n\t"
178 "bltz %0, 2f; \n\t"
179 LOCK_SECTION_START(".balign 4 \n\t")
180 ".fillinsn \n"
181 "2: \n\t"
182 "clrpsw #0x40 -> nop; \n\t"
183 DCACHE_CLEAR("%0", "r6", "%2")
184 "lock %0, @%2; \n\t"
185 "addi %0, #1; \n\t"
186 "unlock %0, @%2; \n\t"
187 "mvtc %1, psw; \n\t"
188 ".fillinsn \n"
189 "3: \n\t"
190 "ld %0, @%2; \n\t"
191 "bgtz %0, 1b; \n\t"
192 "bra 3b; \n\t"
193 LOCK_SECTION_END
194 : "=&r" (tmp0), "=&r" (tmp1)
195 : "r" (&rw->lock)
196 : "memory"
197#ifdef CONFIG_CHIP_M32700_TS1
198 , "r6"
199#endif
200 );
201}
202
203static inline void arch_write_lock(arch_rwlock_t *rw)
204{
205 unsigned long tmp0, tmp1, tmp2;
206
207
208
209
210
211
212
213
214
215
216
217
218 __asm__ __volatile__ (
219 "# write_lock \n\t"
220 "seth %1, #high(" RW_LOCK_BIAS_STR "); \n\t"
221 "or3 %1, %1, #low(" RW_LOCK_BIAS_STR "); \n\t"
222 ".fillinsn \n"
223 "1: \n\t"
224 "mvfc %2, psw; \n\t"
225 "clrpsw #0x40 -> nop; \n\t"
226 DCACHE_CLEAR("%0", "r7", "%3")
227 "lock %0, @%3; \n\t"
228 "sub %0, %1; \n\t"
229 "unlock %0, @%3; \n\t"
230 "mvtc %2, psw; \n\t"
231 "bnez %0, 2f; \n\t"
232 LOCK_SECTION_START(".balign 4 \n\t")
233 ".fillinsn \n"
234 "2: \n\t"
235 "clrpsw #0x40 -> nop; \n\t"
236 DCACHE_CLEAR("%0", "r7", "%3")
237 "lock %0, @%3; \n\t"
238 "add %0, %1; \n\t"
239 "unlock %0, @%3; \n\t"
240 "mvtc %2, psw; \n\t"
241 ".fillinsn \n"
242 "3: \n\t"
243 "ld %0, @%3; \n\t"
244 "beq %0, %1, 1b; \n\t"
245 "bra 3b; \n\t"
246 LOCK_SECTION_END
247 : "=&r" (tmp0), "=&r" (tmp1), "=&r" (tmp2)
248 : "r" (&rw->lock)
249 : "memory"
250#ifdef CONFIG_CHIP_M32700_TS1
251 , "r7"
252#endif
253 );
254}
255
256static inline void arch_read_unlock(arch_rwlock_t *rw)
257{
258 unsigned long tmp0, tmp1;
259
260 __asm__ __volatile__ (
261 "# read_unlock \n\t"
262 "mvfc %1, psw; \n\t"
263 "clrpsw #0x40 -> nop; \n\t"
264 DCACHE_CLEAR("%0", "r6", "%2")
265 "lock %0, @%2; \n\t"
266 "addi %0, #1; \n\t"
267 "unlock %0, @%2; \n\t"
268 "mvtc %1, psw; \n\t"
269 : "=&r" (tmp0), "=&r" (tmp1)
270 : "r" (&rw->lock)
271 : "memory"
272#ifdef CONFIG_CHIP_M32700_TS1
273 , "r6"
274#endif
275 );
276}
277
278static inline void arch_write_unlock(arch_rwlock_t *rw)
279{
280 unsigned long tmp0, tmp1, tmp2;
281
282 __asm__ __volatile__ (
283 "# write_unlock \n\t"
284 "seth %1, #high(" RW_LOCK_BIAS_STR "); \n\t"
285 "or3 %1, %1, #low(" RW_LOCK_BIAS_STR "); \n\t"
286 "mvfc %2, psw; \n\t"
287 "clrpsw #0x40 -> nop; \n\t"
288 DCACHE_CLEAR("%0", "r7", "%3")
289 "lock %0, @%3; \n\t"
290 "add %0, %1; \n\t"
291 "unlock %0, @%3; \n\t"
292 "mvtc %2, psw; \n\t"
293 : "=&r" (tmp0), "=&r" (tmp1), "=&r" (tmp2)
294 : "r" (&rw->lock)
295 : "memory"
296#ifdef CONFIG_CHIP_M32700_TS1
297 , "r7"
298#endif
299 );
300}
301
302static inline int arch_read_trylock(arch_rwlock_t *lock)
303{
304 atomic_t *count = (atomic_t*)lock;
305 if (atomic_dec_return(count) >= 0)
306 return 1;
307 atomic_inc(count);
308 return 0;
309}
310
311static inline int arch_write_trylock(arch_rwlock_t *lock)
312{
313 atomic_t *count = (atomic_t *)lock;
314 if (atomic_sub_and_test(RW_LOCK_BIAS, count))
315 return 1;
316 atomic_add(RW_LOCK_BIAS, count);
317 return 0;
318}
319
320#define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
321#define arch_write_lock_flags(lock, flags) arch_write_lock(lock)
322
323#define arch_spin_relax(lock) cpu_relax()
324#define arch_read_relax(lock) cpu_relax()
325#define arch_write_relax(lock) cpu_relax()
326
327#endif
328