1#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
4
5
6
7
8#ifdef __KERNEL__
9#include <linux/types.h>
10#include <asm/cmpxchg.h>
11#include <asm/barrier.h>
12
13#define ATOMIC_INIT(i) { (i) }
14
15static __inline__ int atomic_read(const atomic_t *v)
16{
17 int t;
18
19 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
20
21 return t;
22}
23
24static __inline__ void atomic_set(atomic_t *v, int i)
25{
26 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
27}
28
29static __inline__ void atomic_add(int a, atomic_t *v)
30{
31 int t;
32
33 __asm__ __volatile__(
34"1: lwarx %0,0,%3 # atomic_add\n\
35 add %0,%2,%0\n"
36 PPC405_ERR77(0,%3)
37" stwcx. %0,0,%3 \n\
38 bne- 1b"
39 : "=&r" (t), "+m" (v->counter)
40 : "r" (a), "r" (&v->counter)
41 : "cc");
42}
43
44static __inline__ int atomic_add_return(int a, atomic_t *v)
45{
46 int t;
47
48 __asm__ __volatile__(
49 PPC_ATOMIC_ENTRY_BARRIER
50"1: lwarx %0,0,%2 # atomic_add_return\n\
51 add %0,%1,%0\n"
52 PPC405_ERR77(0,%2)
53" stwcx. %0,0,%2 \n\
54 bne- 1b"
55 PPC_ATOMIC_EXIT_BARRIER
56 : "=&r" (t)
57 : "r" (a), "r" (&v->counter)
58 : "cc", "memory");
59
60 return t;
61}
62
63#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
64
65static __inline__ void atomic_sub(int a, atomic_t *v)
66{
67 int t;
68
69 __asm__ __volatile__(
70"1: lwarx %0,0,%3 # atomic_sub\n\
71 subf %0,%2,%0\n"
72 PPC405_ERR77(0,%3)
73" stwcx. %0,0,%3 \n\
74 bne- 1b"
75 : "=&r" (t), "+m" (v->counter)
76 : "r" (a), "r" (&v->counter)
77 : "cc");
78}
79
80static __inline__ int atomic_sub_return(int a, atomic_t *v)
81{
82 int t;
83
84 __asm__ __volatile__(
85 PPC_ATOMIC_ENTRY_BARRIER
86"1: lwarx %0,0,%2 # atomic_sub_return\n\
87 subf %0,%1,%0\n"
88 PPC405_ERR77(0,%2)
89" stwcx. %0,0,%2 \n\
90 bne- 1b"
91 PPC_ATOMIC_EXIT_BARRIER
92 : "=&r" (t)
93 : "r" (a), "r" (&v->counter)
94 : "cc", "memory");
95
96 return t;
97}
98
99static __inline__ void atomic_inc(atomic_t *v)
100{
101 int t;
102
103 __asm__ __volatile__(
104"1: lwarx %0,0,%2 # atomic_inc\n\
105 addic %0,%0,1\n"
106 PPC405_ERR77(0,%2)
107" stwcx. %0,0,%2 \n\
108 bne- 1b"
109 : "=&r" (t), "+m" (v->counter)
110 : "r" (&v->counter)
111 : "cc", "xer");
112}
113
114static __inline__ int atomic_inc_return(atomic_t *v)
115{
116 int t;
117
118 __asm__ __volatile__(
119 PPC_ATOMIC_ENTRY_BARRIER
120"1: lwarx %0,0,%1 # atomic_inc_return\n\
121 addic %0,%0,1\n"
122 PPC405_ERR77(0,%1)
123" stwcx. %0,0,%1 \n\
124 bne- 1b"
125 PPC_ATOMIC_EXIT_BARRIER
126 : "=&r" (t)
127 : "r" (&v->counter)
128 : "cc", "xer", "memory");
129
130 return t;
131}
132
133
134
135
136
137
138
139
140
141#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
142
143static __inline__ void atomic_dec(atomic_t *v)
144{
145 int t;
146
147 __asm__ __volatile__(
148"1: lwarx %0,0,%2 # atomic_dec\n\
149 addic %0,%0,-1\n"
150 PPC405_ERR77(0,%2)\
151" stwcx. %0,0,%2\n\
152 bne- 1b"
153 : "=&r" (t), "+m" (v->counter)
154 : "r" (&v->counter)
155 : "cc", "xer");
156}
157
158static __inline__ int atomic_dec_return(atomic_t *v)
159{
160 int t;
161
162 __asm__ __volatile__(
163 PPC_ATOMIC_ENTRY_BARRIER
164"1: lwarx %0,0,%1 # atomic_dec_return\n\
165 addic %0,%0,-1\n"
166 PPC405_ERR77(0,%1)
167" stwcx. %0,0,%1\n\
168 bne- 1b"
169 PPC_ATOMIC_EXIT_BARRIER
170 : "=&r" (t)
171 : "r" (&v->counter)
172 : "cc", "xer", "memory");
173
174 return t;
175}
176
177#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
178#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
179
180
181
182
183
184
185
186
187
188
189static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
190{
191 int t;
192
193 __asm__ __volatile__ (
194 PPC_ATOMIC_ENTRY_BARRIER
195"1: lwarx %0,0,%1 # __atomic_add_unless\n\
196 cmpw 0,%0,%3 \n\
197 beq- 2f \n\
198 add %0,%2,%0 \n"
199 PPC405_ERR77(0,%2)
200" stwcx. %0,0,%1 \n\
201 bne- 1b \n"
202 PPC_ATOMIC_EXIT_BARRIER
203" subf %0,%2,%0 \n\
2042:"
205 : "=&r" (t)
206 : "r" (&v->counter), "r" (a), "r" (u)
207 : "cc", "memory");
208
209 return t;
210}
211
212
213
214
215
216
217
218
219static __inline__ int atomic_inc_not_zero(atomic_t *v)
220{
221 int t1, t2;
222
223 __asm__ __volatile__ (
224 PPC_ATOMIC_ENTRY_BARRIER
225"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
226 cmpwi 0,%0,0\n\
227 beq- 2f\n\
228 addic %1,%0,1\n"
229 PPC405_ERR77(0,%2)
230" stwcx. %1,0,%2\n\
231 bne- 1b\n"
232 PPC_ATOMIC_EXIT_BARRIER
233 "\n\
2342:"
235 : "=&r" (t1), "=&r" (t2)
236 : "r" (&v->counter)
237 : "cc", "xer", "memory");
238
239 return t1;
240}
241#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
242
243#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
244#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
245
246
247
248
249
250
251static __inline__ int atomic_dec_if_positive(atomic_t *v)
252{
253 int t;
254
255 __asm__ __volatile__(
256 PPC_ATOMIC_ENTRY_BARRIER
257"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
258 cmpwi %0,1\n\
259 addi %0,%0,-1\n\
260 blt- 2f\n"
261 PPC405_ERR77(0,%1)
262" stwcx. %0,0,%1\n\
263 bne- 1b"
264 PPC_ATOMIC_EXIT_BARRIER
265 "\n\
2662:" : "=&b" (t)
267 : "r" (&v->counter)
268 : "cc", "memory");
269
270 return t;
271}
272#define atomic_dec_if_positive atomic_dec_if_positive
273
274#ifdef __powerpc64__
275
276#define ATOMIC64_INIT(i) { (i) }
277
278static __inline__ long atomic64_read(const atomic64_t *v)
279{
280 long t;
281
282 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
283
284 return t;
285}
286
287static __inline__ void atomic64_set(atomic64_t *v, long i)
288{
289 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
290}
291
292static __inline__ void atomic64_add(long a, atomic64_t *v)
293{
294 long t;
295
296 __asm__ __volatile__(
297"1: ldarx %0,0,%3 # atomic64_add\n\
298 add %0,%2,%0\n\
299 stdcx. %0,0,%3 \n\
300 bne- 1b"
301 : "=&r" (t), "+m" (v->counter)
302 : "r" (a), "r" (&v->counter)
303 : "cc");
304}
305
306static __inline__ long atomic64_add_return(long a, atomic64_t *v)
307{
308 long t;
309
310 __asm__ __volatile__(
311 PPC_ATOMIC_ENTRY_BARRIER
312"1: ldarx %0,0,%2 # atomic64_add_return\n\
313 add %0,%1,%0\n\
314 stdcx. %0,0,%2 \n\
315 bne- 1b"
316 PPC_ATOMIC_EXIT_BARRIER
317 : "=&r" (t)
318 : "r" (a), "r" (&v->counter)
319 : "cc", "memory");
320
321 return t;
322}
323
324#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
325
326static __inline__ void atomic64_sub(long a, atomic64_t *v)
327{
328 long t;
329
330 __asm__ __volatile__(
331"1: ldarx %0,0,%3 # atomic64_sub\n\
332 subf %0,%2,%0\n\
333 stdcx. %0,0,%3 \n\
334 bne- 1b"
335 : "=&r" (t), "+m" (v->counter)
336 : "r" (a), "r" (&v->counter)
337 : "cc");
338}
339
340static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
341{
342 long t;
343
344 __asm__ __volatile__(
345 PPC_ATOMIC_ENTRY_BARRIER
346"1: ldarx %0,0,%2 # atomic64_sub_return\n\
347 subf %0,%1,%0\n\
348 stdcx. %0,0,%2 \n\
349 bne- 1b"
350 PPC_ATOMIC_EXIT_BARRIER
351 : "=&r" (t)
352 : "r" (a), "r" (&v->counter)
353 : "cc", "memory");
354
355 return t;
356}
357
358static __inline__ void atomic64_inc(atomic64_t *v)
359{
360 long t;
361
362 __asm__ __volatile__(
363"1: ldarx %0,0,%2 # atomic64_inc\n\
364 addic %0,%0,1\n\
365 stdcx. %0,0,%2 \n\
366 bne- 1b"
367 : "=&r" (t), "+m" (v->counter)
368 : "r" (&v->counter)
369 : "cc", "xer");
370}
371
372static __inline__ long atomic64_inc_return(atomic64_t *v)
373{
374 long t;
375
376 __asm__ __volatile__(
377 PPC_ATOMIC_ENTRY_BARRIER
378"1: ldarx %0,0,%1 # atomic64_inc_return\n\
379 addic %0,%0,1\n\
380 stdcx. %0,0,%1 \n\
381 bne- 1b"
382 PPC_ATOMIC_EXIT_BARRIER
383 : "=&r" (t)
384 : "r" (&v->counter)
385 : "cc", "xer", "memory");
386
387 return t;
388}
389
390
391
392
393
394
395
396
397
398#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
399
400static __inline__ void atomic64_dec(atomic64_t *v)
401{
402 long t;
403
404 __asm__ __volatile__(
405"1: ldarx %0,0,%2 # atomic64_dec\n\
406 addic %0,%0,-1\n\
407 stdcx. %0,0,%2\n\
408 bne- 1b"
409 : "=&r" (t), "+m" (v->counter)
410 : "r" (&v->counter)
411 : "cc", "xer");
412}
413
414static __inline__ long atomic64_dec_return(atomic64_t *v)
415{
416 long t;
417
418 __asm__ __volatile__(
419 PPC_ATOMIC_ENTRY_BARRIER
420"1: ldarx %0,0,%1 # atomic64_dec_return\n\
421 addic %0,%0,-1\n\
422 stdcx. %0,0,%1\n\
423 bne- 1b"
424 PPC_ATOMIC_EXIT_BARRIER
425 : "=&r" (t)
426 : "r" (&v->counter)
427 : "cc", "xer", "memory");
428
429 return t;
430}
431
432#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
433#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
434
435
436
437
438
439static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
440{
441 long t;
442
443 __asm__ __volatile__(
444 PPC_ATOMIC_ENTRY_BARRIER
445"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
446 addic. %0,%0,-1\n\
447 blt- 2f\n\
448 stdcx. %0,0,%1\n\
449 bne- 1b"
450 PPC_ATOMIC_EXIT_BARRIER
451 "\n\
4522:" : "=&r" (t)
453 : "r" (&v->counter)
454 : "cc", "xer", "memory");
455
456 return t;
457}
458
459#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
460#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
461
462
463
464
465
466
467
468
469
470
471static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
472{
473 long t;
474
475 __asm__ __volatile__ (
476 PPC_ATOMIC_ENTRY_BARRIER
477"1: ldarx %0,0,%1 # __atomic_add_unless\n\
478 cmpd 0,%0,%3 \n\
479 beq- 2f \n\
480 add %0,%2,%0 \n"
481" stdcx. %0,0,%1 \n\
482 bne- 1b \n"
483 PPC_ATOMIC_EXIT_BARRIER
484" subf %0,%2,%0 \n\
4852:"
486 : "=&r" (t)
487 : "r" (&v->counter), "r" (a), "r" (u)
488 : "cc", "memory");
489
490 return t != u;
491}
492
493
494
495
496
497
498
499
500static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
501{
502 long t1, t2;
503
504 __asm__ __volatile__ (
505 PPC_ATOMIC_ENTRY_BARRIER
506"1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
507 cmpdi 0,%0,0\n\
508 beq- 2f\n\
509 addic %1,%0,1\n\
510 stdcx. %1,0,%2\n\
511 bne- 1b\n"
512 PPC_ATOMIC_EXIT_BARRIER
513 "\n\
5142:"
515 : "=&r" (t1), "=&r" (t2)
516 : "r" (&v->counter)
517 : "cc", "xer", "memory");
518
519 return t1;
520}
521
522#endif
523
524#endif
525#endif
526