1
2
3
4
5
6
7
8
9
10
11
12
13
14#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
17#include <linux/irqflags.h>
18#include <linux/types.h>
19#include <asm/barrier.h>
20#include <asm/cpu-features.h>
21#include <asm/war.h>
22#include <asm/system.h>
23
24#define ATOMIC_INIT(i) { (i) }
25
26
27
28
29
30
31
32#define atomic_read(v) (*(volatile int *)&(v)->counter)
33
34
35
36
37
38
39
40
41#define atomic_set(v, i) ((v)->counter = (i))
42
43
44
45
46
47
48
49
50static __inline__ void atomic_add(int i, atomic_t * v)
51{
52 if (kernel_uses_llsc && R10000_LLSC_WAR) {
53 int temp;
54
55 __asm__ __volatile__(
56 " .set mips3 \n"
57 "1: ll %0, %1 # atomic_add \n"
58 " addu %0, %2 \n"
59 " sc %0, %1 \n"
60 " beqzl %0, 1b \n"
61 " .set mips0 \n"
62 : "=&r" (temp), "=m" (v->counter)
63 : "Ir" (i), "m" (v->counter));
64 } else if (kernel_uses_llsc) {
65 int temp;
66
67 do {
68 __asm__ __volatile__(
69 " .set mips3 \n"
70 " ll %0, %1 # atomic_add \n"
71 " addu %0, %2 \n"
72 " sc %0, %1 \n"
73 " .set mips0 \n"
74 : "=&r" (temp), "=m" (v->counter)
75 : "Ir" (i), "m" (v->counter));
76 } while (unlikely(!temp));
77 } else {
78 unsigned long flags;
79
80 raw_local_irq_save(flags);
81 v->counter += i;
82 raw_local_irq_restore(flags);
83 }
84}
85
86
87
88
89
90
91
92
93static __inline__ void atomic_sub(int i, atomic_t * v)
94{
95 if (kernel_uses_llsc && R10000_LLSC_WAR) {
96 int temp;
97
98 __asm__ __volatile__(
99 " .set mips3 \n"
100 "1: ll %0, %1 # atomic_sub \n"
101 " subu %0, %2 \n"
102 " sc %0, %1 \n"
103 " beqzl %0, 1b \n"
104 " .set mips0 \n"
105 : "=&r" (temp), "=m" (v->counter)
106 : "Ir" (i), "m" (v->counter));
107 } else if (kernel_uses_llsc) {
108 int temp;
109
110 do {
111 __asm__ __volatile__(
112 " .set mips3 \n"
113 " ll %0, %1 # atomic_sub \n"
114 " subu %0, %2 \n"
115 " sc %0, %1 \n"
116 " .set mips0 \n"
117 : "=&r" (temp), "=m" (v->counter)
118 : "Ir" (i), "m" (v->counter));
119 } while (unlikely(!temp));
120 } else {
121 unsigned long flags;
122
123 raw_local_irq_save(flags);
124 v->counter -= i;
125 raw_local_irq_restore(flags);
126 }
127}
128
129
130
131
132static __inline__ int atomic_add_return(int i, atomic_t * v)
133{
134 int result;
135
136 smp_mb__before_llsc();
137
138 if (kernel_uses_llsc && R10000_LLSC_WAR) {
139 int temp;
140
141 __asm__ __volatile__(
142 " .set mips3 \n"
143 "1: ll %1, %2 # atomic_add_return \n"
144 " addu %0, %1, %3 \n"
145 " sc %0, %2 \n"
146 " beqzl %0, 1b \n"
147 " addu %0, %1, %3 \n"
148 " .set mips0 \n"
149 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
150 : "Ir" (i), "m" (v->counter)
151 : "memory");
152 } else if (kernel_uses_llsc) {
153 int temp;
154
155 do {
156 __asm__ __volatile__(
157 " .set mips3 \n"
158 " ll %1, %2 # atomic_add_return \n"
159 " addu %0, %1, %3 \n"
160 " sc %0, %2 \n"
161 " .set mips0 \n"
162 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
163 : "Ir" (i), "m" (v->counter)
164 : "memory");
165 } while (unlikely(!result));
166
167 result = temp + i;
168 } else {
169 unsigned long flags;
170
171 raw_local_irq_save(flags);
172 result = v->counter;
173 result += i;
174 v->counter = result;
175 raw_local_irq_restore(flags);
176 }
177
178 smp_llsc_mb();
179
180 return result;
181}
182
183static __inline__ int atomic_sub_return(int i, atomic_t * v)
184{
185 int result;
186
187 smp_mb__before_llsc();
188
189 if (kernel_uses_llsc && R10000_LLSC_WAR) {
190 int temp;
191
192 __asm__ __volatile__(
193 " .set mips3 \n"
194 "1: ll %1, %2 # atomic_sub_return \n"
195 " subu %0, %1, %3 \n"
196 " sc %0, %2 \n"
197 " beqzl %0, 1b \n"
198 " subu %0, %1, %3 \n"
199 " .set mips0 \n"
200 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
201 : "Ir" (i), "m" (v->counter)
202 : "memory");
203
204 result = temp - i;
205 } else if (kernel_uses_llsc) {
206 int temp;
207
208 do {
209 __asm__ __volatile__(
210 " .set mips3 \n"
211 " ll %1, %2 # atomic_sub_return \n"
212 " subu %0, %1, %3 \n"
213 " sc %0, %2 \n"
214 " .set mips0 \n"
215 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
216 : "Ir" (i), "m" (v->counter)
217 : "memory");
218 } while (unlikely(!result));
219
220 result = temp - i;
221 } else {
222 unsigned long flags;
223
224 raw_local_irq_save(flags);
225 result = v->counter;
226 result -= i;
227 v->counter = result;
228 raw_local_irq_restore(flags);
229 }
230
231 smp_llsc_mb();
232
233 return result;
234}
235
236
237
238
239
240
241
242
243
244static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
245{
246 int result;
247
248 smp_mb__before_llsc();
249
250 if (kernel_uses_llsc && R10000_LLSC_WAR) {
251 int temp;
252
253 __asm__ __volatile__(
254 " .set mips3 \n"
255 "1: ll %1, %2 # atomic_sub_if_positive\n"
256 " subu %0, %1, %3 \n"
257 " bltz %0, 1f \n"
258 " sc %0, %2 \n"
259 " .set noreorder \n"
260 " beqzl %0, 1b \n"
261 " subu %0, %1, %3 \n"
262 " .set reorder \n"
263 "1: \n"
264 " .set mips0 \n"
265 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
266 : "Ir" (i), "m" (v->counter)
267 : "memory");
268 } else if (kernel_uses_llsc) {
269 int temp;
270
271 __asm__ __volatile__(
272 " .set mips3 \n"
273 "1: ll %1, %2 # atomic_sub_if_positive\n"
274 " subu %0, %1, %3 \n"
275 " bltz %0, 1f \n"
276 " sc %0, %2 \n"
277 " .set noreorder \n"
278 " beqz %0, 1b \n"
279 " subu %0, %1, %3 \n"
280 " .set reorder \n"
281 "1: \n"
282 " .set mips0 \n"
283 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
284 : "Ir" (i), "m" (v->counter)
285 : "memory");
286 } else {
287 unsigned long flags;
288
289 raw_local_irq_save(flags);
290 result = v->counter;
291 result -= i;
292 if (result >= 0)
293 v->counter = result;
294 raw_local_irq_restore(flags);
295 }
296
297 smp_llsc_mb();
298
299 return result;
300}
301
302#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
303#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
304
305
306
307
308
309
310
311
312
313
314static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
315{
316 int c, old;
317 c = atomic_read(v);
318 for (;;) {
319 if (unlikely(c == (u)))
320 break;
321 old = atomic_cmpxchg((v), c, c + (a));
322 if (likely(old == c))
323 break;
324 c = old;
325 }
326 return c != (u);
327}
328#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
329
330#define atomic_dec_return(v) atomic_sub_return(1, (v))
331#define atomic_inc_return(v) atomic_add_return(1, (v))
332
333
334
335
336
337
338
339
340
341
342#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
343
344
345
346
347
348
349
350
351
352#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
353
354
355
356
357
358
359
360
361
362#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
363
364
365
366
367
368#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
369
370
371
372
373
374
375
376#define atomic_inc(v) atomic_add(1, (v))
377
378
379
380
381
382
383
384#define atomic_dec(v) atomic_sub(1, (v))
385
386
387
388
389
390
391
392
393
394
395#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
396
397#ifdef CONFIG_64BIT
398
399#define ATOMIC64_INIT(i) { (i) }
400
401
402
403
404
405
406#define atomic64_read(v) (*(volatile long *)&(v)->counter)
407
408
409
410
411
412
413#define atomic64_set(v, i) ((v)->counter = (i))
414
415
416
417
418
419
420
421
422static __inline__ void atomic64_add(long i, atomic64_t * v)
423{
424 if (kernel_uses_llsc && R10000_LLSC_WAR) {
425 long temp;
426
427 __asm__ __volatile__(
428 " .set mips3 \n"
429 "1: lld %0, %1 # atomic64_add \n"
430 " daddu %0, %2 \n"
431 " scd %0, %1 \n"
432 " beqzl %0, 1b \n"
433 " .set mips0 \n"
434 : "=&r" (temp), "=m" (v->counter)
435 : "Ir" (i), "m" (v->counter));
436 } else if (kernel_uses_llsc) {
437 long temp;
438
439 do {
440 __asm__ __volatile__(
441 " .set mips3 \n"
442 " lld %0, %1 # atomic64_add \n"
443 " daddu %0, %2 \n"
444 " scd %0, %1 \n"
445 " .set mips0 \n"
446 : "=&r" (temp), "=m" (v->counter)
447 : "Ir" (i), "m" (v->counter));
448 } while (unlikely(!temp));
449 } else {
450 unsigned long flags;
451
452 raw_local_irq_save(flags);
453 v->counter += i;
454 raw_local_irq_restore(flags);
455 }
456}
457
458
459
460
461
462
463
464
465static __inline__ void atomic64_sub(long i, atomic64_t * v)
466{
467 if (kernel_uses_llsc && R10000_LLSC_WAR) {
468 long temp;
469
470 __asm__ __volatile__(
471 " .set mips3 \n"
472 "1: lld %0, %1 # atomic64_sub \n"
473 " dsubu %0, %2 \n"
474 " scd %0, %1 \n"
475 " beqzl %0, 1b \n"
476 " .set mips0 \n"
477 : "=&r" (temp), "=m" (v->counter)
478 : "Ir" (i), "m" (v->counter));
479 } else if (kernel_uses_llsc) {
480 long temp;
481
482 do {
483 __asm__ __volatile__(
484 " .set mips3 \n"
485 " lld %0, %1 # atomic64_sub \n"
486 " dsubu %0, %2 \n"
487 " scd %0, %1 \n"
488 " .set mips0 \n"
489 : "=&r" (temp), "=m" (v->counter)
490 : "Ir" (i), "m" (v->counter));
491 } while (unlikely(!temp));
492 } else {
493 unsigned long flags;
494
495 raw_local_irq_save(flags);
496 v->counter -= i;
497 raw_local_irq_restore(flags);
498 }
499}
500
501
502
503
504static __inline__ long atomic64_add_return(long i, atomic64_t * v)
505{
506 long result;
507
508 smp_mb__before_llsc();
509
510 if (kernel_uses_llsc && R10000_LLSC_WAR) {
511 long temp;
512
513 __asm__ __volatile__(
514 " .set mips3 \n"
515 "1: lld %1, %2 # atomic64_add_return \n"
516 " daddu %0, %1, %3 \n"
517 " scd %0, %2 \n"
518 " beqzl %0, 1b \n"
519 " daddu %0, %1, %3 \n"
520 " .set mips0 \n"
521 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
522 : "Ir" (i), "m" (v->counter)
523 : "memory");
524 } else if (kernel_uses_llsc) {
525 long temp;
526
527 do {
528 __asm__ __volatile__(
529 " .set mips3 \n"
530 " lld %1, %2 # atomic64_add_return \n"
531 " daddu %0, %1, %3 \n"
532 " scd %0, %2 \n"
533 " .set mips0 \n"
534 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
535 : "Ir" (i), "m" (v->counter)
536 : "memory");
537 } while (unlikely(!result));
538
539 result = temp + i;
540 } else {
541 unsigned long flags;
542
543 raw_local_irq_save(flags);
544 result = v->counter;
545 result += i;
546 v->counter = result;
547 raw_local_irq_restore(flags);
548 }
549
550 smp_llsc_mb();
551
552 return result;
553}
554
555static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
556{
557 long result;
558
559 smp_mb__before_llsc();
560
561 if (kernel_uses_llsc && R10000_LLSC_WAR) {
562 long temp;
563
564 __asm__ __volatile__(
565 " .set mips3 \n"
566 "1: lld %1, %2 # atomic64_sub_return \n"
567 " dsubu %0, %1, %3 \n"
568 " scd %0, %2 \n"
569 " beqzl %0, 1b \n"
570 " dsubu %0, %1, %3 \n"
571 " .set mips0 \n"
572 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
573 : "Ir" (i), "m" (v->counter)
574 : "memory");
575 } else if (kernel_uses_llsc) {
576 long temp;
577
578 do {
579 __asm__ __volatile__(
580 " .set mips3 \n"
581 " lld %1, %2 # atomic64_sub_return \n"
582 " dsubu %0, %1, %3 \n"
583 " scd %0, %2 \n"
584 " .set mips0 \n"
585 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
586 : "Ir" (i), "m" (v->counter)
587 : "memory");
588 } while (unlikely(!result));
589
590 result = temp - i;
591 } else {
592 unsigned long flags;
593
594 raw_local_irq_save(flags);
595 result = v->counter;
596 result -= i;
597 v->counter = result;
598 raw_local_irq_restore(flags);
599 }
600
601 smp_llsc_mb();
602
603 return result;
604}
605
606
607
608
609
610
611
612
613
614static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
615{
616 long result;
617
618 smp_mb__before_llsc();
619
620 if (kernel_uses_llsc && R10000_LLSC_WAR) {
621 long temp;
622
623 __asm__ __volatile__(
624 " .set mips3 \n"
625 "1: lld %1, %2 # atomic64_sub_if_positive\n"
626 " dsubu %0, %1, %3 \n"
627 " bltz %0, 1f \n"
628 " scd %0, %2 \n"
629 " .set noreorder \n"
630 " beqzl %0, 1b \n"
631 " dsubu %0, %1, %3 \n"
632 " .set reorder \n"
633 "1: \n"
634 " .set mips0 \n"
635 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
636 : "Ir" (i), "m" (v->counter)
637 : "memory");
638 } else if (kernel_uses_llsc) {
639 long temp;
640
641 __asm__ __volatile__(
642 " .set mips3 \n"
643 "1: lld %1, %2 # atomic64_sub_if_positive\n"
644 " dsubu %0, %1, %3 \n"
645 " bltz %0, 1f \n"
646 " scd %0, %2 \n"
647 " .set noreorder \n"
648 " beqz %0, 1b \n"
649 " dsubu %0, %1, %3 \n"
650 " .set reorder \n"
651 "1: \n"
652 " .set mips0 \n"
653 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
654 : "Ir" (i), "m" (v->counter)
655 : "memory");
656 } else {
657 unsigned long flags;
658
659 raw_local_irq_save(flags);
660 result = v->counter;
661 result -= i;
662 if (result >= 0)
663 v->counter = result;
664 raw_local_irq_restore(flags);
665 }
666
667 smp_llsc_mb();
668
669 return result;
670}
671
672#define atomic64_cmpxchg(v, o, n) \
673 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
674#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
675
676
677
678
679
680
681
682
683
684
685static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
686{
687 long c, old;
688 c = atomic64_read(v);
689 for (;;) {
690 if (unlikely(c == (u)))
691 break;
692 old = atomic64_cmpxchg((v), c, c + (a));
693 if (likely(old == c))
694 break;
695 c = old;
696 }
697 return c != (u);
698}
699
700#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
701
702#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
703#define atomic64_inc_return(v) atomic64_add_return(1, (v))
704
705
706
707
708
709
710
711
712
713
714#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
715
716
717
718
719
720
721
722
723
724#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
725
726
727
728
729
730
731
732
733
734#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
735
736
737
738
739
740#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
741
742
743
744
745
746
747
748#define atomic64_inc(v) atomic64_add(1, (v))
749
750
751
752
753
754
755
756#define atomic64_dec(v) atomic64_sub(1, (v))
757
758
759
760
761
762
763
764
765
766
767#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
768
769#else
770
771#include <asm-generic/atomic64.h>
772
773#endif
774
775
776
777
778
779#define smp_mb__before_atomic_dec() smp_mb__before_llsc()
780#define smp_mb__after_atomic_dec() smp_llsc_mb()
781#define smp_mb__before_atomic_inc() smp_mb__before_llsc()
782#define smp_mb__after_atomic_inc() smp_llsc_mb()
783
784#include <asm-generic/atomic-long.h>
785
786#endif
787