1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17#ifndef _ASM_TILE_ATOMIC_64_H
18#define _ASM_TILE_ATOMIC_64_H
19
20#ifndef __ASSEMBLY__
21
22#include <asm/barrier.h>
23#include <arch/spr_def.h>
24
25
26
27#define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48static inline int atomic_add_return(int i, atomic_t *v)
49{
50 int val;
51 smp_mb();
52 val = __insn_fetchadd4((void *)&v->counter, i) + i;
53 barrier();
54 return val;
55}
56
57#define ATOMIC_OPS(op) \
58static inline int atomic_fetch_##op(int i, atomic_t *v) \
59{ \
60 int val; \
61 smp_mb(); \
62 val = __insn_fetch##op##4((void *)&v->counter, i); \
63 smp_mb(); \
64 return val; \
65} \
66static inline void atomic_##op(int i, atomic_t *v) \
67{ \
68 __insn_fetch##op##4((void *)&v->counter, i); \
69}
70
71ATOMIC_OPS(add)
72ATOMIC_OPS(and)
73ATOMIC_OPS(or)
74
75#undef ATOMIC_OPS
76
77static inline int atomic_fetch_xor(int i, atomic_t *v)
78{
79 int guess, oldval = v->counter;
80 smp_mb();
81 do {
82 guess = oldval;
83 __insn_mtspr(SPR_CMPEXCH_VALUE, guess);
84 oldval = __insn_cmpexch4(&v->counter, guess ^ i);
85 } while (guess != oldval);
86 smp_mb();
87 return oldval;
88}
89
90static inline void atomic_xor(int i, atomic_t *v)
91{
92 int guess, oldval = v->counter;
93 do {
94 guess = oldval;
95 __insn_mtspr(SPR_CMPEXCH_VALUE, guess);
96 oldval = __insn_cmpexch4(&v->counter, guess ^ i);
97 } while (guess != oldval);
98}
99
100static inline int __atomic_add_unless(atomic_t *v, int a, int u)
101{
102 int guess, oldval = v->counter;
103 do {
104 if (oldval == u)
105 break;
106 guess = oldval;
107 oldval = cmpxchg(&v->counter, guess, guess + a);
108 } while (guess != oldval);
109 return oldval;
110}
111
112
113
114#define ATOMIC64_INIT(i) { (i) }
115
116#define atomic64_read(v) READ_ONCE((v)->counter)
117#define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
118
119static inline long atomic64_add_return(long i, atomic64_t *v)
120{
121 int val;
122 smp_mb();
123 val = __insn_fetchadd((void *)&v->counter, i) + i;
124 barrier();
125 return val;
126}
127
128#define ATOMIC64_OPS(op) \
129static inline long atomic64_fetch_##op(long i, atomic64_t *v) \
130{ \
131 long val; \
132 smp_mb(); \
133 val = __insn_fetch##op((void *)&v->counter, i); \
134 smp_mb(); \
135 return val; \
136} \
137static inline void atomic64_##op(long i, atomic64_t *v) \
138{ \
139 __insn_fetch##op((void *)&v->counter, i); \
140}
141
142ATOMIC64_OPS(add)
143ATOMIC64_OPS(and)
144ATOMIC64_OPS(or)
145
146#undef ATOMIC64_OPS
147
148static inline long atomic64_fetch_xor(long i, atomic64_t *v)
149{
150 long guess, oldval = v->counter;
151 smp_mb();
152 do {
153 guess = oldval;
154 __insn_mtspr(SPR_CMPEXCH_VALUE, guess);
155 oldval = __insn_cmpexch(&v->counter, guess ^ i);
156 } while (guess != oldval);
157 smp_mb();
158 return oldval;
159}
160
161static inline void atomic64_xor(long i, atomic64_t *v)
162{
163 long guess, oldval = v->counter;
164 do {
165 guess = oldval;
166 __insn_mtspr(SPR_CMPEXCH_VALUE, guess);
167 oldval = __insn_cmpexch(&v->counter, guess ^ i);
168 } while (guess != oldval);
169}
170
171static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
172{
173 long guess, oldval = v->counter;
174 do {
175 if (oldval == u)
176 break;
177 guess = oldval;
178 oldval = cmpxchg(&v->counter, guess, guess + a);
179 } while (guess != oldval);
180 return oldval != u;
181}
182
183#define atomic64_sub_return(i, v) atomic64_add_return(-(i), (v))
184#define atomic64_fetch_sub(i, v) atomic64_fetch_add(-(i), (v))
185#define atomic64_sub(i, v) atomic64_add(-(i), (v))
186#define atomic64_inc_return(v) atomic64_add_return(1, (v))
187#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
188#define atomic64_inc(v) atomic64_add(1, (v))
189#define atomic64_dec(v) atomic64_sub(1, (v))
190
191#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
192#define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0)
193#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
194#define atomic64_add_negative(i, v) (atomic64_add_return((i), (v)) < 0)
195
196#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
197
198#endif
199
200#endif
201