1#ifndef _ASM_X86_ATOMIC64_64_H
2#define _ASM_X86_ATOMIC64_64_H
3
4#include <linux/types.h>
5#include <asm/alternative.h>
6#include <asm/cmpxchg.h>
7
8
9
10#define ATOMIC64_INIT(i) { (i) }
11
12
13
14
15
16
17
18
19static inline long atomic64_read(const atomic64_t *v)
20{
21 return READ_ONCE((v)->counter);
22}
23
24
25
26
27
28
29
30
31static inline void atomic64_set(atomic64_t *v, long i)
32{
33 WRITE_ONCE(v->counter, i);
34}
35
36
37
38
39
40
41
42
43static __always_inline void atomic64_add(long i, atomic64_t *v)
44{
45 asm volatile(LOCK_PREFIX "addq %1,%0"
46 : "=m" (v->counter)
47 : "er" (i), "m" (v->counter));
48}
49
50
51
52
53
54
55
56
57static inline void atomic64_sub(long i, atomic64_t *v)
58{
59 asm volatile(LOCK_PREFIX "subq %1,%0"
60 : "=m" (v->counter)
61 : "er" (i), "m" (v->counter));
62}
63
64
65
66
67
68
69
70
71
72
73static inline bool atomic64_sub_and_test(long i, atomic64_t *v)
74{
75 GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e);
76}
77
78
79
80
81
82
83
84static __always_inline void atomic64_inc(atomic64_t *v)
85{
86 asm volatile(LOCK_PREFIX "incq %0"
87 : "=m" (v->counter)
88 : "m" (v->counter));
89}
90
91
92
93
94
95
96
97static __always_inline void atomic64_dec(atomic64_t *v)
98{
99 asm volatile(LOCK_PREFIX "decq %0"
100 : "=m" (v->counter)
101 : "m" (v->counter));
102}
103
104
105
106
107
108
109
110
111
112static inline bool atomic64_dec_and_test(atomic64_t *v)
113{
114 GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e);
115}
116
117
118
119
120
121
122
123
124
125static inline bool atomic64_inc_and_test(atomic64_t *v)
126{
127 GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e);
128}
129
130
131
132
133
134
135
136
137
138
139static inline bool atomic64_add_negative(long i, atomic64_t *v)
140{
141 GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s);
142}
143
144
145
146
147
148
149
150
151static __always_inline long atomic64_add_return(long i, atomic64_t *v)
152{
153 return i + xadd(&v->counter, i);
154}
155
156static inline long atomic64_sub_return(long i, atomic64_t *v)
157{
158 return atomic64_add_return(-i, v);
159}
160
161static inline long atomic64_fetch_add(long i, atomic64_t *v)
162{
163 return xadd(&v->counter, i);
164}
165
166static inline long atomic64_fetch_sub(long i, atomic64_t *v)
167{
168 return xadd(&v->counter, -i);
169}
170
171#define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
172#define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
173
174static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
175{
176 return cmpxchg(&v->counter, old, new);
177}
178
179static inline long atomic64_xchg(atomic64_t *v, long new)
180{
181 return xchg(&v->counter, new);
182}
183
184
185
186
187
188
189
190
191
192
193static inline bool atomic64_add_unless(atomic64_t *v, long a, long u)
194{
195 long c, old;
196 c = atomic64_read(v);
197 for (;;) {
198 if (unlikely(c == (u)))
199 break;
200 old = atomic64_cmpxchg((v), c, c + (a));
201 if (likely(old == c))
202 break;
203 c = old;
204 }
205 return c != (u);
206}
207
208#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
209
210
211
212
213
214
215
216
217static inline long atomic64_dec_if_positive(atomic64_t *v)
218{
219 long c, old, dec;
220 c = atomic64_read(v);
221 for (;;) {
222 dec = c - 1;
223 if (unlikely(dec < 0))
224 break;
225 old = atomic64_cmpxchg((v), c, dec);
226 if (likely(old == c))
227 break;
228 c = old;
229 }
230 return dec;
231}
232
233#define ATOMIC64_OP(op) \
234static inline void atomic64_##op(long i, atomic64_t *v) \
235{ \
236 asm volatile(LOCK_PREFIX #op"q %1,%0" \
237 : "+m" (v->counter) \
238 : "er" (i) \
239 : "memory"); \
240}
241
242#define ATOMIC64_FETCH_OP(op, c_op) \
243static inline long atomic64_fetch_##op(long i, atomic64_t *v) \
244{ \
245 long old, val = atomic64_read(v); \
246 for (;;) { \
247 old = atomic64_cmpxchg(v, val, val c_op i); \
248 if (old == val) \
249 break; \
250 val = old; \
251 } \
252 return old; \
253}
254
255#define ATOMIC64_OPS(op, c_op) \
256 ATOMIC64_OP(op) \
257 ATOMIC64_FETCH_OP(op, c_op)
258
259ATOMIC64_OPS(and, &)
260ATOMIC64_OPS(or, |)
261ATOMIC64_OPS(xor, ^)
262
263#undef ATOMIC64_OPS
264#undef ATOMIC64_FETCH_OP
265#undef ATOMIC64_OP
266
267#endif
268