1
2#ifndef _ASM_X86_ATOMIC64_64_H
3#define _ASM_X86_ATOMIC64_64_H
4
5#include <linux/types.h>
6#include <asm/alternative.h>
7#include <asm/cmpxchg.h>
8
9
10
11#define ATOMIC64_INIT(i) { (i) }
12
13
14
15
16
17
18
19
20static inline s64 arch_atomic64_read(const atomic64_t *v)
21{
22 return __READ_ONCE((v)->counter);
23}
24
25
26
27
28
29
30
31
32static inline void arch_atomic64_set(atomic64_t *v, s64 i)
33{
34 __WRITE_ONCE(v->counter, i);
35}
36
37
38
39
40
41
42
43
44static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
45{
46 asm volatile(LOCK_PREFIX "addq %1,%0"
47 : "=m" (v->counter)
48 : "er" (i), "m" (v->counter) : "memory");
49}
50
51
52
53
54
55
56
57
58static inline void arch_atomic64_sub(s64 i, atomic64_t *v)
59{
60 asm volatile(LOCK_PREFIX "subq %1,%0"
61 : "=m" (v->counter)
62 : "er" (i), "m" (v->counter) : "memory");
63}
64
65
66
67
68
69
70
71
72
73
74static inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
75{
76 return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i);
77}
78#define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
79
80
81
82
83
84
85
86static __always_inline void arch_atomic64_inc(atomic64_t *v)
87{
88 asm volatile(LOCK_PREFIX "incq %0"
89 : "=m" (v->counter)
90 : "m" (v->counter) : "memory");
91}
92#define arch_atomic64_inc arch_atomic64_inc
93
94
95
96
97
98
99
100static __always_inline void arch_atomic64_dec(atomic64_t *v)
101{
102 asm volatile(LOCK_PREFIX "decq %0"
103 : "=m" (v->counter)
104 : "m" (v->counter) : "memory");
105}
106#define arch_atomic64_dec arch_atomic64_dec
107
108
109
110
111
112
113
114
115
116static inline bool arch_atomic64_dec_and_test(atomic64_t *v)
117{
118 return GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, e);
119}
120#define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
121
122
123
124
125
126
127
128
129
130static inline bool arch_atomic64_inc_and_test(atomic64_t *v)
131{
132 return GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, e);
133}
134#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
135
136
137
138
139
140
141
142
143
144
145static inline bool arch_atomic64_add_negative(s64 i, atomic64_t *v)
146{
147 return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i);
148}
149#define arch_atomic64_add_negative arch_atomic64_add_negative
150
151
152
153
154
155
156
157
158static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
159{
160 return i + xadd(&v->counter, i);
161}
162#define arch_atomic64_add_return arch_atomic64_add_return
163
164static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
165{
166 return arch_atomic64_add_return(-i, v);
167}
168#define arch_atomic64_sub_return arch_atomic64_sub_return
169
170static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
171{
172 return xadd(&v->counter, i);
173}
174#define arch_atomic64_fetch_add arch_atomic64_fetch_add
175
176static inline s64 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
177{
178 return xadd(&v->counter, -i);
179}
180#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
181
182static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
183{
184 return arch_cmpxchg(&v->counter, old, new);
185}
186#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
187
188static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
189{
190 return arch_try_cmpxchg(&v->counter, old, new);
191}
192#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
193
194static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new)
195{
196 return arch_xchg(&v->counter, new);
197}
198#define arch_atomic64_xchg arch_atomic64_xchg
199
200static inline void arch_atomic64_and(s64 i, atomic64_t *v)
201{
202 asm volatile(LOCK_PREFIX "andq %1,%0"
203 : "+m" (v->counter)
204 : "er" (i)
205 : "memory");
206}
207
208static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
209{
210 s64 val = arch_atomic64_read(v);
211
212 do {
213 } while (!arch_atomic64_try_cmpxchg(v, &val, val & i));
214 return val;
215}
216#define arch_atomic64_fetch_and arch_atomic64_fetch_and
217
218static inline void arch_atomic64_or(s64 i, atomic64_t *v)
219{
220 asm volatile(LOCK_PREFIX "orq %1,%0"
221 : "+m" (v->counter)
222 : "er" (i)
223 : "memory");
224}
225
226static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
227{
228 s64 val = arch_atomic64_read(v);
229
230 do {
231 } while (!arch_atomic64_try_cmpxchg(v, &val, val | i));
232 return val;
233}
234#define arch_atomic64_fetch_or arch_atomic64_fetch_or
235
236static inline void arch_atomic64_xor(s64 i, atomic64_t *v)
237{
238 asm volatile(LOCK_PREFIX "xorq %1,%0"
239 : "+m" (v->counter)
240 : "er" (i)
241 : "memory");
242}
243
244static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
245{
246 s64 val = arch_atomic64_read(v);
247
248 do {
249 } while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i));
250 return val;
251}
252#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
253
254#endif
255