1#ifndef _ASM_X86_ATOMIC64_64_H
2#define _ASM_X86_ATOMIC64_64_H
3
4#include <linux/types.h>
5#include <asm/alternative.h>
6#include <asm/cmpxchg.h>
7
8
9
10#define ATOMIC64_INIT(i) { (i) }
11
12
13
14
15
16
17
18
19static inline long atomic64_read(const atomic64_t *v)
20{
21 return ACCESS_ONCE((v)->counter);
22}
23
24
25
26
27
28
29
30
31static inline void atomic64_set(atomic64_t *v, long i)
32{
33 v->counter = i;
34}
35
36
37
38
39
40
41
42
43static inline void atomic64_add(long i, atomic64_t *v)
44{
45 asm volatile(LOCK_PREFIX "addq %1,%0"
46 : "=m" (v->counter)
47 : "er" (i), "m" (v->counter));
48}
49
50
51
52
53
54
55
56
57static inline void atomic64_sub(long i, atomic64_t *v)
58{
59 asm volatile(LOCK_PREFIX "subq %1,%0"
60 : "=m" (v->counter)
61 : "er" (i), "m" (v->counter));
62}
63
64
65
66
67
68
69
70
71
72
73static inline int atomic64_sub_and_test(long i, atomic64_t *v)
74{
75 GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", "e");
76}
77
78
79
80
81
82
83
84static inline void atomic64_inc(atomic64_t *v)
85{
86 asm volatile(LOCK_PREFIX "incq %0"
87 : "=m" (v->counter)
88 : "m" (v->counter));
89}
90
91
92
93
94
95
96
97static inline void atomic64_dec(atomic64_t *v)
98{
99 asm volatile(LOCK_PREFIX "decq %0"
100 : "=m" (v->counter)
101 : "m" (v->counter));
102}
103
104
105
106
107
108
109
110
111
112static inline int atomic64_dec_and_test(atomic64_t *v)
113{
114 GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", "e");
115}
116
117
118
119
120
121
122
123
124
125static inline int atomic64_inc_and_test(atomic64_t *v)
126{
127 GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", "e");
128}
129
130
131
132
133
134
135
136
137
138
139static inline int atomic64_add_negative(long i, atomic64_t *v)
140{
141 GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", "s");
142}
143
144
145
146
147
148
149
150
151static inline long atomic64_add_return(long i, atomic64_t *v)
152{
153 return i + xadd(&v->counter, i);
154}
155
156static inline long atomic64_sub_return(long i, atomic64_t *v)
157{
158 return atomic64_add_return(-i, v);
159}
160
161#define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
162#define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
163
164static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
165{
166 return cmpxchg(&v->counter, old, new);
167}
168
169static inline long atomic64_xchg(atomic64_t *v, long new)
170{
171 return xchg(&v->counter, new);
172}
173
174
175
176
177
178
179
180
181
182
183static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
184{
185 long c, old;
186 c = atomic64_read(v);
187 for (;;) {
188 if (unlikely(c == (u)))
189 break;
190 old = atomic64_cmpxchg((v), c, c + (a));
191 if (likely(old == c))
192 break;
193 c = old;
194 }
195 return c != (u);
196}
197
198#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
199
200
201
202
203
204
205
206
207static inline long atomic64_dec_if_positive(atomic64_t *v)
208{
209 long c, old, dec;
210 c = atomic64_read(v);
211 for (;;) {
212 dec = c - 1;
213 if (unlikely(dec < 0))
214 break;
215 old = atomic64_cmpxchg((v), c, dec);
216 if (likely(old == c))
217 break;
218 c = old;
219 }
220 return dec;
221}
222
223#endif
224