1#ifndef _ASM_X86_ATOMIC64_64_H
2#define _ASM_X86_ATOMIC64_64_H
3
4#include <linux/types.h>
5#include <asm/alternative.h>
6#include <asm/cmpxchg.h>
7
8
9
10#define ATOMIC64_INIT(i) { (i) }
11
12
13
14
15
16
17
18
19static inline long atomic64_read(const atomic64_t *v)
20{
21 return (*(volatile long *)&(v)->counter);
22}
23
24
25
26
27
28
29
30
31static inline void atomic64_set(atomic64_t *v, long i)
32{
33 v->counter = i;
34}
35
36
37
38
39
40
41
42
43static inline void atomic64_add(long i, atomic64_t *v)
44{
45 asm volatile(LOCK_PREFIX "addq %1,%0"
46 : "=m" (v->counter)
47 : "er" (i), "m" (v->counter));
48}
49
50
51
52
53
54
55
56
57static inline void atomic64_sub(long i, atomic64_t *v)
58{
59 asm volatile(LOCK_PREFIX "subq %1,%0"
60 : "=m" (v->counter)
61 : "er" (i), "m" (v->counter));
62}
63
64
65
66
67
68
69
70
71
72
73static inline int atomic64_sub_and_test(long i, atomic64_t *v)
74{
75 unsigned char c;
76
77 asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
78 : "=m" (v->counter), "=qm" (c)
79 : "er" (i), "m" (v->counter) : "memory");
80 return c;
81}
82
83
84
85
86
87
88
89static inline void atomic64_inc(atomic64_t *v)
90{
91 asm volatile(LOCK_PREFIX "incq %0"
92 : "=m" (v->counter)
93 : "m" (v->counter));
94}
95
96
97
98
99
100
101
102static inline void atomic64_dec(atomic64_t *v)
103{
104 asm volatile(LOCK_PREFIX "decq %0"
105 : "=m" (v->counter)
106 : "m" (v->counter));
107}
108
109
110
111
112
113
114
115
116
117static inline int atomic64_dec_and_test(atomic64_t *v)
118{
119 unsigned char c;
120
121 asm volatile(LOCK_PREFIX "decq %0; sete %1"
122 : "=m" (v->counter), "=qm" (c)
123 : "m" (v->counter) : "memory");
124 return c != 0;
125}
126
127
128
129
130
131
132
133
134
135static inline int atomic64_inc_and_test(atomic64_t *v)
136{
137 unsigned char c;
138
139 asm volatile(LOCK_PREFIX "incq %0; sete %1"
140 : "=m" (v->counter), "=qm" (c)
141 : "m" (v->counter) : "memory");
142 return c != 0;
143}
144
145
146
147
148
149
150
151
152
153
154static inline int atomic64_add_negative(long i, atomic64_t *v)
155{
156 unsigned char c;
157
158 asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
159 : "=m" (v->counter), "=qm" (c)
160 : "er" (i), "m" (v->counter) : "memory");
161 return c;
162}
163
164
165
166
167
168
169
170
171static inline long atomic64_add_return(long i, atomic64_t *v)
172{
173 return i + xadd(&v->counter, i);
174}
175
176static inline long atomic64_sub_return(long i, atomic64_t *v)
177{
178 return atomic64_add_return(-i, v);
179}
180
181#define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
182#define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
183
184static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
185{
186 return cmpxchg(&v->counter, old, new);
187}
188
189static inline long atomic64_xchg(atomic64_t *v, long new)
190{
191 return xchg(&v->counter, new);
192}
193
194
195
196
197
198
199
200
201
202
203static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
204{
205 long c, old;
206 c = atomic64_read(v);
207 for (;;) {
208 if (unlikely(c == (u)))
209 break;
210 old = atomic64_cmpxchg((v), c, c + (a));
211 if (likely(old == c))
212 break;
213 c = old;
214 }
215 return c != (u);
216}
217
218#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
219
220
221
222
223
224
225
226
227static inline long atomic64_dec_if_positive(atomic64_t *v)
228{
229 long c, old, dec;
230 c = atomic64_read(v);
231 for (;;) {
232 dec = c - 1;
233 if (unlikely(dec < 0))
234 break;
235 old = atomic64_cmpxchg((v), c, dec);
236 if (likely(old == c))
237 break;
238 c = old;
239 }
240 return dec;
241}
242
243#endif
244