1#ifndef _ASM_X86_ATOMIC_H
2#define _ASM_X86_ATOMIC_H
3
4#include <linux/compiler.h>
5#include <linux/types.h>
6#include <asm/processor.h>
7#include <asm/alternative.h>
8#include <asm/cmpxchg.h>
9
10
11
12
13
14
15#define ATOMIC_INIT(i) { (i) }
16
17
18
19
20
21
22
23static inline int atomic_read(const atomic_t *v)
24{
25 return (*(volatile int *)&(v)->counter);
26}
27
28
29
30
31
32
33
34
35static inline void atomic_set(atomic_t *v, int i)
36{
37 v->counter = i;
38}
39
40
41
42
43
44
45
46
47static inline void atomic_add(int i, atomic_t *v)
48{
49 asm volatile(LOCK_PREFIX "addl %1,%0"
50 : "+m" (v->counter)
51 : "ir" (i));
52}
53
54
55
56
57
58
59
60
61static inline void atomic_sub(int i, atomic_t *v)
62{
63 asm volatile(LOCK_PREFIX "subl %1,%0"
64 : "+m" (v->counter)
65 : "ir" (i));
66}
67
68
69
70
71
72
73
74
75
76
77static inline int atomic_sub_and_test(int i, atomic_t *v)
78{
79 unsigned char c;
80
81 asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
82 : "+m" (v->counter), "=qm" (c)
83 : "ir" (i) : "memory");
84 return c;
85}
86
87
88
89
90
91
92
93static inline void atomic_inc(atomic_t *v)
94{
95 asm volatile(LOCK_PREFIX "incl %0"
96 : "+m" (v->counter));
97}
98
99
100
101
102
103
104
105static inline void atomic_dec(atomic_t *v)
106{
107 asm volatile(LOCK_PREFIX "decl %0"
108 : "+m" (v->counter));
109}
110
111
112
113
114
115
116
117
118
119static inline int atomic_dec_and_test(atomic_t *v)
120{
121 unsigned char c;
122
123 asm volatile(LOCK_PREFIX "decl %0; sete %1"
124 : "+m" (v->counter), "=qm" (c)
125 : : "memory");
126 return c != 0;
127}
128
129
130
131
132
133
134
135
136
137static inline int atomic_inc_and_test(atomic_t *v)
138{
139 unsigned char c;
140
141 asm volatile(LOCK_PREFIX "incl %0; sete %1"
142 : "+m" (v->counter), "=qm" (c)
143 : : "memory");
144 return c != 0;
145}
146
147
148
149
150
151
152
153
154
155
156static inline int atomic_add_negative(int i, atomic_t *v)
157{
158 unsigned char c;
159
160 asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
161 : "+m" (v->counter), "=qm" (c)
162 : "ir" (i) : "memory");
163 return c;
164}
165
166
167
168
169
170
171
172
173static inline int atomic_add_return(int i, atomic_t *v)
174{
175#ifdef CONFIG_M386
176 int __i;
177 unsigned long flags;
178 if (unlikely(boot_cpu_data.x86 <= 3))
179 goto no_xadd;
180#endif
181
182 return i + xadd(&v->counter, i);
183
184#ifdef CONFIG_M386
185no_xadd:
186 raw_local_irq_save(flags);
187 __i = atomic_read(v);
188 atomic_set(v, i + __i);
189 raw_local_irq_restore(flags);
190 return i + __i;
191#endif
192}
193
194
195
196
197
198
199
200
201static inline int atomic_sub_return(int i, atomic_t *v)
202{
203 return atomic_add_return(-i, v);
204}
205
206#define atomic_inc_return(v) (atomic_add_return(1, v))
207#define atomic_dec_return(v) (atomic_sub_return(1, v))
208
209static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
210{
211 return cmpxchg(&v->counter, old, new);
212}
213
214static inline int atomic_xchg(atomic_t *v, int new)
215{
216 return xchg(&v->counter, new);
217}
218
219
220
221
222
223
224
225
226
227
228static inline int __atomic_add_unless(atomic_t *v, int a, int u)
229{
230 int c, old;
231 c = atomic_read(v);
232 for (;;) {
233 if (unlikely(c == (u)))
234 break;
235 old = atomic_cmpxchg((v), c, c + (a));
236 if (likely(old == c))
237 break;
238 c = old;
239 }
240 return c;
241}
242
243
244
245
246
247
248
249
250
251static inline int atomic_dec_if_positive(atomic_t *v)
252{
253 int c, old, dec;
254 c = atomic_read(v);
255 for (;;) {
256 dec = c - 1;
257 if (unlikely(dec < 0))
258 break;
259 old = atomic_cmpxchg((v), c, dec);
260 if (likely(old == c))
261 break;
262 c = old;
263 }
264 return dec;
265}
266
267
268
269
270
271
272
273
274static inline short int atomic_inc_short(short int *v)
275{
276 asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
277 return *v;
278}
279
280#ifdef CONFIG_X86_64
281
282
283
284
285
286
287
288
289static inline void atomic_or_long(unsigned long *v1, unsigned long v2)
290{
291 asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2));
292}
293#endif
294
295
296#define atomic_clear_mask(mask, addr) \
297 asm volatile(LOCK_PREFIX "andl %0,%1" \
298 : : "r" (~(mask)), "m" (*(addr)) : "memory")
299
300#define atomic_set_mask(mask, addr) \
301 asm volatile(LOCK_PREFIX "orl %0,%1" \
302 : : "r" ((unsigned)(mask)), "m" (*(addr)) \
303 : "memory")
304
305
306#define smp_mb__before_atomic_dec() barrier()
307#define smp_mb__after_atomic_dec() barrier()
308#define smp_mb__before_atomic_inc() barrier()
309#define smp_mb__after_atomic_inc() barrier()
310
311#ifdef CONFIG_X86_32
312# include "atomic64_32.h"
313#else
314# include "atomic64_64.h"
315#endif
316
317#endif
318