1
2#ifndef _LINUX_ATOMIC_H
3#define _LINUX_ATOMIC_H
4#include <asm/atomic.h>
5#include <asm/barrier.h>
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25#ifndef atomic_read_acquire
26#define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
27#endif
28
29#ifndef atomic_set_release
30#define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
31#endif
32
33
34
35
36
37
38
39
40
41
42#ifndef __atomic_op_acquire
43#define __atomic_op_acquire(op, args...) \
44({ \
45 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
46 smp_mb__after_atomic(); \
47 __ret; \
48})
49#endif
50
51#ifndef __atomic_op_release
52#define __atomic_op_release(op, args...) \
53({ \
54 smp_mb__before_atomic(); \
55 op##_relaxed(args); \
56})
57#endif
58
59#ifndef __atomic_op_fence
60#define __atomic_op_fence(op, args...) \
61({ \
62 typeof(op##_relaxed(args)) __ret; \
63 smp_mb__before_atomic(); \
64 __ret = op##_relaxed(args); \
65 smp_mb__after_atomic(); \
66 __ret; \
67})
68#endif
69
70
71#ifndef atomic_add_return_relaxed
72#define atomic_add_return_relaxed atomic_add_return
73#define atomic_add_return_acquire atomic_add_return
74#define atomic_add_return_release atomic_add_return
75
76#else
77
78#ifndef atomic_add_return_acquire
79#define atomic_add_return_acquire(...) \
80 __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
81#endif
82
83#ifndef atomic_add_return_release
84#define atomic_add_return_release(...) \
85 __atomic_op_release(atomic_add_return, __VA_ARGS__)
86#endif
87
88#ifndef atomic_add_return
89#define atomic_add_return(...) \
90 __atomic_op_fence(atomic_add_return, __VA_ARGS__)
91#endif
92#endif
93
94
95#ifndef atomic_inc_return_relaxed
96#define atomic_inc_return_relaxed atomic_inc_return
97#define atomic_inc_return_acquire atomic_inc_return
98#define atomic_inc_return_release atomic_inc_return
99
100#else
101
102#ifndef atomic_inc_return_acquire
103#define atomic_inc_return_acquire(...) \
104 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
105#endif
106
107#ifndef atomic_inc_return_release
108#define atomic_inc_return_release(...) \
109 __atomic_op_release(atomic_inc_return, __VA_ARGS__)
110#endif
111
112#ifndef atomic_inc_return
113#define atomic_inc_return(...) \
114 __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
115#endif
116#endif
117
118
119#ifndef atomic_sub_return_relaxed
120#define atomic_sub_return_relaxed atomic_sub_return
121#define atomic_sub_return_acquire atomic_sub_return
122#define atomic_sub_return_release atomic_sub_return
123
124#else
125
126#ifndef atomic_sub_return_acquire
127#define atomic_sub_return_acquire(...) \
128 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
129#endif
130
131#ifndef atomic_sub_return_release
132#define atomic_sub_return_release(...) \
133 __atomic_op_release(atomic_sub_return, __VA_ARGS__)
134#endif
135
136#ifndef atomic_sub_return
137#define atomic_sub_return(...) \
138 __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
139#endif
140#endif
141
142
143#ifndef atomic_dec_return_relaxed
144#define atomic_dec_return_relaxed atomic_dec_return
145#define atomic_dec_return_acquire atomic_dec_return
146#define atomic_dec_return_release atomic_dec_return
147
148#else
149
150#ifndef atomic_dec_return_acquire
151#define atomic_dec_return_acquire(...) \
152 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
153#endif
154
155#ifndef atomic_dec_return_release
156#define atomic_dec_return_release(...) \
157 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
158#endif
159
160#ifndef atomic_dec_return
161#define atomic_dec_return(...) \
162 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
163#endif
164#endif
165
166
167#ifndef atomic_xchg_relaxed
168#define atomic_xchg_relaxed atomic_xchg
169#define atomic_xchg_acquire atomic_xchg
170#define atomic_xchg_release atomic_xchg
171
172#else
173
174#ifndef atomic_xchg_acquire
175#define atomic_xchg_acquire(...) \
176 __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
177#endif
178
179#ifndef atomic_xchg_release
180#define atomic_xchg_release(...) \
181 __atomic_op_release(atomic_xchg, __VA_ARGS__)
182#endif
183
184#ifndef atomic_xchg
185#define atomic_xchg(...) \
186 __atomic_op_fence(atomic_xchg, __VA_ARGS__)
187#endif
188#endif
189
190
191#ifndef atomic_cmpxchg_relaxed
192#define atomic_cmpxchg_relaxed atomic_cmpxchg
193#define atomic_cmpxchg_acquire atomic_cmpxchg
194#define atomic_cmpxchg_release atomic_cmpxchg
195
196#else
197
198#ifndef atomic_cmpxchg_acquire
199#define atomic_cmpxchg_acquire(...) \
200 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
201#endif
202
203#ifndef atomic_cmpxchg_release
204#define atomic_cmpxchg_release(...) \
205 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
206#endif
207
208#ifndef atomic_cmpxchg
209#define atomic_cmpxchg(...) \
210 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
211#endif
212#endif
213
214#ifndef atomic64_read_acquire
215#define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
216#endif
217
218#ifndef atomic64_set_release
219#define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
220#endif
221
222
223#ifndef atomic64_add_return_relaxed
224#define atomic64_add_return_relaxed atomic64_add_return
225#define atomic64_add_return_acquire atomic64_add_return
226#define atomic64_add_return_release atomic64_add_return
227
228#else
229
230#ifndef atomic64_add_return_acquire
231#define atomic64_add_return_acquire(...) \
232 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
233#endif
234
235#ifndef atomic64_add_return_release
236#define atomic64_add_return_release(...) \
237 __atomic_op_release(atomic64_add_return, __VA_ARGS__)
238#endif
239
240#ifndef atomic64_add_return
241#define atomic64_add_return(...) \
242 __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
243#endif
244#endif
245
246
247#ifndef atomic64_inc_return_relaxed
248#define atomic64_inc_return_relaxed atomic64_inc_return
249#define atomic64_inc_return_acquire atomic64_inc_return
250#define atomic64_inc_return_release atomic64_inc_return
251
252#else
253
254#ifndef atomic64_inc_return_acquire
255#define atomic64_inc_return_acquire(...) \
256 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
257#endif
258
259#ifndef atomic64_inc_return_release
260#define atomic64_inc_return_release(...) \
261 __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
262#endif
263
264#ifndef atomic64_inc_return
265#define atomic64_inc_return(...) \
266 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
267#endif
268#endif
269
270
271
272#ifndef atomic64_sub_return_relaxed
273#define atomic64_sub_return_relaxed atomic64_sub_return
274#define atomic64_sub_return_acquire atomic64_sub_return
275#define atomic64_sub_return_release atomic64_sub_return
276
277#else
278
279#ifndef atomic64_sub_return_acquire
280#define atomic64_sub_return_acquire(...) \
281 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
282#endif
283
284#ifndef atomic64_sub_return_release
285#define atomic64_sub_return_release(...) \
286 __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
287#endif
288
289#ifndef atomic64_sub_return
290#define atomic64_sub_return(...) \
291 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
292#endif
293#endif
294
295
296#ifndef atomic64_dec_return_relaxed
297#define atomic64_dec_return_relaxed atomic64_dec_return
298#define atomic64_dec_return_acquire atomic64_dec_return
299#define atomic64_dec_return_release atomic64_dec_return
300
301#else
302
303#ifndef atomic64_dec_return_acquire
304#define atomic64_dec_return_acquire(...) \
305 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
306#endif
307
308#ifndef atomic64_dec_return_release
309#define atomic64_dec_return_release(...) \
310 __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
311#endif
312
313#ifndef atomic64_dec_return
314#define atomic64_dec_return(...) \
315 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
316#endif
317#endif
318
319
320#ifndef atomic64_xchg_relaxed
321#define atomic64_xchg_relaxed atomic64_xchg
322#define atomic64_xchg_acquire atomic64_xchg
323#define atomic64_xchg_release atomic64_xchg
324
325#else
326
327#ifndef atomic64_xchg_acquire
328#define atomic64_xchg_acquire(...) \
329 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
330#endif
331
332#ifndef atomic64_xchg_release
333#define atomic64_xchg_release(...) \
334 __atomic_op_release(atomic64_xchg, __VA_ARGS__)
335#endif
336
337#ifndef atomic64_xchg
338#define atomic64_xchg(...) \
339 __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
340#endif
341#endif
342
343
344#ifndef atomic64_cmpxchg_relaxed
345#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
346#define atomic64_cmpxchg_acquire atomic64_cmpxchg
347#define atomic64_cmpxchg_release atomic64_cmpxchg
348
349#else
350
351#ifndef atomic64_cmpxchg_acquire
352#define atomic64_cmpxchg_acquire(...) \
353 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
354#endif
355
356#ifndef atomic64_cmpxchg_release
357#define atomic64_cmpxchg_release(...) \
358 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
359#endif
360
361#ifndef atomic64_cmpxchg
362#define atomic64_cmpxchg(...) \
363 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
364#endif
365#endif
366
367
368#ifndef cmpxchg_relaxed
369#define cmpxchg_relaxed cmpxchg
370#define cmpxchg_acquire cmpxchg
371#define cmpxchg_release cmpxchg
372
373#else
374
375#ifndef cmpxchg_acquire
376#define cmpxchg_acquire(...) \
377 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
378#endif
379
380#ifndef cmpxchg_release
381#define cmpxchg_release(...) \
382 __atomic_op_release(cmpxchg, __VA_ARGS__)
383#endif
384
385#ifndef cmpxchg
386#define cmpxchg(...) \
387 __atomic_op_fence(cmpxchg, __VA_ARGS__)
388#endif
389#endif
390
391
392#ifndef cmpxchg64_relaxed
393#define cmpxchg64_relaxed cmpxchg64
394#define cmpxchg64_acquire cmpxchg64
395#define cmpxchg64_release cmpxchg64
396
397#else
398
399#ifndef cmpxchg64_acquire
400#define cmpxchg64_acquire(...) \
401 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
402#endif
403
404#ifndef cmpxchg64_release
405#define cmpxchg64_release(...) \
406 __atomic_op_release(cmpxchg64, __VA_ARGS__)
407#endif
408
409#ifndef cmpxchg64
410#define cmpxchg64(...) \
411 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
412#endif
413#endif
414
415
416#ifndef xchg_relaxed
417#define xchg_relaxed xchg
418#define xchg_acquire xchg
419#define xchg_release xchg
420
421#else
422
423#ifndef xchg_acquire
424#define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
425#endif
426
427#ifndef xchg_release
428#define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
429#endif
430
431#ifndef xchg
432#define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
433#endif
434#endif
435
436
437
438
439
440
441
442
443
444
445static inline int atomic_add_unless(atomic_t *v, int a, int u)
446{
447 return __atomic_add_unless(v, a, u) != u;
448}
449
450
451
452
453
454
455
456
457#ifndef atomic_inc_not_zero
458#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
459#endif
460
461#ifndef atomic_andnot
462static inline void atomic_andnot(int i, atomic_t *v)
463{
464 atomic_and(~i, v);
465}
466#endif
467
468static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
469{
470 atomic_andnot(mask, v);
471}
472
473static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v)
474{
475 atomic_or(mask, v);
476}
477
478
479
480
481
482
483
484
485
486
487
488
489
490#ifndef atomic_inc_not_zero_hint
491static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
492{
493 int val, c = hint;
494
495
496 if (!hint)
497 return atomic_inc_not_zero(v);
498
499 do {
500 val = atomic_cmpxchg(v, c, c + 1);
501 if (val == c)
502 return 1;
503 c = val;
504 } while (c);
505
506 return 0;
507}
508#endif
509
510#ifndef atomic_inc_unless_negative
511static inline int atomic_inc_unless_negative(atomic_t *p)
512{
513 int v, v1;
514 for (v = 0; v >= 0; v = v1) {
515 v1 = atomic_cmpxchg(p, v, v + 1);
516 if (likely(v1 == v))
517 return 1;
518 }
519 return 0;
520}
521#endif
522
523#ifndef atomic_dec_unless_positive
524static inline int atomic_dec_unless_positive(atomic_t *p)
525{
526 int v, v1;
527 for (v = 0; v <= 0; v = v1) {
528 v1 = atomic_cmpxchg(p, v, v - 1);
529 if (likely(v1 == v))
530 return 1;
531 }
532 return 0;
533}
534#endif
535
536
537
538
539
540
541
542
543#ifndef atomic_dec_if_positive
544static inline int atomic_dec_if_positive(atomic_t *v)
545{
546 int c, old, dec;
547 c = atomic_read(v);
548 for (;;) {
549 dec = c - 1;
550 if (unlikely(dec < 0))
551 break;
552 old = atomic_cmpxchg((v), c, dec);
553 if (likely(old == c))
554 break;
555 c = old;
556 }
557 return dec;
558}
559#endif
560
561
562
563
564
565
566#ifndef atomic_fetch_or
567static inline int atomic_fetch_or(atomic_t *p, int mask)
568{
569 int old, val = atomic_read(p);
570
571 for (;;) {
572 old = atomic_cmpxchg(p, val, val | mask);
573 if (old == val)
574 break;
575 val = old;
576 }
577
578 return old;
579}
580#endif
581
582#ifdef CONFIG_GENERIC_ATOMIC64
583#include <asm-generic/atomic64.h>
584#endif
585
586#ifndef atomic64_andnot
587static inline void atomic64_andnot(long long i, atomic64_t *v)
588{
589 atomic64_and(~i, v);
590}
591#endif
592
593#include <asm-generic/atomic-long.h>
594
595#endif
596