1
2
3
4
5
6
7
8
9
10
11
12
13
14
15#ifndef QEMU_ATOMIC_H
16#define QEMU_ATOMIC_H
17
18
19#define barrier() ({ asm volatile("" ::: "memory"); (void)0; })
20
21
22
23
24
25
26
27
28
29#define typeof_strip_qual(expr) \
30 typeof( \
31 __builtin_choose_expr( \
32 __builtin_types_compatible_p(typeof(expr), bool) || \
33 __builtin_types_compatible_p(typeof(expr), const bool) || \
34 __builtin_types_compatible_p(typeof(expr), volatile bool) || \
35 __builtin_types_compatible_p(typeof(expr), const volatile bool), \
36 (bool)1, \
37 __builtin_choose_expr( \
38 __builtin_types_compatible_p(typeof(expr), signed char) || \
39 __builtin_types_compatible_p(typeof(expr), const signed char) || \
40 __builtin_types_compatible_p(typeof(expr), volatile signed char) || \
41 __builtin_types_compatible_p(typeof(expr), const volatile signed char), \
42 (signed char)1, \
43 __builtin_choose_expr( \
44 __builtin_types_compatible_p(typeof(expr), unsigned char) || \
45 __builtin_types_compatible_p(typeof(expr), const unsigned char) || \
46 __builtin_types_compatible_p(typeof(expr), volatile unsigned char) || \
47 __builtin_types_compatible_p(typeof(expr), const volatile unsigned char), \
48 (unsigned char)1, \
49 __builtin_choose_expr( \
50 __builtin_types_compatible_p(typeof(expr), signed short) || \
51 __builtin_types_compatible_p(typeof(expr), const signed short) || \
52 __builtin_types_compatible_p(typeof(expr), volatile signed short) || \
53 __builtin_types_compatible_p(typeof(expr), const volatile signed short), \
54 (signed short)1, \
55 __builtin_choose_expr( \
56 __builtin_types_compatible_p(typeof(expr), unsigned short) || \
57 __builtin_types_compatible_p(typeof(expr), const unsigned short) || \
58 __builtin_types_compatible_p(typeof(expr), volatile unsigned short) || \
59 __builtin_types_compatible_p(typeof(expr), const volatile unsigned short), \
60 (unsigned short)1, \
61 (expr)+0))))))
62
63#ifdef __ATOMIC_RELAXED
64
65
66
67
68
69
70
71
72
73
74
75#define smp_mb() ({ barrier(); __atomic_thread_fence(__ATOMIC_SEQ_CST); })
76#define smp_mb_release() ({ barrier(); __atomic_thread_fence(__ATOMIC_RELEASE); })
77#define smp_mb_acquire() ({ barrier(); __atomic_thread_fence(__ATOMIC_ACQUIRE); })
78
79
80
81
82
83#if defined(__SANITIZE_THREAD__)
84#define smp_read_barrier_depends() ({ barrier(); __atomic_thread_fence(__ATOMIC_CONSUME); })
85#elif defined(__alpha__)
86#define smp_read_barrier_depends() asm volatile("mb":::"memory")
87#else
88#define smp_read_barrier_depends() barrier()
89#endif
90
91
92
93
94
95
96#define signal_barrier() __atomic_signal_fence(__ATOMIC_SEQ_CST)
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112#if defined(__x86_64__) || defined(__sparc__) || defined(__mips64)
113# define ATOMIC_REG_SIZE 8
114#else
115# define ATOMIC_REG_SIZE sizeof(void *)
116#endif
117
118
119
120
121
122
123
124
125
126
127
128#define qatomic_read__nocheck(ptr) \
129 __atomic_load_n(ptr, __ATOMIC_RELAXED)
130
131#define qatomic_read(ptr) \
132 ({ \
133 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
134 qatomic_read__nocheck(ptr); \
135 })
136
137#define qatomic_set__nocheck(ptr, i) \
138 __atomic_store_n(ptr, i, __ATOMIC_RELAXED)
139
140#define qatomic_set(ptr, i) do { \
141 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
142 qatomic_set__nocheck(ptr, i); \
143} while(0)
144
145
146
147
148#ifdef __SANITIZE_THREAD__
149#define qatomic_rcu_read__nocheck(ptr, valptr) \
150 __atomic_load(ptr, valptr, __ATOMIC_CONSUME);
151#else
152#define qatomic_rcu_read__nocheck(ptr, valptr) \
153 __atomic_load(ptr, valptr, __ATOMIC_RELAXED); \
154 smp_read_barrier_depends();
155#endif
156
157#define qatomic_rcu_read(ptr) \
158 ({ \
159 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
160 typeof_strip_qual(*ptr) _val; \
161 qatomic_rcu_read__nocheck(ptr, &_val); \
162 _val; \
163 })
164
165#define qatomic_rcu_set(ptr, i) do { \
166 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
167 __atomic_store_n(ptr, i, __ATOMIC_RELEASE); \
168} while(0)
169
170#define qatomic_load_acquire(ptr) \
171 ({ \
172 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
173 typeof_strip_qual(*ptr) _val; \
174 __atomic_load(ptr, &_val, __ATOMIC_ACQUIRE); \
175 _val; \
176 })
177
178#define qatomic_store_release(ptr, i) do { \
179 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
180 __atomic_store_n(ptr, i, __ATOMIC_RELEASE); \
181} while(0)
182
183
184
185
186#define qatomic_xchg__nocheck(ptr, i) ({ \
187 __atomic_exchange_n(ptr, (i), __ATOMIC_SEQ_CST); \
188})
189
190#define qatomic_xchg(ptr, i) ({ \
191 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
192 qatomic_xchg__nocheck(ptr, i); \
193})
194
195
196#define qatomic_cmpxchg__nocheck(ptr, old, new) ({ \
197 typeof_strip_qual(*ptr) _old = (old); \
198 (void)__atomic_compare_exchange_n(ptr, &_old, new, false, \
199 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); \
200 _old; \
201})
202
203#define qatomic_cmpxchg(ptr, old, new) ({ \
204 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
205 qatomic_cmpxchg__nocheck(ptr, old, new); \
206})
207
208
209#define qatomic_fetch_inc(ptr) __atomic_fetch_add(ptr, 1, __ATOMIC_SEQ_CST)
210#define qatomic_fetch_dec(ptr) __atomic_fetch_sub(ptr, 1, __ATOMIC_SEQ_CST)
211
212#define qatomic_fetch_add(ptr, n) __atomic_fetch_add(ptr, n, __ATOMIC_SEQ_CST)
213#define qatomic_fetch_sub(ptr, n) __atomic_fetch_sub(ptr, n, __ATOMIC_SEQ_CST)
214#define qatomic_fetch_and(ptr, n) __atomic_fetch_and(ptr, n, __ATOMIC_SEQ_CST)
215#define qatomic_fetch_or(ptr, n) __atomic_fetch_or(ptr, n, __ATOMIC_SEQ_CST)
216#define qatomic_fetch_xor(ptr, n) __atomic_fetch_xor(ptr, n, __ATOMIC_SEQ_CST)
217
218#define qatomic_inc_fetch(ptr) __atomic_add_fetch(ptr, 1, __ATOMIC_SEQ_CST)
219#define qatomic_dec_fetch(ptr) __atomic_sub_fetch(ptr, 1, __ATOMIC_SEQ_CST)
220#define qatomic_add_fetch(ptr, n) __atomic_add_fetch(ptr, n, __ATOMIC_SEQ_CST)
221#define qatomic_sub_fetch(ptr, n) __atomic_sub_fetch(ptr, n, __ATOMIC_SEQ_CST)
222#define qatomic_and_fetch(ptr, n) __atomic_and_fetch(ptr, n, __ATOMIC_SEQ_CST)
223#define qatomic_or_fetch(ptr, n) __atomic_or_fetch(ptr, n, __ATOMIC_SEQ_CST)
224#define qatomic_xor_fetch(ptr, n) __atomic_xor_fetch(ptr, n, __ATOMIC_SEQ_CST)
225
226
227#define qatomic_inc(ptr) \
228 ((void) __atomic_fetch_add(ptr, 1, __ATOMIC_SEQ_CST))
229#define qatomic_dec(ptr) \
230 ((void) __atomic_fetch_sub(ptr, 1, __ATOMIC_SEQ_CST))
231#define qatomic_add(ptr, n) \
232 ((void) __atomic_fetch_add(ptr, n, __ATOMIC_SEQ_CST))
233#define qatomic_sub(ptr, n) \
234 ((void) __atomic_fetch_sub(ptr, n, __ATOMIC_SEQ_CST))
235#define qatomic_and(ptr, n) \
236 ((void) __atomic_fetch_and(ptr, n, __ATOMIC_SEQ_CST))
237#define qatomic_or(ptr, n) \
238 ((void) __atomic_fetch_or(ptr, n, __ATOMIC_SEQ_CST))
239#define qatomic_xor(ptr, n) \
240 ((void) __atomic_fetch_xor(ptr, n, __ATOMIC_SEQ_CST))
241
242#else
243
244#ifdef __alpha__
245#define smp_read_barrier_depends() asm volatile("mb":::"memory")
246#endif
247
248#if defined(__i386__) || defined(__x86_64__) || defined(__s390x__)
249
250
251
252
253
254
255#define smp_mb_release() barrier()
256#define smp_mb_acquire() barrier()
257
258
259
260
261
262
263#define qatomic_xchg(ptr, i) (barrier(), __sync_lock_test_and_set(ptr, i))
264
265#elif defined(_ARCH_PPC)
266
267
268
269
270
271
272
273
274
275#define smp_wmb() ({ asm volatile("eieio" ::: "memory"); (void)0; })
276#if defined(__powerpc64__)
277#define smp_mb_release() ({ asm volatile("lwsync" ::: "memory"); (void)0; })
278#define smp_mb_acquire() ({ asm volatile("lwsync" ::: "memory"); (void)0; })
279#else
280#define smp_mb_release() ({ asm volatile("sync" ::: "memory"); (void)0; })
281#define smp_mb_acquire() ({ asm volatile("sync" ::: "memory"); (void)0; })
282#endif
283#define smp_mb() ({ asm volatile("sync" ::: "memory"); (void)0; })
284
285#endif
286
287
288
289
290
291
292
293#ifndef smp_mb
294#define smp_mb() __sync_synchronize()
295#endif
296
297#ifndef smp_mb_acquire
298#define smp_mb_acquire() __sync_synchronize()
299#endif
300
301#ifndef smp_mb_release
302#define smp_mb_release() __sync_synchronize()
303#endif
304
305#ifndef smp_read_barrier_depends
306#define smp_read_barrier_depends() barrier()
307#endif
308
309#ifndef signal_barrier
310#define signal_barrier() barrier()
311#endif
312
313
314
315
316#define qatomic_read__nocheck(p) (*(__typeof__(*(p)) volatile*) (p))
317#define qatomic_set__nocheck(p, i) ((*(__typeof__(*(p)) volatile*) (p)) = (i))
318
319#define qatomic_read(ptr) qatomic_read__nocheck(ptr)
320#define qatomic_set(ptr, i) qatomic_set__nocheck(ptr,i)
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340#define qatomic_rcu_read(ptr) ({ \
341 typeof(*ptr) _val = qatomic_read(ptr); \
342 smp_read_barrier_depends(); \
343 _val; \
344})
345
346
347
348
349
350
351
352
353
354
355
356
357#define qatomic_rcu_set(ptr, i) do { \
358 smp_wmb(); \
359 qatomic_set(ptr, i); \
360} while (0)
361
362#define qatomic_load_acquire(ptr) ({ \
363 typeof(*ptr) _val = qatomic_read(ptr); \
364 smp_mb_acquire(); \
365 _val; \
366})
367
368#define qatomic_store_release(ptr, i) do { \
369 smp_mb_release(); \
370 qatomic_set(ptr, i); \
371} while (0)
372
373#ifndef qatomic_xchg
374#if defined(__clang__)
375#define qatomic_xchg(ptr, i) __sync_swap(ptr, i)
376#else
377
378#define qatomic_xchg(ptr, i) (smp_mb(), __sync_lock_test_and_set(ptr, i))
379#endif
380#endif
381#define qatomic_xchg__nocheck qatomic_xchg
382
383
384#define qatomic_fetch_inc(ptr) __sync_fetch_and_add(ptr, 1)
385#define qatomic_fetch_dec(ptr) __sync_fetch_and_add(ptr, -1)
386
387#define qatomic_fetch_add(ptr, n) __sync_fetch_and_add(ptr, n)
388#define qatomic_fetch_sub(ptr, n) __sync_fetch_and_sub(ptr, n)
389#define qatomic_fetch_and(ptr, n) __sync_fetch_and_and(ptr, n)
390#define qatomic_fetch_or(ptr, n) __sync_fetch_and_or(ptr, n)
391#define qatomic_fetch_xor(ptr, n) __sync_fetch_and_xor(ptr, n)
392
393#define qatomic_inc_fetch(ptr) __sync_add_and_fetch(ptr, 1)
394#define qatomic_dec_fetch(ptr) __sync_add_and_fetch(ptr, -1)
395#define qatomic_add_fetch(ptr, n) __sync_add_and_fetch(ptr, n)
396#define qatomic_sub_fetch(ptr, n) __sync_sub_and_fetch(ptr, n)
397#define qatomic_and_fetch(ptr, n) __sync_and_and_fetch(ptr, n)
398#define qatomic_or_fetch(ptr, n) __sync_or_and_fetch(ptr, n)
399#define qatomic_xor_fetch(ptr, n) __sync_xor_and_fetch(ptr, n)
400
401#define qatomic_cmpxchg(ptr, old, new) \
402 __sync_val_compare_and_swap(ptr, old, new)
403#define qatomic_cmpxchg__nocheck(ptr, old, new) qatomic_cmpxchg(ptr, old, new)
404
405
406#define qatomic_inc(ptr) ((void) __sync_fetch_and_add(ptr, 1))
407#define qatomic_dec(ptr) ((void) __sync_fetch_and_add(ptr, -1))
408#define qatomic_add(ptr, n) ((void) __sync_fetch_and_add(ptr, n))
409#define qatomic_sub(ptr, n) ((void) __sync_fetch_and_sub(ptr, n))
410#define qatomic_and(ptr, n) ((void) __sync_fetch_and_and(ptr, n))
411#define qatomic_or(ptr, n) ((void) __sync_fetch_and_or(ptr, n))
412#define qatomic_xor(ptr, n) ((void) __sync_fetch_and_xor(ptr, n))
413
414#endif
415
416#ifndef smp_wmb
417#define smp_wmb() smp_mb_release()
418#endif
419#ifndef smp_rmb
420#define smp_rmb() smp_mb_acquire()
421#endif
422
423
424#if !defined(__SANITIZE_THREAD__)
425#if defined(__i386__) || defined(__x86_64__) || defined(__s390x__)
426#define qatomic_mb_set(ptr, i) ((void)qatomic_xchg(ptr, i))
427#endif
428#endif
429
430
431
432
433
434
435
436
437
438#ifndef qatomic_mb_read
439#define qatomic_mb_read(ptr) \
440 qatomic_load_acquire(ptr)
441#endif
442
443#ifndef qatomic_mb_set
444#define qatomic_mb_set(ptr, i) do { \
445 qatomic_store_release(ptr, i); \
446 smp_mb(); \
447} while(0)
448#endif
449
450#define qatomic_fetch_inc_nonzero(ptr) ({ \
451 typeof_strip_qual(*ptr) _oldn = qatomic_read(ptr); \
452 while (_oldn && qatomic_cmpxchg(ptr, _oldn, _oldn + 1) != _oldn) { \
453 _oldn = qatomic_read(ptr); \
454 } \
455 _oldn; \
456})
457
458
459#ifdef CONFIG_ATOMIC64
460static inline int64_t qatomic_read_i64(const int64_t *ptr)
461{
462
463 return qatomic_read__nocheck(ptr);
464}
465
466static inline uint64_t qatomic_read_u64(const uint64_t *ptr)
467{
468 return qatomic_read__nocheck(ptr);
469}
470
471static inline void qatomic_set_i64(int64_t *ptr, int64_t val)
472{
473 qatomic_set__nocheck(ptr, val);
474}
475
476static inline void qatomic_set_u64(uint64_t *ptr, uint64_t val)
477{
478 qatomic_set__nocheck(ptr, val);
479}
480
481static inline void qatomic64_init(void)
482{
483}
484#else
485int64_t qatomic_read_i64(const int64_t *ptr);
486uint64_t qatomic_read_u64(const uint64_t *ptr);
487void qatomic_set_i64(int64_t *ptr, int64_t val);
488void qatomic_set_u64(uint64_t *ptr, uint64_t val);
489void qatomic64_init(void);
490#endif
491
492#endif
493