1
2
3
4
5
6
7
8
9
10
11
12
13
14
15#ifndef QEMU_ATOMIC_H
16#define QEMU_ATOMIC_H
17
18#include "compiler.h"
19
20
21#define barrier() ({ asm volatile("" ::: "memory"); (void)0; })
22
23
24
25
26
27
28
29
30
31#define typeof_strip_qual(expr) \
32 typeof( \
33 __builtin_choose_expr( \
34 __builtin_types_compatible_p(typeof(expr), bool) || \
35 __builtin_types_compatible_p(typeof(expr), const bool) || \
36 __builtin_types_compatible_p(typeof(expr), volatile bool) || \
37 __builtin_types_compatible_p(typeof(expr), const volatile bool), \
38 (bool)1, \
39 __builtin_choose_expr( \
40 __builtin_types_compatible_p(typeof(expr), signed char) || \
41 __builtin_types_compatible_p(typeof(expr), const signed char) || \
42 __builtin_types_compatible_p(typeof(expr), volatile signed char) || \
43 __builtin_types_compatible_p(typeof(expr), const volatile signed char), \
44 (signed char)1, \
45 __builtin_choose_expr( \
46 __builtin_types_compatible_p(typeof(expr), unsigned char) || \
47 __builtin_types_compatible_p(typeof(expr), const unsigned char) || \
48 __builtin_types_compatible_p(typeof(expr), volatile unsigned char) || \
49 __builtin_types_compatible_p(typeof(expr), const volatile unsigned char), \
50 (unsigned char)1, \
51 __builtin_choose_expr( \
52 __builtin_types_compatible_p(typeof(expr), signed short) || \
53 __builtin_types_compatible_p(typeof(expr), const signed short) || \
54 __builtin_types_compatible_p(typeof(expr), volatile signed short) || \
55 __builtin_types_compatible_p(typeof(expr), const volatile signed short), \
56 (signed short)1, \
57 __builtin_choose_expr( \
58 __builtin_types_compatible_p(typeof(expr), unsigned short) || \
59 __builtin_types_compatible_p(typeof(expr), const unsigned short) || \
60 __builtin_types_compatible_p(typeof(expr), volatile unsigned short) || \
61 __builtin_types_compatible_p(typeof(expr), const volatile unsigned short), \
62 (unsigned short)1, \
63 (expr)+0))))))
64
65#ifndef __ATOMIC_RELAXED
66#error "Expecting C11 atomic ops"
67#endif
68
69
70
71
72
73
74
75
76
77
78#define smp_mb() ({ barrier(); __atomic_thread_fence(__ATOMIC_SEQ_CST); })
79#define smp_mb_release() ({ barrier(); __atomic_thread_fence(__ATOMIC_RELEASE); })
80#define smp_mb_acquire() ({ barrier(); __atomic_thread_fence(__ATOMIC_ACQUIRE); })
81
82
83
84
85
86#ifdef QEMU_SANITIZE_THREAD
87#define smp_read_barrier_depends() ({ barrier(); __atomic_thread_fence(__ATOMIC_CONSUME); })
88#elif defined(__alpha__)
89#define smp_read_barrier_depends() asm volatile("mb":::"memory")
90#else
91#define smp_read_barrier_depends() barrier()
92#endif
93
94
95
96
97
98
99#define signal_barrier() __atomic_signal_fence(__ATOMIC_SEQ_CST)
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115#if defined(__x86_64__) || defined(__sparc__) || defined(__mips64)
116# define ATOMIC_REG_SIZE 8
117#else
118# define ATOMIC_REG_SIZE sizeof(void *)
119#endif
120
121
122
123
124
125
126
127
128
129
130
131#define qatomic_read__nocheck(ptr) \
132 __atomic_load_n(ptr, __ATOMIC_RELAXED)
133
134#define qatomic_read(ptr) \
135 ({ \
136 qemu_build_assert(sizeof(*ptr) <= ATOMIC_REG_SIZE); \
137 qatomic_read__nocheck(ptr); \
138 })
139
140#define qatomic_set__nocheck(ptr, i) \
141 __atomic_store_n(ptr, i, __ATOMIC_RELAXED)
142
143#define qatomic_set(ptr, i) do { \
144 qemu_build_assert(sizeof(*ptr) <= ATOMIC_REG_SIZE); \
145 qatomic_set__nocheck(ptr, i); \
146} while(0)
147
148
149
150
151#ifdef QEMU_SANITIZE_THREAD
152#define qatomic_rcu_read__nocheck(ptr, valptr) \
153 __atomic_load(ptr, valptr, __ATOMIC_CONSUME);
154#else
155#define qatomic_rcu_read__nocheck(ptr, valptr) \
156 __atomic_load(ptr, valptr, __ATOMIC_RELAXED); \
157 smp_read_barrier_depends();
158#endif
159
160#define qatomic_rcu_read(ptr) \
161 ({ \
162 qemu_build_assert(sizeof(*ptr) <= ATOMIC_REG_SIZE); \
163 typeof_strip_qual(*ptr) _val; \
164 qatomic_rcu_read__nocheck(ptr, &_val); \
165 _val; \
166 })
167
168#define qatomic_rcu_set(ptr, i) do { \
169 qemu_build_assert(sizeof(*ptr) <= ATOMIC_REG_SIZE); \
170 __atomic_store_n(ptr, i, __ATOMIC_RELEASE); \
171} while(0)
172
173#define qatomic_load_acquire(ptr) \
174 ({ \
175 qemu_build_assert(sizeof(*ptr) <= ATOMIC_REG_SIZE); \
176 typeof_strip_qual(*ptr) _val; \
177 __atomic_load(ptr, &_val, __ATOMIC_ACQUIRE); \
178 _val; \
179 })
180
181#define qatomic_store_release(ptr, i) do { \
182 qemu_build_assert(sizeof(*ptr) <= ATOMIC_REG_SIZE); \
183 __atomic_store_n(ptr, i, __ATOMIC_RELEASE); \
184} while(0)
185
186
187
188
189#define qatomic_xchg__nocheck(ptr, i) ({ \
190 __atomic_exchange_n(ptr, (i), __ATOMIC_SEQ_CST); \
191})
192
193#define qatomic_xchg(ptr, i) ({ \
194 qemu_build_assert(sizeof(*ptr) <= ATOMIC_REG_SIZE); \
195 qatomic_xchg__nocheck(ptr, i); \
196})
197
198
199#define qatomic_cmpxchg__nocheck(ptr, old, new) ({ \
200 typeof_strip_qual(*ptr) _old = (old); \
201 (void)__atomic_compare_exchange_n(ptr, &_old, new, false, \
202 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); \
203 _old; \
204})
205
206#define qatomic_cmpxchg(ptr, old, new) ({ \
207 qemu_build_assert(sizeof(*ptr) <= ATOMIC_REG_SIZE); \
208 qatomic_cmpxchg__nocheck(ptr, old, new); \
209})
210
211
212#define qatomic_fetch_inc(ptr) __atomic_fetch_add(ptr, 1, __ATOMIC_SEQ_CST)
213#define qatomic_fetch_dec(ptr) __atomic_fetch_sub(ptr, 1, __ATOMIC_SEQ_CST)
214
215#define qatomic_fetch_add(ptr, n) __atomic_fetch_add(ptr, n, __ATOMIC_SEQ_CST)
216#define qatomic_fetch_sub(ptr, n) __atomic_fetch_sub(ptr, n, __ATOMIC_SEQ_CST)
217#define qatomic_fetch_and(ptr, n) __atomic_fetch_and(ptr, n, __ATOMIC_SEQ_CST)
218#define qatomic_fetch_or(ptr, n) __atomic_fetch_or(ptr, n, __ATOMIC_SEQ_CST)
219#define qatomic_fetch_xor(ptr, n) __atomic_fetch_xor(ptr, n, __ATOMIC_SEQ_CST)
220
221#define qatomic_inc_fetch(ptr) __atomic_add_fetch(ptr, 1, __ATOMIC_SEQ_CST)
222#define qatomic_dec_fetch(ptr) __atomic_sub_fetch(ptr, 1, __ATOMIC_SEQ_CST)
223#define qatomic_add_fetch(ptr, n) __atomic_add_fetch(ptr, n, __ATOMIC_SEQ_CST)
224#define qatomic_sub_fetch(ptr, n) __atomic_sub_fetch(ptr, n, __ATOMIC_SEQ_CST)
225#define qatomic_and_fetch(ptr, n) __atomic_and_fetch(ptr, n, __ATOMIC_SEQ_CST)
226#define qatomic_or_fetch(ptr, n) __atomic_or_fetch(ptr, n, __ATOMIC_SEQ_CST)
227#define qatomic_xor_fetch(ptr, n) __atomic_xor_fetch(ptr, n, __ATOMIC_SEQ_CST)
228
229
230#define qatomic_inc(ptr) \
231 ((void) __atomic_fetch_add(ptr, 1, __ATOMIC_SEQ_CST))
232#define qatomic_dec(ptr) \
233 ((void) __atomic_fetch_sub(ptr, 1, __ATOMIC_SEQ_CST))
234#define qatomic_add(ptr, n) \
235 ((void) __atomic_fetch_add(ptr, n, __ATOMIC_SEQ_CST))
236#define qatomic_sub(ptr, n) \
237 ((void) __atomic_fetch_sub(ptr, n, __ATOMIC_SEQ_CST))
238#define qatomic_and(ptr, n) \
239 ((void) __atomic_fetch_and(ptr, n, __ATOMIC_SEQ_CST))
240#define qatomic_or(ptr, n) \
241 ((void) __atomic_fetch_or(ptr, n, __ATOMIC_SEQ_CST))
242#define qatomic_xor(ptr, n) \
243 ((void) __atomic_fetch_xor(ptr, n, __ATOMIC_SEQ_CST))
244
245#define smp_wmb() smp_mb_release()
246#define smp_rmb() smp_mb_acquire()
247
248
249
250
251
252
253#if !defined(QEMU_SANITIZE_THREAD) && \
254 (defined(__i386__) || defined(__x86_64__) || defined(__s390x__))
255# define smp_mb__before_rmw() signal_barrier()
256# define smp_mb__after_rmw() signal_barrier()
257#else
258# define smp_mb__before_rmw() smp_mb()
259# define smp_mb__after_rmw() smp_mb()
260#endif
261
262
263
264
265
266
267#if !defined(QEMU_SANITIZE_THREAD) && \
268 (defined(__i386__) || defined(__x86_64__) || defined(__s390x__))
269# define qatomic_set_mb(ptr, i) \
270 ({ (void)qatomic_xchg(ptr, i); smp_mb__after_rmw(); })
271#else
272# define qatomic_set_mb(ptr, i) \
273 ({ qatomic_store_release(ptr, i); smp_mb(); })
274#endif
275
276#define qatomic_fetch_inc_nonzero(ptr) ({ \
277 typeof_strip_qual(*ptr) _oldn = qatomic_read(ptr); \
278 while (_oldn && qatomic_cmpxchg(ptr, _oldn, _oldn + 1) != _oldn) { \
279 _oldn = qatomic_read(ptr); \
280 } \
281 _oldn; \
282})
283
284
285
286
287
288
289
290
291
292
293
294typedef int64_t aligned_int64_t __attribute__((aligned(8)));
295typedef uint64_t aligned_uint64_t __attribute__((aligned(8)));
296
297#ifdef CONFIG_ATOMIC64
298
299#define qatomic_read_i64(P) \
300 _Generic(*(P), int64_t: qatomic_read__nocheck(P))
301#define qatomic_read_u64(P) \
302 _Generic(*(P), uint64_t: qatomic_read__nocheck(P))
303#define qatomic_set_i64(P, V) \
304 _Generic(*(P), int64_t: qatomic_set__nocheck(P, V))
305#define qatomic_set_u64(P, V) \
306 _Generic(*(P), uint64_t: qatomic_set__nocheck(P, V))
307
308static inline void qatomic64_init(void)
309{
310}
311#else
312int64_t qatomic_read_i64(const int64_t *ptr);
313uint64_t qatomic_read_u64(const uint64_t *ptr);
314void qatomic_set_i64(int64_t *ptr, int64_t val);
315void qatomic_set_u64(uint64_t *ptr, uint64_t val);
316void qatomic64_init(void);
317#endif
318
319#endif
320