1
2
3
4
5
6
7
8
9
10
11
12
13
14
15#ifndef QEMU_ATOMIC_H
16#define QEMU_ATOMIC_H
17
18
19#define barrier() ({ asm volatile("" ::: "memory"); (void)0; })
20
21
22
23
24
25
26
27
28
29#define typeof_strip_qual(expr) \
30 typeof( \
31 __builtin_choose_expr( \
32 __builtin_types_compatible_p(typeof(expr), bool) || \
33 __builtin_types_compatible_p(typeof(expr), const bool) || \
34 __builtin_types_compatible_p(typeof(expr), volatile bool) || \
35 __builtin_types_compatible_p(typeof(expr), const volatile bool), \
36 (bool)1, \
37 __builtin_choose_expr( \
38 __builtin_types_compatible_p(typeof(expr), signed char) || \
39 __builtin_types_compatible_p(typeof(expr), const signed char) || \
40 __builtin_types_compatible_p(typeof(expr), volatile signed char) || \
41 __builtin_types_compatible_p(typeof(expr), const volatile signed char), \
42 (signed char)1, \
43 __builtin_choose_expr( \
44 __builtin_types_compatible_p(typeof(expr), unsigned char) || \
45 __builtin_types_compatible_p(typeof(expr), const unsigned char) || \
46 __builtin_types_compatible_p(typeof(expr), volatile unsigned char) || \
47 __builtin_types_compatible_p(typeof(expr), const volatile unsigned char), \
48 (unsigned char)1, \
49 __builtin_choose_expr( \
50 __builtin_types_compatible_p(typeof(expr), signed short) || \
51 __builtin_types_compatible_p(typeof(expr), const signed short) || \
52 __builtin_types_compatible_p(typeof(expr), volatile signed short) || \
53 __builtin_types_compatible_p(typeof(expr), const volatile signed short), \
54 (signed short)1, \
55 __builtin_choose_expr( \
56 __builtin_types_compatible_p(typeof(expr), unsigned short) || \
57 __builtin_types_compatible_p(typeof(expr), const unsigned short) || \
58 __builtin_types_compatible_p(typeof(expr), volatile unsigned short) || \
59 __builtin_types_compatible_p(typeof(expr), const volatile unsigned short), \
60 (unsigned short)1, \
61 (expr)+0))))))
62
63#ifndef __ATOMIC_RELAXED
64#error "Expecting C11 atomic ops"
65#endif
66
67
68
69
70
71
72
73
74
75
76#define smp_mb() ({ barrier(); __atomic_thread_fence(__ATOMIC_SEQ_CST); })
77#define smp_mb_release() ({ barrier(); __atomic_thread_fence(__ATOMIC_RELEASE); })
78#define smp_mb_acquire() ({ barrier(); __atomic_thread_fence(__ATOMIC_ACQUIRE); })
79
80
81
82
83
84#if defined(__SANITIZE_THREAD__)
85#define smp_read_barrier_depends() ({ barrier(); __atomic_thread_fence(__ATOMIC_CONSUME); })
86#elif defined(__alpha__)
87#define smp_read_barrier_depends() asm volatile("mb":::"memory")
88#else
89#define smp_read_barrier_depends() barrier()
90#endif
91
92
93
94
95
96
97#define signal_barrier() __atomic_signal_fence(__ATOMIC_SEQ_CST)
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113#if defined(__x86_64__) || defined(__sparc__) || defined(__mips64)
114# define ATOMIC_REG_SIZE 8
115#else
116# define ATOMIC_REG_SIZE sizeof(void *)
117#endif
118
119
120
121
122
123
124
125
126
127
128
129#define qatomic_read__nocheck(ptr) \
130 __atomic_load_n(ptr, __ATOMIC_RELAXED)
131
132#define qatomic_read(ptr) \
133 ({ \
134 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
135 qatomic_read__nocheck(ptr); \
136 })
137
138#define qatomic_set__nocheck(ptr, i) \
139 __atomic_store_n(ptr, i, __ATOMIC_RELAXED)
140
141#define qatomic_set(ptr, i) do { \
142 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
143 qatomic_set__nocheck(ptr, i); \
144} while(0)
145
146
147
148
149#ifdef __SANITIZE_THREAD__
150#define qatomic_rcu_read__nocheck(ptr, valptr) \
151 __atomic_load(ptr, valptr, __ATOMIC_CONSUME);
152#else
153#define qatomic_rcu_read__nocheck(ptr, valptr) \
154 __atomic_load(ptr, valptr, __ATOMIC_RELAXED); \
155 smp_read_barrier_depends();
156#endif
157
158#define qatomic_rcu_read(ptr) \
159 ({ \
160 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
161 typeof_strip_qual(*ptr) _val; \
162 qatomic_rcu_read__nocheck(ptr, &_val); \
163 _val; \
164 })
165
166#define qatomic_rcu_set(ptr, i) do { \
167 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
168 __atomic_store_n(ptr, i, __ATOMIC_RELEASE); \
169} while(0)
170
171#define qatomic_load_acquire(ptr) \
172 ({ \
173 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
174 typeof_strip_qual(*ptr) _val; \
175 __atomic_load(ptr, &_val, __ATOMIC_ACQUIRE); \
176 _val; \
177 })
178
179#define qatomic_store_release(ptr, i) do { \
180 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
181 __atomic_store_n(ptr, i, __ATOMIC_RELEASE); \
182} while(0)
183
184
185
186
187#define qatomic_xchg__nocheck(ptr, i) ({ \
188 __atomic_exchange_n(ptr, (i), __ATOMIC_SEQ_CST); \
189})
190
191#define qatomic_xchg(ptr, i) ({ \
192 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
193 qatomic_xchg__nocheck(ptr, i); \
194})
195
196
197#define qatomic_cmpxchg__nocheck(ptr, old, new) ({ \
198 typeof_strip_qual(*ptr) _old = (old); \
199 (void)__atomic_compare_exchange_n(ptr, &_old, new, false, \
200 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); \
201 _old; \
202})
203
204#define qatomic_cmpxchg(ptr, old, new) ({ \
205 QEMU_BUILD_BUG_ON(sizeof(*ptr) > ATOMIC_REG_SIZE); \
206 qatomic_cmpxchg__nocheck(ptr, old, new); \
207})
208
209
210#define qatomic_fetch_inc(ptr) __atomic_fetch_add(ptr, 1, __ATOMIC_SEQ_CST)
211#define qatomic_fetch_dec(ptr) __atomic_fetch_sub(ptr, 1, __ATOMIC_SEQ_CST)
212
213#define qatomic_fetch_add(ptr, n) __atomic_fetch_add(ptr, n, __ATOMIC_SEQ_CST)
214#define qatomic_fetch_sub(ptr, n) __atomic_fetch_sub(ptr, n, __ATOMIC_SEQ_CST)
215#define qatomic_fetch_and(ptr, n) __atomic_fetch_and(ptr, n, __ATOMIC_SEQ_CST)
216#define qatomic_fetch_or(ptr, n) __atomic_fetch_or(ptr, n, __ATOMIC_SEQ_CST)
217#define qatomic_fetch_xor(ptr, n) __atomic_fetch_xor(ptr, n, __ATOMIC_SEQ_CST)
218
219#define qatomic_inc_fetch(ptr) __atomic_add_fetch(ptr, 1, __ATOMIC_SEQ_CST)
220#define qatomic_dec_fetch(ptr) __atomic_sub_fetch(ptr, 1, __ATOMIC_SEQ_CST)
221#define qatomic_add_fetch(ptr, n) __atomic_add_fetch(ptr, n, __ATOMIC_SEQ_CST)
222#define qatomic_sub_fetch(ptr, n) __atomic_sub_fetch(ptr, n, __ATOMIC_SEQ_CST)
223#define qatomic_and_fetch(ptr, n) __atomic_and_fetch(ptr, n, __ATOMIC_SEQ_CST)
224#define qatomic_or_fetch(ptr, n) __atomic_or_fetch(ptr, n, __ATOMIC_SEQ_CST)
225#define qatomic_xor_fetch(ptr, n) __atomic_xor_fetch(ptr, n, __ATOMIC_SEQ_CST)
226
227
228#define qatomic_inc(ptr) \
229 ((void) __atomic_fetch_add(ptr, 1, __ATOMIC_SEQ_CST))
230#define qatomic_dec(ptr) \
231 ((void) __atomic_fetch_sub(ptr, 1, __ATOMIC_SEQ_CST))
232#define qatomic_add(ptr, n) \
233 ((void) __atomic_fetch_add(ptr, n, __ATOMIC_SEQ_CST))
234#define qatomic_sub(ptr, n) \
235 ((void) __atomic_fetch_sub(ptr, n, __ATOMIC_SEQ_CST))
236#define qatomic_and(ptr, n) \
237 ((void) __atomic_fetch_and(ptr, n, __ATOMIC_SEQ_CST))
238#define qatomic_or(ptr, n) \
239 ((void) __atomic_fetch_or(ptr, n, __ATOMIC_SEQ_CST))
240#define qatomic_xor(ptr, n) \
241 ((void) __atomic_fetch_xor(ptr, n, __ATOMIC_SEQ_CST))
242
243#define smp_wmb() smp_mb_release()
244#define smp_rmb() smp_mb_acquire()
245
246
247
248
249
250
251
252
253
254#define qatomic_mb_read(ptr) \
255 qatomic_load_acquire(ptr)
256
257#if !defined(__SANITIZE_THREAD__) && \
258 (defined(__i386__) || defined(__x86_64__) || defined(__s390x__))
259
260# define qatomic_mb_set(ptr, i) ((void)qatomic_xchg(ptr, i))
261#else
262# define qatomic_mb_set(ptr, i) \
263 ({ qatomic_store_release(ptr, i); smp_mb(); })
264#endif
265
266#define qatomic_fetch_inc_nonzero(ptr) ({ \
267 typeof_strip_qual(*ptr) _oldn = qatomic_read(ptr); \
268 while (_oldn && qatomic_cmpxchg(ptr, _oldn, _oldn + 1) != _oldn) { \
269 _oldn = qatomic_read(ptr); \
270 } \
271 _oldn; \
272})
273
274
275
276
277
278
279
280
281
282
283
284typedef int64_t aligned_int64_t __attribute__((aligned(8)));
285typedef uint64_t aligned_uint64_t __attribute__((aligned(8)));
286
287#ifdef CONFIG_ATOMIC64
288
289#define qatomic_read_i64(P) \
290 _Generic(*(P), int64_t: qatomic_read__nocheck(P))
291#define qatomic_read_u64(P) \
292 _Generic(*(P), uint64_t: qatomic_read__nocheck(P))
293#define qatomic_set_i64(P, V) \
294 _Generic(*(P), int64_t: qatomic_set__nocheck(P, V))
295#define qatomic_set_u64(P, V) \
296 _Generic(*(P), uint64_t: qatomic_set__nocheck(P, V))
297
298static inline void qatomic64_init(void)
299{
300}
301#else
302int64_t qatomic_read_i64(const int64_t *ptr);
303uint64_t qatomic_read_u64(const uint64_t *ptr);
304void qatomic_set_i64(int64_t *ptr, int64_t val);
305void qatomic_set_u64(uint64_t *ptr, uint64_t val);
306void qatomic64_init(void);
307#endif
308
309#endif
310