1
2
3
4
5
6
7
8
9
10
11
12
13#ifndef __QEMU_ATOMIC_H
14#define __QEMU_ATOMIC_H 1
15
16#include "qemu/compiler.h"
17
18
19
20
21#define barrier() ({ asm volatile("" ::: "memory"); (void)0; })
22
23#ifndef __ATOMIC_RELAXED
24
25
26
27
28
29
30
31#if defined(__i386__) || defined(__x86_64__)
32#if !QEMU_GNUC_PREREQ(4, 4)
33#if defined __x86_64__
34#define smp_mb() ({ asm volatile("mfence" ::: "memory"); (void)0; })
35#else
36#define smp_mb() ({ asm volatile("lock; addl $0,0(%%esp) " ::: "memory"); (void)0; })
37#endif
38#endif
39#endif
40
41
42#ifdef __alpha__
43#define smp_read_barrier_depends() asm volatile("mb":::"memory")
44#endif
45
46#if defined(__i386__) || defined(__x86_64__) || defined(__s390x__)
47
48
49
50
51
52
53#define smp_wmb() barrier()
54#define smp_rmb() barrier()
55
56
57
58
59
60
61#define atomic_xchg(ptr, i) (barrier(), __sync_lock_test_and_set(ptr, i))
62
63
64
65
66#define atomic_mb_set(ptr, i) ((void)atomic_xchg(ptr, i))
67
68#elif defined(_ARCH_PPC)
69
70
71
72
73
74
75
76
77
78#define smp_wmb() ({ asm volatile("eieio" ::: "memory"); (void)0; })
79#if defined(__powerpc64__)
80#define smp_rmb() ({ asm volatile("lwsync" ::: "memory"); (void)0; })
81#else
82#define smp_rmb() ({ asm volatile("sync" ::: "memory"); (void)0; })
83#endif
84#define smp_mb() ({ asm volatile("sync" ::: "memory"); (void)0; })
85
86#endif
87
88#endif
89
90
91
92
93
94
95
96#ifndef smp_mb
97#define smp_mb() __sync_synchronize()
98#endif
99
100#ifndef smp_wmb
101#ifdef __ATOMIC_RELEASE
102#define smp_wmb() __atomic_thread_fence(__ATOMIC_RELEASE)
103#else
104#define smp_wmb() __sync_synchronize()
105#endif
106#endif
107
108#ifndef smp_rmb
109#ifdef __ATOMIC_ACQUIRE
110#define smp_rmb() __atomic_thread_fence(__ATOMIC_ACQUIRE)
111#else
112#define smp_rmb() __sync_synchronize()
113#endif
114#endif
115
116#ifndef smp_read_barrier_depends
117#ifdef __ATOMIC_CONSUME
118#define smp_read_barrier_depends() __atomic_thread_fence(__ATOMIC_CONSUME)
119#else
120#define smp_read_barrier_depends() barrier()
121#endif
122#endif
123
124#ifndef atomic_read
125#define atomic_read(ptr) (*(__typeof__(*ptr) volatile*) (ptr))
126#endif
127
128#ifndef atomic_set
129#define atomic_set(ptr, i) ((*(__typeof__(*ptr) volatile*) (ptr)) = (i))
130#endif
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154#ifndef atomic_mb_read
155#define atomic_mb_read(ptr) ({ \
156 typeof(*ptr) _val = atomic_read(ptr); \
157 smp_rmb(); \
158 _val; \
159})
160#endif
161
162#ifndef atomic_mb_set
163#define atomic_mb_set(ptr, i) do { \
164 smp_wmb(); \
165 atomic_set(ptr, i); \
166 smp_mb(); \
167} while (0)
168#endif
169
170#ifndef atomic_xchg
171#if defined(__clang__)
172#define atomic_xchg(ptr, i) __sync_swap(ptr, i)
173#elif defined(__ATOMIC_SEQ_CST)
174#define atomic_xchg(ptr, i) ({ \
175 typeof(*ptr) _new = (i), _old; \
176 __atomic_exchange(ptr, &_new, &_old, __ATOMIC_SEQ_CST); \
177 _old; \
178})
179#else
180
181#define atomic_xchg(ptr, i) (smp_mb(), __sync_lock_test_and_set(ptr, i))
182#endif
183#endif
184
185
186#define atomic_fetch_inc(ptr) __sync_fetch_and_add(ptr, 1)
187#define atomic_fetch_dec(ptr) __sync_fetch_and_add(ptr, -1)
188#define atomic_fetch_add __sync_fetch_and_add
189#define atomic_fetch_sub __sync_fetch_and_sub
190#define atomic_fetch_and __sync_fetch_and_and
191#define atomic_fetch_or __sync_fetch_and_or
192#define atomic_cmpxchg __sync_val_compare_and_swap
193
194
195#define atomic_inc(ptr) ((void) __sync_fetch_and_add(ptr, 1))
196#define atomic_dec(ptr) ((void) __sync_fetch_and_add(ptr, -1))
197#define atomic_add(ptr, n) ((void) __sync_fetch_and_add(ptr, n))
198#define atomic_sub(ptr, n) ((void) __sync_fetch_and_sub(ptr, n))
199#define atomic_and(ptr, n) ((void) __sync_fetch_and_and(ptr, n))
200#define atomic_or(ptr, n) ((void) __sync_fetch_and_or(ptr, n))
201
202#endif
203