1
2
3
4
5
6
7
8
9
10
11
12
13#ifndef _LINUX_ATOMIC_INSTRUMENTED_H
14#define _LINUX_ATOMIC_INSTRUMENTED_H
15
16#include <linux/build_bug.h>
17#include <linux/kasan-checks.h>
18
19static __always_inline int atomic_read(const atomic_t *v)
20{
21 kasan_check_read(v, sizeof(*v));
22 return arch_atomic_read(v);
23}
24
25static __always_inline s64 atomic64_read(const atomic64_t *v)
26{
27 kasan_check_read(v, sizeof(*v));
28 return arch_atomic64_read(v);
29}
30
31static __always_inline void atomic_set(atomic_t *v, int i)
32{
33 kasan_check_write(v, sizeof(*v));
34 arch_atomic_set(v, i);
35}
36
37static __always_inline void atomic64_set(atomic64_t *v, s64 i)
38{
39 kasan_check_write(v, sizeof(*v));
40 arch_atomic64_set(v, i);
41}
42
43static __always_inline int atomic_xchg(atomic_t *v, int i)
44{
45 kasan_check_write(v, sizeof(*v));
46 return arch_atomic_xchg(v, i);
47}
48
49static __always_inline s64 atomic64_xchg(atomic64_t *v, s64 i)
50{
51 kasan_check_write(v, sizeof(*v));
52 return arch_atomic64_xchg(v, i);
53}
54
55static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
56{
57 kasan_check_write(v, sizeof(*v));
58 return arch_atomic_cmpxchg(v, old, new);
59}
60
61static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
62{
63 kasan_check_write(v, sizeof(*v));
64 return arch_atomic64_cmpxchg(v, old, new);
65}
66
67#ifdef arch_atomic_try_cmpxchg
68#define atomic_try_cmpxchg atomic_try_cmpxchg
69static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new)
70{
71 kasan_check_write(v, sizeof(*v));
72 kasan_check_read(old, sizeof(*old));
73 return arch_atomic_try_cmpxchg(v, old, new);
74}
75#endif
76
77#ifdef arch_atomic64_try_cmpxchg
78#define atomic64_try_cmpxchg atomic64_try_cmpxchg
79static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
80{
81 kasan_check_write(v, sizeof(*v));
82 kasan_check_read(old, sizeof(*old));
83 return arch_atomic64_try_cmpxchg(v, old, new);
84}
85#endif
86
87static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
88{
89 kasan_check_write(v, sizeof(*v));
90 return __arch_atomic_add_unless(v, a, u);
91}
92
93
94static __always_inline bool atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
95{
96 kasan_check_write(v, sizeof(*v));
97 return arch_atomic64_add_unless(v, a, u);
98}
99
100static __always_inline void atomic_inc(atomic_t *v)
101{
102 kasan_check_write(v, sizeof(*v));
103 arch_atomic_inc(v);
104}
105
106static __always_inline void atomic64_inc(atomic64_t *v)
107{
108 kasan_check_write(v, sizeof(*v));
109 arch_atomic64_inc(v);
110}
111
112static __always_inline void atomic_dec(atomic_t *v)
113{
114 kasan_check_write(v, sizeof(*v));
115 arch_atomic_dec(v);
116}
117
118static __always_inline void atomic64_dec(atomic64_t *v)
119{
120 kasan_check_write(v, sizeof(*v));
121 arch_atomic64_dec(v);
122}
123
124static __always_inline void atomic_add(int i, atomic_t *v)
125{
126 kasan_check_write(v, sizeof(*v));
127 arch_atomic_add(i, v);
128}
129
130static __always_inline void atomic64_add(s64 i, atomic64_t *v)
131{
132 kasan_check_write(v, sizeof(*v));
133 arch_atomic64_add(i, v);
134}
135
136static __always_inline void atomic_sub(int i, atomic_t *v)
137{
138 kasan_check_write(v, sizeof(*v));
139 arch_atomic_sub(i, v);
140}
141
142static __always_inline void atomic64_sub(s64 i, atomic64_t *v)
143{
144 kasan_check_write(v, sizeof(*v));
145 arch_atomic64_sub(i, v);
146}
147
148static __always_inline void atomic_and(int i, atomic_t *v)
149{
150 kasan_check_write(v, sizeof(*v));
151 arch_atomic_and(i, v);
152}
153
154static __always_inline void atomic64_and(s64 i, atomic64_t *v)
155{
156 kasan_check_write(v, sizeof(*v));
157 arch_atomic64_and(i, v);
158}
159
160static __always_inline void atomic_or(int i, atomic_t *v)
161{
162 kasan_check_write(v, sizeof(*v));
163 arch_atomic_or(i, v);
164}
165
166static __always_inline void atomic64_or(s64 i, atomic64_t *v)
167{
168 kasan_check_write(v, sizeof(*v));
169 arch_atomic64_or(i, v);
170}
171
172static __always_inline void atomic_xor(int i, atomic_t *v)
173{
174 kasan_check_write(v, sizeof(*v));
175 arch_atomic_xor(i, v);
176}
177
178static __always_inline void atomic64_xor(s64 i, atomic64_t *v)
179{
180 kasan_check_write(v, sizeof(*v));
181 arch_atomic64_xor(i, v);
182}
183
184static __always_inline int atomic_inc_return(atomic_t *v)
185{
186 kasan_check_write(v, sizeof(*v));
187 return arch_atomic_inc_return(v);
188}
189
190static __always_inline s64 atomic64_inc_return(atomic64_t *v)
191{
192 kasan_check_write(v, sizeof(*v));
193 return arch_atomic64_inc_return(v);
194}
195
196static __always_inline int atomic_dec_return(atomic_t *v)
197{
198 kasan_check_write(v, sizeof(*v));
199 return arch_atomic_dec_return(v);
200}
201
202static __always_inline s64 atomic64_dec_return(atomic64_t *v)
203{
204 kasan_check_write(v, sizeof(*v));
205 return arch_atomic64_dec_return(v);
206}
207
208static __always_inline s64 atomic64_inc_not_zero(atomic64_t *v)
209{
210 kasan_check_write(v, sizeof(*v));
211 return arch_atomic64_inc_not_zero(v);
212}
213
214static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
215{
216 kasan_check_write(v, sizeof(*v));
217 return arch_atomic64_dec_if_positive(v);
218}
219
220static __always_inline bool atomic_dec_and_test(atomic_t *v)
221{
222 kasan_check_write(v, sizeof(*v));
223 return arch_atomic_dec_and_test(v);
224}
225
226static __always_inline bool atomic64_dec_and_test(atomic64_t *v)
227{
228 kasan_check_write(v, sizeof(*v));
229 return arch_atomic64_dec_and_test(v);
230}
231
232static __always_inline bool atomic_inc_and_test(atomic_t *v)
233{
234 kasan_check_write(v, sizeof(*v));
235 return arch_atomic_inc_and_test(v);
236}
237
238static __always_inline bool atomic64_inc_and_test(atomic64_t *v)
239{
240 kasan_check_write(v, sizeof(*v));
241 return arch_atomic64_inc_and_test(v);
242}
243
244static __always_inline int atomic_add_return(int i, atomic_t *v)
245{
246 kasan_check_write(v, sizeof(*v));
247 return arch_atomic_add_return(i, v);
248}
249
250static __always_inline s64 atomic64_add_return(s64 i, atomic64_t *v)
251{
252 kasan_check_write(v, sizeof(*v));
253 return arch_atomic64_add_return(i, v);
254}
255
256static __always_inline int atomic_sub_return(int i, atomic_t *v)
257{
258 kasan_check_write(v, sizeof(*v));
259 return arch_atomic_sub_return(i, v);
260}
261
262static __always_inline s64 atomic64_sub_return(s64 i, atomic64_t *v)
263{
264 kasan_check_write(v, sizeof(*v));
265 return arch_atomic64_sub_return(i, v);
266}
267
268static __always_inline int atomic_fetch_add(int i, atomic_t *v)
269{
270 kasan_check_write(v, sizeof(*v));
271 return arch_atomic_fetch_add(i, v);
272}
273
274static __always_inline s64 atomic64_fetch_add(s64 i, atomic64_t *v)
275{
276 kasan_check_write(v, sizeof(*v));
277 return arch_atomic64_fetch_add(i, v);
278}
279
280static __always_inline int atomic_fetch_sub(int i, atomic_t *v)
281{
282 kasan_check_write(v, sizeof(*v));
283 return arch_atomic_fetch_sub(i, v);
284}
285
286static __always_inline s64 atomic64_fetch_sub(s64 i, atomic64_t *v)
287{
288 kasan_check_write(v, sizeof(*v));
289 return arch_atomic64_fetch_sub(i, v);
290}
291
292static __always_inline int atomic_fetch_and(int i, atomic_t *v)
293{
294 kasan_check_write(v, sizeof(*v));
295 return arch_atomic_fetch_and(i, v);
296}
297
298static __always_inline s64 atomic64_fetch_and(s64 i, atomic64_t *v)
299{
300 kasan_check_write(v, sizeof(*v));
301 return arch_atomic64_fetch_and(i, v);
302}
303
304static __always_inline int atomic_fetch_or(int i, atomic_t *v)
305{
306 kasan_check_write(v, sizeof(*v));
307 return arch_atomic_fetch_or(i, v);
308}
309
310static __always_inline s64 atomic64_fetch_or(s64 i, atomic64_t *v)
311{
312 kasan_check_write(v, sizeof(*v));
313 return arch_atomic64_fetch_or(i, v);
314}
315
316static __always_inline int atomic_fetch_xor(int i, atomic_t *v)
317{
318 kasan_check_write(v, sizeof(*v));
319 return arch_atomic_fetch_xor(i, v);
320}
321
322static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v)
323{
324 kasan_check_write(v, sizeof(*v));
325 return arch_atomic64_fetch_xor(i, v);
326}
327
328static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
329{
330 kasan_check_write(v, sizeof(*v));
331 return arch_atomic_sub_and_test(i, v);
332}
333
334static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v)
335{
336 kasan_check_write(v, sizeof(*v));
337 return arch_atomic64_sub_and_test(i, v);
338}
339
340static __always_inline bool atomic_add_negative(int i, atomic_t *v)
341{
342 kasan_check_write(v, sizeof(*v));
343 return arch_atomic_add_negative(i, v);
344}
345
346static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v)
347{
348 kasan_check_write(v, sizeof(*v));
349 return arch_atomic64_add_negative(i, v);
350}
351
352static __always_inline unsigned long
353cmpxchg_size(volatile void *ptr, unsigned long old, unsigned long new, int size)
354{
355 kasan_check_write(ptr, size);
356 switch (size) {
357 case 1:
358 return arch_cmpxchg((u8 *)ptr, (u8)old, (u8)new);
359 case 2:
360 return arch_cmpxchg((u16 *)ptr, (u16)old, (u16)new);
361 case 4:
362 return arch_cmpxchg((u32 *)ptr, (u32)old, (u32)new);
363 case 8:
364 BUILD_BUG_ON(sizeof(unsigned long) != 8);
365 return arch_cmpxchg((u64 *)ptr, (u64)old, (u64)new);
366 }
367 BUILD_BUG();
368 return 0;
369}
370
371#define cmpxchg(ptr, old, new) \
372({ \
373 ((__typeof__(*(ptr)))cmpxchg_size((ptr), (unsigned long)(old), \
374 (unsigned long)(new), sizeof(*(ptr)))); \
375})
376
377static __always_inline unsigned long
378sync_cmpxchg_size(volatile void *ptr, unsigned long old, unsigned long new,
379 int size)
380{
381 kasan_check_write(ptr, size);
382 switch (size) {
383 case 1:
384 return arch_sync_cmpxchg((u8 *)ptr, (u8)old, (u8)new);
385 case 2:
386 return arch_sync_cmpxchg((u16 *)ptr, (u16)old, (u16)new);
387 case 4:
388 return arch_sync_cmpxchg((u32 *)ptr, (u32)old, (u32)new);
389 case 8:
390 BUILD_BUG_ON(sizeof(unsigned long) != 8);
391 return arch_sync_cmpxchg((u64 *)ptr, (u64)old, (u64)new);
392 }
393 BUILD_BUG();
394 return 0;
395}
396
397#define sync_cmpxchg(ptr, old, new) \
398({ \
399 ((__typeof__(*(ptr)))sync_cmpxchg_size((ptr), \
400 (unsigned long)(old), (unsigned long)(new), \
401 sizeof(*(ptr)))); \
402})
403
404static __always_inline unsigned long
405cmpxchg_local_size(volatile void *ptr, unsigned long old, unsigned long new,
406 int size)
407{
408 kasan_check_write(ptr, size);
409 switch (size) {
410 case 1:
411 return arch_cmpxchg_local((u8 *)ptr, (u8)old, (u8)new);
412 case 2:
413 return arch_cmpxchg_local((u16 *)ptr, (u16)old, (u16)new);
414 case 4:
415 return arch_cmpxchg_local((u32 *)ptr, (u32)old, (u32)new);
416 case 8:
417 BUILD_BUG_ON(sizeof(unsigned long) != 8);
418 return arch_cmpxchg_local((u64 *)ptr, (u64)old, (u64)new);
419 }
420 BUILD_BUG();
421 return 0;
422}
423
424#define cmpxchg_local(ptr, old, new) \
425({ \
426 ((__typeof__(*(ptr)))cmpxchg_local_size((ptr), \
427 (unsigned long)(old), (unsigned long)(new), \
428 sizeof(*(ptr)))); \
429})
430
431static __always_inline u64
432cmpxchg64_size(volatile u64 *ptr, u64 old, u64 new)
433{
434 kasan_check_write(ptr, sizeof(*ptr));
435 return arch_cmpxchg64(ptr, old, new);
436}
437
438#define cmpxchg64(ptr, old, new) \
439({ \
440 ((__typeof__(*(ptr)))cmpxchg64_size((ptr), (u64)(old), \
441 (u64)(new))); \
442})
443
444static __always_inline u64
445cmpxchg64_local_size(volatile u64 *ptr, u64 old, u64 new)
446{
447 kasan_check_write(ptr, sizeof(*ptr));
448 return arch_cmpxchg64_local(ptr, old, new);
449}
450
451#define cmpxchg64_local(ptr, old, new) \
452({ \
453 ((__typeof__(*(ptr)))cmpxchg64_local_size((ptr), (u64)(old), \
454 (u64)(new))); \
455})
456
457
458
459
460
461
462
463
464
465
466#define cmpxchg_double(p1, p2, o1, o2, n1, n2) \
467({ \
468 arch_cmpxchg_double((p1), (p2), (o1), (o2), (n1), (n2)); \
469})
470
471#define cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \
472({ \
473 arch_cmpxchg_double_local((p1), (p2), (o1), (o2), (n1), (n2)); \
474})
475
476#endif
477