linux/include/asm-generic/atomic-instrumented.h
<<
>>
Prefs
   1/*
   2 * This file provides wrappers with KASAN instrumentation for atomic operations.
   3 * To use this functionality an arch's atomic.h file needs to define all
   4 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
   5 * this file at the end. This file provides atomic_read() that forwards to
   6 * arch_atomic_read() for actual atomic operation.
   7 * Note: if an arch atomic operation is implemented by means of other atomic
   8 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
   9 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
  10 * double instrumentation.
  11 */
  12
  13#ifndef _LINUX_ATOMIC_INSTRUMENTED_H
  14#define _LINUX_ATOMIC_INSTRUMENTED_H
  15
  16#include <linux/build_bug.h>
  17#include <linux/kasan-checks.h>
  18
  19static __always_inline int atomic_read(const atomic_t *v)
  20{
  21        kasan_check_read(v, sizeof(*v));
  22        return arch_atomic_read(v);
  23}
  24
  25static __always_inline s64 atomic64_read(const atomic64_t *v)
  26{
  27        kasan_check_read(v, sizeof(*v));
  28        return arch_atomic64_read(v);
  29}
  30
  31static __always_inline void atomic_set(atomic_t *v, int i)
  32{
  33        kasan_check_write(v, sizeof(*v));
  34        arch_atomic_set(v, i);
  35}
  36
  37static __always_inline void atomic64_set(atomic64_t *v, s64 i)
  38{
  39        kasan_check_write(v, sizeof(*v));
  40        arch_atomic64_set(v, i);
  41}
  42
  43static __always_inline int atomic_xchg(atomic_t *v, int i)
  44{
  45        kasan_check_write(v, sizeof(*v));
  46        return arch_atomic_xchg(v, i);
  47}
  48
  49static __always_inline s64 atomic64_xchg(atomic64_t *v, s64 i)
  50{
  51        kasan_check_write(v, sizeof(*v));
  52        return arch_atomic64_xchg(v, i);
  53}
  54
  55static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  56{
  57        kasan_check_write(v, sizeof(*v));
  58        return arch_atomic_cmpxchg(v, old, new);
  59}
  60
  61static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
  62{
  63        kasan_check_write(v, sizeof(*v));
  64        return arch_atomic64_cmpxchg(v, old, new);
  65}
  66
  67#ifdef arch_atomic_try_cmpxchg
  68#define atomic_try_cmpxchg atomic_try_cmpxchg
  69static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new)
  70{
  71        kasan_check_write(v, sizeof(*v));
  72        kasan_check_read(old, sizeof(*old));
  73        return arch_atomic_try_cmpxchg(v, old, new);
  74}
  75#endif
  76
  77#ifdef arch_atomic64_try_cmpxchg
  78#define atomic64_try_cmpxchg atomic64_try_cmpxchg
  79static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
  80{
  81        kasan_check_write(v, sizeof(*v));
  82        kasan_check_read(old, sizeof(*old));
  83        return arch_atomic64_try_cmpxchg(v, old, new);
  84}
  85#endif
  86
  87#ifdef arch_atomic_fetch_add_unless
  88#define atomic_fetch_add_unless atomic_fetch_add_unless
  89static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
  90{
  91        kasan_check_write(v, sizeof(*v));
  92        return arch_atomic_fetch_add_unless(v, a, u);
  93}
  94#endif
  95
  96#ifdef arch_atomic64_fetch_add_unless
  97#define atomic64_fetch_add_unless atomic64_fetch_add_unless
  98static __always_inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
  99{
 100        kasan_check_write(v, sizeof(*v));
 101        return arch_atomic64_fetch_add_unless(v, a, u);
 102}
 103#endif
 104
 105#ifdef arch_atomic_inc
 106#define atomic_inc atomic_inc
 107static __always_inline void atomic_inc(atomic_t *v)
 108{
 109        kasan_check_write(v, sizeof(*v));
 110        arch_atomic_inc(v);
 111}
 112#endif
 113
 114#ifdef arch_atomic64_inc
 115#define atomic64_inc atomic64_inc
 116static __always_inline void atomic64_inc(atomic64_t *v)
 117{
 118        kasan_check_write(v, sizeof(*v));
 119        arch_atomic64_inc(v);
 120}
 121#endif
 122
 123#ifdef arch_atomic_dec
 124#define atomic_dec atomic_dec
 125static __always_inline void atomic_dec(atomic_t *v)
 126{
 127        kasan_check_write(v, sizeof(*v));
 128        arch_atomic_dec(v);
 129}
 130#endif
 131
 132#ifdef atch_atomic64_dec
 133#define atomic64_dec
 134static __always_inline void atomic64_dec(atomic64_t *v)
 135{
 136        kasan_check_write(v, sizeof(*v));
 137        arch_atomic64_dec(v);
 138}
 139#endif
 140
 141static __always_inline void atomic_add(int i, atomic_t *v)
 142{
 143        kasan_check_write(v, sizeof(*v));
 144        arch_atomic_add(i, v);
 145}
 146
 147static __always_inline void atomic64_add(s64 i, atomic64_t *v)
 148{
 149        kasan_check_write(v, sizeof(*v));
 150        arch_atomic64_add(i, v);
 151}
 152
 153static __always_inline void atomic_sub(int i, atomic_t *v)
 154{
 155        kasan_check_write(v, sizeof(*v));
 156        arch_atomic_sub(i, v);
 157}
 158
 159static __always_inline void atomic64_sub(s64 i, atomic64_t *v)
 160{
 161        kasan_check_write(v, sizeof(*v));
 162        arch_atomic64_sub(i, v);
 163}
 164
 165static __always_inline void atomic_and(int i, atomic_t *v)
 166{
 167        kasan_check_write(v, sizeof(*v));
 168        arch_atomic_and(i, v);
 169}
 170
 171static __always_inline void atomic64_and(s64 i, atomic64_t *v)
 172{
 173        kasan_check_write(v, sizeof(*v));
 174        arch_atomic64_and(i, v);
 175}
 176
 177static __always_inline void atomic_or(int i, atomic_t *v)
 178{
 179        kasan_check_write(v, sizeof(*v));
 180        arch_atomic_or(i, v);
 181}
 182
 183static __always_inline void atomic64_or(s64 i, atomic64_t *v)
 184{
 185        kasan_check_write(v, sizeof(*v));
 186        arch_atomic64_or(i, v);
 187}
 188
 189static __always_inline void atomic_xor(int i, atomic_t *v)
 190{
 191        kasan_check_write(v, sizeof(*v));
 192        arch_atomic_xor(i, v);
 193}
 194
 195static __always_inline void atomic64_xor(s64 i, atomic64_t *v)
 196{
 197        kasan_check_write(v, sizeof(*v));
 198        arch_atomic64_xor(i, v);
 199}
 200
 201#ifdef arch_atomic_inc_return
 202#define atomic_inc_return atomic_inc_return
 203static __always_inline int atomic_inc_return(atomic_t *v)
 204{
 205        kasan_check_write(v, sizeof(*v));
 206        return arch_atomic_inc_return(v);
 207}
 208#endif
 209
 210#ifdef arch_atomic64_in_return
 211#define atomic64_inc_return atomic64_inc_return
 212static __always_inline s64 atomic64_inc_return(atomic64_t *v)
 213{
 214        kasan_check_write(v, sizeof(*v));
 215        return arch_atomic64_inc_return(v);
 216}
 217#endif
 218
 219#ifdef arch_atomic_dec_return
 220#define atomic_dec_return atomic_dec_return
 221static __always_inline int atomic_dec_return(atomic_t *v)
 222{
 223        kasan_check_write(v, sizeof(*v));
 224        return arch_atomic_dec_return(v);
 225}
 226#endif
 227
 228#ifdef arch_atomic64_dec_return
 229#define atomic64_dec_return atomic64_dec_return
 230static __always_inline s64 atomic64_dec_return(atomic64_t *v)
 231{
 232        kasan_check_write(v, sizeof(*v));
 233        return arch_atomic64_dec_return(v);
 234}
 235#endif
 236
 237#ifdef arch_atomic64_inc_not_zero
 238#define atomic64_inc_not_zero atomic64_inc_not_zero
 239static __always_inline bool atomic64_inc_not_zero(atomic64_t *v)
 240{
 241        kasan_check_write(v, sizeof(*v));
 242        return arch_atomic64_inc_not_zero(v);
 243}
 244#endif
 245
 246#ifdef arch_atomic64_dec_if_positive
 247#define atomic64_dec_if_positive atomic64_dec_if_positive
 248static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
 249{
 250        kasan_check_write(v, sizeof(*v));
 251        return arch_atomic64_dec_if_positive(v);
 252}
 253#endif
 254
 255#ifdef arch_atomic_dec_and_test
 256#define atomic_dec_and_test atomic_dec_and_test
 257static __always_inline bool atomic_dec_and_test(atomic_t *v)
 258{
 259        kasan_check_write(v, sizeof(*v));
 260        return arch_atomic_dec_and_test(v);
 261}
 262#endif
 263
 264#ifdef arch_atomic64_dec_and_test
 265#define atomic64_dec_and_test atomic64_dec_and_test
 266static __always_inline bool atomic64_dec_and_test(atomic64_t *v)
 267{
 268        kasan_check_write(v, sizeof(*v));
 269        return arch_atomic64_dec_and_test(v);
 270}
 271#endif
 272
 273#ifdef arch_atomic_inc_and_test
 274#define atomic_inc_and_test atomic_inc_and_test
 275static __always_inline bool atomic_inc_and_test(atomic_t *v)
 276{
 277        kasan_check_write(v, sizeof(*v));
 278        return arch_atomic_inc_and_test(v);
 279}
 280#endif
 281
 282#ifdef arch_atomic64_inc_and_test
 283#define atomic64_inc_and_test atomic64_inc_and_test
 284static __always_inline bool atomic64_inc_and_test(atomic64_t *v)
 285{
 286        kasan_check_write(v, sizeof(*v));
 287        return arch_atomic64_inc_and_test(v);
 288}
 289#endif
 290
 291static __always_inline int atomic_add_return(int i, atomic_t *v)
 292{
 293        kasan_check_write(v, sizeof(*v));
 294        return arch_atomic_add_return(i, v);
 295}
 296
 297static __always_inline s64 atomic64_add_return(s64 i, atomic64_t *v)
 298{
 299        kasan_check_write(v, sizeof(*v));
 300        return arch_atomic64_add_return(i, v);
 301}
 302
 303static __always_inline int atomic_sub_return(int i, atomic_t *v)
 304{
 305        kasan_check_write(v, sizeof(*v));
 306        return arch_atomic_sub_return(i, v);
 307}
 308
 309static __always_inline s64 atomic64_sub_return(s64 i, atomic64_t *v)
 310{
 311        kasan_check_write(v, sizeof(*v));
 312        return arch_atomic64_sub_return(i, v);
 313}
 314
 315static __always_inline int atomic_fetch_add(int i, atomic_t *v)
 316{
 317        kasan_check_write(v, sizeof(*v));
 318        return arch_atomic_fetch_add(i, v);
 319}
 320
 321static __always_inline s64 atomic64_fetch_add(s64 i, atomic64_t *v)
 322{
 323        kasan_check_write(v, sizeof(*v));
 324        return arch_atomic64_fetch_add(i, v);
 325}
 326
 327static __always_inline int atomic_fetch_sub(int i, atomic_t *v)
 328{
 329        kasan_check_write(v, sizeof(*v));
 330        return arch_atomic_fetch_sub(i, v);
 331}
 332
 333static __always_inline s64 atomic64_fetch_sub(s64 i, atomic64_t *v)
 334{
 335        kasan_check_write(v, sizeof(*v));
 336        return arch_atomic64_fetch_sub(i, v);
 337}
 338
 339static __always_inline int atomic_fetch_and(int i, atomic_t *v)
 340{
 341        kasan_check_write(v, sizeof(*v));
 342        return arch_atomic_fetch_and(i, v);
 343}
 344
 345static __always_inline s64 atomic64_fetch_and(s64 i, atomic64_t *v)
 346{
 347        kasan_check_write(v, sizeof(*v));
 348        return arch_atomic64_fetch_and(i, v);
 349}
 350
 351static __always_inline int atomic_fetch_or(int i, atomic_t *v)
 352{
 353        kasan_check_write(v, sizeof(*v));
 354        return arch_atomic_fetch_or(i, v);
 355}
 356
 357static __always_inline s64 atomic64_fetch_or(s64 i, atomic64_t *v)
 358{
 359        kasan_check_write(v, sizeof(*v));
 360        return arch_atomic64_fetch_or(i, v);
 361}
 362
 363static __always_inline int atomic_fetch_xor(int i, atomic_t *v)
 364{
 365        kasan_check_write(v, sizeof(*v));
 366        return arch_atomic_fetch_xor(i, v);
 367}
 368
 369static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v)
 370{
 371        kasan_check_write(v, sizeof(*v));
 372        return arch_atomic64_fetch_xor(i, v);
 373}
 374
 375#ifdef arch_atomic_sub_and_test
 376#define atomic_sub_and_test atomic_sub_and_test
 377static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
 378{
 379        kasan_check_write(v, sizeof(*v));
 380        return arch_atomic_sub_and_test(i, v);
 381}
 382#endif
 383
 384#ifdef arch_atomic64_sub_and_test
 385#define atomic64_sub_and_test atomic64_sub_and_test
 386static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v)
 387{
 388        kasan_check_write(v, sizeof(*v));
 389        return arch_atomic64_sub_and_test(i, v);
 390}
 391#endif
 392
 393#ifdef arch_atomic_add_negative
 394#define atomic_add_negative atomic_add_negative
 395static __always_inline bool atomic_add_negative(int i, atomic_t *v)
 396{
 397        kasan_check_write(v, sizeof(*v));
 398        return arch_atomic_add_negative(i, v);
 399}
 400#endif
 401
 402#ifdef arch_atomic64_add_negative
 403#define atomic64_add_negative atomic64_add_negative
 404static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v)
 405{
 406        kasan_check_write(v, sizeof(*v));
 407        return arch_atomic64_add_negative(i, v);
 408}
 409#endif
 410
 411#define xchg(ptr, new)                                                  \
 412({                                                                      \
 413        typeof(ptr) __ai_ptr = (ptr);                                   \
 414        kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));                 \
 415        arch_xchg(__ai_ptr, (new));                                     \
 416})
 417
 418#define cmpxchg(ptr, old, new)                                          \
 419({                                                                      \
 420        typeof(ptr) __ai_ptr = (ptr);                                   \
 421        kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));                 \
 422        arch_cmpxchg(__ai_ptr, (old), (new));                           \
 423})
 424
 425#define sync_cmpxchg(ptr, old, new)                                     \
 426({                                                                      \
 427        typeof(ptr) __ai_ptr = (ptr);                                   \
 428        kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));                 \
 429        arch_sync_cmpxchg(__ai_ptr, (old), (new));                      \
 430})
 431
 432#define cmpxchg_local(ptr, old, new)                                    \
 433({                                                                      \
 434        typeof(ptr) __ai_ptr = (ptr);                                   \
 435        kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));                 \
 436        arch_cmpxchg_local(__ai_ptr, (old), (new));                     \
 437})
 438
 439#define cmpxchg64(ptr, old, new)                                        \
 440({                                                                      \
 441        typeof(ptr) __ai_ptr = (ptr);                                   \
 442        kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));                 \
 443        arch_cmpxchg64(__ai_ptr, (old), (new));                         \
 444})
 445
 446#define cmpxchg64_local(ptr, old, new)                                  \
 447({                                                                      \
 448        typeof(ptr) __ai_ptr = (ptr);                                   \
 449        kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));                 \
 450        arch_cmpxchg64_local(__ai_ptr, (old), (new));                   \
 451})
 452
 453#define cmpxchg_double(p1, p2, o1, o2, n1, n2)                          \
 454({                                                                      \
 455        typeof(p1) __ai_p1 = (p1);                                      \
 456        kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1));               \
 457        arch_cmpxchg_double(__ai_p1, (p2), (o1), (o2), (n1), (n2));     \
 458})
 459
 460#define cmpxchg_double_local(p1, p2, o1, o2, n1, n2)                            \
 461({                                                                              \
 462        typeof(p1) __ai_p1 = (p1);                                              \
 463        kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1));                       \
 464        arch_cmpxchg_double_local(__ai_p1, (p2), (o1), (o2), (n1), (n2));       \
 465})
 466
 467#endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
 468