linux/include/linux/atomic.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2/* Atomic operations usable in machine independent code */
   3#ifndef _LINUX_ATOMIC_H
   4#define _LINUX_ATOMIC_H
   5#include <asm/atomic.h>
   6#include <asm/barrier.h>
   7
   8/*
   9 * Relaxed variants of xchg, cmpxchg and some atomic operations.
  10 *
  11 * We support four variants:
  12 *
  13 * - Fully ordered: The default implementation, no suffix required.
  14 * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
  15 * - Release: Provides RELEASE semantics, _release suffix.
  16 * - Relaxed: No ordering guarantees, _relaxed suffix.
  17 *
  18 * For compound atomics performing both a load and a store, ACQUIRE
  19 * semantics apply only to the load and RELEASE semantics only to the
  20 * store portion of the operation. Note that a failed cmpxchg_acquire
  21 * does -not- imply any memory ordering constraints.
  22 *
  23 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
  24 */
  25
  26#ifndef atomic_read_acquire
  27#define  atomic_read_acquire(v)         smp_load_acquire(&(v)->counter)
  28#endif
  29
  30#ifndef atomic_set_release
  31#define  atomic_set_release(v, i)       smp_store_release(&(v)->counter, (i))
  32#endif
  33
  34/*
  35 * The idea here is to build acquire/release variants by adding explicit
  36 * barriers on top of the relaxed variant. In the case where the relaxed
  37 * variant is already fully ordered, no additional barriers are needed.
  38 *
  39 * Besides, if an arch has a special barrier for acquire/release, it could
  40 * implement its own __atomic_op_* and use the same framework for building
  41 * variants
  42 *
  43 * If an architecture overrides __atomic_op_acquire() it will probably want
  44 * to define smp_mb__after_spinlock().
  45 */
  46#ifndef __atomic_op_acquire
  47#define __atomic_op_acquire(op, args...)                                \
  48({                                                                      \
  49        typeof(op##_relaxed(args)) __ret  = op##_relaxed(args);         \
  50        smp_mb__after_atomic();                                         \
  51        __ret;                                                          \
  52})
  53#endif
  54
  55#ifndef __atomic_op_release
  56#define __atomic_op_release(op, args...)                                \
  57({                                                                      \
  58        smp_mb__before_atomic();                                        \
  59        op##_relaxed(args);                                             \
  60})
  61#endif
  62
  63#ifndef __atomic_op_fence
  64#define __atomic_op_fence(op, args...)                                  \
  65({                                                                      \
  66        typeof(op##_relaxed(args)) __ret;                               \
  67        smp_mb__before_atomic();                                        \
  68        __ret = op##_relaxed(args);                                     \
  69        smp_mb__after_atomic();                                         \
  70        __ret;                                                          \
  71})
  72#endif
  73
  74/* atomic_add_return_relaxed */
  75#ifndef atomic_add_return_relaxed
  76#define  atomic_add_return_relaxed      atomic_add_return
  77#define  atomic_add_return_acquire      atomic_add_return
  78#define  atomic_add_return_release      atomic_add_return
  79
  80#else /* atomic_add_return_relaxed */
  81
  82#ifndef atomic_add_return_acquire
  83#define  atomic_add_return_acquire(...)                                 \
  84        __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
  85#endif
  86
  87#ifndef atomic_add_return_release
  88#define  atomic_add_return_release(...)                                 \
  89        __atomic_op_release(atomic_add_return, __VA_ARGS__)
  90#endif
  91
  92#ifndef atomic_add_return
  93#define  atomic_add_return(...)                                         \
  94        __atomic_op_fence(atomic_add_return, __VA_ARGS__)
  95#endif
  96#endif /* atomic_add_return_relaxed */
  97
  98/* atomic_inc_return_relaxed */
  99#ifndef atomic_inc_return_relaxed
 100#define  atomic_inc_return_relaxed      atomic_inc_return
 101#define  atomic_inc_return_acquire      atomic_inc_return
 102#define  atomic_inc_return_release      atomic_inc_return
 103
 104#else /* atomic_inc_return_relaxed */
 105
 106#ifndef atomic_inc_return_acquire
 107#define  atomic_inc_return_acquire(...)                                 \
 108        __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
 109#endif
 110
 111#ifndef atomic_inc_return_release
 112#define  atomic_inc_return_release(...)                                 \
 113        __atomic_op_release(atomic_inc_return, __VA_ARGS__)
 114#endif
 115
 116#ifndef atomic_inc_return
 117#define  atomic_inc_return(...)                                         \
 118        __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
 119#endif
 120#endif /* atomic_inc_return_relaxed */
 121
 122/* atomic_sub_return_relaxed */
 123#ifndef atomic_sub_return_relaxed
 124#define  atomic_sub_return_relaxed      atomic_sub_return
 125#define  atomic_sub_return_acquire      atomic_sub_return
 126#define  atomic_sub_return_release      atomic_sub_return
 127
 128#else /* atomic_sub_return_relaxed */
 129
 130#ifndef atomic_sub_return_acquire
 131#define  atomic_sub_return_acquire(...)                                 \
 132        __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
 133#endif
 134
 135#ifndef atomic_sub_return_release
 136#define  atomic_sub_return_release(...)                                 \
 137        __atomic_op_release(atomic_sub_return, __VA_ARGS__)
 138#endif
 139
 140#ifndef atomic_sub_return
 141#define  atomic_sub_return(...)                                         \
 142        __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
 143#endif
 144#endif /* atomic_sub_return_relaxed */
 145
 146/* atomic_dec_return_relaxed */
 147#ifndef atomic_dec_return_relaxed
 148#define  atomic_dec_return_relaxed      atomic_dec_return
 149#define  atomic_dec_return_acquire      atomic_dec_return
 150#define  atomic_dec_return_release      atomic_dec_return
 151
 152#else /* atomic_dec_return_relaxed */
 153
 154#ifndef atomic_dec_return_acquire
 155#define  atomic_dec_return_acquire(...)                                 \
 156        __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
 157#endif
 158
 159#ifndef atomic_dec_return_release
 160#define  atomic_dec_return_release(...)                                 \
 161        __atomic_op_release(atomic_dec_return, __VA_ARGS__)
 162#endif
 163
 164#ifndef atomic_dec_return
 165#define  atomic_dec_return(...)                                         \
 166        __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
 167#endif
 168#endif /* atomic_dec_return_relaxed */
 169
 170
 171/* atomic_fetch_add_relaxed */
 172#ifndef atomic_fetch_add_relaxed
 173#define atomic_fetch_add_relaxed        atomic_fetch_add
 174#define atomic_fetch_add_acquire        atomic_fetch_add
 175#define atomic_fetch_add_release        atomic_fetch_add
 176
 177#else /* atomic_fetch_add_relaxed */
 178
 179#ifndef atomic_fetch_add_acquire
 180#define atomic_fetch_add_acquire(...)                                   \
 181        __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
 182#endif
 183
 184#ifndef atomic_fetch_add_release
 185#define atomic_fetch_add_release(...)                                   \
 186        __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
 187#endif
 188
 189#ifndef atomic_fetch_add
 190#define atomic_fetch_add(...)                                           \
 191        __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
 192#endif
 193#endif /* atomic_fetch_add_relaxed */
 194
 195/* atomic_fetch_inc_relaxed */
 196#ifndef atomic_fetch_inc_relaxed
 197
 198#ifndef atomic_fetch_inc
 199#define atomic_fetch_inc(v)             atomic_fetch_add(1, (v))
 200#define atomic_fetch_inc_relaxed(v)     atomic_fetch_add_relaxed(1, (v))
 201#define atomic_fetch_inc_acquire(v)     atomic_fetch_add_acquire(1, (v))
 202#define atomic_fetch_inc_release(v)     atomic_fetch_add_release(1, (v))
 203#else /* atomic_fetch_inc */
 204#define atomic_fetch_inc_relaxed        atomic_fetch_inc
 205#define atomic_fetch_inc_acquire        atomic_fetch_inc
 206#define atomic_fetch_inc_release        atomic_fetch_inc
 207#endif /* atomic_fetch_inc */
 208
 209#else /* atomic_fetch_inc_relaxed */
 210
 211#ifndef atomic_fetch_inc_acquire
 212#define atomic_fetch_inc_acquire(...)                                   \
 213        __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
 214#endif
 215
 216#ifndef atomic_fetch_inc_release
 217#define atomic_fetch_inc_release(...)                                   \
 218        __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
 219#endif
 220
 221#ifndef atomic_fetch_inc
 222#define atomic_fetch_inc(...)                                           \
 223        __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
 224#endif
 225#endif /* atomic_fetch_inc_relaxed */
 226
 227/* atomic_fetch_sub_relaxed */
 228#ifndef atomic_fetch_sub_relaxed
 229#define atomic_fetch_sub_relaxed        atomic_fetch_sub
 230#define atomic_fetch_sub_acquire        atomic_fetch_sub
 231#define atomic_fetch_sub_release        atomic_fetch_sub
 232
 233#else /* atomic_fetch_sub_relaxed */
 234
 235#ifndef atomic_fetch_sub_acquire
 236#define atomic_fetch_sub_acquire(...)                                   \
 237        __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
 238#endif
 239
 240#ifndef atomic_fetch_sub_release
 241#define atomic_fetch_sub_release(...)                                   \
 242        __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
 243#endif
 244
 245#ifndef atomic_fetch_sub
 246#define atomic_fetch_sub(...)                                           \
 247        __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
 248#endif
 249#endif /* atomic_fetch_sub_relaxed */
 250
 251/* atomic_fetch_dec_relaxed */
 252#ifndef atomic_fetch_dec_relaxed
 253
 254#ifndef atomic_fetch_dec
 255#define atomic_fetch_dec(v)             atomic_fetch_sub(1, (v))
 256#define atomic_fetch_dec_relaxed(v)     atomic_fetch_sub_relaxed(1, (v))
 257#define atomic_fetch_dec_acquire(v)     atomic_fetch_sub_acquire(1, (v))
 258#define atomic_fetch_dec_release(v)     atomic_fetch_sub_release(1, (v))
 259#else /* atomic_fetch_dec */
 260#define atomic_fetch_dec_relaxed        atomic_fetch_dec
 261#define atomic_fetch_dec_acquire        atomic_fetch_dec
 262#define atomic_fetch_dec_release        atomic_fetch_dec
 263#endif /* atomic_fetch_dec */
 264
 265#else /* atomic_fetch_dec_relaxed */
 266
 267#ifndef atomic_fetch_dec_acquire
 268#define atomic_fetch_dec_acquire(...)                                   \
 269        __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
 270#endif
 271
 272#ifndef atomic_fetch_dec_release
 273#define atomic_fetch_dec_release(...)                                   \
 274        __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
 275#endif
 276
 277#ifndef atomic_fetch_dec
 278#define atomic_fetch_dec(...)                                           \
 279        __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
 280#endif
 281#endif /* atomic_fetch_dec_relaxed */
 282
 283/* atomic_fetch_or_relaxed */
 284#ifndef atomic_fetch_or_relaxed
 285#define atomic_fetch_or_relaxed atomic_fetch_or
 286#define atomic_fetch_or_acquire atomic_fetch_or
 287#define atomic_fetch_or_release atomic_fetch_or
 288
 289#else /* atomic_fetch_or_relaxed */
 290
 291#ifndef atomic_fetch_or_acquire
 292#define atomic_fetch_or_acquire(...)                                    \
 293        __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
 294#endif
 295
 296#ifndef atomic_fetch_or_release
 297#define atomic_fetch_or_release(...)                                    \
 298        __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
 299#endif
 300
 301#ifndef atomic_fetch_or
 302#define atomic_fetch_or(...)                                            \
 303        __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
 304#endif
 305#endif /* atomic_fetch_or_relaxed */
 306
 307/* atomic_fetch_and_relaxed */
 308#ifndef atomic_fetch_and_relaxed
 309#define atomic_fetch_and_relaxed        atomic_fetch_and
 310#define atomic_fetch_and_acquire        atomic_fetch_and
 311#define atomic_fetch_and_release        atomic_fetch_and
 312
 313#else /* atomic_fetch_and_relaxed */
 314
 315#ifndef atomic_fetch_and_acquire
 316#define atomic_fetch_and_acquire(...)                                   \
 317        __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
 318#endif
 319
 320#ifndef atomic_fetch_and_release
 321#define atomic_fetch_and_release(...)                                   \
 322        __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
 323#endif
 324
 325#ifndef atomic_fetch_and
 326#define atomic_fetch_and(...)                                           \
 327        __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
 328#endif
 329#endif /* atomic_fetch_and_relaxed */
 330
 331#ifdef atomic_andnot
 332/* atomic_fetch_andnot_relaxed */
 333#ifndef atomic_fetch_andnot_relaxed
 334#define atomic_fetch_andnot_relaxed     atomic_fetch_andnot
 335#define atomic_fetch_andnot_acquire     atomic_fetch_andnot
 336#define atomic_fetch_andnot_release     atomic_fetch_andnot
 337
 338#else /* atomic_fetch_andnot_relaxed */
 339
 340#ifndef atomic_fetch_andnot_acquire
 341#define atomic_fetch_andnot_acquire(...)                                        \
 342        __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
 343#endif
 344
 345#ifndef atomic_fetch_andnot_release
 346#define atomic_fetch_andnot_release(...)                                        \
 347        __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
 348#endif
 349
 350#ifndef atomic_fetch_andnot
 351#define atomic_fetch_andnot(...)                                                \
 352        __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
 353#endif
 354#endif /* atomic_fetch_andnot_relaxed */
 355#endif /* atomic_andnot */
 356
 357/* atomic_fetch_xor_relaxed */
 358#ifndef atomic_fetch_xor_relaxed
 359#define atomic_fetch_xor_relaxed        atomic_fetch_xor
 360#define atomic_fetch_xor_acquire        atomic_fetch_xor
 361#define atomic_fetch_xor_release        atomic_fetch_xor
 362
 363#else /* atomic_fetch_xor_relaxed */
 364
 365#ifndef atomic_fetch_xor_acquire
 366#define atomic_fetch_xor_acquire(...)                                   \
 367        __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
 368#endif
 369
 370#ifndef atomic_fetch_xor_release
 371#define atomic_fetch_xor_release(...)                                   \
 372        __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
 373#endif
 374
 375#ifndef atomic_fetch_xor
 376#define atomic_fetch_xor(...)                                           \
 377        __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
 378#endif
 379#endif /* atomic_fetch_xor_relaxed */
 380
 381
 382/* atomic_xchg_relaxed */
 383#ifndef atomic_xchg_relaxed
 384#define  atomic_xchg_relaxed            atomic_xchg
 385#define  atomic_xchg_acquire            atomic_xchg
 386#define  atomic_xchg_release            atomic_xchg
 387
 388#else /* atomic_xchg_relaxed */
 389
 390#ifndef atomic_xchg_acquire
 391#define  atomic_xchg_acquire(...)                                       \
 392        __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
 393#endif
 394
 395#ifndef atomic_xchg_release
 396#define  atomic_xchg_release(...)                                       \
 397        __atomic_op_release(atomic_xchg, __VA_ARGS__)
 398#endif
 399
 400#ifndef atomic_xchg
 401#define  atomic_xchg(...)                                               \
 402        __atomic_op_fence(atomic_xchg, __VA_ARGS__)
 403#endif
 404#endif /* atomic_xchg_relaxed */
 405
 406/* atomic_cmpxchg_relaxed */
 407#ifndef atomic_cmpxchg_relaxed
 408#define  atomic_cmpxchg_relaxed         atomic_cmpxchg
 409#define  atomic_cmpxchg_acquire         atomic_cmpxchg
 410#define  atomic_cmpxchg_release         atomic_cmpxchg
 411
 412#else /* atomic_cmpxchg_relaxed */
 413
 414#ifndef atomic_cmpxchg_acquire
 415#define  atomic_cmpxchg_acquire(...)                                    \
 416        __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
 417#endif
 418
 419#ifndef atomic_cmpxchg_release
 420#define  atomic_cmpxchg_release(...)                                    \
 421        __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
 422#endif
 423
 424#ifndef atomic_cmpxchg
 425#define  atomic_cmpxchg(...)                                            \
 426        __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
 427#endif
 428#endif /* atomic_cmpxchg_relaxed */
 429
 430#ifndef atomic_try_cmpxchg
 431
 432#define __atomic_try_cmpxchg(type, _p, _po, _n)                         \
 433({                                                                      \
 434        typeof(_po) __po = (_po);                                       \
 435        typeof(*(_po)) __r, __o = *__po;                                \
 436        __r = atomic_cmpxchg##type((_p), __o, (_n));                    \
 437        if (unlikely(__r != __o))                                       \
 438                *__po = __r;                                            \
 439        likely(__r == __o);                                             \
 440})
 441
 442#define atomic_try_cmpxchg(_p, _po, _n)         __atomic_try_cmpxchg(, _p, _po, _n)
 443#define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
 444#define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
 445#define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
 446
 447#else /* atomic_try_cmpxchg */
 448#define atomic_try_cmpxchg_relaxed      atomic_try_cmpxchg
 449#define atomic_try_cmpxchg_acquire      atomic_try_cmpxchg
 450#define atomic_try_cmpxchg_release      atomic_try_cmpxchg
 451#endif /* atomic_try_cmpxchg */
 452
 453/* cmpxchg_relaxed */
 454#ifndef cmpxchg_relaxed
 455#define  cmpxchg_relaxed                cmpxchg
 456#define  cmpxchg_acquire                cmpxchg
 457#define  cmpxchg_release                cmpxchg
 458
 459#else /* cmpxchg_relaxed */
 460
 461#ifndef cmpxchg_acquire
 462#define  cmpxchg_acquire(...)                                           \
 463        __atomic_op_acquire(cmpxchg, __VA_ARGS__)
 464#endif
 465
 466#ifndef cmpxchg_release
 467#define  cmpxchg_release(...)                                           \
 468        __atomic_op_release(cmpxchg, __VA_ARGS__)
 469#endif
 470
 471#ifndef cmpxchg
 472#define  cmpxchg(...)                                                   \
 473        __atomic_op_fence(cmpxchg, __VA_ARGS__)
 474#endif
 475#endif /* cmpxchg_relaxed */
 476
 477/* cmpxchg64_relaxed */
 478#ifndef cmpxchg64_relaxed
 479#define  cmpxchg64_relaxed              cmpxchg64
 480#define  cmpxchg64_acquire              cmpxchg64
 481#define  cmpxchg64_release              cmpxchg64
 482
 483#else /* cmpxchg64_relaxed */
 484
 485#ifndef cmpxchg64_acquire
 486#define  cmpxchg64_acquire(...)                                         \
 487        __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
 488#endif
 489
 490#ifndef cmpxchg64_release
 491#define  cmpxchg64_release(...)                                         \
 492        __atomic_op_release(cmpxchg64, __VA_ARGS__)
 493#endif
 494
 495#ifndef cmpxchg64
 496#define  cmpxchg64(...)                                                 \
 497        __atomic_op_fence(cmpxchg64, __VA_ARGS__)
 498#endif
 499#endif /* cmpxchg64_relaxed */
 500
 501/* xchg_relaxed */
 502#ifndef xchg_relaxed
 503#define  xchg_relaxed                   xchg
 504#define  xchg_acquire                   xchg
 505#define  xchg_release                   xchg
 506
 507#else /* xchg_relaxed */
 508
 509#ifndef xchg_acquire
 510#define  xchg_acquire(...)              __atomic_op_acquire(xchg, __VA_ARGS__)
 511#endif
 512
 513#ifndef xchg_release
 514#define  xchg_release(...)              __atomic_op_release(xchg, __VA_ARGS__)
 515#endif
 516
 517#ifndef xchg
 518#define  xchg(...)                      __atomic_op_fence(xchg, __VA_ARGS__)
 519#endif
 520#endif /* xchg_relaxed */
 521
 522/**
 523 * atomic_add_unless - add unless the number is already a given value
 524 * @v: pointer of type atomic_t
 525 * @a: the amount to add to v...
 526 * @u: ...unless v is equal to u.
 527 *
 528 * Atomically adds @a to @v, so long as @v was not already @u.
 529 * Returns non-zero if @v was not @u, and zero otherwise.
 530 */
 531static inline int atomic_add_unless(atomic_t *v, int a, int u)
 532{
 533        return __atomic_add_unless(v, a, u) != u;
 534}
 535
 536/**
 537 * atomic_inc_not_zero - increment unless the number is zero
 538 * @v: pointer of type atomic_t
 539 *
 540 * Atomically increments @v by 1, so long as @v is non-zero.
 541 * Returns non-zero if @v was non-zero, and zero otherwise.
 542 */
 543#ifndef atomic_inc_not_zero
 544#define atomic_inc_not_zero(v)          atomic_add_unless((v), 1, 0)
 545#endif
 546
 547#ifndef atomic_andnot
 548static inline void atomic_andnot(int i, atomic_t *v)
 549{
 550        atomic_and(~i, v);
 551}
 552
 553static inline int atomic_fetch_andnot(int i, atomic_t *v)
 554{
 555        return atomic_fetch_and(~i, v);
 556}
 557
 558static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
 559{
 560        return atomic_fetch_and_relaxed(~i, v);
 561}
 562
 563static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
 564{
 565        return atomic_fetch_and_acquire(~i, v);
 566}
 567
 568static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
 569{
 570        return atomic_fetch_and_release(~i, v);
 571}
 572#endif
 573
 574/**
 575 * atomic_inc_not_zero_hint - increment if not null
 576 * @v: pointer of type atomic_t
 577 * @hint: probable value of the atomic before the increment
 578 *
 579 * This version of atomic_inc_not_zero() gives a hint of probable
 580 * value of the atomic. This helps processor to not read the memory
 581 * before doing the atomic read/modify/write cycle, lowering
 582 * number of bus transactions on some arches.
 583 *
 584 * Returns: 0 if increment was not done, 1 otherwise.
 585 */
 586#ifndef atomic_inc_not_zero_hint
 587static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
 588{
 589        int val, c = hint;
 590
 591        /* sanity test, should be removed by compiler if hint is a constant */
 592        if (!hint)
 593                return atomic_inc_not_zero(v);
 594
 595        do {
 596                val = atomic_cmpxchg(v, c, c + 1);
 597                if (val == c)
 598                        return 1;
 599                c = val;
 600        } while (c);
 601
 602        return 0;
 603}
 604#endif
 605
 606#ifndef atomic_inc_unless_negative
 607static inline int atomic_inc_unless_negative(atomic_t *p)
 608{
 609        int v, v1;
 610        for (v = 0; v >= 0; v = v1) {
 611                v1 = atomic_cmpxchg(p, v, v + 1);
 612                if (likely(v1 == v))
 613                        return 1;
 614        }
 615        return 0;
 616}
 617#endif
 618
 619#ifndef atomic_dec_unless_positive
 620static inline int atomic_dec_unless_positive(atomic_t *p)
 621{
 622        int v, v1;
 623        for (v = 0; v <= 0; v = v1) {
 624                v1 = atomic_cmpxchg(p, v, v - 1);
 625                if (likely(v1 == v))
 626                        return 1;
 627        }
 628        return 0;
 629}
 630#endif
 631
 632/*
 633 * atomic_dec_if_positive - decrement by 1 if old value positive
 634 * @v: pointer of type atomic_t
 635 *
 636 * The function returns the old value of *v minus 1, even if
 637 * the atomic variable, v, was not decremented.
 638 */
 639#ifndef atomic_dec_if_positive
 640static inline int atomic_dec_if_positive(atomic_t *v)
 641{
 642        int c, old, dec;
 643        c = atomic_read(v);
 644        for (;;) {
 645                dec = c - 1;
 646                if (unlikely(dec < 0))
 647                        break;
 648                old = atomic_cmpxchg((v), c, dec);
 649                if (likely(old == c))
 650                        break;
 651                c = old;
 652        }
 653        return dec;
 654}
 655#endif
 656
 657#define atomic_cond_read_acquire(v, c)  smp_cond_load_acquire(&(v)->counter, (c))
 658
 659#ifdef CONFIG_GENERIC_ATOMIC64
 660#include <asm-generic/atomic64.h>
 661#endif
 662
 663#ifndef atomic64_read_acquire
 664#define  atomic64_read_acquire(v)       smp_load_acquire(&(v)->counter)
 665#endif
 666
 667#ifndef atomic64_set_release
 668#define  atomic64_set_release(v, i)     smp_store_release(&(v)->counter, (i))
 669#endif
 670
 671/* atomic64_add_return_relaxed */
 672#ifndef atomic64_add_return_relaxed
 673#define  atomic64_add_return_relaxed    atomic64_add_return
 674#define  atomic64_add_return_acquire    atomic64_add_return
 675#define  atomic64_add_return_release    atomic64_add_return
 676
 677#else /* atomic64_add_return_relaxed */
 678
 679#ifndef atomic64_add_return_acquire
 680#define  atomic64_add_return_acquire(...)                               \
 681        __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
 682#endif
 683
 684#ifndef atomic64_add_return_release
 685#define  atomic64_add_return_release(...)                               \
 686        __atomic_op_release(atomic64_add_return, __VA_ARGS__)
 687#endif
 688
 689#ifndef atomic64_add_return
 690#define  atomic64_add_return(...)                                       \
 691        __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
 692#endif
 693#endif /* atomic64_add_return_relaxed */
 694
 695/* atomic64_inc_return_relaxed */
 696#ifndef atomic64_inc_return_relaxed
 697#define  atomic64_inc_return_relaxed    atomic64_inc_return
 698#define  atomic64_inc_return_acquire    atomic64_inc_return
 699#define  atomic64_inc_return_release    atomic64_inc_return
 700
 701#else /* atomic64_inc_return_relaxed */
 702
 703#ifndef atomic64_inc_return_acquire
 704#define  atomic64_inc_return_acquire(...)                               \
 705        __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
 706#endif
 707
 708#ifndef atomic64_inc_return_release
 709#define  atomic64_inc_return_release(...)                               \
 710        __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
 711#endif
 712
 713#ifndef atomic64_inc_return
 714#define  atomic64_inc_return(...)                                       \
 715        __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
 716#endif
 717#endif /* atomic64_inc_return_relaxed */
 718
 719
 720/* atomic64_sub_return_relaxed */
 721#ifndef atomic64_sub_return_relaxed
 722#define  atomic64_sub_return_relaxed    atomic64_sub_return
 723#define  atomic64_sub_return_acquire    atomic64_sub_return
 724#define  atomic64_sub_return_release    atomic64_sub_return
 725
 726#else /* atomic64_sub_return_relaxed */
 727
 728#ifndef atomic64_sub_return_acquire
 729#define  atomic64_sub_return_acquire(...)                               \
 730        __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
 731#endif
 732
 733#ifndef atomic64_sub_return_release
 734#define  atomic64_sub_return_release(...)                               \
 735        __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
 736#endif
 737
 738#ifndef atomic64_sub_return
 739#define  atomic64_sub_return(...)                                       \
 740        __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
 741#endif
 742#endif /* atomic64_sub_return_relaxed */
 743
 744/* atomic64_dec_return_relaxed */
 745#ifndef atomic64_dec_return_relaxed
 746#define  atomic64_dec_return_relaxed    atomic64_dec_return
 747#define  atomic64_dec_return_acquire    atomic64_dec_return
 748#define  atomic64_dec_return_release    atomic64_dec_return
 749
 750#else /* atomic64_dec_return_relaxed */
 751
 752#ifndef atomic64_dec_return_acquire
 753#define  atomic64_dec_return_acquire(...)                               \
 754        __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
 755#endif
 756
 757#ifndef atomic64_dec_return_release
 758#define  atomic64_dec_return_release(...)                               \
 759        __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
 760#endif
 761
 762#ifndef atomic64_dec_return
 763#define  atomic64_dec_return(...)                                       \
 764        __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
 765#endif
 766#endif /* atomic64_dec_return_relaxed */
 767
 768
 769/* atomic64_fetch_add_relaxed */
 770#ifndef atomic64_fetch_add_relaxed
 771#define atomic64_fetch_add_relaxed      atomic64_fetch_add
 772#define atomic64_fetch_add_acquire      atomic64_fetch_add
 773#define atomic64_fetch_add_release      atomic64_fetch_add
 774
 775#else /* atomic64_fetch_add_relaxed */
 776
 777#ifndef atomic64_fetch_add_acquire
 778#define atomic64_fetch_add_acquire(...)                                 \
 779        __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
 780#endif
 781
 782#ifndef atomic64_fetch_add_release
 783#define atomic64_fetch_add_release(...)                                 \
 784        __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
 785#endif
 786
 787#ifndef atomic64_fetch_add
 788#define atomic64_fetch_add(...)                                         \
 789        __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
 790#endif
 791#endif /* atomic64_fetch_add_relaxed */
 792
 793/* atomic64_fetch_inc_relaxed */
 794#ifndef atomic64_fetch_inc_relaxed
 795
 796#ifndef atomic64_fetch_inc
 797#define atomic64_fetch_inc(v)           atomic64_fetch_add(1, (v))
 798#define atomic64_fetch_inc_relaxed(v)   atomic64_fetch_add_relaxed(1, (v))
 799#define atomic64_fetch_inc_acquire(v)   atomic64_fetch_add_acquire(1, (v))
 800#define atomic64_fetch_inc_release(v)   atomic64_fetch_add_release(1, (v))
 801#else /* atomic64_fetch_inc */
 802#define atomic64_fetch_inc_relaxed      atomic64_fetch_inc
 803#define atomic64_fetch_inc_acquire      atomic64_fetch_inc
 804#define atomic64_fetch_inc_release      atomic64_fetch_inc
 805#endif /* atomic64_fetch_inc */
 806
 807#else /* atomic64_fetch_inc_relaxed */
 808
 809#ifndef atomic64_fetch_inc_acquire
 810#define atomic64_fetch_inc_acquire(...)                                 \
 811        __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
 812#endif
 813
 814#ifndef atomic64_fetch_inc_release
 815#define atomic64_fetch_inc_release(...)                                 \
 816        __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
 817#endif
 818
 819#ifndef atomic64_fetch_inc
 820#define atomic64_fetch_inc(...)                                         \
 821        __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
 822#endif
 823#endif /* atomic64_fetch_inc_relaxed */
 824
 825/* atomic64_fetch_sub_relaxed */
 826#ifndef atomic64_fetch_sub_relaxed
 827#define atomic64_fetch_sub_relaxed      atomic64_fetch_sub
 828#define atomic64_fetch_sub_acquire      atomic64_fetch_sub
 829#define atomic64_fetch_sub_release      atomic64_fetch_sub
 830
 831#else /* atomic64_fetch_sub_relaxed */
 832
 833#ifndef atomic64_fetch_sub_acquire
 834#define atomic64_fetch_sub_acquire(...)                                 \
 835        __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
 836#endif
 837
 838#ifndef atomic64_fetch_sub_release
 839#define atomic64_fetch_sub_release(...)                                 \
 840        __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
 841#endif
 842
 843#ifndef atomic64_fetch_sub
 844#define atomic64_fetch_sub(...)                                         \
 845        __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
 846#endif
 847#endif /* atomic64_fetch_sub_relaxed */
 848
 849/* atomic64_fetch_dec_relaxed */
 850#ifndef atomic64_fetch_dec_relaxed
 851
 852#ifndef atomic64_fetch_dec
 853#define atomic64_fetch_dec(v)           atomic64_fetch_sub(1, (v))
 854#define atomic64_fetch_dec_relaxed(v)   atomic64_fetch_sub_relaxed(1, (v))
 855#define atomic64_fetch_dec_acquire(v)   atomic64_fetch_sub_acquire(1, (v))
 856#define atomic64_fetch_dec_release(v)   atomic64_fetch_sub_release(1, (v))
 857#else /* atomic64_fetch_dec */
 858#define atomic64_fetch_dec_relaxed      atomic64_fetch_dec
 859#define atomic64_fetch_dec_acquire      atomic64_fetch_dec
 860#define atomic64_fetch_dec_release      atomic64_fetch_dec
 861#endif /* atomic64_fetch_dec */
 862
 863#else /* atomic64_fetch_dec_relaxed */
 864
 865#ifndef atomic64_fetch_dec_acquire
 866#define atomic64_fetch_dec_acquire(...)                                 \
 867        __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
 868#endif
 869
 870#ifndef atomic64_fetch_dec_release
 871#define atomic64_fetch_dec_release(...)                                 \
 872        __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
 873#endif
 874
 875#ifndef atomic64_fetch_dec
 876#define atomic64_fetch_dec(...)                                         \
 877        __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
 878#endif
 879#endif /* atomic64_fetch_dec_relaxed */
 880
 881/* atomic64_fetch_or_relaxed */
 882#ifndef atomic64_fetch_or_relaxed
 883#define atomic64_fetch_or_relaxed       atomic64_fetch_or
 884#define atomic64_fetch_or_acquire       atomic64_fetch_or
 885#define atomic64_fetch_or_release       atomic64_fetch_or
 886
 887#else /* atomic64_fetch_or_relaxed */
 888
 889#ifndef atomic64_fetch_or_acquire
 890#define atomic64_fetch_or_acquire(...)                                  \
 891        __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
 892#endif
 893
 894#ifndef atomic64_fetch_or_release
 895#define atomic64_fetch_or_release(...)                                  \
 896        __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
 897#endif
 898
 899#ifndef atomic64_fetch_or
 900#define atomic64_fetch_or(...)                                          \
 901        __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
 902#endif
 903#endif /* atomic64_fetch_or_relaxed */
 904
 905/* atomic64_fetch_and_relaxed */
 906#ifndef atomic64_fetch_and_relaxed
 907#define atomic64_fetch_and_relaxed      atomic64_fetch_and
 908#define atomic64_fetch_and_acquire      atomic64_fetch_and
 909#define atomic64_fetch_and_release      atomic64_fetch_and
 910
 911#else /* atomic64_fetch_and_relaxed */
 912
 913#ifndef atomic64_fetch_and_acquire
 914#define atomic64_fetch_and_acquire(...)                                 \
 915        __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
 916#endif
 917
 918#ifndef atomic64_fetch_and_release
 919#define atomic64_fetch_and_release(...)                                 \
 920        __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
 921#endif
 922
 923#ifndef atomic64_fetch_and
 924#define atomic64_fetch_and(...)                                         \
 925        __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
 926#endif
 927#endif /* atomic64_fetch_and_relaxed */
 928
 929#ifdef atomic64_andnot
 930/* atomic64_fetch_andnot_relaxed */
 931#ifndef atomic64_fetch_andnot_relaxed
 932#define atomic64_fetch_andnot_relaxed   atomic64_fetch_andnot
 933#define atomic64_fetch_andnot_acquire   atomic64_fetch_andnot
 934#define atomic64_fetch_andnot_release   atomic64_fetch_andnot
 935
 936#else /* atomic64_fetch_andnot_relaxed */
 937
 938#ifndef atomic64_fetch_andnot_acquire
 939#define atomic64_fetch_andnot_acquire(...)                                      \
 940        __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
 941#endif
 942
 943#ifndef atomic64_fetch_andnot_release
 944#define atomic64_fetch_andnot_release(...)                                      \
 945        __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
 946#endif
 947
 948#ifndef atomic64_fetch_andnot
 949#define atomic64_fetch_andnot(...)                                              \
 950        __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
 951#endif
 952#endif /* atomic64_fetch_andnot_relaxed */
 953#endif /* atomic64_andnot */
 954
 955/* atomic64_fetch_xor_relaxed */
 956#ifndef atomic64_fetch_xor_relaxed
 957#define atomic64_fetch_xor_relaxed      atomic64_fetch_xor
 958#define atomic64_fetch_xor_acquire      atomic64_fetch_xor
 959#define atomic64_fetch_xor_release      atomic64_fetch_xor
 960
 961#else /* atomic64_fetch_xor_relaxed */
 962
 963#ifndef atomic64_fetch_xor_acquire
 964#define atomic64_fetch_xor_acquire(...)                                 \
 965        __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
 966#endif
 967
 968#ifndef atomic64_fetch_xor_release
 969#define atomic64_fetch_xor_release(...)                                 \
 970        __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
 971#endif
 972
 973#ifndef atomic64_fetch_xor
 974#define atomic64_fetch_xor(...)                                         \
 975        __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
 976#endif
 977#endif /* atomic64_fetch_xor_relaxed */
 978
 979
 980/* atomic64_xchg_relaxed */
 981#ifndef atomic64_xchg_relaxed
 982#define  atomic64_xchg_relaxed          atomic64_xchg
 983#define  atomic64_xchg_acquire          atomic64_xchg
 984#define  atomic64_xchg_release          atomic64_xchg
 985
 986#else /* atomic64_xchg_relaxed */
 987
 988#ifndef atomic64_xchg_acquire
 989#define  atomic64_xchg_acquire(...)                                     \
 990        __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
 991#endif
 992
 993#ifndef atomic64_xchg_release
 994#define  atomic64_xchg_release(...)                                     \
 995        __atomic_op_release(atomic64_xchg, __VA_ARGS__)
 996#endif
 997
 998#ifndef atomic64_xchg
 999#define  atomic64_xchg(...)                                             \
1000        __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
1001#endif
1002#endif /* atomic64_xchg_relaxed */
1003
1004/* atomic64_cmpxchg_relaxed */
1005#ifndef atomic64_cmpxchg_relaxed
1006#define  atomic64_cmpxchg_relaxed       atomic64_cmpxchg
1007#define  atomic64_cmpxchg_acquire       atomic64_cmpxchg
1008#define  atomic64_cmpxchg_release       atomic64_cmpxchg
1009
1010#else /* atomic64_cmpxchg_relaxed */
1011
1012#ifndef atomic64_cmpxchg_acquire
1013#define  atomic64_cmpxchg_acquire(...)                                  \
1014        __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1015#endif
1016
1017#ifndef atomic64_cmpxchg_release
1018#define  atomic64_cmpxchg_release(...)                                  \
1019        __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1020#endif
1021
1022#ifndef atomic64_cmpxchg
1023#define  atomic64_cmpxchg(...)                                          \
1024        __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1025#endif
1026#endif /* atomic64_cmpxchg_relaxed */
1027
1028#ifndef atomic64_try_cmpxchg
1029
1030#define __atomic64_try_cmpxchg(type, _p, _po, _n)                       \
1031({                                                                      \
1032        typeof(_po) __po = (_po);                                       \
1033        typeof(*(_po)) __r, __o = *__po;                                \
1034        __r = atomic64_cmpxchg##type((_p), __o, (_n));                  \
1035        if (unlikely(__r != __o))                                       \
1036                *__po = __r;                                            \
1037        likely(__r == __o);                                             \
1038})
1039
1040#define atomic64_try_cmpxchg(_p, _po, _n)               __atomic64_try_cmpxchg(, _p, _po, _n)
1041#define atomic64_try_cmpxchg_relaxed(_p, _po, _n)       __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1042#define atomic64_try_cmpxchg_acquire(_p, _po, _n)       __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1043#define atomic64_try_cmpxchg_release(_p, _po, _n)       __atomic64_try_cmpxchg(_release, _p, _po, _n)
1044
1045#else /* atomic64_try_cmpxchg */
1046#define atomic64_try_cmpxchg_relaxed    atomic64_try_cmpxchg
1047#define atomic64_try_cmpxchg_acquire    atomic64_try_cmpxchg
1048#define atomic64_try_cmpxchg_release    atomic64_try_cmpxchg
1049#endif /* atomic64_try_cmpxchg */
1050
1051#ifndef atomic64_andnot
1052static inline void atomic64_andnot(long long i, atomic64_t *v)
1053{
1054        atomic64_and(~i, v);
1055}
1056
1057static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1058{
1059        return atomic64_fetch_and(~i, v);
1060}
1061
1062static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1063{
1064        return atomic64_fetch_and_relaxed(~i, v);
1065}
1066
1067static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1068{
1069        return atomic64_fetch_and_acquire(~i, v);
1070}
1071
1072static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1073{
1074        return atomic64_fetch_and_release(~i, v);
1075}
1076#endif
1077
1078#define atomic64_cond_read_acquire(v, c)        smp_cond_load_acquire(&(v)->counter, (c))
1079
1080#include <asm-generic/atomic-long.h>
1081
1082#endif /* _LINUX_ATOMIC_H */
1083