linux/arch/x86/include/asm/atomic64_64.h
<<
>>
Prefs
   1#ifndef _ASM_X86_ATOMIC64_64_H
   2#define _ASM_X86_ATOMIC64_64_H
   3
   4#include <linux/types.h>
   5#include <asm/alternative.h>
   6#include <asm/cmpxchg.h>
   7
   8/* The 64-bit atomic type */
   9
  10#define ATOMIC64_INIT(i)        { (i) }
  11
  12/**
  13 * atomic64_read - read atomic64 variable
  14 * @v: pointer of type atomic64_t
  15 *
  16 * Atomically reads the value of @v.
  17 * Doesn't imply a read memory barrier.
  18 */
  19static inline long atomic64_read(const atomic64_t *v)
  20{
  21        return READ_ONCE((v)->counter);
  22}
  23
  24/**
  25 * atomic64_set - set atomic64 variable
  26 * @v: pointer to type atomic64_t
  27 * @i: required value
  28 *
  29 * Atomically sets the value of @v to @i.
  30 */
  31static inline void atomic64_set(atomic64_t *v, long i)
  32{
  33        WRITE_ONCE(v->counter, i);
  34}
  35
  36/**
  37 * atomic64_add - add integer to atomic64 variable
  38 * @i: integer value to add
  39 * @v: pointer to type atomic64_t
  40 *
  41 * Atomically adds @i to @v.
  42 */
  43static __always_inline void atomic64_add(long i, atomic64_t *v)
  44{
  45        asm volatile(LOCK_PREFIX "addq %1,%0"
  46                     : "=m" (v->counter)
  47                     : "er" (i), "m" (v->counter));
  48}
  49
  50/**
  51 * atomic64_sub - subtract the atomic64 variable
  52 * @i: integer value to subtract
  53 * @v: pointer to type atomic64_t
  54 *
  55 * Atomically subtracts @i from @v.
  56 */
  57static inline void atomic64_sub(long i, atomic64_t *v)
  58{
  59        asm volatile(LOCK_PREFIX "subq %1,%0"
  60                     : "=m" (v->counter)
  61                     : "er" (i), "m" (v->counter));
  62}
  63
  64/**
  65 * atomic64_sub_and_test - subtract value from variable and test result
  66 * @i: integer value to subtract
  67 * @v: pointer to type atomic64_t
  68 *
  69 * Atomically subtracts @i from @v and returns
  70 * true if the result is zero, or false for all
  71 * other cases.
  72 */
  73static inline bool atomic64_sub_and_test(long i, atomic64_t *v)
  74{
  75        GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e);
  76}
  77
  78/**
  79 * atomic64_inc - increment atomic64 variable
  80 * @v: pointer to type atomic64_t
  81 *
  82 * Atomically increments @v by 1.
  83 */
  84static __always_inline void atomic64_inc(atomic64_t *v)
  85{
  86        asm volatile(LOCK_PREFIX "incq %0"
  87                     : "=m" (v->counter)
  88                     : "m" (v->counter));
  89}
  90
  91/**
  92 * atomic64_dec - decrement atomic64 variable
  93 * @v: pointer to type atomic64_t
  94 *
  95 * Atomically decrements @v by 1.
  96 */
  97static __always_inline void atomic64_dec(atomic64_t *v)
  98{
  99        asm volatile(LOCK_PREFIX "decq %0"
 100                     : "=m" (v->counter)
 101                     : "m" (v->counter));
 102}
 103
 104/**
 105 * atomic64_dec_and_test - decrement and test
 106 * @v: pointer to type atomic64_t
 107 *
 108 * Atomically decrements @v by 1 and
 109 * returns true if the result is 0, or false for all other
 110 * cases.
 111 */
 112static inline bool atomic64_dec_and_test(atomic64_t *v)
 113{
 114        GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e);
 115}
 116
 117/**
 118 * atomic64_inc_and_test - increment and test
 119 * @v: pointer to type atomic64_t
 120 *
 121 * Atomically increments @v by 1
 122 * and returns true if the result is zero, or false for all
 123 * other cases.
 124 */
 125static inline bool atomic64_inc_and_test(atomic64_t *v)
 126{
 127        GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e);
 128}
 129
 130/**
 131 * atomic64_add_negative - add and test if negative
 132 * @i: integer value to add
 133 * @v: pointer to type atomic64_t
 134 *
 135 * Atomically adds @i to @v and returns true
 136 * if the result is negative, or false when
 137 * result is greater than or equal to zero.
 138 */
 139static inline bool atomic64_add_negative(long i, atomic64_t *v)
 140{
 141        GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s);
 142}
 143
 144/**
 145 * atomic64_add_return - add and return
 146 * @i: integer value to add
 147 * @v: pointer to type atomic64_t
 148 *
 149 * Atomically adds @i to @v and returns @i + @v
 150 */
 151static __always_inline long atomic64_add_return(long i, atomic64_t *v)
 152{
 153        return i + xadd(&v->counter, i);
 154}
 155
 156static inline long atomic64_sub_return(long i, atomic64_t *v)
 157{
 158        return atomic64_add_return(-i, v);
 159}
 160
 161static inline long atomic64_fetch_add(long i, atomic64_t *v)
 162{
 163        return xadd(&v->counter, i);
 164}
 165
 166static inline long atomic64_fetch_sub(long i, atomic64_t *v)
 167{
 168        return xadd(&v->counter, -i);
 169}
 170
 171#define atomic64_inc_return(v)  (atomic64_add_return(1, (v)))
 172#define atomic64_dec_return(v)  (atomic64_sub_return(1, (v)))
 173
 174static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
 175{
 176        return cmpxchg(&v->counter, old, new);
 177}
 178
 179#define atomic64_try_cmpxchg atomic64_try_cmpxchg
 180static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, long *old, long new)
 181{
 182        return try_cmpxchg(&v->counter, old, new);
 183}
 184
 185static inline long atomic64_xchg(atomic64_t *v, long new)
 186{
 187        return xchg(&v->counter, new);
 188}
 189
 190/**
 191 * atomic64_add_unless - add unless the number is a given value
 192 * @v: pointer of type atomic64_t
 193 * @a: the amount to add to v...
 194 * @u: ...unless v is equal to u.
 195 *
 196 * Atomically adds @a to @v, so long as it was not @u.
 197 * Returns the old value of @v.
 198 */
 199static inline bool atomic64_add_unless(atomic64_t *v, long a, long u)
 200{
 201        long c = atomic64_read(v);
 202        do {
 203                if (unlikely(c == u))
 204                        return false;
 205        } while (!atomic64_try_cmpxchg(v, &c, c + a));
 206        return true;
 207}
 208
 209#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
 210
 211/*
 212 * atomic64_dec_if_positive - decrement by 1 if old value positive
 213 * @v: pointer of type atomic_t
 214 *
 215 * The function returns the old value of *v minus 1, even if
 216 * the atomic variable, v, was not decremented.
 217 */
 218static inline long atomic64_dec_if_positive(atomic64_t *v)
 219{
 220        long dec, c = atomic64_read(v);
 221        do {
 222                dec = c - 1;
 223                if (unlikely(dec < 0))
 224                        break;
 225        } while (!atomic64_try_cmpxchg(v, &c, dec));
 226        return dec;
 227}
 228
 229#define ATOMIC64_OP(op)                                                 \
 230static inline void atomic64_##op(long i, atomic64_t *v)                 \
 231{                                                                       \
 232        asm volatile(LOCK_PREFIX #op"q %1,%0"                           \
 233                        : "+m" (v->counter)                             \
 234                        : "er" (i)                                      \
 235                        : "memory");                                    \
 236}
 237
 238#define ATOMIC64_FETCH_OP(op, c_op)                                     \
 239static inline long atomic64_fetch_##op(long i, atomic64_t *v)           \
 240{                                                                       \
 241        long val = atomic64_read(v);                                    \
 242        do {                                                            \
 243        } while (!atomic64_try_cmpxchg(v, &val, val c_op i));           \
 244        return val;                                                     \
 245}
 246
 247#define ATOMIC64_OPS(op, c_op)                                          \
 248        ATOMIC64_OP(op)                                                 \
 249        ATOMIC64_FETCH_OP(op, c_op)
 250
 251ATOMIC64_OPS(and, &)
 252ATOMIC64_OPS(or, |)
 253ATOMIC64_OPS(xor, ^)
 254
 255#undef ATOMIC64_OPS
 256#undef ATOMIC64_FETCH_OP
 257#undef ATOMIC64_OP
 258
 259#endif /* _ASM_X86_ATOMIC64_64_H */
 260