linux/arch/x86/include/asm/atomic64_64.h
<<
>>
Prefs
   1#ifndef _ASM_X86_ATOMIC64_64_H
   2#define _ASM_X86_ATOMIC64_64_H
   3
   4#include <linux/types.h>
   5#include <asm/alternative.h>
   6#include <asm/cmpxchg.h>
   7
   8/* The 64-bit atomic type */
   9
  10#define ATOMIC64_INIT(i)        { (i) }
  11
  12/**
  13 * atomic64_read - read atomic64 variable
  14 * @v: pointer of type atomic64_t
  15 *
  16 * Atomically reads the value of @v.
  17 * Doesn't imply a read memory barrier.
  18 */
  19static inline long atomic64_read(const atomic64_t *v)
  20{
  21        return READ_ONCE((v)->counter);
  22}
  23
  24/**
  25 * atomic64_set - set atomic64 variable
  26 * @v: pointer to type atomic64_t
  27 * @i: required value
  28 *
  29 * Atomically sets the value of @v to @i.
  30 */
  31static inline void atomic64_set(atomic64_t *v, long i)
  32{
  33        WRITE_ONCE(v->counter, i);
  34}
  35
  36/**
  37 * atomic64_add - add integer to atomic64 variable
  38 * @i: integer value to add
  39 * @v: pointer to type atomic64_t
  40 *
  41 * Atomically adds @i to @v.
  42 */
  43static __always_inline void atomic64_add(long i, atomic64_t *v)
  44{
  45        asm volatile(LOCK_PREFIX "addq %1,%0"
  46                     : "=m" (v->counter)
  47                     : "er" (i), "m" (v->counter));
  48}
  49
  50/**
  51 * atomic64_sub - subtract the atomic64 variable
  52 * @i: integer value to subtract
  53 * @v: pointer to type atomic64_t
  54 *
  55 * Atomically subtracts @i from @v.
  56 */
  57static inline void atomic64_sub(long i, atomic64_t *v)
  58{
  59        asm volatile(LOCK_PREFIX "subq %1,%0"
  60                     : "=m" (v->counter)
  61                     : "er" (i), "m" (v->counter));
  62}
  63
  64/**
  65 * atomic64_sub_and_test - subtract value from variable and test result
  66 * @i: integer value to subtract
  67 * @v: pointer to type atomic64_t
  68 *
  69 * Atomically subtracts @i from @v and returns
  70 * true if the result is zero, or false for all
  71 * other cases.
  72 */
  73static inline bool atomic64_sub_and_test(long i, atomic64_t *v)
  74{
  75        GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e);
  76}
  77
  78/**
  79 * atomic64_inc - increment atomic64 variable
  80 * @v: pointer to type atomic64_t
  81 *
  82 * Atomically increments @v by 1.
  83 */
  84static __always_inline void atomic64_inc(atomic64_t *v)
  85{
  86        asm volatile(LOCK_PREFIX "incq %0"
  87                     : "=m" (v->counter)
  88                     : "m" (v->counter));
  89}
  90
  91/**
  92 * atomic64_dec - decrement atomic64 variable
  93 * @v: pointer to type atomic64_t
  94 *
  95 * Atomically decrements @v by 1.
  96 */
  97static __always_inline void atomic64_dec(atomic64_t *v)
  98{
  99        asm volatile(LOCK_PREFIX "decq %0"
 100                     : "=m" (v->counter)
 101                     : "m" (v->counter));
 102}
 103
 104/**
 105 * atomic64_dec_and_test - decrement and test
 106 * @v: pointer to type atomic64_t
 107 *
 108 * Atomically decrements @v by 1 and
 109 * returns true if the result is 0, or false for all other
 110 * cases.
 111 */
 112static inline bool atomic64_dec_and_test(atomic64_t *v)
 113{
 114        GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e);
 115}
 116
 117/**
 118 * atomic64_inc_and_test - increment and test
 119 * @v: pointer to type atomic64_t
 120 *
 121 * Atomically increments @v by 1
 122 * and returns true if the result is zero, or false for all
 123 * other cases.
 124 */
 125static inline bool atomic64_inc_and_test(atomic64_t *v)
 126{
 127        GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e);
 128}
 129
 130/**
 131 * atomic64_add_negative - add and test if negative
 132 * @i: integer value to add
 133 * @v: pointer to type atomic64_t
 134 *
 135 * Atomically adds @i to @v and returns true
 136 * if the result is negative, or false when
 137 * result is greater than or equal to zero.
 138 */
 139static inline bool atomic64_add_negative(long i, atomic64_t *v)
 140{
 141        GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s);
 142}
 143
 144/**
 145 * atomic64_add_return - add and return
 146 * @i: integer value to add
 147 * @v: pointer to type atomic64_t
 148 *
 149 * Atomically adds @i to @v and returns @i + @v
 150 */
 151static __always_inline long atomic64_add_return(long i, atomic64_t *v)
 152{
 153        return i + xadd(&v->counter, i);
 154}
 155
 156static inline long atomic64_sub_return(long i, atomic64_t *v)
 157{
 158        return atomic64_add_return(-i, v);
 159}
 160
 161static inline long atomic64_fetch_add(long i, atomic64_t *v)
 162{
 163        return xadd(&v->counter, i);
 164}
 165
 166static inline long atomic64_fetch_sub(long i, atomic64_t *v)
 167{
 168        return xadd(&v->counter, -i);
 169}
 170
 171#define atomic64_inc_return(v)  (atomic64_add_return(1, (v)))
 172#define atomic64_dec_return(v)  (atomic64_sub_return(1, (v)))
 173
 174static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
 175{
 176        return cmpxchg(&v->counter, old, new);
 177}
 178
 179static inline long atomic64_xchg(atomic64_t *v, long new)
 180{
 181        return xchg(&v->counter, new);
 182}
 183
 184/**
 185 * atomic64_add_unless - add unless the number is a given value
 186 * @v: pointer of type atomic64_t
 187 * @a: the amount to add to v...
 188 * @u: ...unless v is equal to u.
 189 *
 190 * Atomically adds @a to @v, so long as it was not @u.
 191 * Returns the old value of @v.
 192 */
 193static inline bool atomic64_add_unless(atomic64_t *v, long a, long u)
 194{
 195        long c, old;
 196        c = atomic64_read(v);
 197        for (;;) {
 198                if (unlikely(c == (u)))
 199                        break;
 200                old = atomic64_cmpxchg((v), c, c + (a));
 201                if (likely(old == c))
 202                        break;
 203                c = old;
 204        }
 205        return c != (u);
 206}
 207
 208#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
 209
 210/*
 211 * atomic64_dec_if_positive - decrement by 1 if old value positive
 212 * @v: pointer of type atomic_t
 213 *
 214 * The function returns the old value of *v minus 1, even if
 215 * the atomic variable, v, was not decremented.
 216 */
 217static inline long atomic64_dec_if_positive(atomic64_t *v)
 218{
 219        long c, old, dec;
 220        c = atomic64_read(v);
 221        for (;;) {
 222                dec = c - 1;
 223                if (unlikely(dec < 0))
 224                        break;
 225                old = atomic64_cmpxchg((v), c, dec);
 226                if (likely(old == c))
 227                        break;
 228                c = old;
 229        }
 230        return dec;
 231}
 232
 233#define ATOMIC64_OP(op)                                                 \
 234static inline void atomic64_##op(long i, atomic64_t *v)                 \
 235{                                                                       \
 236        asm volatile(LOCK_PREFIX #op"q %1,%0"                           \
 237                        : "+m" (v->counter)                             \
 238                        : "er" (i)                                      \
 239                        : "memory");                                    \
 240}
 241
 242#define ATOMIC64_FETCH_OP(op, c_op)                                     \
 243static inline long atomic64_fetch_##op(long i, atomic64_t *v)           \
 244{                                                                       \
 245        long old, val = atomic64_read(v);                               \
 246        for (;;) {                                                      \
 247                old = atomic64_cmpxchg(v, val, val c_op i);             \
 248                if (old == val)                                         \
 249                        break;                                          \
 250                val = old;                                              \
 251        }                                                               \
 252        return old;                                                     \
 253}
 254
 255#define ATOMIC64_OPS(op, c_op)                                          \
 256        ATOMIC64_OP(op)                                                 \
 257        ATOMIC64_FETCH_OP(op, c_op)
 258
 259ATOMIC64_OPS(and, &)
 260ATOMIC64_OPS(or, |)
 261ATOMIC64_OPS(xor, ^)
 262
 263#undef ATOMIC64_OPS
 264#undef ATOMIC64_FETCH_OP
 265#undef ATOMIC64_OP
 266
 267#endif /* _ASM_X86_ATOMIC64_64_H */
 268