linux/arch/x86/include/asm/atomic64_64.h
<<
>>
Prefs
   1#ifndef _ASM_X86_ATOMIC64_64_H
   2#define _ASM_X86_ATOMIC64_64_H
   3
   4#include <linux/types.h>
   5#include <asm/alternative.h>
   6#include <asm/cmpxchg.h>
   7
   8/* The 64-bit atomic type */
   9
  10#define ATOMIC64_INIT(i)        { (i) }
  11
  12/**
  13 * atomic64_read - read atomic64 variable
  14 * @v: pointer of type atomic64_t
  15 *
  16 * Atomically reads the value of @v.
  17 * Doesn't imply a read memory barrier.
  18 */
  19static inline long atomic64_read(const atomic64_t *v)
  20{
  21        return (*(volatile long *)&(v)->counter);
  22}
  23
  24/**
  25 * atomic64_set - set atomic64 variable
  26 * @v: pointer to type atomic64_t
  27 * @i: required value
  28 *
  29 * Atomically sets the value of @v to @i.
  30 */
  31static inline void atomic64_set(atomic64_t *v, long i)
  32{
  33        v->counter = i;
  34}
  35
  36/**
  37 * atomic64_add - add integer to atomic64 variable
  38 * @i: integer value to add
  39 * @v: pointer to type atomic64_t
  40 *
  41 * Atomically adds @i to @v.
  42 */
  43static inline void atomic64_add(long i, atomic64_t *v)
  44{
  45        asm volatile(LOCK_PREFIX "addq %1,%0"
  46                     : "=m" (v->counter)
  47                     : "er" (i), "m" (v->counter));
  48}
  49
  50/**
  51 * atomic64_sub - subtract the atomic64 variable
  52 * @i: integer value to subtract
  53 * @v: pointer to type atomic64_t
  54 *
  55 * Atomically subtracts @i from @v.
  56 */
  57static inline void atomic64_sub(long i, atomic64_t *v)
  58{
  59        asm volatile(LOCK_PREFIX "subq %1,%0"
  60                     : "=m" (v->counter)
  61                     : "er" (i), "m" (v->counter));
  62}
  63
  64/**
  65 * atomic64_sub_and_test - subtract value from variable and test result
  66 * @i: integer value to subtract
  67 * @v: pointer to type atomic64_t
  68 *
  69 * Atomically subtracts @i from @v and returns
  70 * true if the result is zero, or false for all
  71 * other cases.
  72 */
  73static inline int atomic64_sub_and_test(long i, atomic64_t *v)
  74{
  75        unsigned char c;
  76
  77        asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
  78                     : "=m" (v->counter), "=qm" (c)
  79                     : "er" (i), "m" (v->counter) : "memory");
  80        return c;
  81}
  82
  83/**
  84 * atomic64_inc - increment atomic64 variable
  85 * @v: pointer to type atomic64_t
  86 *
  87 * Atomically increments @v by 1.
  88 */
  89static inline void atomic64_inc(atomic64_t *v)
  90{
  91        asm volatile(LOCK_PREFIX "incq %0"
  92                     : "=m" (v->counter)
  93                     : "m" (v->counter));
  94}
  95
  96/**
  97 * atomic64_dec - decrement atomic64 variable
  98 * @v: pointer to type atomic64_t
  99 *
 100 * Atomically decrements @v by 1.
 101 */
 102static inline void atomic64_dec(atomic64_t *v)
 103{
 104        asm volatile(LOCK_PREFIX "decq %0"
 105                     : "=m" (v->counter)
 106                     : "m" (v->counter));
 107}
 108
 109/**
 110 * atomic64_dec_and_test - decrement and test
 111 * @v: pointer to type atomic64_t
 112 *
 113 * Atomically decrements @v by 1 and
 114 * returns true if the result is 0, or false for all other
 115 * cases.
 116 */
 117static inline int atomic64_dec_and_test(atomic64_t *v)
 118{
 119        unsigned char c;
 120
 121        asm volatile(LOCK_PREFIX "decq %0; sete %1"
 122                     : "=m" (v->counter), "=qm" (c)
 123                     : "m" (v->counter) : "memory");
 124        return c != 0;
 125}
 126
 127/**
 128 * atomic64_inc_and_test - increment and test
 129 * @v: pointer to type atomic64_t
 130 *
 131 * Atomically increments @v by 1
 132 * and returns true if the result is zero, or false for all
 133 * other cases.
 134 */
 135static inline int atomic64_inc_and_test(atomic64_t *v)
 136{
 137        unsigned char c;
 138
 139        asm volatile(LOCK_PREFIX "incq %0; sete %1"
 140                     : "=m" (v->counter), "=qm" (c)
 141                     : "m" (v->counter) : "memory");
 142        return c != 0;
 143}
 144
 145/**
 146 * atomic64_add_negative - add and test if negative
 147 * @i: integer value to add
 148 * @v: pointer to type atomic64_t
 149 *
 150 * Atomically adds @i to @v and returns true
 151 * if the result is negative, or false when
 152 * result is greater than or equal to zero.
 153 */
 154static inline int atomic64_add_negative(long i, atomic64_t *v)
 155{
 156        unsigned char c;
 157
 158        asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
 159                     : "=m" (v->counter), "=qm" (c)
 160                     : "er" (i), "m" (v->counter) : "memory");
 161        return c;
 162}
 163
 164/**
 165 * atomic64_add_return - add and return
 166 * @i: integer value to add
 167 * @v: pointer to type atomic64_t
 168 *
 169 * Atomically adds @i to @v and returns @i + @v
 170 */
 171static inline long atomic64_add_return(long i, atomic64_t *v)
 172{
 173        return i + xadd(&v->counter, i);
 174}
 175
 176static inline long atomic64_sub_return(long i, atomic64_t *v)
 177{
 178        return atomic64_add_return(-i, v);
 179}
 180
 181#define atomic64_inc_return(v)  (atomic64_add_return(1, (v)))
 182#define atomic64_dec_return(v)  (atomic64_sub_return(1, (v)))
 183
 184static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
 185{
 186        return cmpxchg(&v->counter, old, new);
 187}
 188
 189static inline long atomic64_xchg(atomic64_t *v, long new)
 190{
 191        return xchg(&v->counter, new);
 192}
 193
 194/**
 195 * atomic64_add_unless - add unless the number is a given value
 196 * @v: pointer of type atomic64_t
 197 * @a: the amount to add to v...
 198 * @u: ...unless v is equal to u.
 199 *
 200 * Atomically adds @a to @v, so long as it was not @u.
 201 * Returns the old value of @v.
 202 */
 203static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
 204{
 205        long c, old;
 206        c = atomic64_read(v);
 207        for (;;) {
 208                if (unlikely(c == (u)))
 209                        break;
 210                old = atomic64_cmpxchg((v), c, c + (a));
 211                if (likely(old == c))
 212                        break;
 213                c = old;
 214        }
 215        return c != (u);
 216}
 217
 218#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
 219
 220/*
 221 * atomic64_dec_if_positive - decrement by 1 if old value positive
 222 * @v: pointer of type atomic_t
 223 *
 224 * The function returns the old value of *v minus 1, even if
 225 * the atomic variable, v, was not decremented.
 226 */
 227static inline long atomic64_dec_if_positive(atomic64_t *v)
 228{
 229        long c, old, dec;
 230        c = atomic64_read(v);
 231        for (;;) {
 232                dec = c - 1;
 233                if (unlikely(dec < 0))
 234                        break;
 235                old = atomic64_cmpxchg((v), c, dec);
 236                if (likely(old == c))
 237                        break;
 238                c = old;
 239        }
 240        return dec;
 241}
 242
 243#endif /* _ASM_X86_ATOMIC64_64_H */
 244