linux/arch/x86/include/asm/atomic.h
<<
>>
Prefs
   1#ifndef _ASM_X86_ATOMIC_H
   2#define _ASM_X86_ATOMIC_H
   3
   4#include <linux/compiler.h>
   5#include <linux/types.h>
   6#include <asm/alternative.h>
   7#include <asm/cmpxchg.h>
   8#include <asm/rmwcc.h>
   9#include <asm/barrier.h>
  10
  11/*
  12 * Atomic operations that C can't guarantee us.  Useful for
  13 * resource counting etc..
  14 */
  15
  16#define ATOMIC_INIT(i)  { (i) }
  17
  18/**
  19 * atomic_read - read atomic variable
  20 * @v: pointer of type atomic_t
  21 *
  22 * Atomically reads the value of @v.
  23 */
  24static __always_inline int atomic_read(const atomic_t *v)
  25{
  26        return READ_ONCE((v)->counter);
  27}
  28
  29/**
  30 * atomic_set - set atomic variable
  31 * @v: pointer of type atomic_t
  32 * @i: required value
  33 *
  34 * Atomically sets the value of @v to @i.
  35 */
  36static __always_inline void atomic_set(atomic_t *v, int i)
  37{
  38        WRITE_ONCE(v->counter, i);
  39}
  40
  41/**
  42 * atomic_add - add integer to atomic variable
  43 * @i: integer value to add
  44 * @v: pointer of type atomic_t
  45 *
  46 * Atomically adds @i to @v.
  47 */
  48static __always_inline void atomic_add(int i, atomic_t *v)
  49{
  50        asm volatile(LOCK_PREFIX "addl %1,%0"
  51                     : "+m" (v->counter)
  52                     : "ir" (i));
  53}
  54
  55/**
  56 * atomic_sub - subtract integer from atomic variable
  57 * @i: integer value to subtract
  58 * @v: pointer of type atomic_t
  59 *
  60 * Atomically subtracts @i from @v.
  61 */
  62static __always_inline void atomic_sub(int i, atomic_t *v)
  63{
  64        asm volatile(LOCK_PREFIX "subl %1,%0"
  65                     : "+m" (v->counter)
  66                     : "ir" (i));
  67}
  68
  69/**
  70 * atomic_sub_and_test - subtract value from variable and test result
  71 * @i: integer value to subtract
  72 * @v: pointer of type atomic_t
  73 *
  74 * Atomically subtracts @i from @v and returns
  75 * true if the result is zero, or false for all
  76 * other cases.
  77 */
  78static __always_inline int atomic_sub_and_test(int i, atomic_t *v)
  79{
  80        GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
  81}
  82
  83/**
  84 * atomic_inc - increment atomic variable
  85 * @v: pointer of type atomic_t
  86 *
  87 * Atomically increments @v by 1.
  88 */
  89static __always_inline void atomic_inc(atomic_t *v)
  90{
  91        asm volatile(LOCK_PREFIX "incl %0"
  92                     : "+m" (v->counter));
  93}
  94
  95/**
  96 * atomic_dec - decrement atomic variable
  97 * @v: pointer of type atomic_t
  98 *
  99 * Atomically decrements @v by 1.
 100 */
 101static __always_inline void atomic_dec(atomic_t *v)
 102{
 103        asm volatile(LOCK_PREFIX "decl %0"
 104                     : "+m" (v->counter));
 105}
 106
 107/**
 108 * atomic_dec_and_test - decrement and test
 109 * @v: pointer of type atomic_t
 110 *
 111 * Atomically decrements @v by 1 and
 112 * returns true if the result is 0, or false for all other
 113 * cases.
 114 */
 115static __always_inline int atomic_dec_and_test(atomic_t *v)
 116{
 117        GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
 118}
 119
 120/**
 121 * atomic_inc_and_test - increment and test
 122 * @v: pointer of type atomic_t
 123 *
 124 * Atomically increments @v by 1
 125 * and returns true if the result is zero, or false for all
 126 * other cases.
 127 */
 128static __always_inline int atomic_inc_and_test(atomic_t *v)
 129{
 130        GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");
 131}
 132
 133/**
 134 * atomic_add_negative - add and test if negative
 135 * @i: integer value to add
 136 * @v: pointer of type atomic_t
 137 *
 138 * Atomically adds @i to @v and returns true
 139 * if the result is negative, or false when
 140 * result is greater than or equal to zero.
 141 */
 142static __always_inline int atomic_add_negative(int i, atomic_t *v)
 143{
 144        GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
 145}
 146
 147/**
 148 * atomic_add_return - add integer and return
 149 * @i: integer value to add
 150 * @v: pointer of type atomic_t
 151 *
 152 * Atomically adds @i to @v and returns @i + @v
 153 */
 154static __always_inline int atomic_add_return(int i, atomic_t *v)
 155{
 156        return i + xadd(&v->counter, i);
 157}
 158
 159/**
 160 * atomic_sub_return - subtract integer and return
 161 * @v: pointer of type atomic_t
 162 * @i: integer value to subtract
 163 *
 164 * Atomically subtracts @i from @v and returns @v - @i
 165 */
 166static __always_inline int atomic_sub_return(int i, atomic_t *v)
 167{
 168        return atomic_add_return(-i, v);
 169}
 170
 171#define atomic_inc_return(v)  (atomic_add_return(1, v))
 172#define atomic_dec_return(v)  (atomic_sub_return(1, v))
 173
 174static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
 175{
 176        return cmpxchg(&v->counter, old, new);
 177}
 178
 179static inline int atomic_xchg(atomic_t *v, int new)
 180{
 181        return xchg(&v->counter, new);
 182}
 183
 184#define ATOMIC_OP(op)                                                   \
 185static inline void atomic_##op(int i, atomic_t *v)                      \
 186{                                                                       \
 187        asm volatile(LOCK_PREFIX #op"l %1,%0"                           \
 188                        : "+m" (v->counter)                             \
 189                        : "ir" (i)                                      \
 190                        : "memory");                                    \
 191}
 192
 193ATOMIC_OP(and)
 194ATOMIC_OP(or)
 195ATOMIC_OP(xor)
 196
 197#undef ATOMIC_OP
 198
 199/**
 200 * __atomic_add_unless - add unless the number is already a given value
 201 * @v: pointer of type atomic_t
 202 * @a: the amount to add to v...
 203 * @u: ...unless v is equal to u.
 204 *
 205 * Atomically adds @a to @v, so long as @v was not already @u.
 206 * Returns the old value of @v.
 207 */
 208static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
 209{
 210        int c, old;
 211        c = atomic_read(v);
 212        for (;;) {
 213                if (unlikely(c == (u)))
 214                        break;
 215                old = atomic_cmpxchg((v), c, c + (a));
 216                if (likely(old == c))
 217                        break;
 218                c = old;
 219        }
 220        return c;
 221}
 222
 223/**
 224 * atomic_inc_short - increment of a short integer
 225 * @v: pointer to type int
 226 *
 227 * Atomically adds 1 to @v
 228 * Returns the new value of @u
 229 */
 230static __always_inline short int atomic_inc_short(short int *v)
 231{
 232        asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
 233        return *v;
 234}
 235
 236#ifdef CONFIG_X86_32
 237# include <asm/atomic64_32.h>
 238#else
 239# include <asm/atomic64_64.h>
 240#endif
 241
 242#endif /* _ASM_X86_ATOMIC_H */
 243