linux/arch/x86/include/asm/atomic64_32.h
<<
>>
Prefs
   1#ifndef _ASM_X86_ATOMIC64_32_H
   2#define _ASM_X86_ATOMIC64_32_H
   3
   4#include <linux/compiler.h>
   5#include <linux/types.h>
   6#include <asm/processor.h>
   7//#include <asm/cmpxchg.h>
   8
   9/* An 64bit atomic type */
  10
  11typedef struct {
  12        u64 __aligned(8) counter;
  13} atomic64_t;
  14
  15#define ATOMIC64_INIT(val)      { (val) }
  16
  17#define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
  18#ifndef ATOMIC64_EXPORT
  19#define ATOMIC64_DECL_ONE __ATOMIC64_DECL
  20#else
  21#define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
  22        ATOMIC64_EXPORT(atomic64_##sym)
  23#endif
  24
  25#ifdef CONFIG_X86_CMPXCHG64
  26#define __alternative_atomic64(f, g, out, in...) \
  27        asm volatile("call %P[func]" \
  28                     : out : [func] "i" (atomic64_##g##_cx8), ## in)
  29
  30#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
  31#else
  32#define __alternative_atomic64(f, g, out, in...) \
  33        alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
  34                         X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
  35
  36#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
  37        ATOMIC64_DECL_ONE(sym##_386)
  38
  39ATOMIC64_DECL_ONE(add_386);
  40ATOMIC64_DECL_ONE(sub_386);
  41ATOMIC64_DECL_ONE(inc_386);
  42ATOMIC64_DECL_ONE(dec_386);
  43#endif
  44
  45#define alternative_atomic64(f, out, in...) \
  46        __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
  47
  48ATOMIC64_DECL(read);
  49ATOMIC64_DECL(set);
  50ATOMIC64_DECL(xchg);
  51ATOMIC64_DECL(add_return);
  52ATOMIC64_DECL(sub_return);
  53ATOMIC64_DECL(inc_return);
  54ATOMIC64_DECL(dec_return);
  55ATOMIC64_DECL(dec_if_positive);
  56ATOMIC64_DECL(inc_not_zero);
  57ATOMIC64_DECL(add_unless);
  58
  59#undef ATOMIC64_DECL
  60#undef ATOMIC64_DECL_ONE
  61#undef __ATOMIC64_DECL
  62#undef ATOMIC64_EXPORT
  63
  64/**
  65 * atomic64_cmpxchg - cmpxchg atomic64 variable
  66 * @v: pointer to type atomic64_t
  67 * @o: expected value
  68 * @n: new value
  69 *
  70 * Atomically sets @v to @n if it was equal to @o and returns
  71 * the old value.
  72 */
  73
  74static inline long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n)
  75{
  76        return cmpxchg64(&v->counter, o, n);
  77}
  78
  79/**
  80 * atomic64_xchg - xchg atomic64 variable
  81 * @v: pointer to type atomic64_t
  82 * @n: value to assign
  83 *
  84 * Atomically xchgs the value of @v to @n and returns
  85 * the old value.
  86 */
  87static inline long long atomic64_xchg(atomic64_t *v, long long n)
  88{
  89        long long o;
  90        unsigned high = (unsigned)(n >> 32);
  91        unsigned low = (unsigned)n;
  92        alternative_atomic64(xchg, "=&A" (o),
  93                             "S" (v), "b" (low), "c" (high)
  94                             : "memory");
  95        return o;
  96}
  97
  98/**
  99 * atomic64_set - set atomic64 variable
 100 * @v: pointer to type atomic64_t
 101 * @i: value to assign
 102 *
 103 * Atomically sets the value of @v to @n.
 104 */
 105static inline void atomic64_set(atomic64_t *v, long long i)
 106{
 107        unsigned high = (unsigned)(i >> 32);
 108        unsigned low = (unsigned)i;
 109        alternative_atomic64(set, /* no output */,
 110                             "S" (v), "b" (low), "c" (high)
 111                             : "eax", "edx", "memory");
 112}
 113
 114/**
 115 * atomic64_read - read atomic64 variable
 116 * @v: pointer to type atomic64_t
 117 *
 118 * Atomically reads the value of @v and returns it.
 119 */
 120static inline long long atomic64_read(const atomic64_t *v)
 121{
 122        long long r;
 123        alternative_atomic64(read, "=&A" (r), "c" (v) : "memory");
 124        return r;
 125 }
 126
 127/**
 128 * atomic64_add_return - add and return
 129 * @i: integer value to add
 130 * @v: pointer to type atomic64_t
 131 *
 132 * Atomically adds @i to @v and returns @i + *@v
 133 */
 134static inline long long atomic64_add_return(long long i, atomic64_t *v)
 135{
 136        alternative_atomic64(add_return,
 137                             ASM_OUTPUT2("+A" (i), "+c" (v)),
 138                             ASM_NO_INPUT_CLOBBER("memory"));
 139        return i;
 140}
 141
 142/*
 143 * Other variants with different arithmetic operators:
 144 */
 145static inline long long atomic64_sub_return(long long i, atomic64_t *v)
 146{
 147        alternative_atomic64(sub_return,
 148                             ASM_OUTPUT2("+A" (i), "+c" (v)),
 149                             ASM_NO_INPUT_CLOBBER("memory"));
 150        return i;
 151}
 152
 153static inline long long atomic64_inc_return(atomic64_t *v)
 154{
 155        long long a;
 156        alternative_atomic64(inc_return, "=&A" (a),
 157                             "S" (v) : "memory", "ecx");
 158        return a;
 159}
 160
 161static inline long long atomic64_dec_return(atomic64_t *v)
 162{
 163        long long a;
 164        alternative_atomic64(dec_return, "=&A" (a),
 165                             "S" (v) : "memory", "ecx");
 166        return a;
 167}
 168
 169/**
 170 * atomic64_add - add integer to atomic64 variable
 171 * @i: integer value to add
 172 * @v: pointer to type atomic64_t
 173 *
 174 * Atomically adds @i to @v.
 175 */
 176static inline long long atomic64_add(long long i, atomic64_t *v)
 177{
 178        __alternative_atomic64(add, add_return,
 179                               ASM_OUTPUT2("+A" (i), "+c" (v)),
 180                               ASM_NO_INPUT_CLOBBER("memory"));
 181        return i;
 182}
 183
 184/**
 185 * atomic64_sub - subtract the atomic64 variable
 186 * @i: integer value to subtract
 187 * @v: pointer to type atomic64_t
 188 *
 189 * Atomically subtracts @i from @v.
 190 */
 191static inline long long atomic64_sub(long long i, atomic64_t *v)
 192{
 193        __alternative_atomic64(sub, sub_return,
 194                               ASM_OUTPUT2("+A" (i), "+c" (v)),
 195                               ASM_NO_INPUT_CLOBBER("memory"));
 196        return i;
 197}
 198
 199/**
 200 * atomic64_sub_and_test - subtract value from variable and test result
 201 * @i: integer value to subtract
 202 * @v: pointer to type atomic64_t
 203 *
 204 * Atomically subtracts @i from @v and returns
 205 * true if the result is zero, or false for all
 206 * other cases.
 207 */
 208static inline int atomic64_sub_and_test(long long i, atomic64_t *v)
 209{
 210        return atomic64_sub_return(i, v) == 0;
 211}
 212
 213/**
 214 * atomic64_inc - increment atomic64 variable
 215 * @v: pointer to type atomic64_t
 216 *
 217 * Atomically increments @v by 1.
 218 */
 219static inline void atomic64_inc(atomic64_t *v)
 220{
 221        __alternative_atomic64(inc, inc_return, /* no output */,
 222                               "S" (v) : "memory", "eax", "ecx", "edx");
 223}
 224
 225/**
 226 * atomic64_dec - decrement atomic64 variable
 227 * @v: pointer to type atomic64_t
 228 *
 229 * Atomically decrements @v by 1.
 230 */
 231static inline void atomic64_dec(atomic64_t *v)
 232{
 233        __alternative_atomic64(dec, dec_return, /* no output */,
 234                               "S" (v) : "memory", "eax", "ecx", "edx");
 235}
 236
 237/**
 238 * atomic64_dec_and_test - decrement and test
 239 * @v: pointer to type atomic64_t
 240 *
 241 * Atomically decrements @v by 1 and
 242 * returns true if the result is 0, or false for all other
 243 * cases.
 244 */
 245static inline int atomic64_dec_and_test(atomic64_t *v)
 246{
 247        return atomic64_dec_return(v) == 0;
 248}
 249
 250/**
 251 * atomic64_inc_and_test - increment and test
 252 * @v: pointer to type atomic64_t
 253 *
 254 * Atomically increments @v by 1
 255 * and returns true if the result is zero, or false for all
 256 * other cases.
 257 */
 258static inline int atomic64_inc_and_test(atomic64_t *v)
 259{
 260        return atomic64_inc_return(v) == 0;
 261}
 262
 263/**
 264 * atomic64_add_negative - add and test if negative
 265 * @i: integer value to add
 266 * @v: pointer to type atomic64_t
 267 *
 268 * Atomically adds @i to @v and returns true
 269 * if the result is negative, or false when
 270 * result is greater than or equal to zero.
 271 */
 272static inline int atomic64_add_negative(long long i, atomic64_t *v)
 273{
 274        return atomic64_add_return(i, v) < 0;
 275}
 276
 277/**
 278 * atomic64_add_unless - add unless the number is a given value
 279 * @v: pointer of type atomic64_t
 280 * @a: the amount to add to v...
 281 * @u: ...unless v is equal to u.
 282 *
 283 * Atomically adds @a to @v, so long as it was not @u.
 284 * Returns non-zero if the add was done, zero otherwise.
 285 */
 286static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
 287{
 288        unsigned low = (unsigned)u;
 289        unsigned high = (unsigned)(u >> 32);
 290        alternative_atomic64(add_unless,
 291                             ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
 292                             "S" (v) : "memory");
 293        return (int)a;
 294}
 295
 296
 297static inline int atomic64_inc_not_zero(atomic64_t *v)
 298{
 299        int r;
 300        alternative_atomic64(inc_not_zero, "=&a" (r),
 301                             "S" (v) : "ecx", "edx", "memory");
 302        return r;
 303}
 304
 305static inline long long atomic64_dec_if_positive(atomic64_t *v)
 306{
 307        long long r;
 308        alternative_atomic64(dec_if_positive, "=&A" (r),
 309                             "S" (v) : "ecx", "memory");
 310        return r;
 311}
 312
 313#undef alternative_atomic64
 314#undef __alternative_atomic64
 315
 316#endif /* _ASM_X86_ATOMIC64_32_H */
 317