linux/arch/x86/include/asm/atomic64_32.h
<<
>>
Prefs
   1#ifndef _ASM_X86_ATOMIC64_32_H
   2#define _ASM_X86_ATOMIC64_32_H
   3
   4#include <linux/compiler.h>
   5#include <linux/types.h>
   6//#include <asm/cmpxchg.h>
   7
   8/* An 64bit atomic type */
   9
  10typedef struct {
  11        u64 __aligned(8) counter;
  12} atomic64_t;
  13
  14#define ATOMIC64_INIT(val)      { (val) }
  15
  16#define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
  17#ifndef ATOMIC64_EXPORT
  18#define ATOMIC64_DECL_ONE __ATOMIC64_DECL
  19#else
  20#define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
  21        ATOMIC64_EXPORT(atomic64_##sym)
  22#endif
  23
  24#ifdef CONFIG_X86_CMPXCHG64
  25#define __alternative_atomic64(f, g, out, in...) \
  26        asm volatile("call %P[func]" \
  27                     : out : [func] "i" (atomic64_##g##_cx8), ## in)
  28
  29#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
  30#else
  31#define __alternative_atomic64(f, g, out, in...) \
  32        alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
  33                         X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
  34
  35#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
  36        ATOMIC64_DECL_ONE(sym##_386)
  37
  38ATOMIC64_DECL_ONE(add_386);
  39ATOMIC64_DECL_ONE(sub_386);
  40ATOMIC64_DECL_ONE(inc_386);
  41ATOMIC64_DECL_ONE(dec_386);
  42#endif
  43
  44#define alternative_atomic64(f, out, in...) \
  45        __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
  46
  47ATOMIC64_DECL(read);
  48ATOMIC64_DECL(set);
  49ATOMIC64_DECL(xchg);
  50ATOMIC64_DECL(add_return);
  51ATOMIC64_DECL(sub_return);
  52ATOMIC64_DECL(inc_return);
  53ATOMIC64_DECL(dec_return);
  54ATOMIC64_DECL(dec_if_positive);
  55ATOMIC64_DECL(inc_not_zero);
  56ATOMIC64_DECL(add_unless);
  57
  58#undef ATOMIC64_DECL
  59#undef ATOMIC64_DECL_ONE
  60#undef __ATOMIC64_DECL
  61#undef ATOMIC64_EXPORT
  62
  63/**
  64 * atomic64_cmpxchg - cmpxchg atomic64 variable
  65 * @v: pointer to type atomic64_t
  66 * @o: expected value
  67 * @n: new value
  68 *
  69 * Atomically sets @v to @n if it was equal to @o and returns
  70 * the old value.
  71 */
  72
  73static inline long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n)
  74{
  75        return cmpxchg64(&v->counter, o, n);
  76}
  77
  78/**
  79 * atomic64_xchg - xchg atomic64 variable
  80 * @v: pointer to type atomic64_t
  81 * @n: value to assign
  82 *
  83 * Atomically xchgs the value of @v to @n and returns
  84 * the old value.
  85 */
  86static inline long long atomic64_xchg(atomic64_t *v, long long n)
  87{
  88        long long o;
  89        unsigned high = (unsigned)(n >> 32);
  90        unsigned low = (unsigned)n;
  91        alternative_atomic64(xchg, "=&A" (o),
  92                             "S" (v), "b" (low), "c" (high)
  93                             : "memory");
  94        return o;
  95}
  96
  97/**
  98 * atomic64_set - set atomic64 variable
  99 * @v: pointer to type atomic64_t
 100 * @i: value to assign
 101 *
 102 * Atomically sets the value of @v to @n.
 103 */
 104static inline void atomic64_set(atomic64_t *v, long long i)
 105{
 106        unsigned high = (unsigned)(i >> 32);
 107        unsigned low = (unsigned)i;
 108        alternative_atomic64(set, /* no output */,
 109                             "S" (v), "b" (low), "c" (high)
 110                             : "eax", "edx", "memory");
 111}
 112
 113/**
 114 * atomic64_read - read atomic64 variable
 115 * @v: pointer to type atomic64_t
 116 *
 117 * Atomically reads the value of @v and returns it.
 118 */
 119static inline long long atomic64_read(const atomic64_t *v)
 120{
 121        long long r;
 122        alternative_atomic64(read, "=&A" (r), "c" (v) : "memory");
 123        return r;
 124 }
 125
 126/**
 127 * atomic64_add_return - add and return
 128 * @i: integer value to add
 129 * @v: pointer to type atomic64_t
 130 *
 131 * Atomically adds @i to @v and returns @i + *@v
 132 */
 133static inline long long atomic64_add_return(long long i, atomic64_t *v)
 134{
 135        alternative_atomic64(add_return,
 136                             ASM_OUTPUT2("+A" (i), "+c" (v)),
 137                             ASM_NO_INPUT_CLOBBER("memory"));
 138        return i;
 139}
 140
 141/*
 142 * Other variants with different arithmetic operators:
 143 */
 144static inline long long atomic64_sub_return(long long i, atomic64_t *v)
 145{
 146        alternative_atomic64(sub_return,
 147                             ASM_OUTPUT2("+A" (i), "+c" (v)),
 148                             ASM_NO_INPUT_CLOBBER("memory"));
 149        return i;
 150}
 151
 152static inline long long atomic64_inc_return(atomic64_t *v)
 153{
 154        long long a;
 155        alternative_atomic64(inc_return, "=&A" (a),
 156                             "S" (v) : "memory", "ecx");
 157        return a;
 158}
 159
 160static inline long long atomic64_dec_return(atomic64_t *v)
 161{
 162        long long a;
 163        alternative_atomic64(dec_return, "=&A" (a),
 164                             "S" (v) : "memory", "ecx");
 165        return a;
 166}
 167
 168/**
 169 * atomic64_add - add integer to atomic64 variable
 170 * @i: integer value to add
 171 * @v: pointer to type atomic64_t
 172 *
 173 * Atomically adds @i to @v.
 174 */
 175static inline long long atomic64_add(long long i, atomic64_t *v)
 176{
 177        __alternative_atomic64(add, add_return,
 178                               ASM_OUTPUT2("+A" (i), "+c" (v)),
 179                               ASM_NO_INPUT_CLOBBER("memory"));
 180        return i;
 181}
 182
 183/**
 184 * atomic64_sub - subtract the atomic64 variable
 185 * @i: integer value to subtract
 186 * @v: pointer to type atomic64_t
 187 *
 188 * Atomically subtracts @i from @v.
 189 */
 190static inline long long atomic64_sub(long long i, atomic64_t *v)
 191{
 192        __alternative_atomic64(sub, sub_return,
 193                               ASM_OUTPUT2("+A" (i), "+c" (v)),
 194                               ASM_NO_INPUT_CLOBBER("memory"));
 195        return i;
 196}
 197
 198/**
 199 * atomic64_sub_and_test - subtract value from variable and test result
 200 * @i: integer value to subtract
 201 * @v: pointer to type atomic64_t
 202 *
 203 * Atomically subtracts @i from @v and returns
 204 * true if the result is zero, or false for all
 205 * other cases.
 206 */
 207static inline int atomic64_sub_and_test(long long i, atomic64_t *v)
 208{
 209        return atomic64_sub_return(i, v) == 0;
 210}
 211
 212/**
 213 * atomic64_inc - increment atomic64 variable
 214 * @v: pointer to type atomic64_t
 215 *
 216 * Atomically increments @v by 1.
 217 */
 218static inline void atomic64_inc(atomic64_t *v)
 219{
 220        __alternative_atomic64(inc, inc_return, /* no output */,
 221                               "S" (v) : "memory", "eax", "ecx", "edx");
 222}
 223
 224/**
 225 * atomic64_dec - decrement atomic64 variable
 226 * @v: pointer to type atomic64_t
 227 *
 228 * Atomically decrements @v by 1.
 229 */
 230static inline void atomic64_dec(atomic64_t *v)
 231{
 232        __alternative_atomic64(dec, dec_return, /* no output */,
 233                               "S" (v) : "memory", "eax", "ecx", "edx");
 234}
 235
 236/**
 237 * atomic64_dec_and_test - decrement and test
 238 * @v: pointer to type atomic64_t
 239 *
 240 * Atomically decrements @v by 1 and
 241 * returns true if the result is 0, or false for all other
 242 * cases.
 243 */
 244static inline int atomic64_dec_and_test(atomic64_t *v)
 245{
 246        return atomic64_dec_return(v) == 0;
 247}
 248
 249/**
 250 * atomic64_inc_and_test - increment and test
 251 * @v: pointer to type atomic64_t
 252 *
 253 * Atomically increments @v by 1
 254 * and returns true if the result is zero, or false for all
 255 * other cases.
 256 */
 257static inline int atomic64_inc_and_test(atomic64_t *v)
 258{
 259        return atomic64_inc_return(v) == 0;
 260}
 261
 262/**
 263 * atomic64_add_negative - add and test if negative
 264 * @i: integer value to add
 265 * @v: pointer to type atomic64_t
 266 *
 267 * Atomically adds @i to @v and returns true
 268 * if the result is negative, or false when
 269 * result is greater than or equal to zero.
 270 */
 271static inline int atomic64_add_negative(long long i, atomic64_t *v)
 272{
 273        return atomic64_add_return(i, v) < 0;
 274}
 275
 276/**
 277 * atomic64_add_unless - add unless the number is a given value
 278 * @v: pointer of type atomic64_t
 279 * @a: the amount to add to v...
 280 * @u: ...unless v is equal to u.
 281 *
 282 * Atomically adds @a to @v, so long as it was not @u.
 283 * Returns non-zero if the add was done, zero otherwise.
 284 */
 285static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
 286{
 287        unsigned low = (unsigned)u;
 288        unsigned high = (unsigned)(u >> 32);
 289        alternative_atomic64(add_unless,
 290                             ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
 291                             "S" (v) : "memory");
 292        return (int)a;
 293}
 294
 295
 296static inline int atomic64_inc_not_zero(atomic64_t *v)
 297{
 298        int r;
 299        alternative_atomic64(inc_not_zero, "=&a" (r),
 300                             "S" (v) : "ecx", "edx", "memory");
 301        return r;
 302}
 303
 304static inline long long atomic64_dec_if_positive(atomic64_t *v)
 305{
 306        long long r;
 307        alternative_atomic64(dec_if_positive, "=&A" (r),
 308                             "S" (v) : "ecx", "memory");
 309        return r;
 310}
 311
 312#undef alternative_atomic64
 313#undef __alternative_atomic64
 314
 315#define ATOMIC64_OP(op, c_op)                                           \
 316static inline void atomic64_##op(long long i, atomic64_t *v)            \
 317{                                                                       \
 318        long long old, c = 0;                                           \
 319        while ((old = atomic64_cmpxchg(v, c, c c_op i)) != c)           \
 320                c = old;                                                \
 321}
 322
 323ATOMIC64_OP(and, &)
 324ATOMIC64_OP(or, |)
 325ATOMIC64_OP(xor, ^)
 326
 327#undef ATOMIC64_OP
 328
 329#endif /* _ASM_X86_ATOMIC64_32_H */
 330