linux/arch/x86/include/asm/atomic_64.h
<<
>>
Prefs
   1#ifndef _ASM_X86_ATOMIC_64_H
   2#define _ASM_X86_ATOMIC_64_H
   3
   4#include <linux/types.h>
   5#include <asm/alternative.h>
   6#include <asm/cmpxchg.h>
   7
   8/*
   9 * Atomic operations that C can't guarantee us.  Useful for
  10 * resource counting etc..
  11 */
  12
  13#define ATOMIC_INIT(i)  { (i) }
  14
  15/**
  16 * atomic_read - read atomic variable
  17 * @v: pointer of type atomic_t
  18 *
  19 * Atomically reads the value of @v.
  20 */
  21static inline int atomic_read(const atomic_t *v)
  22{
  23        return v->counter;
  24}
  25
  26/**
  27 * atomic_set - set atomic variable
  28 * @v: pointer of type atomic_t
  29 * @i: required value
  30 *
  31 * Atomically sets the value of @v to @i.
  32 */
  33static inline void atomic_set(atomic_t *v, int i)
  34{
  35        v->counter = i;
  36}
  37
  38/**
  39 * atomic_add - add integer to atomic variable
  40 * @i: integer value to add
  41 * @v: pointer of type atomic_t
  42 *
  43 * Atomically adds @i to @v.
  44 */
  45static inline void atomic_add(int i, atomic_t *v)
  46{
  47        asm volatile(LOCK_PREFIX "addl %1,%0"
  48                     : "=m" (v->counter)
  49                     : "ir" (i), "m" (v->counter));
  50}
  51
  52/**
  53 * atomic_sub - subtract the atomic variable
  54 * @i: integer value to subtract
  55 * @v: pointer of type atomic_t
  56 *
  57 * Atomically subtracts @i from @v.
  58 */
  59static inline void atomic_sub(int i, atomic_t *v)
  60{
  61        asm volatile(LOCK_PREFIX "subl %1,%0"
  62                     : "=m" (v->counter)
  63                     : "ir" (i), "m" (v->counter));
  64}
  65
  66/**
  67 * atomic_sub_and_test - subtract value from variable and test result
  68 * @i: integer value to subtract
  69 * @v: pointer of type atomic_t
  70 *
  71 * Atomically subtracts @i from @v and returns
  72 * true if the result is zero, or false for all
  73 * other cases.
  74 */
  75static inline int atomic_sub_and_test(int i, atomic_t *v)
  76{
  77        unsigned char c;
  78
  79        asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
  80                     : "=m" (v->counter), "=qm" (c)
  81                     : "ir" (i), "m" (v->counter) : "memory");
  82        return c;
  83}
  84
  85/**
  86 * atomic_inc - increment atomic variable
  87 * @v: pointer of type atomic_t
  88 *
  89 * Atomically increments @v by 1.
  90 */
  91static inline void atomic_inc(atomic_t *v)
  92{
  93        asm volatile(LOCK_PREFIX "incl %0"
  94                     : "=m" (v->counter)
  95                     : "m" (v->counter));
  96}
  97
  98/**
  99 * atomic_dec - decrement atomic variable
 100 * @v: pointer of type atomic_t
 101 *
 102 * Atomically decrements @v by 1.
 103 */
 104static inline void atomic_dec(atomic_t *v)
 105{
 106        asm volatile(LOCK_PREFIX "decl %0"
 107                     : "=m" (v->counter)
 108                     : "m" (v->counter));
 109}
 110
 111/**
 112 * atomic_dec_and_test - decrement and test
 113 * @v: pointer of type atomic_t
 114 *
 115 * Atomically decrements @v by 1 and
 116 * returns true if the result is 0, or false for all other
 117 * cases.
 118 */
 119static inline int atomic_dec_and_test(atomic_t *v)
 120{
 121        unsigned char c;
 122
 123        asm volatile(LOCK_PREFIX "decl %0; sete %1"
 124                     : "=m" (v->counter), "=qm" (c)
 125                     : "m" (v->counter) : "memory");
 126        return c != 0;
 127}
 128
 129/**
 130 * atomic_inc_and_test - increment and test
 131 * @v: pointer of type atomic_t
 132 *
 133 * Atomically increments @v by 1
 134 * and returns true if the result is zero, or false for all
 135 * other cases.
 136 */
 137static inline int atomic_inc_and_test(atomic_t *v)
 138{
 139        unsigned char c;
 140
 141        asm volatile(LOCK_PREFIX "incl %0; sete %1"
 142                     : "=m" (v->counter), "=qm" (c)
 143                     : "m" (v->counter) : "memory");
 144        return c != 0;
 145}
 146
 147/**
 148 * atomic_add_negative - add and test if negative
 149 * @i: integer value to add
 150 * @v: pointer of type atomic_t
 151 *
 152 * Atomically adds @i to @v and returns true
 153 * if the result is negative, or false when
 154 * result is greater than or equal to zero.
 155 */
 156static inline int atomic_add_negative(int i, atomic_t *v)
 157{
 158        unsigned char c;
 159
 160        asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
 161                     : "=m" (v->counter), "=qm" (c)
 162                     : "ir" (i), "m" (v->counter) : "memory");
 163        return c;
 164}
 165
 166/**
 167 * atomic_add_return - add and return
 168 * @i: integer value to add
 169 * @v: pointer of type atomic_t
 170 *
 171 * Atomically adds @i to @v and returns @i + @v
 172 */
 173static inline int atomic_add_return(int i, atomic_t *v)
 174{
 175        int __i = i;
 176        asm volatile(LOCK_PREFIX "xaddl %0, %1"
 177                     : "+r" (i), "+m" (v->counter)
 178                     : : "memory");
 179        return i + __i;
 180}
 181
 182static inline int atomic_sub_return(int i, atomic_t *v)
 183{
 184        return atomic_add_return(-i, v);
 185}
 186
 187#define atomic_inc_return(v)  (atomic_add_return(1, v))
 188#define atomic_dec_return(v)  (atomic_sub_return(1, v))
 189
 190/* The 64-bit atomic type */
 191
 192#define ATOMIC64_INIT(i)        { (i) }
 193
 194/**
 195 * atomic64_read - read atomic64 variable
 196 * @v: pointer of type atomic64_t
 197 *
 198 * Atomically reads the value of @v.
 199 * Doesn't imply a read memory barrier.
 200 */
 201static inline long atomic64_read(const atomic64_t *v)
 202{
 203        return v->counter;
 204}
 205
 206/**
 207 * atomic64_set - set atomic64 variable
 208 * @v: pointer to type atomic64_t
 209 * @i: required value
 210 *
 211 * Atomically sets the value of @v to @i.
 212 */
 213static inline void atomic64_set(atomic64_t *v, long i)
 214{
 215        v->counter = i;
 216}
 217
 218/**
 219 * atomic64_add - add integer to atomic64 variable
 220 * @i: integer value to add
 221 * @v: pointer to type atomic64_t
 222 *
 223 * Atomically adds @i to @v.
 224 */
 225static inline void atomic64_add(long i, atomic64_t *v)
 226{
 227        asm volatile(LOCK_PREFIX "addq %1,%0"
 228                     : "=m" (v->counter)
 229                     : "er" (i), "m" (v->counter));
 230}
 231
 232/**
 233 * atomic64_sub - subtract the atomic64 variable
 234 * @i: integer value to subtract
 235 * @v: pointer to type atomic64_t
 236 *
 237 * Atomically subtracts @i from @v.
 238 */
 239static inline void atomic64_sub(long i, atomic64_t *v)
 240{
 241        asm volatile(LOCK_PREFIX "subq %1,%0"
 242                     : "=m" (v->counter)
 243                     : "er" (i), "m" (v->counter));
 244}
 245
 246/**
 247 * atomic64_sub_and_test - subtract value from variable and test result
 248 * @i: integer value to subtract
 249 * @v: pointer to type atomic64_t
 250 *
 251 * Atomically subtracts @i from @v and returns
 252 * true if the result is zero, or false for all
 253 * other cases.
 254 */
 255static inline int atomic64_sub_and_test(long i, atomic64_t *v)
 256{
 257        unsigned char c;
 258
 259        asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
 260                     : "=m" (v->counter), "=qm" (c)
 261                     : "er" (i), "m" (v->counter) : "memory");
 262        return c;
 263}
 264
 265/**
 266 * atomic64_inc - increment atomic64 variable
 267 * @v: pointer to type atomic64_t
 268 *
 269 * Atomically increments @v by 1.
 270 */
 271static inline void atomic64_inc(atomic64_t *v)
 272{
 273        asm volatile(LOCK_PREFIX "incq %0"
 274                     : "=m" (v->counter)
 275                     : "m" (v->counter));
 276}
 277
 278/**
 279 * atomic64_dec - decrement atomic64 variable
 280 * @v: pointer to type atomic64_t
 281 *
 282 * Atomically decrements @v by 1.
 283 */
 284static inline void atomic64_dec(atomic64_t *v)
 285{
 286        asm volatile(LOCK_PREFIX "decq %0"
 287                     : "=m" (v->counter)
 288                     : "m" (v->counter));
 289}
 290
 291/**
 292 * atomic64_dec_and_test - decrement and test
 293 * @v: pointer to type atomic64_t
 294 *
 295 * Atomically decrements @v by 1 and
 296 * returns true if the result is 0, or false for all other
 297 * cases.
 298 */
 299static inline int atomic64_dec_and_test(atomic64_t *v)
 300{
 301        unsigned char c;
 302
 303        asm volatile(LOCK_PREFIX "decq %0; sete %1"
 304                     : "=m" (v->counter), "=qm" (c)
 305                     : "m" (v->counter) : "memory");
 306        return c != 0;
 307}
 308
 309/**
 310 * atomic64_inc_and_test - increment and test
 311 * @v: pointer to type atomic64_t
 312 *
 313 * Atomically increments @v by 1
 314 * and returns true if the result is zero, or false for all
 315 * other cases.
 316 */
 317static inline int atomic64_inc_and_test(atomic64_t *v)
 318{
 319        unsigned char c;
 320
 321        asm volatile(LOCK_PREFIX "incq %0; sete %1"
 322                     : "=m" (v->counter), "=qm" (c)
 323                     : "m" (v->counter) : "memory");
 324        return c != 0;
 325}
 326
 327/**
 328 * atomic64_add_negative - add and test if negative
 329 * @i: integer value to add
 330 * @v: pointer to type atomic64_t
 331 *
 332 * Atomically adds @i to @v and returns true
 333 * if the result is negative, or false when
 334 * result is greater than or equal to zero.
 335 */
 336static inline int atomic64_add_negative(long i, atomic64_t *v)
 337{
 338        unsigned char c;
 339
 340        asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
 341                     : "=m" (v->counter), "=qm" (c)
 342                     : "er" (i), "m" (v->counter) : "memory");
 343        return c;
 344}
 345
 346/**
 347 * atomic64_add_return - add and return
 348 * @i: integer value to add
 349 * @v: pointer to type atomic64_t
 350 *
 351 * Atomically adds @i to @v and returns @i + @v
 352 */
 353static inline long atomic64_add_return(long i, atomic64_t *v)
 354{
 355        long __i = i;
 356        asm volatile(LOCK_PREFIX "xaddq %0, %1;"
 357                     : "+r" (i), "+m" (v->counter)
 358                     : : "memory");
 359        return i + __i;
 360}
 361
 362static inline long atomic64_sub_return(long i, atomic64_t *v)
 363{
 364        return atomic64_add_return(-i, v);
 365}
 366
 367#define atomic64_inc_return(v)  (atomic64_add_return(1, (v)))
 368#define atomic64_dec_return(v)  (atomic64_sub_return(1, (v)))
 369
 370static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
 371{
 372        return cmpxchg(&v->counter, old, new);
 373}
 374
 375static inline long atomic64_xchg(atomic64_t *v, long new)
 376{
 377        return xchg(&v->counter, new);
 378}
 379
 380static inline long atomic_cmpxchg(atomic_t *v, int old, int new)
 381{
 382        return cmpxchg(&v->counter, old, new);
 383}
 384
 385static inline long atomic_xchg(atomic_t *v, int new)
 386{
 387        return xchg(&v->counter, new);
 388}
 389
 390/**
 391 * atomic_add_unless - add unless the number is a given value
 392 * @v: pointer of type atomic_t
 393 * @a: the amount to add to v...
 394 * @u: ...unless v is equal to u.
 395 *
 396 * Atomically adds @a to @v, so long as it was not @u.
 397 * Returns non-zero if @v was not @u, and zero otherwise.
 398 */
 399static inline int atomic_add_unless(atomic_t *v, int a, int u)
 400{
 401        int c, old;
 402        c = atomic_read(v);
 403        for (;;) {
 404                if (unlikely(c == (u)))
 405                        break;
 406                old = atomic_cmpxchg((v), c, c + (a));
 407                if (likely(old == c))
 408                        break;
 409                c = old;
 410        }
 411        return c != (u);
 412}
 413
 414#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
 415
 416/**
 417 * atomic64_add_unless - add unless the number is a given value
 418 * @v: pointer of type atomic64_t
 419 * @a: the amount to add to v...
 420 * @u: ...unless v is equal to u.
 421 *
 422 * Atomically adds @a to @v, so long as it was not @u.
 423 * Returns non-zero if @v was not @u, and zero otherwise.
 424 */
 425static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
 426{
 427        long c, old;
 428        c = atomic64_read(v);
 429        for (;;) {
 430                if (unlikely(c == (u)))
 431                        break;
 432                old = atomic64_cmpxchg((v), c, c + (a));
 433                if (likely(old == c))
 434                        break;
 435                c = old;
 436        }
 437        return c != (u);
 438}
 439
 440/**
 441 * atomic_inc_short - increment of a short integer
 442 * @v: pointer to type int
 443 *
 444 * Atomically adds 1 to @v
 445 * Returns the new value of @u
 446 */
 447static inline short int atomic_inc_short(short int *v)
 448{
 449        asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
 450        return *v;
 451}
 452
 453/**
 454 * atomic_or_long - OR of two long integers
 455 * @v1: pointer to type unsigned long
 456 * @v2: pointer to type unsigned long
 457 *
 458 * Atomically ORs @v1 and @v2
 459 * Returns the result of the OR
 460 */
 461static inline void atomic_or_long(unsigned long *v1, unsigned long v2)
 462{
 463        asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2));
 464}
 465
 466#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
 467
 468/* These are x86-specific, used by some header files */
 469#define atomic_clear_mask(mask, addr)                                   \
 470        asm volatile(LOCK_PREFIX "andl %0,%1"                           \
 471                     : : "r" (~(mask)), "m" (*(addr)) : "memory")
 472
 473#define atomic_set_mask(mask, addr)                                     \
 474        asm volatile(LOCK_PREFIX "orl %0,%1"                            \
 475                     : : "r" ((unsigned)(mask)), "m" (*(addr))          \
 476                     : "memory")
 477
 478/* Atomic operations are already serializing on x86 */
 479#define smp_mb__before_atomic_dec()     barrier()
 480#define smp_mb__after_atomic_dec()      barrier()
 481#define smp_mb__before_atomic_inc()     barrier()
 482#define smp_mb__after_atomic_inc()      barrier()
 483
 484#include <asm-generic/atomic-long.h>
 485#endif /* _ASM_X86_ATOMIC_64_H */
 486