linux/arch/sparc/include/asm/atomic_64.h
<<
>>
Prefs
   1/* atomic.h: Thankfully the V9 is at least reasonable for this
   2 *           stuff.
   3 *
   4 * Copyright (C) 1996, 1997, 2000 David S. Miller (davem@redhat.com)
   5 */
   6
   7#ifndef __ARCH_SPARC64_ATOMIC__
   8#define __ARCH_SPARC64_ATOMIC__
   9
  10#include <linux/types.h>
  11#include <asm/system.h>
  12
  13#define ATOMIC_INIT(i)          { (i) }
  14#define ATOMIC64_INIT(i)        { (i) }
  15
  16#define atomic_read(v)          ((v)->counter)
  17#define atomic64_read(v)        ((v)->counter)
  18
  19#define atomic_set(v, i)        (((v)->counter) = i)
  20#define atomic64_set(v, i)      (((v)->counter) = i)
  21
  22extern void atomic_add(int, atomic_t *);
  23extern void atomic64_add(int, atomic64_t *);
  24extern void atomic_sub(int, atomic_t *);
  25extern void atomic64_sub(int, atomic64_t *);
  26
  27extern int atomic_add_ret(int, atomic_t *);
  28extern int atomic64_add_ret(int, atomic64_t *);
  29extern int atomic_sub_ret(int, atomic_t *);
  30extern int atomic64_sub_ret(int, atomic64_t *);
  31
  32#define atomic_dec_return(v) atomic_sub_ret(1, v)
  33#define atomic64_dec_return(v) atomic64_sub_ret(1, v)
  34
  35#define atomic_inc_return(v) atomic_add_ret(1, v)
  36#define atomic64_inc_return(v) atomic64_add_ret(1, v)
  37
  38#define atomic_sub_return(i, v) atomic_sub_ret(i, v)
  39#define atomic64_sub_return(i, v) atomic64_sub_ret(i, v)
  40
  41#define atomic_add_return(i, v) atomic_add_ret(i, v)
  42#define atomic64_add_return(i, v) atomic64_add_ret(i, v)
  43
  44/*
  45 * atomic_inc_and_test - increment and test
  46 * @v: pointer of type atomic_t
  47 *
  48 * Atomically increments @v by 1
  49 * and returns true if the result is zero, or false for all
  50 * other cases.
  51 */
  52#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  53#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  54
  55#define atomic_sub_and_test(i, v) (atomic_sub_ret(i, v) == 0)
  56#define atomic64_sub_and_test(i, v) (atomic64_sub_ret(i, v) == 0)
  57
  58#define atomic_dec_and_test(v) (atomic_sub_ret(1, v) == 0)
  59#define atomic64_dec_and_test(v) (atomic64_sub_ret(1, v) == 0)
  60
  61#define atomic_inc(v) atomic_add(1, v)
  62#define atomic64_inc(v) atomic64_add(1, v)
  63
  64#define atomic_dec(v) atomic_sub(1, v)
  65#define atomic64_dec(v) atomic64_sub(1, v)
  66
  67#define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0)
  68#define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0)
  69
  70#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
  71#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  72
  73static inline int atomic_add_unless(atomic_t *v, int a, int u)
  74{
  75        int c, old;
  76        c = atomic_read(v);
  77        for (;;) {
  78                if (unlikely(c == (u)))
  79                        break;
  80                old = atomic_cmpxchg((v), c, c + (a));
  81                if (likely(old == c))
  82                        break;
  83                c = old;
  84        }
  85        return c != (u);
  86}
  87
  88#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  89
  90#define atomic64_cmpxchg(v, o, n) \
  91        ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
  92#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
  93
  94static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
  95{
  96        long c, old;
  97        c = atomic64_read(v);
  98        for (;;) {
  99                if (unlikely(c == (u)))
 100                        break;
 101                old = atomic64_cmpxchg((v), c, c + (a));
 102                if (likely(old == c))
 103                        break;
 104                c = old;
 105        }
 106        return c != (u);
 107}
 108
 109#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
 110
 111/* Atomic operations are already serializing */
 112#define smp_mb__before_atomic_dec()     barrier()
 113#define smp_mb__after_atomic_dec()      barrier()
 114#define smp_mb__before_atomic_inc()     barrier()
 115#define smp_mb__after_atomic_inc()      barrier()
 116
 117#include <asm-generic/atomic-long.h>
 118#endif /* !(__ARCH_SPARC64_ATOMIC__) */
 119