linux/arch/sparc/include/asm/atomic_64.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2/* atomic.h: Thankfully the V9 is at least reasonable for this
   3 *           stuff.
   4 *
   5 * Copyright (C) 1996, 1997, 2000, 2012 David S. Miller (davem@redhat.com)
   6 */
   7
   8#ifndef __ARCH_SPARC64_ATOMIC__
   9#define __ARCH_SPARC64_ATOMIC__
  10
  11#include <linux/types.h>
  12#include <asm/cmpxchg.h>
  13#include <asm/barrier.h>
  14
  15#define ATOMIC_INIT(i)          { (i) }
  16#define ATOMIC64_INIT(i)        { (i) }
  17
  18#define atomic_read(v)          READ_ONCE((v)->counter)
  19#define atomic64_read(v)        READ_ONCE((v)->counter)
  20
  21#define atomic_set(v, i)        WRITE_ONCE(((v)->counter), (i))
  22#define atomic64_set(v, i)      WRITE_ONCE(((v)->counter), (i))
  23
  24#define ATOMIC_OP(op)                                                   \
  25void atomic_##op(int, atomic_t *);                                      \
  26void atomic64_##op(long, atomic64_t *);
  27
  28#define ATOMIC_OP_RETURN(op)                                            \
  29int atomic_##op##_return(int, atomic_t *);                              \
  30long atomic64_##op##_return(long, atomic64_t *);
  31
  32#define ATOMIC_FETCH_OP(op)                                             \
  33int atomic_fetch_##op(int, atomic_t *);                                 \
  34long atomic64_fetch_##op(long, atomic64_t *);
  35
  36#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
  37
  38ATOMIC_OPS(add)
  39ATOMIC_OPS(sub)
  40
  41#undef ATOMIC_OPS
  42#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
  43
  44ATOMIC_OPS(and)
  45ATOMIC_OPS(or)
  46ATOMIC_OPS(xor)
  47
  48#undef ATOMIC_OPS
  49#undef ATOMIC_FETCH_OP
  50#undef ATOMIC_OP_RETURN
  51#undef ATOMIC_OP
  52
  53#define atomic_dec_return(v)   atomic_sub_return(1, v)
  54#define atomic64_dec_return(v) atomic64_sub_return(1, v)
  55
  56#define atomic_inc_return(v)   atomic_add_return(1, v)
  57#define atomic64_inc_return(v) atomic64_add_return(1, v)
  58
  59/*
  60 * atomic_inc_and_test - increment and test
  61 * @v: pointer of type atomic_t
  62 *
  63 * Atomically increments @v by 1
  64 * and returns true if the result is zero, or false for all
  65 * other cases.
  66 */
  67#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  68#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  69
  70#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
  71#define atomic64_sub_and_test(i, v) (atomic64_sub_return(i, v) == 0)
  72
  73#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
  74#define atomic64_dec_and_test(v) (atomic64_sub_return(1, v) == 0)
  75
  76#define atomic_inc(v) atomic_add(1, v)
  77#define atomic64_inc(v) atomic64_add(1, v)
  78
  79#define atomic_dec(v) atomic_sub(1, v)
  80#define atomic64_dec(v) atomic64_sub(1, v)
  81
  82#define atomic_add_negative(i, v) (atomic_add_return(i, v) < 0)
  83#define atomic64_add_negative(i, v) (atomic64_add_return(i, v) < 0)
  84
  85#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
  86
  87static inline int atomic_xchg(atomic_t *v, int new)
  88{
  89        return xchg(&v->counter, new);
  90}
  91
  92static inline int __atomic_add_unless(atomic_t *v, int a, int u)
  93{
  94        int c, old;
  95        c = atomic_read(v);
  96        for (;;) {
  97                if (unlikely(c == (u)))
  98                        break;
  99                old = atomic_cmpxchg((v), c, c + (a));
 100                if (likely(old == c))
 101                        break;
 102                c = old;
 103        }
 104        return c;
 105}
 106
 107#define atomic64_cmpxchg(v, o, n) \
 108        ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
 109#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
 110
 111static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
 112{
 113        long c, old;
 114        c = atomic64_read(v);
 115        for (;;) {
 116                if (unlikely(c == (u)))
 117                        break;
 118                old = atomic64_cmpxchg((v), c, c + (a));
 119                if (likely(old == c))
 120                        break;
 121                c = old;
 122        }
 123        return c != (u);
 124}
 125
 126#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
 127
 128long atomic64_dec_if_positive(atomic64_t *v);
 129
 130#endif /* !(__ARCH_SPARC64_ATOMIC__) */
 131