linux/include/asm-generic/atomic.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-or-later */
   2/*
   3 * Generic C implementation of atomic counter operations. Usable on
   4 * UP systems only. Do not include in machine independent code.
   5 *
   6 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
   7 * Written by David Howells (dhowells@redhat.com)
   8 */
   9#ifndef __ASM_GENERIC_ATOMIC_H
  10#define __ASM_GENERIC_ATOMIC_H
  11
  12#include <asm/cmpxchg.h>
  13#include <asm/barrier.h>
  14
  15/*
  16 * atomic_$op() - $op integer to atomic variable
  17 * @i: integer value to $op
  18 * @v: pointer to the atomic variable
  19 *
  20 * Atomically $ops @i to @v. Does not strictly guarantee a memory-barrier, use
  21 * smp_mb__{before,after}_atomic().
  22 */
  23
  24/*
  25 * atomic_$op_return() - $op interer to atomic variable and returns the result
  26 * @i: integer value to $op
  27 * @v: pointer to the atomic variable
  28 *
  29 * Atomically $ops @i to @v. Does imply a full memory barrier.
  30 */
  31
  32#ifdef CONFIG_SMP
  33
  34/* we can build all atomic primitives from cmpxchg */
  35
  36#define ATOMIC_OP(op, c_op)                                             \
  37static inline void atomic_##op(int i, atomic_t *v)                      \
  38{                                                                       \
  39        int c, old;                                                     \
  40                                                                        \
  41        c = v->counter;                                                 \
  42        while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)          \
  43                c = old;                                                \
  44}
  45
  46#define ATOMIC_OP_RETURN(op, c_op)                                      \
  47static inline int atomic_##op##_return(int i, atomic_t *v)              \
  48{                                                                       \
  49        int c, old;                                                     \
  50                                                                        \
  51        c = v->counter;                                                 \
  52        while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)          \
  53                c = old;                                                \
  54                                                                        \
  55        return c c_op i;                                                \
  56}
  57
  58#define ATOMIC_FETCH_OP(op, c_op)                                       \
  59static inline int atomic_fetch_##op(int i, atomic_t *v)                 \
  60{                                                                       \
  61        int c, old;                                                     \
  62                                                                        \
  63        c = v->counter;                                                 \
  64        while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)          \
  65                c = old;                                                \
  66                                                                        \
  67        return c;                                                       \
  68}
  69
  70#else
  71
  72#include <linux/irqflags.h>
  73
  74#define ATOMIC_OP(op, c_op)                                             \
  75static inline void atomic_##op(int i, atomic_t *v)                      \
  76{                                                                       \
  77        unsigned long flags;                                            \
  78                                                                        \
  79        raw_local_irq_save(flags);                                      \
  80        v->counter = v->counter c_op i;                                 \
  81        raw_local_irq_restore(flags);                                   \
  82}
  83
  84#define ATOMIC_OP_RETURN(op, c_op)                                      \
  85static inline int atomic_##op##_return(int i, atomic_t *v)              \
  86{                                                                       \
  87        unsigned long flags;                                            \
  88        int ret;                                                        \
  89                                                                        \
  90        raw_local_irq_save(flags);                                      \
  91        ret = (v->counter = v->counter c_op i);                         \
  92        raw_local_irq_restore(flags);                                   \
  93                                                                        \
  94        return ret;                                                     \
  95}
  96
  97#define ATOMIC_FETCH_OP(op, c_op)                                       \
  98static inline int atomic_fetch_##op(int i, atomic_t *v)                 \
  99{                                                                       \
 100        unsigned long flags;                                            \
 101        int ret;                                                        \
 102                                                                        \
 103        raw_local_irq_save(flags);                                      \
 104        ret = v->counter;                                               \
 105        v->counter = v->counter c_op i;                                 \
 106        raw_local_irq_restore(flags);                                   \
 107                                                                        \
 108        return ret;                                                     \
 109}
 110
 111#endif /* CONFIG_SMP */
 112
 113#ifndef atomic_add_return
 114ATOMIC_OP_RETURN(add, +)
 115#endif
 116
 117#ifndef atomic_sub_return
 118ATOMIC_OP_RETURN(sub, -)
 119#endif
 120
 121#ifndef atomic_fetch_add
 122ATOMIC_FETCH_OP(add, +)
 123#endif
 124
 125#ifndef atomic_fetch_sub
 126ATOMIC_FETCH_OP(sub, -)
 127#endif
 128
 129#ifndef atomic_fetch_and
 130ATOMIC_FETCH_OP(and, &)
 131#endif
 132
 133#ifndef atomic_fetch_or
 134ATOMIC_FETCH_OP(or, |)
 135#endif
 136
 137#ifndef atomic_fetch_xor
 138ATOMIC_FETCH_OP(xor, ^)
 139#endif
 140
 141#ifndef atomic_and
 142ATOMIC_OP(and, &)
 143#endif
 144
 145#ifndef atomic_or
 146ATOMIC_OP(or, |)
 147#endif
 148
 149#ifndef atomic_xor
 150ATOMIC_OP(xor, ^)
 151#endif
 152
 153#undef ATOMIC_FETCH_OP
 154#undef ATOMIC_OP_RETURN
 155#undef ATOMIC_OP
 156
 157/*
 158 * Atomic operations that C can't guarantee us.  Useful for
 159 * resource counting etc..
 160 */
 161
 162#define ATOMIC_INIT(i)  { (i) }
 163
 164/**
 165 * atomic_read - read atomic variable
 166 * @v: pointer of type atomic_t
 167 *
 168 * Atomically reads the value of @v.
 169 */
 170#ifndef atomic_read
 171#define atomic_read(v)  READ_ONCE((v)->counter)
 172#endif
 173
 174/**
 175 * atomic_set - set atomic variable
 176 * @v: pointer of type atomic_t
 177 * @i: required value
 178 *
 179 * Atomically sets the value of @v to @i.
 180 */
 181#define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
 182
 183#include <linux/irqflags.h>
 184
 185static inline void atomic_add(int i, atomic_t *v)
 186{
 187        atomic_add_return(i, v);
 188}
 189
 190static inline void atomic_sub(int i, atomic_t *v)
 191{
 192        atomic_sub_return(i, v);
 193}
 194
 195#define atomic_xchg(ptr, v)             (xchg(&(ptr)->counter, (v)))
 196#define atomic_cmpxchg(v, old, new)     (cmpxchg(&((v)->counter), (old), (new)))
 197
 198#endif /* __ASM_GENERIC_ATOMIC_H */
 199