linux/arch/s390/include/asm/atomic.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2/*
   3 * Copyright IBM Corp. 1999, 2016
   4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
   5 *            Denis Joseph Barrow,
   6 *            Arnd Bergmann,
   7 */
   8
   9#ifndef __ARCH_S390_ATOMIC__
  10#define __ARCH_S390_ATOMIC__
  11
  12#include <linux/compiler.h>
  13#include <linux/types.h>
  14#include <asm/atomic_ops.h>
  15#include <asm/barrier.h>
  16#include <asm/cmpxchg.h>
  17
  18static inline int arch_atomic_read(const atomic_t *v)
  19{
  20        return __atomic_read(v);
  21}
  22#define arch_atomic_read arch_atomic_read
  23
  24static inline void arch_atomic_set(atomic_t *v, int i)
  25{
  26        __atomic_set(v, i);
  27}
  28#define arch_atomic_set arch_atomic_set
  29
  30static inline int arch_atomic_add_return(int i, atomic_t *v)
  31{
  32        return __atomic_add_barrier(i, &v->counter) + i;
  33}
  34#define arch_atomic_add_return arch_atomic_add_return
  35
  36static inline int arch_atomic_fetch_add(int i, atomic_t *v)
  37{
  38        return __atomic_add_barrier(i, &v->counter);
  39}
  40#define arch_atomic_fetch_add arch_atomic_fetch_add
  41
  42static inline void arch_atomic_add(int i, atomic_t *v)
  43{
  44        __atomic_add(i, &v->counter);
  45}
  46#define arch_atomic_add arch_atomic_add
  47
  48#define arch_atomic_sub(_i, _v)         arch_atomic_add(-(int)(_i), _v)
  49#define arch_atomic_sub_return(_i, _v)  arch_atomic_add_return(-(int)(_i), _v)
  50#define arch_atomic_fetch_sub(_i, _v)   arch_atomic_fetch_add(-(int)(_i), _v)
  51
  52#define ATOMIC_OPS(op)                                                  \
  53static inline void arch_atomic_##op(int i, atomic_t *v)                 \
  54{                                                                       \
  55        __atomic_##op(i, &v->counter);                                  \
  56}                                                                       \
  57static inline int arch_atomic_fetch_##op(int i, atomic_t *v)            \
  58{                                                                       \
  59        return __atomic_##op##_barrier(i, &v->counter);                 \
  60}
  61
  62ATOMIC_OPS(and)
  63ATOMIC_OPS(or)
  64ATOMIC_OPS(xor)
  65
  66#undef ATOMIC_OPS
  67
  68#define arch_atomic_and                 arch_atomic_and
  69#define arch_atomic_or                  arch_atomic_or
  70#define arch_atomic_xor                 arch_atomic_xor
  71#define arch_atomic_fetch_and           arch_atomic_fetch_and
  72#define arch_atomic_fetch_or            arch_atomic_fetch_or
  73#define arch_atomic_fetch_xor           arch_atomic_fetch_xor
  74
  75#define arch_atomic_xchg(v, new)        (arch_xchg(&((v)->counter), new))
  76
  77static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
  78{
  79        return __atomic_cmpxchg(&v->counter, old, new);
  80}
  81#define arch_atomic_cmpxchg arch_atomic_cmpxchg
  82
  83#define ATOMIC64_INIT(i)  { (i) }
  84
  85static inline s64 arch_atomic64_read(const atomic64_t *v)
  86{
  87        return __atomic64_read(v);
  88}
  89#define arch_atomic64_read arch_atomic64_read
  90
  91static inline void arch_atomic64_set(atomic64_t *v, s64 i)
  92{
  93        __atomic64_set(v, i);
  94}
  95#define arch_atomic64_set arch_atomic64_set
  96
  97static inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
  98{
  99        return __atomic64_add_barrier(i, (long *)&v->counter) + i;
 100}
 101#define arch_atomic64_add_return arch_atomic64_add_return
 102
 103static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
 104{
 105        return __atomic64_add_barrier(i, (long *)&v->counter);
 106}
 107#define arch_atomic64_fetch_add arch_atomic64_fetch_add
 108
 109static inline void arch_atomic64_add(s64 i, atomic64_t *v)
 110{
 111        __atomic64_add(i, (long *)&v->counter);
 112}
 113#define arch_atomic64_add arch_atomic64_add
 114
 115#define arch_atomic64_xchg(v, new)      (arch_xchg(&((v)->counter), new))
 116
 117static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
 118{
 119        return __atomic64_cmpxchg((long *)&v->counter, old, new);
 120}
 121#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
 122
 123#define ATOMIC64_OPS(op)                                                \
 124static inline void arch_atomic64_##op(s64 i, atomic64_t *v)             \
 125{                                                                       \
 126        __atomic64_##op(i, (long *)&v->counter);                        \
 127}                                                                       \
 128static inline long arch_atomic64_fetch_##op(s64 i, atomic64_t *v)       \
 129{                                                                       \
 130        return __atomic64_##op##_barrier(i, (long *)&v->counter);       \
 131}
 132
 133ATOMIC64_OPS(and)
 134ATOMIC64_OPS(or)
 135ATOMIC64_OPS(xor)
 136
 137#undef ATOMIC64_OPS
 138
 139#define arch_atomic64_and               arch_atomic64_and
 140#define arch_atomic64_or                arch_atomic64_or
 141#define arch_atomic64_xor               arch_atomic64_xor
 142#define arch_atomic64_fetch_and         arch_atomic64_fetch_and
 143#define arch_atomic64_fetch_or          arch_atomic64_fetch_or
 144#define arch_atomic64_fetch_xor         arch_atomic64_fetch_xor
 145
 146#define arch_atomic64_sub_return(_i, _v) arch_atomic64_add_return(-(s64)(_i), _v)
 147#define arch_atomic64_fetch_sub(_i, _v)  arch_atomic64_fetch_add(-(s64)(_i), _v)
 148#define arch_atomic64_sub(_i, _v)        arch_atomic64_add(-(s64)(_i), _v)
 149
 150#endif /* __ARCH_S390_ATOMIC__  */
 151