linux/arch/arm64/include/asm/atomic.h
<<
>>
Prefs
   1/*
   2 * Based on arch/arm/include/asm/atomic.h
   3 *
   4 * Copyright (C) 1996 Russell King.
   5 * Copyright (C) 2002 Deep Blue Solutions Ltd.
   6 * Copyright (C) 2012 ARM Ltd.
   7 *
   8 * This program is free software; you can redistribute it and/or modify
   9 * it under the terms of the GNU General Public License version 2 as
  10 * published by the Free Software Foundation.
  11 *
  12 * This program is distributed in the hope that it will be useful,
  13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
  15 * GNU General Public License for more details.
  16 *
  17 * You should have received a copy of the GNU General Public License
  18 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
  19 */
  20#ifndef __ASM_ATOMIC_H
  21#define __ASM_ATOMIC_H
  22
  23#include <linux/compiler.h>
  24#include <linux/types.h>
  25
  26#include <asm/barrier.h>
  27#include <asm/lse.h>
  28
  29#ifdef __KERNEL__
  30
  31#define __ARM64_IN_ATOMIC_IMPL
  32
  33#if defined(CONFIG_ARM64_LSE_ATOMICS) && defined(CONFIG_AS_LSE)
  34#include <asm/atomic_lse.h>
  35#else
  36#include <asm/atomic_ll_sc.h>
  37#endif
  38
  39#undef __ARM64_IN_ATOMIC_IMPL
  40
  41#include <asm/cmpxchg.h>
  42
  43#define ___atomic_add_unless(v, a, u, sfx)                              \
  44({                                                                      \
  45        typeof((v)->counter) c, old;                                    \
  46                                                                        \
  47        c = atomic##sfx##_read(v);                                      \
  48        while (c != (u) &&                                              \
  49              (old = atomic##sfx##_cmpxchg((v), c, c + (a))) != c)      \
  50                c = old;                                                \
  51        c;                                                              \
  52 })
  53
  54#define ATOMIC_INIT(i)  { (i) }
  55
  56#define atomic_read(v)                  READ_ONCE((v)->counter)
  57#define atomic_set(v, i)                (((v)->counter) = (i))
  58#define atomic_xchg(v, new)             xchg(&((v)->counter), (new))
  59#define atomic_cmpxchg(v, old, new)     cmpxchg(&((v)->counter), (old), (new))
  60
  61#define atomic_inc(v)                   atomic_add(1, (v))
  62#define atomic_dec(v)                   atomic_sub(1, (v))
  63#define atomic_inc_return(v)            atomic_add_return(1, (v))
  64#define atomic_dec_return(v)            atomic_sub_return(1, (v))
  65#define atomic_inc_and_test(v)          (atomic_inc_return(v) == 0)
  66#define atomic_dec_and_test(v)          (atomic_dec_return(v) == 0)
  67#define atomic_sub_and_test(i, v)       (atomic_sub_return((i), (v)) == 0)
  68#define atomic_add_negative(i, v)       (atomic_add_return((i), (v)) < 0)
  69#define __atomic_add_unless(v, a, u)    ___atomic_add_unless(v, a, u,)
  70#define atomic_andnot                   atomic_andnot
  71
  72/*
  73 * 64-bit atomic operations.
  74 */
  75#define ATOMIC64_INIT                   ATOMIC_INIT
  76#define atomic64_read                   atomic_read
  77#define atomic64_set                    atomic_set
  78#define atomic64_xchg                   atomic_xchg
  79#define atomic64_cmpxchg                atomic_cmpxchg
  80
  81#define atomic64_inc(v)                 atomic64_add(1, (v))
  82#define atomic64_dec(v)                 atomic64_sub(1, (v))
  83#define atomic64_inc_return(v)          atomic64_add_return(1, (v))
  84#define atomic64_dec_return(v)          atomic64_sub_return(1, (v))
  85#define atomic64_inc_and_test(v)        (atomic64_inc_return(v) == 0)
  86#define atomic64_dec_and_test(v)        (atomic64_dec_return(v) == 0)
  87#define atomic64_sub_and_test(i, v)     (atomic64_sub_return((i), (v)) == 0)
  88#define atomic64_add_negative(i, v)     (atomic64_add_return((i), (v)) < 0)
  89#define atomic64_add_unless(v, a, u)    (___atomic_add_unless(v, a, u, 64) != u)
  90#define atomic64_andnot                 atomic64_andnot
  91
  92#define atomic64_inc_not_zero(v)        atomic64_add_unless((v), 1, 0)
  93
  94#endif
  95#endif
  96