linux/arch/x86/include/asm/local.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef _ASM_X86_LOCAL_H
   3#define _ASM_X86_LOCAL_H
   4
   5#include <linux/percpu.h>
   6
   7#include <linux/atomic.h>
   8#include <asm/asm.h>
   9
  10typedef struct {
  11        atomic_long_t a;
  12} local_t;
  13
  14#define LOCAL_INIT(i)   { ATOMIC_LONG_INIT(i) }
  15
  16#define local_read(l)   atomic_long_read(&(l)->a)
  17#define local_set(l, i) atomic_long_set(&(l)->a, (i))
  18
  19static inline void local_inc(local_t *l)
  20{
  21        asm volatile(_ASM_INC "%0"
  22                     : "+m" (l->a.counter));
  23}
  24
  25static inline void local_dec(local_t *l)
  26{
  27        asm volatile(_ASM_DEC "%0"
  28                     : "+m" (l->a.counter));
  29}
  30
  31static inline void local_add(long i, local_t *l)
  32{
  33        asm volatile(_ASM_ADD "%1,%0"
  34                     : "+m" (l->a.counter)
  35                     : "ir" (i));
  36}
  37
  38static inline void local_sub(long i, local_t *l)
  39{
  40        asm volatile(_ASM_SUB "%1,%0"
  41                     : "+m" (l->a.counter)
  42                     : "ir" (i));
  43}
  44
  45/**
  46 * local_sub_and_test - subtract value from variable and test result
  47 * @i: integer value to subtract
  48 * @l: pointer to type local_t
  49 *
  50 * Atomically subtracts @i from @l and returns
  51 * true if the result is zero, or false for all
  52 * other cases.
  53 */
  54static inline bool local_sub_and_test(long i, local_t *l)
  55{
  56        return GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, e, "er", i);
  57}
  58
  59/**
  60 * local_dec_and_test - decrement and test
  61 * @l: pointer to type local_t
  62 *
  63 * Atomically decrements @l by 1 and
  64 * returns true if the result is 0, or false for all other
  65 * cases.
  66 */
  67static inline bool local_dec_and_test(local_t *l)
  68{
  69        return GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, e);
  70}
  71
  72/**
  73 * local_inc_and_test - increment and test
  74 * @l: pointer to type local_t
  75 *
  76 * Atomically increments @l by 1
  77 * and returns true if the result is zero, or false for all
  78 * other cases.
  79 */
  80static inline bool local_inc_and_test(local_t *l)
  81{
  82        return GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, e);
  83}
  84
  85/**
  86 * local_add_negative - add and test if negative
  87 * @i: integer value to add
  88 * @l: pointer to type local_t
  89 *
  90 * Atomically adds @i to @l and returns true
  91 * if the result is negative, or false when
  92 * result is greater than or equal to zero.
  93 */
  94static inline bool local_add_negative(long i, local_t *l)
  95{
  96        return GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, s, "er", i);
  97}
  98
  99/**
 100 * local_add_return - add and return
 101 * @i: integer value to add
 102 * @l: pointer to type local_t
 103 *
 104 * Atomically adds @i to @l and returns @i + @l
 105 */
 106static inline long local_add_return(long i, local_t *l)
 107{
 108        long __i = i;
 109        asm volatile(_ASM_XADD "%0, %1;"
 110                     : "+r" (i), "+m" (l->a.counter)
 111                     : : "memory");
 112        return i + __i;
 113}
 114
 115static inline long local_sub_return(long i, local_t *l)
 116{
 117        return local_add_return(-i, l);
 118}
 119
 120#define local_inc_return(l)  (local_add_return(1, l))
 121#define local_dec_return(l)  (local_sub_return(1, l))
 122
 123#define local_cmpxchg(l, o, n) \
 124        (cmpxchg_local(&((l)->a.counter), (o), (n)))
 125/* Always has a lock prefix */
 126#define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
 127
 128/**
 129 * local_add_unless - add unless the number is a given value
 130 * @l: pointer of type local_t
 131 * @a: the amount to add to l...
 132 * @u: ...unless l is equal to u.
 133 *
 134 * Atomically adds @a to @l, so long as it was not @u.
 135 * Returns non-zero if @l was not @u, and zero otherwise.
 136 */
 137#define local_add_unless(l, a, u)                               \
 138({                                                              \
 139        long c, old;                                            \
 140        c = local_read((l));                                    \
 141        for (;;) {                                              \
 142                if (unlikely(c == (u)))                         \
 143                        break;                                  \
 144                old = local_cmpxchg((l), c, c + (a));           \
 145                if (likely(old == c))                           \
 146                        break;                                  \
 147                c = old;                                        \
 148        }                                                       \
 149        c != (u);                                               \
 150})
 151#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 152
 153/* On x86_32, these are no better than the atomic variants.
 154 * On x86-64 these are better than the atomic variants on SMP kernels
 155 * because they dont use a lock prefix.
 156 */
 157#define __local_inc(l)          local_inc(l)
 158#define __local_dec(l)          local_dec(l)
 159#define __local_add(i, l)       local_add((i), (l))
 160#define __local_sub(i, l)       local_sub((i), (l))
 161
 162#endif /* _ASM_X86_LOCAL_H */
 163