linux/arch/alpha/include/asm/local.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef _ALPHA_LOCAL_H
   3#define _ALPHA_LOCAL_H
   4
   5#include <linux/percpu.h>
   6#include <linux/atomic.h>
   7
   8typedef struct
   9{
  10        atomic_long_t a;
  11} local_t;
  12
  13#define LOCAL_INIT(i)   { ATOMIC_LONG_INIT(i) }
  14#define local_read(l)   atomic_long_read(&(l)->a)
  15#define local_set(l,i)  atomic_long_set(&(l)->a, (i))
  16#define local_inc(l)    atomic_long_inc(&(l)->a)
  17#define local_dec(l)    atomic_long_dec(&(l)->a)
  18#define local_add(i,l)  atomic_long_add((i),(&(l)->a))
  19#define local_sub(i,l)  atomic_long_sub((i),(&(l)->a))
  20
  21static __inline__ long local_add_return(long i, local_t * l)
  22{
  23        long temp, result;
  24        __asm__ __volatile__(
  25        "1:     ldq_l %0,%1\n"
  26        "       addq %0,%3,%2\n"
  27        "       addq %0,%3,%0\n"
  28        "       stq_c %0,%1\n"
  29        "       beq %0,2f\n"
  30        ".subsection 2\n"
  31        "2:     br 1b\n"
  32        ".previous"
  33        :"=&r" (temp), "=m" (l->a.counter), "=&r" (result)
  34        :"Ir" (i), "m" (l->a.counter) : "memory");
  35        return result;
  36}
  37
  38static __inline__ long local_sub_return(long i, local_t * l)
  39{
  40        long temp, result;
  41        __asm__ __volatile__(
  42        "1:     ldq_l %0,%1\n"
  43        "       subq %0,%3,%2\n"
  44        "       subq %0,%3,%0\n"
  45        "       stq_c %0,%1\n"
  46        "       beq %0,2f\n"
  47        ".subsection 2\n"
  48        "2:     br 1b\n"
  49        ".previous"
  50        :"=&r" (temp), "=m" (l->a.counter), "=&r" (result)
  51        :"Ir" (i), "m" (l->a.counter) : "memory");
  52        return result;
  53}
  54
  55#define local_cmpxchg(l, o, n) \
  56        (cmpxchg_local(&((l)->a.counter), (o), (n)))
  57#define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
  58
  59/**
  60 * local_add_unless - add unless the number is a given value
  61 * @l: pointer of type local_t
  62 * @a: the amount to add to l...
  63 * @u: ...unless l is equal to u.
  64 *
  65 * Atomically adds @a to @l, so long as it was not @u.
  66 * Returns non-zero if @l was not @u, and zero otherwise.
  67 */
  68#define local_add_unless(l, a, u)                               \
  69({                                                              \
  70        long c, old;                                            \
  71        c = local_read(l);                                      \
  72        for (;;) {                                              \
  73                if (unlikely(c == (u)))                         \
  74                        break;                                  \
  75                old = local_cmpxchg((l), c, c + (a));   \
  76                if (likely(old == c))                           \
  77                        break;                                  \
  78                c = old;                                        \
  79        }                                                       \
  80        c != (u);                                               \
  81})
  82#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
  83
  84#define local_add_negative(a, l) (local_add_return((a), (l)) < 0)
  85
  86#define local_dec_return(l) local_sub_return(1,(l))
  87
  88#define local_inc_return(l) local_add_return(1,(l))
  89
  90#define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0)
  91
  92#define local_inc_and_test(l) (local_add_return(1, (l)) == 0)
  93
  94#define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
  95
  96/* Verify if faster than atomic ops */
  97#define __local_inc(l)          ((l)->a.counter++)
  98#define __local_dec(l)          ((l)->a.counter++)
  99#define __local_add(i,l)        ((l)->a.counter+=(i))
 100#define __local_sub(i,l)        ((l)->a.counter-=(i))
 101
 102#endif /* _ALPHA_LOCAL_H */
 103