linux/arch/mips/include/asm/local.h
<<
>>
Prefs
   1#ifndef _ARCH_MIPS_LOCAL_H
   2#define _ARCH_MIPS_LOCAL_H
   3
   4#include <linux/percpu.h>
   5#include <linux/bitops.h>
   6#include <linux/atomic.h>
   7#include <asm/cmpxchg.h>
   8#include <asm/war.h>
   9
  10typedef struct
  11{
  12        atomic_long_t a;
  13} local_t;
  14
  15#define LOCAL_INIT(i)   { ATOMIC_LONG_INIT(i) }
  16
  17#define local_read(l)   atomic_long_read(&(l)->a)
  18#define local_set(l, i) atomic_long_set(&(l)->a, (i))
  19
  20#define local_add(i, l) atomic_long_add((i), (&(l)->a))
  21#define local_sub(i, l) atomic_long_sub((i), (&(l)->a))
  22#define local_inc(l)    atomic_long_inc(&(l)->a)
  23#define local_dec(l)    atomic_long_dec(&(l)->a)
  24
  25/*
  26 * Same as above, but return the result value
  27 */
  28static __inline__ long local_add_return(long i, local_t * l)
  29{
  30        unsigned long result;
  31
  32        if (kernel_uses_llsc && R10000_LLSC_WAR) {
  33                unsigned long temp;
  34
  35                __asm__ __volatile__(
  36                "       .set    mips3                                   \n"
  37                "1:"    __LL    "%1, %2         # local_add_return      \n"
  38                "       addu    %0, %1, %3                              \n"
  39                        __SC    "%0, %2                                 \n"
  40                "       beqzl   %0, 1b                                  \n"
  41                "       addu    %0, %1, %3                              \n"
  42                "       .set    mips0                                   \n"
  43                : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  44                : "Ir" (i), "m" (l->a.counter)
  45                : "memory");
  46        } else if (kernel_uses_llsc) {
  47                unsigned long temp;
  48
  49                __asm__ __volatile__(
  50                "       .set    mips3                                   \n"
  51                "1:"    __LL    "%1, %2         # local_add_return      \n"
  52                "       addu    %0, %1, %3                              \n"
  53                        __SC    "%0, %2                                 \n"
  54                "       beqz    %0, 1b                                  \n"
  55                "       addu    %0, %1, %3                              \n"
  56                "       .set    mips0                                   \n"
  57                : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  58                : "Ir" (i), "m" (l->a.counter)
  59                : "memory");
  60        } else {
  61                unsigned long flags;
  62
  63                local_irq_save(flags);
  64                result = l->a.counter;
  65                result += i;
  66                l->a.counter = result;
  67                local_irq_restore(flags);
  68        }
  69
  70        return result;
  71}
  72
  73static __inline__ long local_sub_return(long i, local_t * l)
  74{
  75        unsigned long result;
  76
  77        if (kernel_uses_llsc && R10000_LLSC_WAR) {
  78                unsigned long temp;
  79
  80                __asm__ __volatile__(
  81                "       .set    mips3                                   \n"
  82                "1:"    __LL    "%1, %2         # local_sub_return      \n"
  83                "       subu    %0, %1, %3                              \n"
  84                        __SC    "%0, %2                                 \n"
  85                "       beqzl   %0, 1b                                  \n"
  86                "       subu    %0, %1, %3                              \n"
  87                "       .set    mips0                                   \n"
  88                : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  89                : "Ir" (i), "m" (l->a.counter)
  90                : "memory");
  91        } else if (kernel_uses_llsc) {
  92                unsigned long temp;
  93
  94                __asm__ __volatile__(
  95                "       .set    mips3                                   \n"
  96                "1:"    __LL    "%1, %2         # local_sub_return      \n"
  97                "       subu    %0, %1, %3                              \n"
  98                        __SC    "%0, %2                                 \n"
  99                "       beqz    %0, 1b                                  \n"
 100                "       subu    %0, %1, %3                              \n"
 101                "       .set    mips0                                   \n"
 102                : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
 103                : "Ir" (i), "m" (l->a.counter)
 104                : "memory");
 105        } else {
 106                unsigned long flags;
 107
 108                local_irq_save(flags);
 109                result = l->a.counter;
 110                result -= i;
 111                l->a.counter = result;
 112                local_irq_restore(flags);
 113        }
 114
 115        return result;
 116}
 117
 118#define local_cmpxchg(l, o, n) \
 119        ((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
 120#define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
 121
 122/**
 123 * local_add_unless - add unless the number is a given value
 124 * @l: pointer of type local_t
 125 * @a: the amount to add to l...
 126 * @u: ...unless l is equal to u.
 127 *
 128 * Atomically adds @a to @l, so long as it was not @u.
 129 * Returns non-zero if @l was not @u, and zero otherwise.
 130 */
 131#define local_add_unless(l, a, u)                               \
 132({                                                              \
 133        long c, old;                                            \
 134        c = local_read(l);                                      \
 135        while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
 136                c = old;                                        \
 137        c != (u);                                               \
 138})
 139#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 140
 141#define local_dec_return(l) local_sub_return(1, (l))
 142#define local_inc_return(l) local_add_return(1, (l))
 143
 144/*
 145 * local_sub_and_test - subtract value from variable and test result
 146 * @i: integer value to subtract
 147 * @l: pointer of type local_t
 148 *
 149 * Atomically subtracts @i from @l and returns
 150 * true if the result is zero, or false for all
 151 * other cases.
 152 */
 153#define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
 154
 155/*
 156 * local_inc_and_test - increment and test
 157 * @l: pointer of type local_t
 158 *
 159 * Atomically increments @l by 1
 160 * and returns true if the result is zero, or false for all
 161 * other cases.
 162 */
 163#define local_inc_and_test(l) (local_inc_return(l) == 0)
 164
 165/*
 166 * local_dec_and_test - decrement by 1 and test
 167 * @l: pointer of type local_t
 168 *
 169 * Atomically decrements @l by 1 and
 170 * returns true if the result is 0, or false for all other
 171 * cases.
 172 */
 173#define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
 174
 175/*
 176 * local_add_negative - add and test if negative
 177 * @l: pointer of type local_t
 178 * @i: integer value to add
 179 *
 180 * Atomically adds @i to @l and returns true
 181 * if the result is negative, or false when
 182 * result is greater than or equal to zero.
 183 */
 184#define local_add_negative(i, l) (local_add_return(i, (l)) < 0)
 185
 186/* Use these for per-cpu local_t variables: on some archs they are
 187 * much more efficient than these naive implementations.  Note they take
 188 * a variable, not an address.
 189 */
 190
 191#define __local_inc(l)          ((l)->a.counter++)
 192#define __local_dec(l)          ((l)->a.counter++)
 193#define __local_add(i, l)       ((l)->a.counter+=(i))
 194#define __local_sub(i, l)       ((l)->a.counter-=(i))
 195
 196#endif /* _ARCH_MIPS_LOCAL_H */
 197