linux/arch/m32r/include/asm/atomic.h
<<
>>
Prefs
   1#ifndef _ASM_M32R_ATOMIC_H
   2#define _ASM_M32R_ATOMIC_H
   3
   4/*
   5 *  linux/include/asm-m32r/atomic.h
   6 *
   7 *  M32R version:
   8 *    Copyright (C) 2001, 2002  Hitoshi Yamamoto
   9 *    Copyright (C) 2004  Hirokazu Takata <takata at linux-m32r.org>
  10 */
  11
  12#include <linux/types.h>
  13#include <asm/assembler.h>
  14#include <asm/cmpxchg.h>
  15#include <asm/dcache_clear.h>
  16#include <asm/barrier.h>
  17
  18/*
  19 * Atomic operations that C can't guarantee us.  Useful for
  20 * resource counting etc..
  21 */
  22
  23#define ATOMIC_INIT(i)  { (i) }
  24
  25/**
  26 * atomic_read - read atomic variable
  27 * @v: pointer of type atomic_t
  28 *
  29 * Atomically reads the value of @v.
  30 */
  31#define atomic_read(v)  ACCESS_ONCE((v)->counter)
  32
  33/**
  34 * atomic_set - set atomic variable
  35 * @v: pointer of type atomic_t
  36 * @i: required value
  37 *
  38 * Atomically sets the value of @v to @i.
  39 */
  40#define atomic_set(v,i) (((v)->counter) = (i))
  41
  42#ifdef CONFIG_CHIP_M32700_TS1
  43#define __ATOMIC_CLOBBER        , "r4"
  44#else
  45#define __ATOMIC_CLOBBER
  46#endif
  47
  48#define ATOMIC_OP(op)                                                   \
  49static __inline__ void atomic_##op(int i, atomic_t *v)                  \
  50{                                                                       \
  51        unsigned long flags;                                            \
  52        int result;                                                     \
  53                                                                        \
  54        local_irq_save(flags);                                          \
  55        __asm__ __volatile__ (                                          \
  56                "# atomic_" #op "               \n\t"                   \
  57                DCACHE_CLEAR("%0", "r4", "%1")                          \
  58                M32R_LOCK" %0, @%1;             \n\t"                   \
  59                #op " %0, %2;                   \n\t"                   \
  60                M32R_UNLOCK" %0, @%1;           \n\t"                   \
  61                : "=&r" (result)                                        \
  62                : "r" (&v->counter), "r" (i)                            \
  63                : "memory"                                              \
  64                __ATOMIC_CLOBBER                                        \
  65        );                                                              \
  66        local_irq_restore(flags);                                       \
  67}                                                                       \
  68
  69#define ATOMIC_OP_RETURN(op)                                            \
  70static __inline__ int atomic_##op##_return(int i, atomic_t *v)          \
  71{                                                                       \
  72        unsigned long flags;                                            \
  73        int result;                                                     \
  74                                                                        \
  75        local_irq_save(flags);                                          \
  76        __asm__ __volatile__ (                                          \
  77                "# atomic_" #op "_return        \n\t"                   \
  78                DCACHE_CLEAR("%0", "r4", "%1")                          \
  79                M32R_LOCK" %0, @%1;             \n\t"                   \
  80                #op " %0, %2;                   \n\t"                   \
  81                M32R_UNLOCK" %0, @%1;           \n\t"                   \
  82                : "=&r" (result)                                        \
  83                : "r" (&v->counter), "r" (i)                            \
  84                : "memory"                                              \
  85                __ATOMIC_CLOBBER                                        \
  86        );                                                              \
  87        local_irq_restore(flags);                                       \
  88                                                                        \
  89        return result;                                                  \
  90}
  91
  92#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
  93
  94ATOMIC_OPS(add)
  95ATOMIC_OPS(sub)
  96
  97ATOMIC_OP(and)
  98ATOMIC_OP(or)
  99ATOMIC_OP(xor)
 100
 101#undef ATOMIC_OPS
 102#undef ATOMIC_OP_RETURN
 103#undef ATOMIC_OP
 104
 105/**
 106 * atomic_sub_and_test - subtract value from variable and test result
 107 * @i: integer value to subtract
 108 * @v: pointer of type atomic_t
 109 *
 110 * Atomically subtracts @i from @v and returns
 111 * true if the result is zero, or false for all
 112 * other cases.
 113 */
 114#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
 115
 116/**
 117 * atomic_inc_return - increment atomic variable and return it
 118 * @v: pointer of type atomic_t
 119 *
 120 * Atomically increments @v by 1 and returns the result.
 121 */
 122static __inline__ int atomic_inc_return(atomic_t *v)
 123{
 124        unsigned long flags;
 125        int result;
 126
 127        local_irq_save(flags);
 128        __asm__ __volatile__ (
 129                "# atomic_inc_return            \n\t"
 130                DCACHE_CLEAR("%0", "r4", "%1")
 131                M32R_LOCK" %0, @%1;             \n\t"
 132                "addi   %0, #1;                 \n\t"
 133                M32R_UNLOCK" %0, @%1;           \n\t"
 134                : "=&r" (result)
 135                : "r" (&v->counter)
 136                : "memory"
 137                __ATOMIC_CLOBBER
 138        );
 139        local_irq_restore(flags);
 140
 141        return result;
 142}
 143
 144/**
 145 * atomic_dec_return - decrement atomic variable and return it
 146 * @v: pointer of type atomic_t
 147 *
 148 * Atomically decrements @v by 1 and returns the result.
 149 */
 150static __inline__ int atomic_dec_return(atomic_t *v)
 151{
 152        unsigned long flags;
 153        int result;
 154
 155        local_irq_save(flags);
 156        __asm__ __volatile__ (
 157                "# atomic_dec_return            \n\t"
 158                DCACHE_CLEAR("%0", "r4", "%1")
 159                M32R_LOCK" %0, @%1;             \n\t"
 160                "addi   %0, #-1;                \n\t"
 161                M32R_UNLOCK" %0, @%1;           \n\t"
 162                : "=&r" (result)
 163                : "r" (&v->counter)
 164                : "memory"
 165                __ATOMIC_CLOBBER
 166        );
 167        local_irq_restore(flags);
 168
 169        return result;
 170}
 171
 172/**
 173 * atomic_inc - increment atomic variable
 174 * @v: pointer of type atomic_t
 175 *
 176 * Atomically increments @v by 1.
 177 */
 178#define atomic_inc(v) ((void)atomic_inc_return(v))
 179
 180/**
 181 * atomic_dec - decrement atomic variable
 182 * @v: pointer of type atomic_t
 183 *
 184 * Atomically decrements @v by 1.
 185 */
 186#define atomic_dec(v) ((void)atomic_dec_return(v))
 187
 188/**
 189 * atomic_inc_and_test - increment and test
 190 * @v: pointer of type atomic_t
 191 *
 192 * Atomically increments @v by 1
 193 * and returns true if the result is zero, or false for all
 194 * other cases.
 195 */
 196#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
 197
 198/**
 199 * atomic_dec_and_test - decrement and test
 200 * @v: pointer of type atomic_t
 201 *
 202 * Atomically decrements @v by 1 and
 203 * returns true if the result is 0, or false for all
 204 * other cases.
 205 */
 206#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
 207
 208/**
 209 * atomic_add_negative - add and test if negative
 210 * @v: pointer of type atomic_t
 211 * @i: integer value to add
 212 *
 213 * Atomically adds @i to @v and returns true
 214 * if the result is negative, or false when
 215 * result is greater than or equal to zero.
 216 */
 217#define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
 218
 219#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
 220#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
 221
 222/**
 223 * __atomic_add_unless - add unless the number is a given value
 224 * @v: pointer of type atomic_t
 225 * @a: the amount to add to v...
 226 * @u: ...unless v is equal to u.
 227 *
 228 * Atomically adds @a to @v, so long as it was not @u.
 229 * Returns the old value of @v.
 230 */
 231static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
 232{
 233        int c, old;
 234        c = atomic_read(v);
 235        for (;;) {
 236                if (unlikely(c == (u)))
 237                        break;
 238                old = atomic_cmpxchg((v), c, c + (a));
 239                if (likely(old == c))
 240                        break;
 241                c = old;
 242        }
 243        return c;
 244}
 245
 246#endif  /* _ASM_M32R_ATOMIC_H */
 247