linux/arch/avr32/include/asm/atomic.h
<<
>>
Prefs
   1/*
   2 * Atomic operations that C can't guarantee us.  Useful for
   3 * resource counting etc.
   4 *
   5 * But use these as seldom as possible since they are slower than
   6 * regular operations.
   7 *
   8 * Copyright (C) 2004-2006 Atmel Corporation
   9 *
  10 * This program is free software; you can redistribute it and/or modify
  11 * it under the terms of the GNU General Public License version 2 as
  12 * published by the Free Software Foundation.
  13 */
  14#ifndef __ASM_AVR32_ATOMIC_H
  15#define __ASM_AVR32_ATOMIC_H
  16
  17#include <linux/types.h>
  18#include <asm/cmpxchg.h>
  19
  20#define ATOMIC_INIT(i)  { (i) }
  21
  22#define atomic_read(v)          (*(volatile int *)&(v)->counter)
  23#define atomic_set(v, i)        (((v)->counter) = i)
  24
  25/*
  26 * atomic_sub_return - subtract the atomic variable
  27 * @i: integer value to subtract
  28 * @v: pointer of type atomic_t
  29 *
  30 * Atomically subtracts @i from @v. Returns the resulting value.
  31 */
  32static inline int atomic_sub_return(int i, atomic_t *v)
  33{
  34        int result;
  35
  36        asm volatile(
  37                "/* atomic_sub_return */\n"
  38                "1:     ssrf    5\n"
  39                "       ld.w    %0, %2\n"
  40                "       sub     %0, %3\n"
  41                "       stcond  %1, %0\n"
  42                "       brne    1b"
  43                : "=&r"(result), "=o"(v->counter)
  44                : "m"(v->counter), "rKs21"(i)
  45                : "cc");
  46
  47        return result;
  48}
  49
  50/*
  51 * atomic_add_return - add integer to atomic variable
  52 * @i: integer value to add
  53 * @v: pointer of type atomic_t
  54 *
  55 * Atomically adds @i to @v. Returns the resulting value.
  56 */
  57static inline int atomic_add_return(int i, atomic_t *v)
  58{
  59        int result;
  60
  61        if (__builtin_constant_p(i) && (i >= -1048575) && (i <= 1048576))
  62                result = atomic_sub_return(-i, v);
  63        else
  64                asm volatile(
  65                        "/* atomic_add_return */\n"
  66                        "1:     ssrf    5\n"
  67                        "       ld.w    %0, %1\n"
  68                        "       add     %0, %3\n"
  69                        "       stcond  %2, %0\n"
  70                        "       brne    1b"
  71                        : "=&r"(result), "=o"(v->counter)
  72                        : "m"(v->counter), "r"(i)
  73                        : "cc", "memory");
  74
  75        return result;
  76}
  77
  78/*
  79 * atomic_sub_unless - sub unless the number is a given value
  80 * @v: pointer of type atomic_t
  81 * @a: the amount to subtract from v...
  82 * @u: ...unless v is equal to u.
  83 *
  84 * Atomically subtract @a from @v, so long as it was not @u.
  85 * Returns the old value of @v.
  86*/
  87static inline void atomic_sub_unless(atomic_t *v, int a, int u)
  88{
  89        int tmp;
  90
  91        asm volatile(
  92                "/* atomic_sub_unless */\n"
  93                "1:     ssrf    5\n"
  94                "       ld.w    %0, %2\n"
  95                "       cp.w    %0, %4\n"
  96                "       breq    1f\n"
  97                "       sub     %0, %3\n"
  98                "       stcond  %1, %0\n"
  99                "       brne    1b\n"
 100                "1:"
 101                : "=&r"(tmp), "=o"(v->counter)
 102                : "m"(v->counter), "rKs21"(a), "rKs21"(u)
 103                : "cc", "memory");
 104}
 105
 106/*
 107 * __atomic_add_unless - add unless the number is a given value
 108 * @v: pointer of type atomic_t
 109 * @a: the amount to add to v...
 110 * @u: ...unless v is equal to u.
 111 *
 112 * Atomically adds @a to @v, so long as it was not @u.
 113 * Returns the old value of @v.
 114*/
 115static inline int __atomic_add_unless(atomic_t *v, int a, int u)
 116{
 117        int tmp, old = atomic_read(v);
 118
 119        if (__builtin_constant_p(a) && (a >= -1048575) && (a <= 1048576))
 120                atomic_sub_unless(v, -a, u);
 121        else {
 122                asm volatile(
 123                        "/* __atomic_add_unless */\n"
 124                        "1:     ssrf    5\n"
 125                        "       ld.w    %0, %2\n"
 126                        "       cp.w    %0, %4\n"
 127                        "       breq    1f\n"
 128                        "       add     %0, %3\n"
 129                        "       stcond  %1, %0\n"
 130                        "       brne    1b\n"
 131                        "1:"
 132                        : "=&r"(tmp), "=o"(v->counter)
 133                        : "m"(v->counter), "r"(a), "ir"(u)
 134                        : "cc", "memory");
 135        }
 136
 137        return old;
 138}
 139
 140/*
 141 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
 142 * @i: integer value to subtract
 143 * @v: pointer of type atomic_t
 144 *
 145 * Atomically test @v and subtract @i if @v is greater or equal than @i.
 146 * The function returns the old value of @v minus @i.
 147 */
 148static inline int atomic_sub_if_positive(int i, atomic_t *v)
 149{
 150        int result;
 151
 152        asm volatile(
 153                "/* atomic_sub_if_positive */\n"
 154                "1:     ssrf    5\n"
 155                "       ld.w    %0, %2\n"
 156                "       sub     %0, %3\n"
 157                "       brlt    1f\n"
 158                "       stcond  %1, %0\n"
 159                "       brne    1b\n"
 160                "1:"
 161                : "=&r"(result), "=o"(v->counter)
 162                : "m"(v->counter), "ir"(i)
 163                : "cc", "memory");
 164
 165        return result;
 166}
 167
 168#define atomic_xchg(v, new)     (xchg(&((v)->counter), new))
 169#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
 170
 171#define atomic_sub(i, v)        (void)atomic_sub_return(i, v)
 172#define atomic_add(i, v)        (void)atomic_add_return(i, v)
 173#define atomic_dec(v)           atomic_sub(1, (v))
 174#define atomic_inc(v)           atomic_add(1, (v))
 175
 176#define atomic_dec_return(v)    atomic_sub_return(1, v)
 177#define atomic_inc_return(v)    atomic_add_return(1, v)
 178
 179#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
 180#define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
 181#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
 182#define atomic_add_negative(i, v) (atomic_add_return(i, v) < 0)
 183
 184#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
 185
 186#define smp_mb__before_atomic_dec()     barrier()
 187#define smp_mb__after_atomic_dec()      barrier()
 188#define smp_mb__before_atomic_inc()     barrier()
 189#define smp_mb__after_atomic_inc()      barrier()
 190
 191#endif /*  __ASM_AVR32_ATOMIC_H */
 192