linux/arch/frv/include/asm/atomic.h
<<
>>
Prefs
   1/* atomic.h: atomic operation emulation for FR-V
   2 *
   3 * For an explanation of how atomic ops work in this arch, see:
   4 *   Documentation/frv/atomic-ops.txt
   5 *
   6 * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved.
   7 * Written by David Howells (dhowells@redhat.com)
   8 *
   9 * This program is free software; you can redistribute it and/or
  10 * modify it under the terms of the GNU General Public License
  11 * as published by the Free Software Foundation; either version
  12 * 2 of the License, or (at your option) any later version.
  13 */
  14#ifndef _ASM_ATOMIC_H
  15#define _ASM_ATOMIC_H
  16
  17#include <linux/types.h>
  18#include <asm/cmpxchg.h>
  19#include <asm/barrier.h>
  20
  21#ifdef CONFIG_SMP
  22#error not SMP safe
  23#endif
  24
  25#include <asm/atomic_defs.h>
  26
  27/*
  28 * Atomic operations that C can't guarantee us.  Useful for
  29 * resource counting etc..
  30 *
  31 * We do not have SMP systems, so we don't have to deal with that.
  32 */
  33
  34#define ATOMIC_INIT(i)          { (i) }
  35#define atomic_read(v)          READ_ONCE((v)->counter)
  36#define atomic_set(v, i)        WRITE_ONCE(((v)->counter), (i))
  37
  38static inline int atomic_inc_return(atomic_t *v)
  39{
  40        return __atomic_add_return(1, &v->counter);
  41}
  42
  43static inline int atomic_dec_return(atomic_t *v)
  44{
  45        return __atomic_sub_return(1, &v->counter);
  46}
  47
  48static inline int atomic_add_return(int i, atomic_t *v)
  49{
  50        return __atomic_add_return(i, &v->counter);
  51}
  52
  53static inline int atomic_sub_return(int i, atomic_t *v)
  54{
  55        return __atomic_sub_return(i, &v->counter);
  56}
  57
  58static inline int atomic_add_negative(int i, atomic_t *v)
  59{
  60        return atomic_add_return(i, v) < 0;
  61}
  62
  63static inline void atomic_add(int i, atomic_t *v)
  64{
  65        atomic_add_return(i, v);
  66}
  67
  68static inline void atomic_sub(int i, atomic_t *v)
  69{
  70        atomic_sub_return(i, v);
  71}
  72
  73static inline void atomic_inc(atomic_t *v)
  74{
  75        atomic_inc_return(v);
  76}
  77
  78static inline void atomic_dec(atomic_t *v)
  79{
  80        atomic_dec_return(v);
  81}
  82
  83#define atomic_sub_and_test(i,v)        (atomic_sub_return((i), (v)) == 0)
  84#define atomic_dec_and_test(v)          (atomic_sub_return(1, (v)) == 0)
  85#define atomic_inc_and_test(v)          (atomic_add_return(1, (v)) == 0)
  86
  87/*
  88 * 64-bit atomic ops
  89 */
  90typedef struct {
  91        long long counter;
  92} atomic64_t;
  93
  94#define ATOMIC64_INIT(i)        { (i) }
  95
  96static inline long long atomic64_read(const atomic64_t *v)
  97{
  98        long long counter;
  99
 100        asm("ldd%I1 %M1,%0"
 101            : "=e"(counter)
 102            : "m"(v->counter));
 103
 104        return counter;
 105}
 106
 107static inline void atomic64_set(atomic64_t *v, long long i)
 108{
 109        asm volatile("std%I0 %1,%M0"
 110                     : "=m"(v->counter)
 111                     : "e"(i));
 112}
 113
 114static inline long long atomic64_inc_return(atomic64_t *v)
 115{
 116        return __atomic64_add_return(1, &v->counter);
 117}
 118
 119static inline long long atomic64_dec_return(atomic64_t *v)
 120{
 121        return __atomic64_sub_return(1, &v->counter);
 122}
 123
 124static inline long long atomic64_add_return(long long i, atomic64_t *v)
 125{
 126        return __atomic64_add_return(i, &v->counter);
 127}
 128
 129static inline long long atomic64_sub_return(long long i, atomic64_t *v)
 130{
 131        return __atomic64_sub_return(i, &v->counter);
 132}
 133
 134static inline long long atomic64_add_negative(long long i, atomic64_t *v)
 135{
 136        return atomic64_add_return(i, v) < 0;
 137}
 138
 139static inline void atomic64_add(long long i, atomic64_t *v)
 140{
 141        atomic64_add_return(i, v);
 142}
 143
 144static inline void atomic64_sub(long long i, atomic64_t *v)
 145{
 146        atomic64_sub_return(i, v);
 147}
 148
 149static inline void atomic64_inc(atomic64_t *v)
 150{
 151        atomic64_inc_return(v);
 152}
 153
 154static inline void atomic64_dec(atomic64_t *v)
 155{
 156        atomic64_dec_return(v);
 157}
 158
 159#define atomic64_sub_and_test(i,v)      (atomic64_sub_return((i), (v)) == 0)
 160#define atomic64_dec_and_test(v)        (atomic64_dec_return((v)) == 0)
 161#define atomic64_inc_and_test(v)        (atomic64_inc_return((v)) == 0)
 162
 163
 164#define atomic_cmpxchg(v, old, new)     (cmpxchg(&(v)->counter, old, new))
 165#define atomic_xchg(v, new)             (xchg(&(v)->counter, new))
 166#define atomic64_cmpxchg(v, old, new)   (__cmpxchg_64(old, new, &(v)->counter))
 167#define atomic64_xchg(v, new)           (__xchg_64(new, &(v)->counter))
 168
 169static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
 170{
 171        int c, old;
 172        c = atomic_read(v);
 173        for (;;) {
 174                if (unlikely(c == (u)))
 175                        break;
 176                old = atomic_cmpxchg((v), c, c + (a));
 177                if (likely(old == c))
 178                        break;
 179                c = old;
 180        }
 181        return c;
 182}
 183
 184#define ATOMIC_OP(op)                                                   \
 185static inline void atomic_##op(int i, atomic_t *v)                      \
 186{                                                                       \
 187        (void)__atomic32_fetch_##op(i, &v->counter);                    \
 188}                                                                       \
 189                                                                        \
 190static inline void atomic64_##op(long long i, atomic64_t *v)            \
 191{                                                                       \
 192        (void)__atomic64_fetch_##op(i, &v->counter);                    \
 193}
 194
 195ATOMIC_OP(or)
 196ATOMIC_OP(and)
 197ATOMIC_OP(xor)
 198
 199#undef ATOMIC_OP
 200
 201#endif /* _ASM_ATOMIC_H */
 202