linux/arch/mn10300/include/asm/atomic.h
<<
>>
Prefs
   1/* MN10300 Atomic counter operations
   2 *
   3 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
   4 * Written by David Howells (dhowells@redhat.com)
   5 *
   6 * This program is free software; you can redistribute it and/or
   7 * modify it under the terms of the GNU General Public Licence
   8 * as published by the Free Software Foundation; either version
   9 * 2 of the Licence, or (at your option) any later version.
  10 */
  11#ifndef _ASM_ATOMIC_H
  12#define _ASM_ATOMIC_H
  13
  14#include <asm/irqflags.h>
  15#include <asm/cmpxchg.h>
  16#include <asm/barrier.h>
  17
  18#ifndef CONFIG_SMP
  19#include <asm-generic/atomic.h>
  20#else
  21
  22/*
  23 * Atomic operations that C can't guarantee us.  Useful for
  24 * resource counting etc..
  25 */
  26
  27#define ATOMIC_INIT(i)  { (i) }
  28
  29#ifdef __KERNEL__
  30
  31/**
  32 * atomic_read - read atomic variable
  33 * @v: pointer of type atomic_t
  34 *
  35 * Atomically reads the value of @v.  Note that the guaranteed
  36 */
  37#define atomic_read(v)  (ACCESS_ONCE((v)->counter))
  38
  39/**
  40 * atomic_set - set atomic variable
  41 * @v: pointer of type atomic_t
  42 * @i: required value
  43 *
  44 * Atomically sets the value of @v to @i.  Note that the guaranteed
  45 */
  46#define atomic_set(v, i) (((v)->counter) = (i))
  47
  48#define ATOMIC_OP(op)                                                   \
  49static inline void atomic_##op(int i, atomic_t *v)                      \
  50{                                                                       \
  51        int retval, status;                                             \
  52                                                                        \
  53        asm volatile(                                                   \
  54                "1:     mov     %4,(_AAR,%3)    \n"                     \
  55                "       mov     (_ADR,%3),%1    \n"                     \
  56                "       " #op " %5,%1           \n"                     \
  57                "       mov     %1,(_ADR,%3)    \n"                     \
  58                "       mov     (_ADR,%3),%0    \n"     /* flush */     \
  59                "       mov     (_ASR,%3),%0    \n"                     \
  60                "       or      %0,%0           \n"                     \
  61                "       bne     1b              \n"                     \
  62                : "=&r"(status), "=&r"(retval), "=m"(v->counter)        \
  63                : "a"(ATOMIC_OPS_BASE_ADDR), "r"(&v->counter), "r"(i)   \
  64                : "memory", "cc");                                      \
  65}
  66
  67#define ATOMIC_OP_RETURN(op)                                            \
  68static inline int atomic_##op##_return(int i, atomic_t *v)              \
  69{                                                                       \
  70        int retval, status;                                             \
  71                                                                        \
  72        asm volatile(                                                   \
  73                "1:     mov     %4,(_AAR,%3)    \n"                     \
  74                "       mov     (_ADR,%3),%1    \n"                     \
  75                "       " #op " %5,%1           \n"                     \
  76                "       mov     %1,(_ADR,%3)    \n"                     \
  77                "       mov     (_ADR,%3),%0    \n"     /* flush */     \
  78                "       mov     (_ASR,%3),%0    \n"                     \
  79                "       or      %0,%0           \n"                     \
  80                "       bne     1b              \n"                     \
  81                : "=&r"(status), "=&r"(retval), "=m"(v->counter)        \
  82                : "a"(ATOMIC_OPS_BASE_ADDR), "r"(&v->counter), "r"(i)   \
  83                : "memory", "cc");                                      \
  84        return retval;                                                  \
  85}
  86
  87#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
  88
  89ATOMIC_OPS(add)
  90ATOMIC_OPS(sub)
  91
  92ATOMIC_OP(and)
  93ATOMIC_OP(or)
  94ATOMIC_OP(xor)
  95
  96#undef ATOMIC_OPS
  97#undef ATOMIC_OP_RETURN
  98#undef ATOMIC_OP
  99
 100static inline int atomic_add_negative(int i, atomic_t *v)
 101{
 102        return atomic_add_return(i, v) < 0;
 103}
 104
 105static inline void atomic_inc(atomic_t *v)
 106{
 107        atomic_add_return(1, v);
 108}
 109
 110static inline void atomic_dec(atomic_t *v)
 111{
 112        atomic_sub_return(1, v);
 113}
 114
 115#define atomic_dec_return(v)            atomic_sub_return(1, (v))
 116#define atomic_inc_return(v)            atomic_add_return(1, (v))
 117
 118#define atomic_sub_and_test(i, v)       (atomic_sub_return((i), (v)) == 0)
 119#define atomic_dec_and_test(v)          (atomic_sub_return(1, (v)) == 0)
 120#define atomic_inc_and_test(v)          (atomic_add_return(1, (v)) == 0)
 121
 122#define __atomic_add_unless(v, a, u)                            \
 123({                                                              \
 124        int c, old;                                             \
 125        c = atomic_read(v);                                     \
 126        while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
 127                c = old;                                        \
 128        c;                                                      \
 129})
 130
 131#define atomic_xchg(ptr, v)             (xchg(&(ptr)->counter, (v)))
 132#define atomic_cmpxchg(v, old, new)     (cmpxchg(&((v)->counter), (old), (new)))
 133
 134#endif /* __KERNEL__ */
 135#endif /* CONFIG_SMP */
 136#endif /* _ASM_ATOMIC_H */
 137