linux/arch/h8300/include/asm/atomic.h
<<
>>
Prefs
   1#ifndef __ARCH_H8300_ATOMIC__
   2#define __ARCH_H8300_ATOMIC__
   3
   4#include <linux/types.h>
   5#include <asm/cmpxchg.h>
   6
   7/*
   8 * Atomic operations that C can't guarantee us.  Useful for
   9 * resource counting etc..
  10 */
  11
  12#define ATOMIC_INIT(i)  { (i) }
  13
  14#define atomic_read(v)          (*(volatile int *)&(v)->counter)
  15#define atomic_set(v, i)        (((v)->counter) = i)
  16
  17#include <linux/kernel.h>
  18
  19static __inline__ int atomic_add_return(int i, atomic_t *v)
  20{
  21        unsigned long flags;
  22        int ret;
  23        local_irq_save(flags);
  24        ret = v->counter += i;
  25        local_irq_restore(flags);
  26        return ret;
  27}
  28
  29#define atomic_add(i, v) atomic_add_return(i, v)
  30#define atomic_add_negative(a, v)       (atomic_add_return((a), (v)) < 0)
  31
  32static __inline__ int atomic_sub_return(int i, atomic_t *v)
  33{
  34        unsigned long flags;
  35        int ret;
  36        local_irq_save(flags);
  37        ret = v->counter -= i;
  38        local_irq_restore(flags);
  39        return ret;
  40}
  41
  42#define atomic_sub(i, v) atomic_sub_return(i, v)
  43#define atomic_sub_and_test(i,v) (atomic_sub_return(i, v) == 0)
  44
  45static __inline__ int atomic_inc_return(atomic_t *v)
  46{
  47        unsigned long flags;
  48        int ret;
  49        local_irq_save(flags);
  50        v->counter++;
  51        ret = v->counter;
  52        local_irq_restore(flags);
  53        return ret;
  54}
  55
  56#define atomic_inc(v) atomic_inc_return(v)
  57
  58/*
  59 * atomic_inc_and_test - increment and test
  60 * @v: pointer of type atomic_t
  61 *
  62 * Atomically increments @v by 1
  63 * and returns true if the result is zero, or false for all
  64 * other cases.
  65 */
  66#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  67
  68static __inline__ int atomic_dec_return(atomic_t *v)
  69{
  70        unsigned long flags;
  71        int ret;
  72        local_irq_save(flags);
  73        --v->counter;
  74        ret = v->counter;
  75        local_irq_restore(flags);
  76        return ret;
  77}
  78
  79#define atomic_dec(v) atomic_dec_return(v)
  80
  81static __inline__ int atomic_dec_and_test(atomic_t *v)
  82{
  83        unsigned long flags;
  84        int ret;
  85        local_irq_save(flags);
  86        --v->counter;
  87        ret = v->counter;
  88        local_irq_restore(flags);
  89        return ret == 0;
  90}
  91
  92static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  93{
  94        int ret;
  95        unsigned long flags;
  96
  97        local_irq_save(flags);
  98        ret = v->counter;
  99        if (likely(ret == old))
 100                v->counter = new;
 101        local_irq_restore(flags);
 102        return ret;
 103}
 104
 105static inline int __atomic_add_unless(atomic_t *v, int a, int u)
 106{
 107        int ret;
 108        unsigned long flags;
 109
 110        local_irq_save(flags);
 111        ret = v->counter;
 112        if (ret != u)
 113                v->counter += a;
 114        local_irq_restore(flags);
 115        return ret;
 116}
 117
 118static __inline__ void atomic_clear_mask(unsigned long mask, unsigned long *v)
 119{
 120        __asm__ __volatile__("stc ccr,r1l\n\t"
 121                             "orc #0x80,ccr\n\t"
 122                             "mov.l %0,er0\n\t"
 123                             "and.l %1,er0\n\t"
 124                             "mov.l er0,%0\n\t"
 125                             "ldc r1l,ccr" 
 126                             : "=m" (*v) : "g" (~(mask)) :"er0","er1");
 127}
 128
 129static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v)
 130{
 131        __asm__ __volatile__("stc ccr,r1l\n\t"
 132                             "orc #0x80,ccr\n\t"
 133                             "mov.l %0,er0\n\t"
 134                             "or.l %1,er0\n\t"
 135                             "mov.l er0,%0\n\t"
 136                             "ldc r1l,ccr" 
 137                             : "=m" (*v) : "g" (mask) :"er0","er1");
 138}
 139
 140/* Atomic operations are already serializing */
 141#define smp_mb__before_atomic_dec()    barrier()
 142#define smp_mb__after_atomic_dec() barrier()
 143#define smp_mb__before_atomic_inc()    barrier()
 144#define smp_mb__after_atomic_inc() barrier()
 145
 146#endif /* __ARCH_H8300_ATOMIC __ */
 147