linux/arch/h8300/include/asm/atomic.h
<<
>>
Prefs
   1#ifndef __ARCH_H8300_ATOMIC__
   2#define __ARCH_H8300_ATOMIC__
   3
   4#include <linux/types.h>
   5
   6/*
   7 * Atomic operations that C can't guarantee us.  Useful for
   8 * resource counting etc..
   9 */
  10
  11#define ATOMIC_INIT(i)  { (i) }
  12
  13#define atomic_read(v)          (*(volatile int *)&(v)->counter)
  14#define atomic_set(v, i)        (((v)->counter) = i)
  15
  16#include <asm/system.h>
  17#include <linux/kernel.h>
  18
  19static __inline__ int atomic_add_return(int i, atomic_t *v)
  20{
  21        unsigned long flags;
  22        int ret;
  23        local_irq_save(flags);
  24        ret = v->counter += i;
  25        local_irq_restore(flags);
  26        return ret;
  27}
  28
  29#define atomic_add(i, v) atomic_add_return(i, v)
  30#define atomic_add_negative(a, v)       (atomic_add_return((a), (v)) < 0)
  31
  32static __inline__ int atomic_sub_return(int i, atomic_t *v)
  33{
  34        unsigned long flags;
  35        int ret;
  36        local_irq_save(flags);
  37        ret = v->counter -= i;
  38        local_irq_restore(flags);
  39        return ret;
  40}
  41
  42#define atomic_sub(i, v) atomic_sub_return(i, v)
  43#define atomic_sub_and_test(i,v) (atomic_sub_return(i, v) == 0)
  44
  45static __inline__ int atomic_inc_return(atomic_t *v)
  46{
  47        unsigned long flags;
  48        int ret;
  49        local_irq_save(flags);
  50        v->counter++;
  51        ret = v->counter;
  52        local_irq_restore(flags);
  53        return ret;
  54}
  55
  56#define atomic_inc(v) atomic_inc_return(v)
  57
  58/*
  59 * atomic_inc_and_test - increment and test
  60 * @v: pointer of type atomic_t
  61 *
  62 * Atomically increments @v by 1
  63 * and returns true if the result is zero, or false for all
  64 * other cases.
  65 */
  66#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  67
  68static __inline__ int atomic_dec_return(atomic_t *v)
  69{
  70        unsigned long flags;
  71        int ret;
  72        local_irq_save(flags);
  73        --v->counter;
  74        ret = v->counter;
  75        local_irq_restore(flags);
  76        return ret;
  77}
  78
  79#define atomic_dec(v) atomic_dec_return(v)
  80
  81static __inline__ int atomic_dec_and_test(atomic_t *v)
  82{
  83        unsigned long flags;
  84        int ret;
  85        local_irq_save(flags);
  86        --v->counter;
  87        ret = v->counter;
  88        local_irq_restore(flags);
  89        return ret == 0;
  90}
  91
  92static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  93{
  94        int ret;
  95        unsigned long flags;
  96
  97        local_irq_save(flags);
  98        ret = v->counter;
  99        if (likely(ret == old))
 100                v->counter = new;
 101        local_irq_restore(flags);
 102        return ret;
 103}
 104
 105#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
 106
 107static inline int atomic_add_unless(atomic_t *v, int a, int u)
 108{
 109        int ret;
 110        unsigned long flags;
 111
 112        local_irq_save(flags);
 113        ret = v->counter;
 114        if (ret != u)
 115                v->counter += a;
 116        local_irq_restore(flags);
 117        return ret != u;
 118}
 119#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
 120
 121static __inline__ void atomic_clear_mask(unsigned long mask, unsigned long *v)
 122{
 123        __asm__ __volatile__("stc ccr,r1l\n\t"
 124                             "orc #0x80,ccr\n\t"
 125                             "mov.l %0,er0\n\t"
 126                             "and.l %1,er0\n\t"
 127                             "mov.l er0,%0\n\t"
 128                             "ldc r1l,ccr" 
 129                             : "=m" (*v) : "g" (~(mask)) :"er0","er1");
 130}
 131
 132static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v)
 133{
 134        __asm__ __volatile__("stc ccr,r1l\n\t"
 135                             "orc #0x80,ccr\n\t"
 136                             "mov.l %0,er0\n\t"
 137                             "or.l %1,er0\n\t"
 138                             "mov.l er0,%0\n\t"
 139                             "ldc r1l,ccr" 
 140                             : "=m" (*v) : "g" (mask) :"er0","er1");
 141}
 142
 143/* Atomic operations are already serializing */
 144#define smp_mb__before_atomic_dec()    barrier()
 145#define smp_mb__after_atomic_dec() barrier()
 146#define smp_mb__before_atomic_inc()    barrier()
 147#define smp_mb__after_atomic_inc() barrier()
 148
 149#include <asm-generic/atomic-long.h>
 150#endif /* __ARCH_H8300_ATOMIC __ */
 151