linux/arch/mn10300/include/asm/atomic.h
<<
>>
Prefs
   1/* MN10300 Atomic counter operations
   2 *
   3 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
   4 * Written by David Howells (dhowells@redhat.com)
   5 *
   6 * This program is free software; you can redistribute it and/or
   7 * modify it under the terms of the GNU General Public Licence
   8 * as published by the Free Software Foundation; either version
   9 * 2 of the Licence, or (at your option) any later version.
  10 */
  11#ifndef _ASM_ATOMIC_H
  12#define _ASM_ATOMIC_H
  13
  14#ifdef CONFIG_SMP
  15#error not SMP safe
  16#endif
  17
  18/*
  19 * Atomic operations that C can't guarantee us.  Useful for
  20 * resource counting etc..
  21 */
  22
  23#define ATOMIC_INIT(i)  { (i) }
  24
  25#ifdef __KERNEL__
  26
  27/**
  28 * atomic_read - read atomic variable
  29 * @v: pointer of type atomic_t
  30 *
  31 * Atomically reads the value of @v.  Note that the guaranteed
  32 * useful range of an atomic_t is only 24 bits.
  33 */
  34#define atomic_read(v)  ((v)->counter)
  35
  36/**
  37 * atomic_set - set atomic variable
  38 * @v: pointer of type atomic_t
  39 * @i: required value
  40 *
  41 * Atomically sets the value of @v to @i.  Note that the guaranteed
  42 * useful range of an atomic_t is only 24 bits.
  43 */
  44#define atomic_set(v, i) (((v)->counter) = (i))
  45
  46#include <asm/system.h>
  47
  48/**
  49 * atomic_add_return - add integer to atomic variable
  50 * @i: integer value to add
  51 * @v: pointer of type atomic_t
  52 *
  53 * Atomically adds @i to @v and returns the result
  54 * Note that the guaranteed useful range of an atomic_t is only 24 bits.
  55 */
  56static inline int atomic_add_return(int i, atomic_t *v)
  57{
  58        unsigned long flags;
  59        int temp;
  60
  61        local_irq_save(flags);
  62        temp = v->counter;
  63        temp += i;
  64        v->counter = temp;
  65        local_irq_restore(flags);
  66
  67        return temp;
  68}
  69
  70/**
  71 * atomic_sub_return - subtract integer from atomic variable
  72 * @i: integer value to subtract
  73 * @v: pointer of type atomic_t
  74 *
  75 * Atomically subtracts @i from @v and returns the result
  76 * Note that the guaranteed useful range of an atomic_t is only 24 bits.
  77 */
  78static inline int atomic_sub_return(int i, atomic_t *v)
  79{
  80        unsigned long flags;
  81        int temp;
  82
  83        local_irq_save(flags);
  84        temp = v->counter;
  85        temp -= i;
  86        v->counter = temp;
  87        local_irq_restore(flags);
  88
  89        return temp;
  90}
  91
  92static inline int atomic_add_negative(int i, atomic_t *v)
  93{
  94        return atomic_add_return(i, v) < 0;
  95}
  96
  97static inline void atomic_add(int i, atomic_t *v)
  98{
  99        atomic_add_return(i, v);
 100}
 101
 102static inline void atomic_sub(int i, atomic_t *v)
 103{
 104        atomic_sub_return(i, v);
 105}
 106
 107static inline void atomic_inc(atomic_t *v)
 108{
 109        atomic_add_return(1, v);
 110}
 111
 112static inline void atomic_dec(atomic_t *v)
 113{
 114        atomic_sub_return(1, v);
 115}
 116
 117#define atomic_dec_return(v)            atomic_sub_return(1, (v))
 118#define atomic_inc_return(v)            atomic_add_return(1, (v))
 119
 120#define atomic_sub_and_test(i, v)       (atomic_sub_return((i), (v)) == 0)
 121#define atomic_dec_and_test(v)          (atomic_sub_return(1, (v)) == 0)
 122#define atomic_inc_and_test(v)          (atomic_add_return(1, (v)) == 0)
 123
 124#define atomic_add_unless(v, a, u)                              \
 125({                                                              \
 126        int c, old;                                             \
 127        c = atomic_read(v);                                     \
 128        while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
 129                c = old;                                        \
 130        c != (u);                                               \
 131})
 132
 133#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
 134
 135static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
 136{
 137        unsigned long flags;
 138
 139        mask = ~mask;
 140        local_irq_save(flags);
 141        *addr &= mask;
 142        local_irq_restore(flags);
 143}
 144
 145#define atomic_xchg(ptr, v)             (xchg(&(ptr)->counter, (v)))
 146#define atomic_cmpxchg(v, old, new)     (cmpxchg(&((v)->counter), (old), (new)))
 147
 148/* Atomic operations are already serializing on MN10300??? */
 149#define smp_mb__before_atomic_dec()     barrier()
 150#define smp_mb__after_atomic_dec()      barrier()
 151#define smp_mb__before_atomic_inc()     barrier()
 152#define smp_mb__after_atomic_inc()      barrier()
 153
 154#include <asm-generic/atomic-long.h>
 155
 156#endif /* __KERNEL__ */
 157#endif /* _ASM_ATOMIC_H */
 158