linux/arch/blackfin/include/asm/spinlock.h
<<
>>
Prefs
   1/*
   2 * Copyright 2004-2009 Analog Devices Inc.
   3 *
   4 * Licensed under the GPL-2 or later.
   5 */
   6
   7#ifndef __BFIN_SPINLOCK_H
   8#define __BFIN_SPINLOCK_H
   9
  10#ifndef CONFIG_SMP
  11# include <asm-generic/spinlock.h>
  12#else
  13
  14#include <linux/atomic.h>
  15
  16asmlinkage int __raw_spin_is_locked_asm(volatile int *ptr);
  17asmlinkage void __raw_spin_lock_asm(volatile int *ptr);
  18asmlinkage int __raw_spin_trylock_asm(volatile int *ptr);
  19asmlinkage void __raw_spin_unlock_asm(volatile int *ptr);
  20asmlinkage void __raw_read_lock_asm(volatile int *ptr);
  21asmlinkage int __raw_read_trylock_asm(volatile int *ptr);
  22asmlinkage void __raw_read_unlock_asm(volatile int *ptr);
  23asmlinkage void __raw_write_lock_asm(volatile int *ptr);
  24asmlinkage int __raw_write_trylock_asm(volatile int *ptr);
  25asmlinkage void __raw_write_unlock_asm(volatile int *ptr);
  26
  27static inline int arch_spin_is_locked(arch_spinlock_t *lock)
  28{
  29        return __raw_spin_is_locked_asm(&lock->lock);
  30}
  31
  32static inline void arch_spin_lock(arch_spinlock_t *lock)
  33{
  34        __raw_spin_lock_asm(&lock->lock);
  35}
  36
  37#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
  38
  39static inline int arch_spin_trylock(arch_spinlock_t *lock)
  40{
  41        return __raw_spin_trylock_asm(&lock->lock);
  42}
  43
  44static inline void arch_spin_unlock(arch_spinlock_t *lock)
  45{
  46        __raw_spin_unlock_asm(&lock->lock);
  47}
  48
  49static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
  50{
  51        while (arch_spin_is_locked(lock))
  52                cpu_relax();
  53}
  54
  55static inline int arch_read_can_lock(arch_rwlock_t *rw)
  56{
  57        return __raw_uncached_fetch_asm(&rw->lock) > 0;
  58}
  59
  60static inline int arch_write_can_lock(arch_rwlock_t *rw)
  61{
  62        return __raw_uncached_fetch_asm(&rw->lock) == RW_LOCK_BIAS;
  63}
  64
  65static inline void arch_read_lock(arch_rwlock_t *rw)
  66{
  67        __raw_read_lock_asm(&rw->lock);
  68}
  69
  70#define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
  71
  72static inline int arch_read_trylock(arch_rwlock_t *rw)
  73{
  74        return __raw_read_trylock_asm(&rw->lock);
  75}
  76
  77static inline void arch_read_unlock(arch_rwlock_t *rw)
  78{
  79        __raw_read_unlock_asm(&rw->lock);
  80}
  81
  82static inline void arch_write_lock(arch_rwlock_t *rw)
  83{
  84        __raw_write_lock_asm(&rw->lock);
  85}
  86
  87#define arch_write_lock_flags(lock, flags) arch_write_lock(lock)
  88
  89static inline int arch_write_trylock(arch_rwlock_t *rw)
  90{
  91        return __raw_write_trylock_asm(&rw->lock);
  92}
  93
  94static inline void arch_write_unlock(arch_rwlock_t *rw)
  95{
  96        __raw_write_unlock_asm(&rw->lock);
  97}
  98
  99#define arch_spin_relax(lock)   cpu_relax()
 100#define arch_read_relax(lock)   cpu_relax()
 101#define arch_write_relax(lock)  cpu_relax()
 102
 103#endif
 104
 105#endif /*  !__BFIN_SPINLOCK_H */
 106