1
2
3
4
5
6
7
8
9
10#ifndef __ASM_SH_SPINLOCK_CAS_H
11#define __ASM_SH_SPINLOCK_CAS_H
12
13#include <asm/barrier.h>
14#include <asm/processor.h>
15
16static inline unsigned __sl_cas(volatile unsigned *p, unsigned old, unsigned new)
17{
18 __asm__ __volatile__("cas.l %1,%0,@r0"
19 : "+r"(new)
20 : "r"(old), "z"(p)
21 : "t", "memory" );
22 return new;
23}
24
25
26
27
28
29#define arch_spin_is_locked(x) ((x)->lock <= 0)
30#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
31
32static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
33{
34 smp_cond_load_acquire(&lock->lock, VAL > 0);
35}
36
37static inline void arch_spin_lock(arch_spinlock_t *lock)
38{
39 while (!__sl_cas(&lock->lock, 1, 0));
40}
41
42static inline void arch_spin_unlock(arch_spinlock_t *lock)
43{
44 __sl_cas(&lock->lock, 0, 1);
45}
46
47static inline int arch_spin_trylock(arch_spinlock_t *lock)
48{
49 return __sl_cas(&lock->lock, 1, 0);
50}
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65#define arch_read_can_lock(x) ((x)->lock > 0)
66
67
68
69
70
71#define arch_write_can_lock(x) ((x)->lock == RW_LOCK_BIAS)
72
73static inline void arch_read_lock(arch_rwlock_t *rw)
74{
75 unsigned old;
76 do old = rw->lock;
77 while (!old || __sl_cas(&rw->lock, old, old-1) != old);
78}
79
80static inline void arch_read_unlock(arch_rwlock_t *rw)
81{
82 unsigned old;
83 do old = rw->lock;
84 while (__sl_cas(&rw->lock, old, old+1) != old);
85}
86
87static inline void arch_write_lock(arch_rwlock_t *rw)
88{
89 while (__sl_cas(&rw->lock, RW_LOCK_BIAS, 0) != RW_LOCK_BIAS);
90}
91
92static inline void arch_write_unlock(arch_rwlock_t *rw)
93{
94 __sl_cas(&rw->lock, 0, RW_LOCK_BIAS);
95}
96
97static inline int arch_read_trylock(arch_rwlock_t *rw)
98{
99 unsigned old;
100 do old = rw->lock;
101 while (old && __sl_cas(&rw->lock, old, old-1) != old);
102 return !!old;
103}
104
105static inline int arch_write_trylock(arch_rwlock_t *rw)
106{
107 return __sl_cas(&rw->lock, RW_LOCK_BIAS, 0) == RW_LOCK_BIAS;
108}
109
110#define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
111#define arch_write_lock_flags(lock, flags) arch_write_lock(lock)
112
113#define arch_spin_relax(lock) cpu_relax()
114#define arch_read_relax(lock) cpu_relax()
115#define arch_write_relax(lock) cpu_relax()
116
117#endif
118