1
2#ifndef _ASM_GENERIC_BITOPS_ATOMIC_H_
3#define _ASM_GENERIC_BITOPS_ATOMIC_H_
4
5#include <asm/types.h>
6#include <linux/irqflags.h>
7
8#ifdef CONFIG_SMP
9#include <asm/spinlock.h>
10#include <asm/cache.h>
11
12
13
14
15
16# define ATOMIC_HASH_SIZE 4
17# define ATOMIC_HASH(a) (&(__atomic_hash[ (((unsigned long) a)/L1_CACHE_BYTES) & (ATOMIC_HASH_SIZE-1) ]))
18
19extern arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned;
20
21
22
23#define _atomic_spin_lock_irqsave(l,f) do { \
24 arch_spinlock_t *s = ATOMIC_HASH(l); \
25 local_irq_save(f); \
26 arch_spin_lock(s); \
27} while(0)
28
29#define _atomic_spin_unlock_irqrestore(l,f) do { \
30 arch_spinlock_t *s = ATOMIC_HASH(l); \
31 arch_spin_unlock(s); \
32 local_irq_restore(f); \
33} while(0)
34
35
36#else
37# define _atomic_spin_lock_irqsave(l,f) do { local_irq_save(f); } while (0)
38# define _atomic_spin_unlock_irqrestore(l,f) do { local_irq_restore(f); } while (0)
39#endif
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66static inline void set_bit(int nr, volatile unsigned long *addr)
67{
68 unsigned long mask = BIT_MASK(nr);
69 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
70 unsigned long flags;
71
72 _atomic_spin_lock_irqsave(p, flags);
73 *p |= mask;
74 _atomic_spin_unlock_irqrestore(p, flags);
75}
76
77
78
79
80
81
82
83
84
85
86
87static inline void clear_bit(int nr, volatile unsigned long *addr)
88{
89 unsigned long mask = BIT_MASK(nr);
90 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
91 unsigned long flags;
92
93 _atomic_spin_lock_irqsave(p, flags);
94 *p &= ~mask;
95 _atomic_spin_unlock_irqrestore(p, flags);
96}
97
98
99
100
101
102
103
104
105
106
107
108static inline void change_bit(int nr, volatile unsigned long *addr)
109{
110 unsigned long mask = BIT_MASK(nr);
111 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
112 unsigned long flags;
113
114 _atomic_spin_lock_irqsave(p, flags);
115 *p ^= mask;
116 _atomic_spin_unlock_irqrestore(p, flags);
117}
118
119
120
121
122
123
124
125
126
127
128static inline int test_and_set_bit(int nr, volatile unsigned long *addr)
129{
130 unsigned long mask = BIT_MASK(nr);
131 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
132 unsigned long old;
133 unsigned long flags;
134
135 _atomic_spin_lock_irqsave(p, flags);
136 old = *p;
137 *p = old | mask;
138 _atomic_spin_unlock_irqrestore(p, flags);
139
140 return (old & mask) != 0;
141}
142
143
144
145
146
147
148
149
150
151
152static inline int test_and_clear_bit(int nr, volatile unsigned long *addr)
153{
154 unsigned long mask = BIT_MASK(nr);
155 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
156 unsigned long old;
157 unsigned long flags;
158
159 _atomic_spin_lock_irqsave(p, flags);
160 old = *p;
161 *p = old & ~mask;
162 _atomic_spin_unlock_irqrestore(p, flags);
163
164 return (old & mask) != 0;
165}
166
167
168
169
170
171
172
173
174
175static inline int test_and_change_bit(int nr, volatile unsigned long *addr)
176{
177 unsigned long mask = BIT_MASK(nr);
178 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
179 unsigned long old;
180 unsigned long flags;
181
182 _atomic_spin_lock_irqsave(p, flags);
183 old = *p;
184 *p = old ^ mask;
185 _atomic_spin_unlock_irqrestore(p, flags);
186
187 return (old & mask) != 0;
188}
189
190#endif
191