1#ifndef _ASM_M32R_ATOMIC_H
2#define _ASM_M32R_ATOMIC_H
3
4
5
6
7
8
9
10
11
12#include <linux/types.h>
13#include <asm/assembler.h>
14#include <asm/cmpxchg.h>
15#include <asm/dcache_clear.h>
16#include <asm/barrier.h>
17
18
19
20
21
22
23#define ATOMIC_INIT(i) { (i) }
24
25
26
27
28
29
30
31#define atomic_read(v) ACCESS_ONCE((v)->counter)
32
33
34
35
36
37
38
39
40#define atomic_set(v,i) (((v)->counter) = (i))
41
42#ifdef CONFIG_CHIP_M32700_TS1
43#define __ATOMIC_CLOBBER , "r4"
44#else
45#define __ATOMIC_CLOBBER
46#endif
47
48#define ATOMIC_OP(op) \
49static __inline__ void atomic_##op(int i, atomic_t *v) \
50{ \
51 unsigned long flags; \
52 int result; \
53 \
54 local_irq_save(flags); \
55 __asm__ __volatile__ ( \
56 "# atomic_" #op " \n\t" \
57 DCACHE_CLEAR("%0", "r4", "%1") \
58 M32R_LOCK" %0, @%1; \n\t" \
59 #op " %0, %2; \n\t" \
60 M32R_UNLOCK" %0, @%1; \n\t" \
61 : "=&r" (result) \
62 : "r" (&v->counter), "r" (i) \
63 : "memory" \
64 __ATOMIC_CLOBBER \
65 ); \
66 local_irq_restore(flags); \
67} \
68
69#define ATOMIC_OP_RETURN(op) \
70static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
71{ \
72 unsigned long flags; \
73 int result; \
74 \
75 local_irq_save(flags); \
76 __asm__ __volatile__ ( \
77 "# atomic_" #op "_return \n\t" \
78 DCACHE_CLEAR("%0", "r4", "%1") \
79 M32R_LOCK" %0, @%1; \n\t" \
80 #op " %0, %2; \n\t" \
81 M32R_UNLOCK" %0, @%1; \n\t" \
82 : "=&r" (result) \
83 : "r" (&v->counter), "r" (i) \
84 : "memory" \
85 __ATOMIC_CLOBBER \
86 ); \
87 local_irq_restore(flags); \
88 \
89 return result; \
90}
91
92#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
93
94ATOMIC_OPS(add)
95ATOMIC_OPS(sub)
96
97ATOMIC_OP(and)
98ATOMIC_OP(or)
99ATOMIC_OP(xor)
100
101#undef ATOMIC_OPS
102#undef ATOMIC_OP_RETURN
103#undef ATOMIC_OP
104
105
106
107
108
109
110
111
112
113
114#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
115
116
117
118
119
120
121
122static __inline__ int atomic_inc_return(atomic_t *v)
123{
124 unsigned long flags;
125 int result;
126
127 local_irq_save(flags);
128 __asm__ __volatile__ (
129 "# atomic_inc_return \n\t"
130 DCACHE_CLEAR("%0", "r4", "%1")
131 M32R_LOCK" %0, @%1; \n\t"
132 "addi %0, #1; \n\t"
133 M32R_UNLOCK" %0, @%1; \n\t"
134 : "=&r" (result)
135 : "r" (&v->counter)
136 : "memory"
137 __ATOMIC_CLOBBER
138 );
139 local_irq_restore(flags);
140
141 return result;
142}
143
144
145
146
147
148
149
150static __inline__ int atomic_dec_return(atomic_t *v)
151{
152 unsigned long flags;
153 int result;
154
155 local_irq_save(flags);
156 __asm__ __volatile__ (
157 "# atomic_dec_return \n\t"
158 DCACHE_CLEAR("%0", "r4", "%1")
159 M32R_LOCK" %0, @%1; \n\t"
160 "addi %0, #-1; \n\t"
161 M32R_UNLOCK" %0, @%1; \n\t"
162 : "=&r" (result)
163 : "r" (&v->counter)
164 : "memory"
165 __ATOMIC_CLOBBER
166 );
167 local_irq_restore(flags);
168
169 return result;
170}
171
172
173
174
175
176
177
178#define atomic_inc(v) ((void)atomic_inc_return(v))
179
180
181
182
183
184
185
186#define atomic_dec(v) ((void)atomic_dec_return(v))
187
188
189
190
191
192
193
194
195
196#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
197
198
199
200
201
202
203
204
205
206#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
207
208
209
210
211
212
213
214
215
216
217#define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
218
219#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
220#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
221
222
223
224
225
226
227
228
229
230
231static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
232{
233 int c, old;
234 c = atomic_read(v);
235 for (;;) {
236 if (unlikely(c == (u)))
237 break;
238 old = atomic_cmpxchg((v), c, c + (a));
239 if (likely(old == c))
240 break;
241 c = old;
242 }
243 return c;
244}
245
246#endif
247