1
2
3
4
5
6
7
8#ifndef __ASM_CMPXCHG_H
9#define __ASM_CMPXCHG_H
10
11#include <linux/bug.h>
12#include <linux/irqflags.h>
13#include <asm/compiler.h>
14#include <asm/war.h>
15
16
17
18
19
20
21#if R10000_LLSC_WAR
22# define __scbeqz "beqzl"
23#else
24# define __scbeqz "beqz"
25#endif
26
27
28
29
30
31
32
33
34
35
36
37extern unsigned long __cmpxchg_called_with_bad_pointer(void)
38 __compiletime_error("Bad argument size for cmpxchg");
39extern unsigned long __cmpxchg64_unsupported(void)
40 __compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
41extern unsigned long __xchg_called_with_bad_pointer(void)
42 __compiletime_error("Bad argument size for xchg");
43
44#define __xchg_asm(ld, st, m, val) \
45({ \
46 __typeof(*(m)) __ret; \
47 \
48 if (kernel_uses_llsc) { \
49 __asm__ __volatile__( \
50 " .set push \n" \
51 " .set noat \n" \
52 " .set push \n" \
53 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
54 "1: " ld " %0, %2 # __xchg_asm \n" \
55 " .set pop \n" \
56 " move $1, %z3 \n" \
57 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
58 " " st " $1, %1 \n" \
59 "\t" __scbeqz " $1, 1b \n" \
60 " .set pop \n" \
61 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
62 : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
63 : "memory"); \
64 } else { \
65 unsigned long __flags; \
66 \
67 raw_local_irq_save(__flags); \
68 __ret = *m; \
69 *m = val; \
70 raw_local_irq_restore(__flags); \
71 } \
72 \
73 __ret; \
74})
75
76extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
77 unsigned int size);
78
79static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
80 int size)
81{
82 switch (size) {
83 case 1:
84 case 2:
85 return __xchg_small(ptr, x, size);
86
87 case 4:
88 return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
89
90 case 8:
91 if (!IS_ENABLED(CONFIG_64BIT))
92 return __xchg_called_with_bad_pointer();
93
94 return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
95
96 default:
97 return __xchg_called_with_bad_pointer();
98 }
99}
100
101#define xchg(ptr, x) \
102({ \
103 __typeof__(*(ptr)) __res; \
104 \
105 smp_mb__before_llsc(); \
106 \
107 __res = (__typeof__(*(ptr))) \
108 __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
109 \
110 smp_llsc_mb(); \
111 \
112 __res; \
113})
114
115#define __cmpxchg_asm(ld, st, m, old, new) \
116({ \
117 __typeof(*(m)) __ret; \
118 \
119 if (kernel_uses_llsc) { \
120 __asm__ __volatile__( \
121 " .set push \n" \
122 " .set noat \n" \
123 " .set push \n" \
124 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
125 "1: " ld " %0, %2 # __cmpxchg_asm \n" \
126 " bne %0, %z3, 2f \n" \
127 " .set pop \n" \
128 " move $1, %z4 \n" \
129 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
130 " " st " $1, %1 \n" \
131 "\t" __scbeqz " $1, 1b \n" \
132 " .set pop \n" \
133 "2: \n" \
134 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
135 : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
136 : "memory"); \
137 } else { \
138 unsigned long __flags; \
139 \
140 raw_local_irq_save(__flags); \
141 __ret = *m; \
142 if (__ret == old) \
143 *m = new; \
144 raw_local_irq_restore(__flags); \
145 } \
146 \
147 __ret; \
148})
149
150extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
151 unsigned long new, unsigned int size);
152
153static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
154 unsigned long new, unsigned int size)
155{
156 switch (size) {
157 case 1:
158 case 2:
159 return __cmpxchg_small(ptr, old, new, size);
160
161 case 4:
162 return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
163 (u32)old, new);
164
165 case 8:
166
167 if (!IS_ENABLED(CONFIG_64BIT))
168 return __cmpxchg_called_with_bad_pointer();
169
170 return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
171 (u64)old, new);
172
173 default:
174 return __cmpxchg_called_with_bad_pointer();
175 }
176}
177
178#define cmpxchg_local(ptr, old, new) \
179 ((__typeof__(*(ptr))) \
180 __cmpxchg((ptr), \
181 (unsigned long)(__typeof__(*(ptr)))(old), \
182 (unsigned long)(__typeof__(*(ptr)))(new), \
183 sizeof(*(ptr))))
184
185#define cmpxchg(ptr, old, new) \
186({ \
187 __typeof__(*(ptr)) __res; \
188 \
189 smp_mb__before_llsc(); \
190 __res = cmpxchg_local((ptr), (old), (new)); \
191 smp_llsc_mb(); \
192 \
193 __res; \
194})
195
196#ifdef CONFIG_64BIT
197#define cmpxchg64_local(ptr, o, n) \
198 ({ \
199 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
200 cmpxchg_local((ptr), (o), (n)); \
201 })
202
203#define cmpxchg64(ptr, o, n) \
204 ({ \
205 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
206 cmpxchg((ptr), (o), (n)); \
207 })
208#else
209
210# include <asm-generic/cmpxchg-local.h>
211# define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
212
213# ifdef CONFIG_SMP
214
215static inline unsigned long __cmpxchg64(volatile void *ptr,
216 unsigned long long old,
217 unsigned long long new)
218{
219 unsigned long long tmp, ret;
220 unsigned long flags;
221
222
223
224
225
226
227
228
229
230 local_irq_save(flags);
231
232 asm volatile(
233 " .set push \n"
234 " .set " MIPS_ISA_ARCH_LEVEL " \n"
235
236 "1: lld %L0, %3 # __cmpxchg64 \n"
237
238
239
240
241 " dsra %M0, %L0, 32 \n"
242 " sll %L0, %L0, 0 \n"
243
244
245
246
247 " bne %M0, %M4, 2f \n"
248 " bne %L0, %L4, 2f \n"
249
250
251
252
253# if MIPS_ISA_REV >= 2
254 " move %L1, %L5 \n"
255 " dins %L1, %M5, 32, 32 \n"
256# else
257 " dsll %L1, %L5, 32 \n"
258 " dsrl %L1, %L1, 32 \n"
259 " .set noat \n"
260 " dsll $at, %M5, 32 \n"
261 " or %L1, %L1, $at \n"
262 " .set at \n"
263# endif
264
265 " scd %L1, %2 \n"
266
267 "\t" __scbeqz " %L1, 1b \n"
268 " .set pop \n"
269 "2: \n"
270 : "=&r"(ret),
271 "=&r"(tmp),
272 "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
273 : GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
274 "r" (old),
275 "r" (new)
276 : "memory");
277
278 local_irq_restore(flags);
279 return ret;
280}
281
282# define cmpxchg64(ptr, o, n) ({ \
283 unsigned long long __old = (__typeof__(*(ptr)))(o); \
284 unsigned long long __new = (__typeof__(*(ptr)))(n); \
285 __typeof__(*(ptr)) __res; \
286 \
287
288
289
290
291
292 \
293 if (cpu_has_64bits && kernel_uses_llsc) \
294 __res = __cmpxchg64((ptr), __old, __new); \
295 else \
296 __res = __cmpxchg64_unsupported(); \
297 \
298 __res; \
299})
300
301# else
302# define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
303# endif
304#endif
305
306#undef __scbeqz
307
308#endif
309