1
2
3#ifndef __ASM_CSKY_CMPXCHG_H
4#define __ASM_CSKY_CMPXCHG_H
5
6#ifdef CONFIG_SMP
7#include <asm/barrier.h>
8
9extern void __bad_xchg(void);
10
11#define __xchg_relaxed(new, ptr, size) \
12({ \
13 __typeof__(ptr) __ptr = (ptr); \
14 __typeof__(new) __new = (new); \
15 __typeof__(*(ptr)) __ret; \
16 unsigned long tmp; \
17 switch (size) { \
18 case 4: \
19 asm volatile ( \
20 "1: ldex.w %0, (%3) \n" \
21 " mov %1, %2 \n" \
22 " stex.w %1, (%3) \n" \
23 " bez %1, 1b \n" \
24 : "=&r" (__ret), "=&r" (tmp) \
25 : "r" (__new), "r"(__ptr) \
26 :); \
27 break; \
28 default: \
29 __bad_xchg(); \
30 } \
31 __ret; \
32})
33
34#define arch_xchg_relaxed(ptr, x) \
35 (__xchg_relaxed((x), (ptr), sizeof(*(ptr))))
36
37#define __cmpxchg_relaxed(ptr, old, new, size) \
38({ \
39 __typeof__(ptr) __ptr = (ptr); \
40 __typeof__(new) __new = (new); \
41 __typeof__(new) __tmp; \
42 __typeof__(old) __old = (old); \
43 __typeof__(*(ptr)) __ret; \
44 switch (size) { \
45 case 4: \
46 asm volatile ( \
47 "1: ldex.w %0, (%3) \n" \
48 " cmpne %0, %4 \n" \
49 " bt 2f \n" \
50 " mov %1, %2 \n" \
51 " stex.w %1, (%3) \n" \
52 " bez %1, 1b \n" \
53 "2: \n" \
54 : "=&r" (__ret), "=&r" (__tmp) \
55 : "r" (__new), "r"(__ptr), "r"(__old) \
56 :); \
57 break; \
58 default: \
59 __bad_xchg(); \
60 } \
61 __ret; \
62})
63
64#define arch_cmpxchg_relaxed(ptr, o, n) \
65 (__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
66
67#define arch_cmpxchg(ptr, o, n) \
68({ \
69 __typeof__(*(ptr)) __ret; \
70 __smp_release_fence(); \
71 __ret = arch_cmpxchg_relaxed(ptr, o, n); \
72 __smp_acquire_fence(); \
73 __ret; \
74})
75
76#else
77#include <asm-generic/cmpxchg.h>
78#endif
79
80#endif
81