1#ifndef _ASM_X86_CMPXCHG_32_H
2#define _ASM_X86_CMPXCHG_32_H
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23static inline void set_64bit(volatile u64 *ptr, u64 value)
24{
25 u32 low = value;
26 u32 high = value >> 32;
27 u64 prev = *ptr;
28
29 asm volatile("\n1:\t"
30 LOCK_PREFIX "cmpxchg8b %0\n\t"
31 "jnz 1b"
32 : "=m" (*ptr), "+A" (prev)
33 : "b" (low), "c" (high)
34 : "memory");
35}
36
37#ifdef CONFIG_X86_CMPXCHG
38#define __HAVE_ARCH_CMPXCHG 1
39#endif
40
41#ifdef CONFIG_X86_CMPXCHG64
42#define cmpxchg64(ptr, o, n) \
43 ((__typeof__(*(ptr)))__cmpxchg64((ptr), (unsigned long long)(o), \
44 (unsigned long long)(n)))
45#define cmpxchg64_local(ptr, o, n) \
46 ((__typeof__(*(ptr)))__cmpxchg64_local((ptr), (unsigned long long)(o), \
47 (unsigned long long)(n)))
48#endif
49
50static inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
51{
52 u64 prev;
53 asm volatile(LOCK_PREFIX "cmpxchg8b %1"
54 : "=A" (prev),
55 "+m" (*ptr)
56 : "b" ((u32)new),
57 "c" ((u32)(new >> 32)),
58 "0" (old)
59 : "memory");
60 return prev;
61}
62
63static inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
64{
65 u64 prev;
66 asm volatile("cmpxchg8b %1"
67 : "=A" (prev),
68 "+m" (*ptr)
69 : "b" ((u32)new),
70 "c" ((u32)(new >> 32)),
71 "0" (old)
72 : "memory");
73 return prev;
74}
75
76#ifndef CONFIG_X86_CMPXCHG
77
78
79
80
81
82
83extern unsigned long cmpxchg_386_u8(volatile void *, u8, u8);
84extern unsigned long cmpxchg_386_u16(volatile void *, u16, u16);
85extern unsigned long cmpxchg_386_u32(volatile void *, u32, u32);
86
87static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old,
88 unsigned long new, int size)
89{
90 switch (size) {
91 case 1:
92 return cmpxchg_386_u8(ptr, old, new);
93 case 2:
94 return cmpxchg_386_u16(ptr, old, new);
95 case 4:
96 return cmpxchg_386_u32(ptr, old, new);
97 }
98 return old;
99}
100
101#define cmpxchg(ptr, o, n) \
102({ \
103 __typeof__(*(ptr)) __ret; \
104 if (likely(boot_cpu_data.x86 > 3)) \
105 __ret = (__typeof__(*(ptr)))__cmpxchg((ptr), \
106 (unsigned long)(o), (unsigned long)(n), \
107 sizeof(*(ptr))); \
108 else \
109 __ret = (__typeof__(*(ptr)))cmpxchg_386((ptr), \
110 (unsigned long)(o), (unsigned long)(n), \
111 sizeof(*(ptr))); \
112 __ret; \
113})
114#define cmpxchg_local(ptr, o, n) \
115({ \
116 __typeof__(*(ptr)) __ret; \
117 if (likely(boot_cpu_data.x86 > 3)) \
118 __ret = (__typeof__(*(ptr)))__cmpxchg_local((ptr), \
119 (unsigned long)(o), (unsigned long)(n), \
120 sizeof(*(ptr))); \
121 else \
122 __ret = (__typeof__(*(ptr)))cmpxchg_386((ptr), \
123 (unsigned long)(o), (unsigned long)(n), \
124 sizeof(*(ptr))); \
125 __ret; \
126})
127#endif
128
129#ifndef CONFIG_X86_CMPXCHG64
130
131
132
133
134
135#define cmpxchg64(ptr, o, n) \
136({ \
137 __typeof__(*(ptr)) __ret; \
138 __typeof__(*(ptr)) __old = (o); \
139 __typeof__(*(ptr)) __new = (n); \
140 alternative_io(LOCK_PREFIX_HERE \
141 "call cmpxchg8b_emu", \
142 "lock; cmpxchg8b (%%esi)" , \
143 X86_FEATURE_CX8, \
144 "=A" (__ret), \
145 "S" ((ptr)), "0" (__old), \
146 "b" ((unsigned int)__new), \
147 "c" ((unsigned int)(__new>>32)) \
148 : "memory"); \
149 __ret; })
150
151
152#define cmpxchg64_local(ptr, o, n) \
153({ \
154 __typeof__(*(ptr)) __ret; \
155 __typeof__(*(ptr)) __old = (o); \
156 __typeof__(*(ptr)) __new = (n); \
157 alternative_io("call cmpxchg8b_emu", \
158 "cmpxchg8b (%%esi)" , \
159 X86_FEATURE_CX8, \
160 "=A" (__ret), \
161 "S" ((ptr)), "0" (__old), \
162 "b" ((unsigned int)__new), \
163 "c" ((unsigned int)(__new>>32)) \
164 : "memory"); \
165 __ret; })
166
167#endif
168
169#define system_has_cmpxchg_double() cpu_has_cx8
170
171#endif
172