1
2
3
4
5
6
7
8
9#include <linux/export.h>
10#include <linux/uaccess.h>
11#include <asm/mmx.h>
12#include <asm/asm.h>
13
14#ifdef CONFIG_X86_INTEL_USERCOPY
15
16
17
18struct movsl_mask movsl_mask __read_mostly;
19#endif
20
21static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
22{
23#ifdef CONFIG_X86_INTEL_USERCOPY
24 if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
25 return 0;
26#endif
27 return 1;
28}
29#define movsl_is_ok(a1, a2, n) \
30 __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n))
31
32
33
34
35
36#define __do_clear_user(addr,size) \
37do { \
38 int __d0; \
39 might_fault(); \
40 __asm__ __volatile__( \
41 ASM_STAC "\n" \
42 "0: rep; stosl\n" \
43 " movl %2,%0\n" \
44 "1: rep; stosb\n" \
45 "2: " ASM_CLAC "\n" \
46 ".section .fixup,\"ax\"\n" \
47 "3: lea 0(%2,%0,4),%0\n" \
48 " jmp 2b\n" \
49 ".previous\n" \
50 _ASM_EXTABLE_UA(0b, 3b) \
51 _ASM_EXTABLE_UA(1b, 2b) \
52 : "=&c"(size), "=&D" (__d0) \
53 : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \
54} while (0)
55
56
57
58
59
60
61
62
63
64
65
66unsigned long
67clear_user(void __user *to, unsigned long n)
68{
69 might_fault();
70 if (access_ok(VERIFY_WRITE, to, n))
71 __do_clear_user(to, n);
72 return n;
73}
74EXPORT_SYMBOL(clear_user);
75
76
77
78
79
80
81
82
83
84
85
86
87unsigned long
88__clear_user(void __user *to, unsigned long n)
89{
90 __do_clear_user(to, n);
91 return n;
92}
93EXPORT_SYMBOL(__clear_user);
94
95#ifdef CONFIG_X86_INTEL_USERCOPY
96static unsigned long
97__copy_user_intel(void __user *to, const void *from, unsigned long size)
98{
99 int d0, d1;
100 __asm__ __volatile__(
101 " .align 2,0x90\n"
102 "1: movl 32(%4), %%eax\n"
103 " cmpl $67, %0\n"
104 " jbe 3f\n"
105 "2: movl 64(%4), %%eax\n"
106 " .align 2,0x90\n"
107 "3: movl 0(%4), %%eax\n"
108 "4: movl 4(%4), %%edx\n"
109 "5: movl %%eax, 0(%3)\n"
110 "6: movl %%edx, 4(%3)\n"
111 "7: movl 8(%4), %%eax\n"
112 "8: movl 12(%4),%%edx\n"
113 "9: movl %%eax, 8(%3)\n"
114 "10: movl %%edx, 12(%3)\n"
115 "11: movl 16(%4), %%eax\n"
116 "12: movl 20(%4), %%edx\n"
117 "13: movl %%eax, 16(%3)\n"
118 "14: movl %%edx, 20(%3)\n"
119 "15: movl 24(%4), %%eax\n"
120 "16: movl 28(%4), %%edx\n"
121 "17: movl %%eax, 24(%3)\n"
122 "18: movl %%edx, 28(%3)\n"
123 "19: movl 32(%4), %%eax\n"
124 "20: movl 36(%4), %%edx\n"
125 "21: movl %%eax, 32(%3)\n"
126 "22: movl %%edx, 36(%3)\n"
127 "23: movl 40(%4), %%eax\n"
128 "24: movl 44(%4), %%edx\n"
129 "25: movl %%eax, 40(%3)\n"
130 "26: movl %%edx, 44(%3)\n"
131 "27: movl 48(%4), %%eax\n"
132 "28: movl 52(%4), %%edx\n"
133 "29: movl %%eax, 48(%3)\n"
134 "30: movl %%edx, 52(%3)\n"
135 "31: movl 56(%4), %%eax\n"
136 "32: movl 60(%4), %%edx\n"
137 "33: movl %%eax, 56(%3)\n"
138 "34: movl %%edx, 60(%3)\n"
139 " addl $-64, %0\n"
140 " addl $64, %4\n"
141 " addl $64, %3\n"
142 " cmpl $63, %0\n"
143 " ja 1b\n"
144 "35: movl %0, %%eax\n"
145 " shrl $2, %0\n"
146 " andl $3, %%eax\n"
147 " cld\n"
148 "99: rep; movsl\n"
149 "36: movl %%eax, %0\n"
150 "37: rep; movsb\n"
151 "100:\n"
152 ".section .fixup,\"ax\"\n"
153 "101: lea 0(%%eax,%0,4),%0\n"
154 " jmp 100b\n"
155 ".previous\n"
156 _ASM_EXTABLE_UA(1b, 100b)
157 _ASM_EXTABLE_UA(2b, 100b)
158 _ASM_EXTABLE_UA(3b, 100b)
159 _ASM_EXTABLE_UA(4b, 100b)
160 _ASM_EXTABLE_UA(5b, 100b)
161 _ASM_EXTABLE_UA(6b, 100b)
162 _ASM_EXTABLE_UA(7b, 100b)
163 _ASM_EXTABLE_UA(8b, 100b)
164 _ASM_EXTABLE_UA(9b, 100b)
165 _ASM_EXTABLE_UA(10b, 100b)
166 _ASM_EXTABLE_UA(11b, 100b)
167 _ASM_EXTABLE_UA(12b, 100b)
168 _ASM_EXTABLE_UA(13b, 100b)
169 _ASM_EXTABLE_UA(14b, 100b)
170 _ASM_EXTABLE_UA(15b, 100b)
171 _ASM_EXTABLE_UA(16b, 100b)
172 _ASM_EXTABLE_UA(17b, 100b)
173 _ASM_EXTABLE_UA(18b, 100b)
174 _ASM_EXTABLE_UA(19b, 100b)
175 _ASM_EXTABLE_UA(20b, 100b)
176 _ASM_EXTABLE_UA(21b, 100b)
177 _ASM_EXTABLE_UA(22b, 100b)
178 _ASM_EXTABLE_UA(23b, 100b)
179 _ASM_EXTABLE_UA(24b, 100b)
180 _ASM_EXTABLE_UA(25b, 100b)
181 _ASM_EXTABLE_UA(26b, 100b)
182 _ASM_EXTABLE_UA(27b, 100b)
183 _ASM_EXTABLE_UA(28b, 100b)
184 _ASM_EXTABLE_UA(29b, 100b)
185 _ASM_EXTABLE_UA(30b, 100b)
186 _ASM_EXTABLE_UA(31b, 100b)
187 _ASM_EXTABLE_UA(32b, 100b)
188 _ASM_EXTABLE_UA(33b, 100b)
189 _ASM_EXTABLE_UA(34b, 100b)
190 _ASM_EXTABLE_UA(35b, 100b)
191 _ASM_EXTABLE_UA(36b, 100b)
192 _ASM_EXTABLE_UA(37b, 100b)
193 _ASM_EXTABLE_UA(99b, 101b)
194 : "=&c"(size), "=&D" (d0), "=&S" (d1)
195 : "1"(to), "2"(from), "0"(size)
196 : "eax", "edx", "memory");
197 return size;
198}
199
200static unsigned long __copy_user_intel_nocache(void *to,
201 const void __user *from, unsigned long size)
202{
203 int d0, d1;
204
205 __asm__ __volatile__(
206 " .align 2,0x90\n"
207 "0: movl 32(%4), %%eax\n"
208 " cmpl $67, %0\n"
209 " jbe 2f\n"
210 "1: movl 64(%4), %%eax\n"
211 " .align 2,0x90\n"
212 "2: movl 0(%4), %%eax\n"
213 "21: movl 4(%4), %%edx\n"
214 " movnti %%eax, 0(%3)\n"
215 " movnti %%edx, 4(%3)\n"
216 "3: movl 8(%4), %%eax\n"
217 "31: movl 12(%4),%%edx\n"
218 " movnti %%eax, 8(%3)\n"
219 " movnti %%edx, 12(%3)\n"
220 "4: movl 16(%4), %%eax\n"
221 "41: movl 20(%4), %%edx\n"
222 " movnti %%eax, 16(%3)\n"
223 " movnti %%edx, 20(%3)\n"
224 "10: movl 24(%4), %%eax\n"
225 "51: movl 28(%4), %%edx\n"
226 " movnti %%eax, 24(%3)\n"
227 " movnti %%edx, 28(%3)\n"
228 "11: movl 32(%4), %%eax\n"
229 "61: movl 36(%4), %%edx\n"
230 " movnti %%eax, 32(%3)\n"
231 " movnti %%edx, 36(%3)\n"
232 "12: movl 40(%4), %%eax\n"
233 "71: movl 44(%4), %%edx\n"
234 " movnti %%eax, 40(%3)\n"
235 " movnti %%edx, 44(%3)\n"
236 "13: movl 48(%4), %%eax\n"
237 "81: movl 52(%4), %%edx\n"
238 " movnti %%eax, 48(%3)\n"
239 " movnti %%edx, 52(%3)\n"
240 "14: movl 56(%4), %%eax\n"
241 "91: movl 60(%4), %%edx\n"
242 " movnti %%eax, 56(%3)\n"
243 " movnti %%edx, 60(%3)\n"
244 " addl $-64, %0\n"
245 " addl $64, %4\n"
246 " addl $64, %3\n"
247 " cmpl $63, %0\n"
248 " ja 0b\n"
249 " sfence \n"
250 "5: movl %0, %%eax\n"
251 " shrl $2, %0\n"
252 " andl $3, %%eax\n"
253 " cld\n"
254 "6: rep; movsl\n"
255 " movl %%eax,%0\n"
256 "7: rep; movsb\n"
257 "8:\n"
258 ".section .fixup,\"ax\"\n"
259 "9: lea 0(%%eax,%0,4),%0\n"
260 "16: jmp 8b\n"
261 ".previous\n"
262 _ASM_EXTABLE_UA(0b, 16b)
263 _ASM_EXTABLE_UA(1b, 16b)
264 _ASM_EXTABLE_UA(2b, 16b)
265 _ASM_EXTABLE_UA(21b, 16b)
266 _ASM_EXTABLE_UA(3b, 16b)
267 _ASM_EXTABLE_UA(31b, 16b)
268 _ASM_EXTABLE_UA(4b, 16b)
269 _ASM_EXTABLE_UA(41b, 16b)
270 _ASM_EXTABLE_UA(10b, 16b)
271 _ASM_EXTABLE_UA(51b, 16b)
272 _ASM_EXTABLE_UA(11b, 16b)
273 _ASM_EXTABLE_UA(61b, 16b)
274 _ASM_EXTABLE_UA(12b, 16b)
275 _ASM_EXTABLE_UA(71b, 16b)
276 _ASM_EXTABLE_UA(13b, 16b)
277 _ASM_EXTABLE_UA(81b, 16b)
278 _ASM_EXTABLE_UA(14b, 16b)
279 _ASM_EXTABLE_UA(91b, 16b)
280 _ASM_EXTABLE_UA(6b, 9b)
281 _ASM_EXTABLE_UA(7b, 16b)
282 : "=&c"(size), "=&D" (d0), "=&S" (d1)
283 : "1"(to), "2"(from), "0"(size)
284 : "eax", "edx", "memory");
285 return size;
286}
287
288#else
289
290
291
292
293
294unsigned long __copy_user_intel(void __user *to, const void *from,
295 unsigned long size);
296#endif
297
298
299#define __copy_user(to, from, size) \
300do { \
301 int __d0, __d1, __d2; \
302 __asm__ __volatile__( \
303 " cmp $7,%0\n" \
304 " jbe 1f\n" \
305 " movl %1,%0\n" \
306 " negl %0\n" \
307 " andl $7,%0\n" \
308 " subl %0,%3\n" \
309 "4: rep; movsb\n" \
310 " movl %3,%0\n" \
311 " shrl $2,%0\n" \
312 " andl $3,%3\n" \
313 " .align 2,0x90\n" \
314 "0: rep; movsl\n" \
315 " movl %3,%0\n" \
316 "1: rep; movsb\n" \
317 "2:\n" \
318 ".section .fixup,\"ax\"\n" \
319 "5: addl %3,%0\n" \
320 " jmp 2b\n" \
321 "3: lea 0(%3,%0,4),%0\n" \
322 " jmp 2b\n" \
323 ".previous\n" \
324 _ASM_EXTABLE_UA(4b, 5b) \
325 _ASM_EXTABLE_UA(0b, 3b) \
326 _ASM_EXTABLE_UA(1b, 2b) \
327 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \
328 : "3"(size), "0"(size), "1"(to), "2"(from) \
329 : "memory"); \
330} while (0)
331
332unsigned long __copy_user_ll(void *to, const void *from, unsigned long n)
333{
334 __uaccess_begin_nospec();
335 if (movsl_is_ok(to, from, n))
336 __copy_user(to, from, n);
337 else
338 n = __copy_user_intel(to, from, n);
339 __uaccess_end();
340 return n;
341}
342EXPORT_SYMBOL(__copy_user_ll);
343
344unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
345 unsigned long n)
346{
347 __uaccess_begin_nospec();
348#ifdef CONFIG_X86_INTEL_USERCOPY
349 if (n > 64 && static_cpu_has(X86_FEATURE_XMM2))
350 n = __copy_user_intel_nocache(to, from, n);
351 else
352 __copy_user(to, from, n);
353#else
354 __copy_user(to, from, n);
355#endif
356 __uaccess_end();
357 return n;
358}
359EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero);
360