1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28#define _ST(p, inst, v) \
29 ({ \
30 asm("1: " #inst " %0, %1;" \
31 ".pushsection .coldtext.memcpy,\"ax\";" \
32 "2: { move r0, %2; jrp lr };" \
33 ".section __ex_table,\"a\";" \
34 ".align 8;" \
35 ".quad 1b, 2b;" \
36 ".popsection" \
37 : "=m" (*(p)) : "r" (v), "r" (n)); \
38 })
39
40#define _LD(p, inst) \
41 ({ \
42 unsigned long __v; \
43 asm("1: " #inst " %0, %1;" \
44 ".pushsection .coldtext.memcpy,\"ax\";" \
45 "2: { move r0, %2; jrp lr };" \
46 ".section __ex_table,\"a\";" \
47 ".align 8;" \
48 ".quad 1b, 2b;" \
49 ".popsection" \
50 : "=r" (__v) : "m" (*(p)), "r" (n)); \
51 __v; \
52 })
53
54#define USERCOPY_FUNC __copy_to_user_inatomic
55#define ST1(p, v) _ST((p), st1, (v))
56#define ST2(p, v) _ST((p), st2, (v))
57#define ST4(p, v) _ST((p), st4, (v))
58#define ST8(p, v) _ST((p), st, (v))
59#define LD1 LD
60#define LD2 LD
61#define LD4 LD
62#define LD8 LD
63#include "memcpy_64.c"
64
65#define USERCOPY_FUNC __copy_from_user_inatomic
66#define ST1 ST
67#define ST2 ST
68#define ST4 ST
69#define ST8 ST
70#define LD1(p) _LD((p), ld1u)
71#define LD2(p) _LD((p), ld2u)
72#define LD4(p) _LD((p), ld4u)
73#define LD8(p) _LD((p), ld)
74#include "memcpy_64.c"
75
76#define USERCOPY_FUNC __copy_in_user_inatomic
77#define ST1(p, v) _ST((p), st1, (v))
78#define ST2(p, v) _ST((p), st2, (v))
79#define ST4(p, v) _ST((p), st4, (v))
80#define ST8(p, v) _ST((p), st, (v))
81#define LD1(p) _LD((p), ld1u)
82#define LD2(p) _LD((p), ld2u)
83#define LD4(p) _LD((p), ld4u)
84#define LD8(p) _LD((p), ld)
85#include "memcpy_64.c"
86
87unsigned long __copy_from_user_zeroing(void *to, const void __user *from,
88 unsigned long n)
89{
90 unsigned long rc = __copy_from_user_inatomic(to, from, n);
91 if (unlikely(rc))
92 memset(to + n - rc, 0, rc);
93 return rc;
94}
95