1#ifndef _ASM_X86_UACCESS_64_H
2#define _ASM_X86_UACCESS_64_H
3
4
5
6
7#include <linux/compiler.h>
8#include <linux/errno.h>
9#include <linux/prefetch.h>
10#include <linux/lockdep.h>
11#include <asm/alternative.h>
12#include <asm/cpufeature.h>
13#include <asm/page.h>
14
15
16
17
18
19
20__must_check unsigned long
21copy_user_generic_string(void *to, const void *from, unsigned len);
22__must_check unsigned long
23copy_user_generic_unrolled(void *to, const void *from, unsigned len);
24
25static __always_inline __must_check unsigned long
26copy_user_generic(void *to, const void *from, unsigned len)
27{
28 unsigned ret;
29
30 alternative_call(copy_user_generic_unrolled,
31 copy_user_generic_string,
32 X86_FEATURE_REP_GOOD,
33 ASM_OUTPUT2("=a" (ret), "=D" (to), "=S" (from),
34 "=d" (len)),
35 "1" (to), "2" (from), "3" (len)
36 : "memory", "rcx", "r8", "r9", "r10", "r11");
37 return ret;
38}
39
40__must_check unsigned long
41_copy_to_user(void __user *to, const void *from, unsigned len);
42__must_check unsigned long
43_copy_from_user(void *to, const void __user *from, unsigned len);
44__must_check unsigned long
45copy_in_user(void __user *to, const void __user *from, unsigned len);
46
47static inline unsigned long __must_check copy_from_user(void *to,
48 const void __user *from,
49 unsigned long n)
50{
51 int sz = __compiletime_object_size(to);
52
53 might_fault();
54 if (likely(sz == -1 || sz >= n))
55 n = _copy_from_user(to, from, n);
56#ifdef CONFIG_DEBUG_VM
57 else
58 WARN(1, "Buffer overflow detected!\n");
59#endif
60 return n;
61}
62
63static __always_inline __must_check
64int copy_to_user(void __user *dst, const void *src, unsigned size)
65{
66 might_fault();
67
68 return _copy_to_user(dst, src, size);
69}
70
71static __always_inline __must_check
72int __copy_from_user(void *dst, const void __user *src, unsigned size)
73{
74 int ret = 0;
75
76 might_fault();
77 if (!__builtin_constant_p(size))
78 return copy_user_generic(dst, (__force void *)src, size);
79 switch (size) {
80 case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
81 ret, "b", "b", "=q", 1);
82 return ret;
83 case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
84 ret, "w", "w", "=r", 2);
85 return ret;
86 case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
87 ret, "l", "k", "=r", 4);
88 return ret;
89 case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
90 ret, "q", "", "=r", 8);
91 return ret;
92 case 10:
93 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
94 ret, "q", "", "=r", 10);
95 if (unlikely(ret))
96 return ret;
97 __get_user_asm(*(u16 *)(8 + (char *)dst),
98 (u16 __user *)(8 + (char __user *)src),
99 ret, "w", "w", "=r", 2);
100 return ret;
101 case 16:
102 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
103 ret, "q", "", "=r", 16);
104 if (unlikely(ret))
105 return ret;
106 __get_user_asm(*(u64 *)(8 + (char *)dst),
107 (u64 __user *)(8 + (char __user *)src),
108 ret, "q", "", "=r", 8);
109 return ret;
110 default:
111 return copy_user_generic(dst, (__force void *)src, size);
112 }
113}
114
115static __always_inline __must_check
116int __copy_to_user(void __user *dst, const void *src, unsigned size)
117{
118 int ret = 0;
119
120 might_fault();
121 if (!__builtin_constant_p(size))
122 return copy_user_generic((__force void *)dst, src, size);
123 switch (size) {
124 case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
125 ret, "b", "b", "iq", 1);
126 return ret;
127 case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
128 ret, "w", "w", "ir", 2);
129 return ret;
130 case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
131 ret, "l", "k", "ir", 4);
132 return ret;
133 case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
134 ret, "q", "", "er", 8);
135 return ret;
136 case 10:
137 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
138 ret, "q", "", "er", 10);
139 if (unlikely(ret))
140 return ret;
141 asm("":::"memory");
142 __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
143 ret, "w", "w", "ir", 2);
144 return ret;
145 case 16:
146 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
147 ret, "q", "", "er", 16);
148 if (unlikely(ret))
149 return ret;
150 asm("":::"memory");
151 __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
152 ret, "q", "", "er", 8);
153 return ret;
154 default:
155 return copy_user_generic((__force void *)dst, src, size);
156 }
157}
158
159static __always_inline __must_check
160int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
161{
162 int ret = 0;
163
164 might_fault();
165 if (!__builtin_constant_p(size))
166 return copy_user_generic((__force void *)dst,
167 (__force void *)src, size);
168 switch (size) {
169 case 1: {
170 u8 tmp;
171 __get_user_asm(tmp, (u8 __user *)src,
172 ret, "b", "b", "=q", 1);
173 if (likely(!ret))
174 __put_user_asm(tmp, (u8 __user *)dst,
175 ret, "b", "b", "iq", 1);
176 return ret;
177 }
178 case 2: {
179 u16 tmp;
180 __get_user_asm(tmp, (u16 __user *)src,
181 ret, "w", "w", "=r", 2);
182 if (likely(!ret))
183 __put_user_asm(tmp, (u16 __user *)dst,
184 ret, "w", "w", "ir", 2);
185 return ret;
186 }
187
188 case 4: {
189 u32 tmp;
190 __get_user_asm(tmp, (u32 __user *)src,
191 ret, "l", "k", "=r", 4);
192 if (likely(!ret))
193 __put_user_asm(tmp, (u32 __user *)dst,
194 ret, "l", "k", "ir", 4);
195 return ret;
196 }
197 case 8: {
198 u64 tmp;
199 __get_user_asm(tmp, (u64 __user *)src,
200 ret, "q", "", "=r", 8);
201 if (likely(!ret))
202 __put_user_asm(tmp, (u64 __user *)dst,
203 ret, "q", "", "er", 8);
204 return ret;
205 }
206 default:
207 return copy_user_generic((__force void *)dst,
208 (__force void *)src, size);
209 }
210}
211
212__must_check long
213strncpy_from_user(char *dst, const char __user *src, long count);
214__must_check long
215__strncpy_from_user(char *dst, const char __user *src, long count);
216__must_check long strnlen_user(const char __user *str, long n);
217__must_check long __strnlen_user(const char __user *str, long n);
218__must_check long strlen_user(const char __user *str);
219__must_check unsigned long clear_user(void __user *mem, unsigned long len);
220__must_check unsigned long __clear_user(void __user *mem, unsigned long len);
221
222static __must_check __always_inline int
223__copy_from_user_inatomic(void *dst, const void __user *src, unsigned size)
224{
225 return copy_user_generic(dst, (__force const void *)src, size);
226}
227
228static __must_check __always_inline int
229__copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
230{
231 return copy_user_generic((__force void *)dst, src, size);
232}
233
234extern long __copy_user_nocache(void *dst, const void __user *src,
235 unsigned size, int zerorest);
236
237static inline int
238__copy_from_user_nocache(void *dst, const void __user *src, unsigned size)
239{
240 might_sleep();
241 return __copy_user_nocache(dst, src, size, 1);
242}
243
244static inline int
245__copy_from_user_inatomic_nocache(void *dst, const void __user *src,
246 unsigned size)
247{
248 return __copy_user_nocache(dst, src, size, 0);
249}
250
251unsigned long
252copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest);
253
254#endif
255