1#ifndef __M68K_UACCESS_H
2#define __M68K_UACCESS_H
3
4
5
6
7#include <linux/compiler.h>
8#include <linux/types.h>
9#include <asm/segment.h>
10
11
12static inline int access_ok(int type, const void __user *addr,
13 unsigned long size)
14{
15 return 1;
16}
17
18
19
20
21
22
23
24
25
26
27
28#ifdef CONFIG_CPU_HAS_ADDRESS_SPACES
29#define MOVES "moves"
30#else
31#define MOVES "move"
32#endif
33
34extern int __put_user_bad(void);
35extern int __get_user_bad(void);
36
37#define __put_user_asm(res, x, ptr, bwl, reg, err) \
38asm volatile ("\n" \
39 "1: "MOVES"."#bwl" %2,%1\n" \
40 "2:\n" \
41 " .section .fixup,\"ax\"\n" \
42 " .even\n" \
43 "10: moveq.l %3,%0\n" \
44 " jra 2b\n" \
45 " .previous\n" \
46 "\n" \
47 " .section __ex_table,\"a\"\n" \
48 " .align 4\n" \
49 " .long 1b,10b\n" \
50 " .long 2b,10b\n" \
51 " .previous" \
52 : "+d" (res), "=m" (*(ptr)) \
53 : #reg (x), "i" (err))
54
55
56
57
58
59
60#define __put_user(x, ptr) \
61({ \
62 typeof(*(ptr)) __pu_val = (x); \
63 int __pu_err = 0; \
64 __chk_user_ptr(ptr); \
65 switch (sizeof (*(ptr))) { \
66 case 1: \
67 __put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT); \
68 break; \
69 case 2: \
70 __put_user_asm(__pu_err, __pu_val, ptr, w, r, -EFAULT); \
71 break; \
72 case 4: \
73 __put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT); \
74 break; \
75 case 8: \
76 { \
77 const void __user *__pu_ptr = (ptr); \
78 asm volatile ("\n" \
79 "1: "MOVES".l %2,(%1)+\n" \
80 "2: "MOVES".l %R2,(%1)\n" \
81 "3:\n" \
82 " .section .fixup,\"ax\"\n" \
83 " .even\n" \
84 "10: movel %3,%0\n" \
85 " jra 3b\n" \
86 " .previous\n" \
87 "\n" \
88 " .section __ex_table,\"a\"\n" \
89 " .align 4\n" \
90 " .long 1b,10b\n" \
91 " .long 2b,10b\n" \
92 " .long 3b,10b\n" \
93 " .previous" \
94 : "+d" (__pu_err), "+a" (__pu_ptr) \
95 : "r" (__pu_val), "i" (-EFAULT) \
96 : "memory"); \
97 break; \
98 } \
99 default: \
100 __pu_err = __put_user_bad(); \
101 break; \
102 } \
103 __pu_err; \
104})
105#define put_user(x, ptr) __put_user(x, ptr)
106
107
108#define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({ \
109 type __gu_val; \
110 asm volatile ("\n" \
111 "1: "MOVES"."#bwl" %2,%1\n" \
112 "2:\n" \
113 " .section .fixup,\"ax\"\n" \
114 " .even\n" \
115 "10: move.l %3,%0\n" \
116 " sub.l %1,%1\n" \
117 " jra 2b\n" \
118 " .previous\n" \
119 "\n" \
120 " .section __ex_table,\"a\"\n" \
121 " .align 4\n" \
122 " .long 1b,10b\n" \
123 " .previous" \
124 : "+d" (res), "=&" #reg (__gu_val) \
125 : "m" (*(ptr)), "i" (err)); \
126 (x) = (__force typeof(*(ptr)))(__force unsigned long)__gu_val; \
127})
128
129#define __get_user(x, ptr) \
130({ \
131 int __gu_err = 0; \
132 __chk_user_ptr(ptr); \
133 switch (sizeof(*(ptr))) { \
134 case 1: \
135 __get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT); \
136 break; \
137 case 2: \
138 __get_user_asm(__gu_err, x, ptr, u16, w, r, -EFAULT); \
139 break; \
140 case 4: \
141 __get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT); \
142 break; \
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170 \
171 default: \
172 __gu_err = __get_user_bad(); \
173 break; \
174 } \
175 __gu_err; \
176})
177#define get_user(x, ptr) __get_user(x, ptr)
178
179unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
180unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
181
182#define __suffix0
183#define __suffix1 b
184#define __suffix2 w
185#define __suffix4 l
186
187#define ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
188 asm volatile ("\n" \
189 "1: "MOVES"."#s1" (%2)+,%3\n" \
190 " move."#s1" %3,(%1)+\n" \
191 " .ifnc \""#s2"\",\"\"\n" \
192 "2: "MOVES"."#s2" (%2)+,%3\n" \
193 " move."#s2" %3,(%1)+\n" \
194 " .ifnc \""#s3"\",\"\"\n" \
195 "3: "MOVES"."#s3" (%2)+,%3\n" \
196 " move."#s3" %3,(%1)+\n" \
197 " .endif\n" \
198 " .endif\n" \
199 "4:\n" \
200 " .section __ex_table,\"a\"\n" \
201 " .align 4\n" \
202 " .long 1b,10f\n" \
203 " .ifnc \""#s2"\",\"\"\n" \
204 " .long 2b,20f\n" \
205 " .ifnc \""#s3"\",\"\"\n" \
206 " .long 3b,30f\n" \
207 " .endif\n" \
208 " .endif\n" \
209 " .previous\n" \
210 "\n" \
211 " .section .fixup,\"ax\"\n" \
212 " .even\n" \
213 "10: addq.l #"#n1",%0\n" \
214 " .ifnc \""#s2"\",\"\"\n" \
215 "20: addq.l #"#n2",%0\n" \
216 " .ifnc \""#s3"\",\"\"\n" \
217 "30: addq.l #"#n3",%0\n" \
218 " .endif\n" \
219 " .endif\n" \
220 " jra 4b\n" \
221 " .previous\n" \
222 : "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp) \
223 : : "memory")
224
225#define ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
226 ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)
227#define __constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3) \
228 ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, \
229 __suffix##n1, __suffix##n2, __suffix##n3)
230
231static __always_inline unsigned long
232__constant_copy_from_user(void *to, const void __user *from, unsigned long n)
233{
234 unsigned long res = 0, tmp;
235
236 switch (n) {
237 case 1:
238 __constant_copy_from_user_asm(res, to, from, tmp, 1, 0, 0);
239 break;
240 case 2:
241 __constant_copy_from_user_asm(res, to, from, tmp, 2, 0, 0);
242 break;
243 case 3:
244 __constant_copy_from_user_asm(res, to, from, tmp, 2, 1, 0);
245 break;
246 case 4:
247 __constant_copy_from_user_asm(res, to, from, tmp, 4, 0, 0);
248 break;
249 case 5:
250 __constant_copy_from_user_asm(res, to, from, tmp, 4, 1, 0);
251 break;
252 case 6:
253 __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 0);
254 break;
255 case 7:
256 __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 1);
257 break;
258 case 8:
259 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 0);
260 break;
261 case 9:
262 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 1);
263 break;
264 case 10:
265 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 2);
266 break;
267 case 12:
268 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 4);
269 break;
270 default:
271
272 return __generic_copy_from_user(to, from, n);
273 }
274
275 return res;
276}
277
278#define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3) \
279 asm volatile ("\n" \
280 " move."#s1" (%2)+,%3\n" \
281 "11: "MOVES"."#s1" %3,(%1)+\n" \
282 "12: move."#s2" (%2)+,%3\n" \
283 "21: "MOVES"."#s2" %3,(%1)+\n" \
284 "22:\n" \
285 " .ifnc \""#s3"\",\"\"\n" \
286 " move."#s3" (%2)+,%3\n" \
287 "31: "MOVES"."#s3" %3,(%1)+\n" \
288 "32:\n" \
289 " .endif\n" \
290 "4:\n" \
291 "\n" \
292 " .section __ex_table,\"a\"\n" \
293 " .align 4\n" \
294 " .long 11b,5f\n" \
295 " .long 12b,5f\n" \
296 " .long 21b,5f\n" \
297 " .long 22b,5f\n" \
298 " .ifnc \""#s3"\",\"\"\n" \
299 " .long 31b,5f\n" \
300 " .long 32b,5f\n" \
301 " .endif\n" \
302 " .previous\n" \
303 "\n" \
304 " .section .fixup,\"ax\"\n" \
305 " .even\n" \
306 "5: moveq.l #"#n",%0\n" \
307 " jra 4b\n" \
308 " .previous\n" \
309 : "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp) \
310 : : "memory")
311
312static __always_inline unsigned long
313__constant_copy_to_user(void __user *to, const void *from, unsigned long n)
314{
315 unsigned long res = 0, tmp;
316
317 switch (n) {
318 case 1:
319 __put_user_asm(res, *(u8 *)from, (u8 __user *)to, b, d, 1);
320 break;
321 case 2:
322 __put_user_asm(res, *(u16 *)from, (u16 __user *)to, w, r, 2);
323 break;
324 case 3:
325 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
326 break;
327 case 4:
328 __put_user_asm(res, *(u32 *)from, (u32 __user *)to, l, r, 4);
329 break;
330 case 5:
331 __constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
332 break;
333 case 6:
334 __constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
335 break;
336 case 7:
337 __constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
338 break;
339 case 8:
340 __constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
341 break;
342 case 9:
343 __constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
344 break;
345 case 10:
346 __constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
347 break;
348 case 12:
349 __constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
350 break;
351 default:
352
353 return __generic_copy_to_user(to, from, n);
354 }
355
356 return res;
357}
358
359static inline unsigned long
360raw_copy_from_user(void *to, const void __user *from, unsigned long n)
361{
362 if (__builtin_constant_p(n))
363 return __constant_copy_from_user(to, from, n);
364 return __generic_copy_from_user(to, from, n);
365}
366
367static inline unsigned long
368raw_copy_to_user(void __user *to, const void *from, unsigned long n)
369{
370 if (__builtin_constant_p(n))
371 return __constant_copy_to_user(to, from, n);
372 return __generic_copy_to_user(to, from, n);
373}
374#define INLINE_COPY_FROM_USER
375#define INLINE_COPY_TO_USER
376
377#define user_addr_max() \
378 (uaccess_kernel() ? ~0UL : TASK_SIZE)
379
380extern long strncpy_from_user(char *dst, const char __user *src, long count);
381extern __must_check long strnlen_user(const char __user *str, long n);
382
383unsigned long __clear_user(void __user *to, unsigned long n);
384
385#define clear_user __clear_user
386
387#endif
388