1
2#ifndef _ASM_X86_STRING_32_H
3#define _ASM_X86_STRING_32_H
4
5#ifdef __KERNEL__
6
7
8
9#define __HAVE_ARCH_STRCPY
10extern char *strcpy(char *dest, const char *src);
11
12#define __HAVE_ARCH_STRNCPY
13extern char *strncpy(char *dest, const char *src, size_t count);
14
15#define __HAVE_ARCH_STRCAT
16extern char *strcat(char *dest, const char *src);
17
18#define __HAVE_ARCH_STRNCAT
19extern char *strncat(char *dest, const char *src, size_t count);
20
21#define __HAVE_ARCH_STRCMP
22extern int strcmp(const char *cs, const char *ct);
23
24#define __HAVE_ARCH_STRNCMP
25extern int strncmp(const char *cs, const char *ct, size_t count);
26
27#define __HAVE_ARCH_STRCHR
28extern char *strchr(const char *s, int c);
29
30#define __HAVE_ARCH_STRLEN
31extern size_t strlen(const char *s);
32
33static __always_inline void *__memcpy(void *to, const void *from, size_t n)
34{
35 int d0, d1, d2;
36 asm volatile("rep ; movsl\n\t"
37 "movl %4,%%ecx\n\t"
38 "andl $3,%%ecx\n\t"
39 "jz 1f\n\t"
40 "rep ; movsb\n\t"
41 "1:"
42 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
43 : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
44 : "memory");
45 return to;
46}
47
48
49
50
51
52static __always_inline void *__constant_memcpy(void *to, const void *from,
53 size_t n)
54{
55 long esi, edi;
56 if (!n)
57 return to;
58
59 switch (n) {
60 case 1:
61 *(char *)to = *(char *)from;
62 return to;
63 case 2:
64 *(short *)to = *(short *)from;
65 return to;
66 case 4:
67 *(int *)to = *(int *)from;
68 return to;
69 case 3:
70 *(short *)to = *(short *)from;
71 *((char *)to + 2) = *((char *)from + 2);
72 return to;
73 case 5:
74 *(int *)to = *(int *)from;
75 *((char *)to + 4) = *((char *)from + 4);
76 return to;
77 case 6:
78 *(int *)to = *(int *)from;
79 *((short *)to + 2) = *((short *)from + 2);
80 return to;
81 case 8:
82 *(int *)to = *(int *)from;
83 *((int *)to + 1) = *((int *)from + 1);
84 return to;
85 }
86
87 esi = (long)from;
88 edi = (long)to;
89 if (n >= 5 * 4) {
90
91 int ecx;
92 asm volatile("rep ; movsl"
93 : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
94 : "0" (n / 4), "1" (edi), "2" (esi)
95 : "memory"
96 );
97 } else {
98
99 if (n >= 4 * 4)
100 asm volatile("movsl"
101 : "=&D"(edi), "=&S"(esi)
102 : "0"(edi), "1"(esi)
103 : "memory");
104 if (n >= 3 * 4)
105 asm volatile("movsl"
106 : "=&D"(edi), "=&S"(esi)
107 : "0"(edi), "1"(esi)
108 : "memory");
109 if (n >= 2 * 4)
110 asm volatile("movsl"
111 : "=&D"(edi), "=&S"(esi)
112 : "0"(edi), "1"(esi)
113 : "memory");
114 if (n >= 1 * 4)
115 asm volatile("movsl"
116 : "=&D"(edi), "=&S"(esi)
117 : "0"(edi), "1"(esi)
118 : "memory");
119 }
120 switch (n % 4) {
121
122 case 0:
123 return to;
124 case 1:
125 asm volatile("movsb"
126 : "=&D"(edi), "=&S"(esi)
127 : "0"(edi), "1"(esi)
128 : "memory");
129 return to;
130 case 2:
131 asm volatile("movsw"
132 : "=&D"(edi), "=&S"(esi)
133 : "0"(edi), "1"(esi)
134 : "memory");
135 return to;
136 default:
137 asm volatile("movsw\n\tmovsb"
138 : "=&D"(edi), "=&S"(esi)
139 : "0"(edi), "1"(esi)
140 : "memory");
141 return to;
142 }
143}
144
145#define __HAVE_ARCH_MEMCPY
146extern void *memcpy(void *, const void *, size_t);
147
148#ifndef CONFIG_FORTIFY_SOURCE
149#ifdef CONFIG_X86_USE_3DNOW
150
151#include <asm/mmx.h>
152
153
154
155
156
157static inline void *__constant_memcpy3d(void *to, const void *from, size_t len)
158{
159 if (len < 512)
160 return __constant_memcpy(to, from, len);
161 return _mmx_memcpy(to, from, len);
162}
163
164static inline void *__memcpy3d(void *to, const void *from, size_t len)
165{
166 if (len < 512)
167 return __memcpy(to, from, len);
168 return _mmx_memcpy(to, from, len);
169}
170
171#define memcpy(t, f, n) \
172 (__builtin_constant_p((n)) \
173 ? __constant_memcpy3d((t), (f), (n)) \
174 : __memcpy3d((t), (f), (n)))
175
176#else
177
178
179
180
181
182#ifndef CONFIG_KMEMCHECK
183
184#if (__GNUC__ >= 4)
185#define memcpy(t, f, n) __builtin_memcpy(t, f, n)
186#else
187#define memcpy(t, f, n) \
188 (__builtin_constant_p((n)) \
189 ? __constant_memcpy((t), (f), (n)) \
190 : __memcpy((t), (f), (n)))
191#endif
192#else
193
194
195
196
197#define memcpy(t, f, n) __memcpy((t), (f), (n))
198#endif
199
200#endif
201#endif
202
203#define __HAVE_ARCH_MEMMOVE
204void *memmove(void *dest, const void *src, size_t n);
205
206extern int memcmp(const void *, const void *, size_t);
207#ifndef CONFIG_FORTIFY_SOURCE
208#define memcmp __builtin_memcmp
209#endif
210
211#define __HAVE_ARCH_MEMCHR
212extern void *memchr(const void *cs, int c, size_t count);
213
214static inline void *__memset_generic(void *s, char c, size_t count)
215{
216 int d0, d1;
217 asm volatile("rep\n\t"
218 "stosb"
219 : "=&c" (d0), "=&D" (d1)
220 : "a" (c), "1" (s), "0" (count)
221 : "memory");
222 return s;
223}
224
225
226#define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
227
228
229
230
231
232
233static __always_inline
234void *__constant_c_memset(void *s, unsigned long c, size_t count)
235{
236 int d0, d1;
237 asm volatile("rep ; stosl\n\t"
238 "testb $2,%b3\n\t"
239 "je 1f\n\t"
240 "stosw\n"
241 "1:\ttestb $1,%b3\n\t"
242 "je 2f\n\t"
243 "stosb\n"
244 "2:"
245 : "=&c" (d0), "=&D" (d1)
246 : "a" (c), "q" (count), "0" (count/4), "1" ((long)s)
247 : "memory");
248 return s;
249}
250
251
252#define __HAVE_ARCH_STRNLEN
253extern size_t strnlen(const char *s, size_t count);
254
255
256#define __HAVE_ARCH_STRSTR
257extern char *strstr(const char *cs, const char *ct);
258
259
260
261
262
263static __always_inline
264void *__constant_c_and_count_memset(void *s, unsigned long pattern,
265 size_t count)
266{
267 switch (count) {
268 case 0:
269 return s;
270 case 1:
271 *(unsigned char *)s = pattern & 0xff;
272 return s;
273 case 2:
274 *(unsigned short *)s = pattern & 0xffff;
275 return s;
276 case 3:
277 *(unsigned short *)s = pattern & 0xffff;
278 *((unsigned char *)s + 2) = pattern & 0xff;
279 return s;
280 case 4:
281 *(unsigned long *)s = pattern;
282 return s;
283 }
284
285#define COMMON(x) \
286 asm volatile("rep ; stosl" \
287 x \
288 : "=&c" (d0), "=&D" (d1) \
289 : "a" (eax), "0" (count/4), "1" ((long)s) \
290 : "memory")
291
292 {
293 int d0, d1;
294#if __GNUC__ == 4 && __GNUC_MINOR__ == 0
295
296 register unsigned long eax asm("%eax") = pattern;
297#else
298 unsigned long eax = pattern;
299#endif
300
301 switch (count % 4) {
302 case 0:
303 COMMON("");
304 return s;
305 case 1:
306 COMMON("\n\tstosb");
307 return s;
308 case 2:
309 COMMON("\n\tstosw");
310 return s;
311 default:
312 COMMON("\n\tstosw\n\tstosb");
313 return s;
314 }
315 }
316
317#undef COMMON
318}
319
320#define __constant_c_x_memset(s, c, count) \
321 (__builtin_constant_p(count) \
322 ? __constant_c_and_count_memset((s), (c), (count)) \
323 : __constant_c_memset((s), (c), (count)))
324
325#define __memset(s, c, count) \
326 (__builtin_constant_p(count) \
327 ? __constant_count_memset((s), (c), (count)) \
328 : __memset_generic((s), (c), (count)))
329
330#define __HAVE_ARCH_MEMSET
331extern void *memset(void *, int, size_t);
332#ifndef CONFIG_FORTIFY_SOURCE
333#if (__GNUC__ >= 4)
334#define memset(s, c, count) __builtin_memset(s, c, count)
335#else
336#define memset(s, c, count) \
337 (__builtin_constant_p(c) \
338 ? __constant_c_x_memset((s), (0x01010101UL * (unsigned char)(c)), \
339 (count)) \
340 : __memset((s), (c), (count)))
341#endif
342#endif
343
344#define __HAVE_ARCH_MEMSET16
345static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
346{
347 int d0, d1;
348 asm volatile("rep\n\t"
349 "stosw"
350 : "=&c" (d0), "=&D" (d1)
351 : "a" (v), "1" (s), "0" (n)
352 : "memory");
353 return s;
354}
355
356#define __HAVE_ARCH_MEMSET32
357static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
358{
359 int d0, d1;
360 asm volatile("rep\n\t"
361 "stosl"
362 : "=&c" (d0), "=&D" (d1)
363 : "a" (v), "1" (s), "0" (n)
364 : "memory");
365 return s;
366}
367
368
369
370
371#define __HAVE_ARCH_MEMSCAN
372extern void *memscan(void *addr, int c, size_t size);
373
374#endif
375
376#endif
377