linux/arch/x86/include/asm/string_32.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef _ASM_X86_STRING_32_H
   3#define _ASM_X86_STRING_32_H
   4
   5#ifdef __KERNEL__
   6
   7/* Let gcc decide whether to inline or use the out of line functions */
   8
   9#define __HAVE_ARCH_STRCPY
  10extern char *strcpy(char *dest, const char *src);
  11
  12#define __HAVE_ARCH_STRNCPY
  13extern char *strncpy(char *dest, const char *src, size_t count);
  14
  15#define __HAVE_ARCH_STRCAT
  16extern char *strcat(char *dest, const char *src);
  17
  18#define __HAVE_ARCH_STRNCAT
  19extern char *strncat(char *dest, const char *src, size_t count);
  20
  21#define __HAVE_ARCH_STRCMP
  22extern int strcmp(const char *cs, const char *ct);
  23
  24#define __HAVE_ARCH_STRNCMP
  25extern int strncmp(const char *cs, const char *ct, size_t count);
  26
  27#define __HAVE_ARCH_STRCHR
  28extern char *strchr(const char *s, int c);
  29
  30#define __HAVE_ARCH_STRLEN
  31extern size_t strlen(const char *s);
  32
  33static __always_inline void *__memcpy(void *to, const void *from, size_t n)
  34{
  35        int d0, d1, d2;
  36        asm volatile("rep ; movsl\n\t"
  37                     "movl %4,%%ecx\n\t"
  38                     "andl $3,%%ecx\n\t"
  39                     "jz 1f\n\t"
  40                     "rep ; movsb\n\t"
  41                     "1:"
  42                     : "=&c" (d0), "=&D" (d1), "=&S" (d2)
  43                     : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
  44                     : "memory");
  45        return to;
  46}
  47
  48/*
  49 * This looks ugly, but the compiler can optimize it totally,
  50 * as the count is constant.
  51 */
  52static __always_inline void *__constant_memcpy(void *to, const void *from,
  53                                               size_t n)
  54{
  55        long esi, edi;
  56        if (!n)
  57                return to;
  58
  59        switch (n) {
  60        case 1:
  61                *(char *)to = *(char *)from;
  62                return to;
  63        case 2:
  64                *(short *)to = *(short *)from;
  65                return to;
  66        case 4:
  67                *(int *)to = *(int *)from;
  68                return to;
  69        case 3:
  70                *(short *)to = *(short *)from;
  71                *((char *)to + 2) = *((char *)from + 2);
  72                return to;
  73        case 5:
  74                *(int *)to = *(int *)from;
  75                *((char *)to + 4) = *((char *)from + 4);
  76                return to;
  77        case 6:
  78                *(int *)to = *(int *)from;
  79                *((short *)to + 2) = *((short *)from + 2);
  80                return to;
  81        case 8:
  82                *(int *)to = *(int *)from;
  83                *((int *)to + 1) = *((int *)from + 1);
  84                return to;
  85        }
  86
  87        esi = (long)from;
  88        edi = (long)to;
  89        if (n >= 5 * 4) {
  90                /* large block: use rep prefix */
  91                int ecx;
  92                asm volatile("rep ; movsl"
  93                             : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
  94                             : "0" (n / 4), "1" (edi), "2" (esi)
  95                             : "memory"
  96                );
  97        } else {
  98                /* small block: don't clobber ecx + smaller code */
  99                if (n >= 4 * 4)
 100                        asm volatile("movsl"
 101                                     : "=&D"(edi), "=&S"(esi)
 102                                     : "0"(edi), "1"(esi)
 103                                     : "memory");
 104                if (n >= 3 * 4)
 105                        asm volatile("movsl"
 106                                     : "=&D"(edi), "=&S"(esi)
 107                                     : "0"(edi), "1"(esi)
 108                                     : "memory");
 109                if (n >= 2 * 4)
 110                        asm volatile("movsl"
 111                                     : "=&D"(edi), "=&S"(esi)
 112                                     : "0"(edi), "1"(esi)
 113                                     : "memory");
 114                if (n >= 1 * 4)
 115                        asm volatile("movsl"
 116                                     : "=&D"(edi), "=&S"(esi)
 117                                     : "0"(edi), "1"(esi)
 118                                     : "memory");
 119        }
 120        switch (n % 4) {
 121                /* tail */
 122        case 0:
 123                return to;
 124        case 1:
 125                asm volatile("movsb"
 126                             : "=&D"(edi), "=&S"(esi)
 127                             : "0"(edi), "1"(esi)
 128                             : "memory");
 129                return to;
 130        case 2:
 131                asm volatile("movsw"
 132                             : "=&D"(edi), "=&S"(esi)
 133                             : "0"(edi), "1"(esi)
 134                             : "memory");
 135                return to;
 136        default:
 137                asm volatile("movsw\n\tmovsb"
 138                             : "=&D"(edi), "=&S"(esi)
 139                             : "0"(edi), "1"(esi)
 140                             : "memory");
 141                return to;
 142        }
 143}
 144
 145#define __HAVE_ARCH_MEMCPY
 146extern void *memcpy(void *, const void *, size_t);
 147
 148#ifndef CONFIG_FORTIFY_SOURCE
 149#ifdef CONFIG_X86_USE_3DNOW
 150
 151#include <asm/mmx.h>
 152
 153/*
 154 *      This CPU favours 3DNow strongly (eg AMD Athlon)
 155 */
 156
 157static inline void *__constant_memcpy3d(void *to, const void *from, size_t len)
 158{
 159        if (len < 512)
 160                return __constant_memcpy(to, from, len);
 161        return _mmx_memcpy(to, from, len);
 162}
 163
 164static inline void *__memcpy3d(void *to, const void *from, size_t len)
 165{
 166        if (len < 512)
 167                return __memcpy(to, from, len);
 168        return _mmx_memcpy(to, from, len);
 169}
 170
 171#define memcpy(t, f, n)                         \
 172        (__builtin_constant_p((n))              \
 173         ? __constant_memcpy3d((t), (f), (n))   \
 174         : __memcpy3d((t), (f), (n)))
 175
 176#else
 177
 178/*
 179 *      No 3D Now!
 180 */
 181
 182#define memcpy(t, f, n) __builtin_memcpy(t, f, n)
 183
 184#endif
 185#endif /* !CONFIG_FORTIFY_SOURCE */
 186
 187#define __HAVE_ARCH_MEMMOVE
 188void *memmove(void *dest, const void *src, size_t n);
 189
 190extern int memcmp(const void *, const void *, size_t);
 191#ifndef CONFIG_FORTIFY_SOURCE
 192#define memcmp __builtin_memcmp
 193#endif
 194
 195#define __HAVE_ARCH_MEMCHR
 196extern void *memchr(const void *cs, int c, size_t count);
 197
 198static inline void *__memset_generic(void *s, char c, size_t count)
 199{
 200        int d0, d1;
 201        asm volatile("rep\n\t"
 202                     "stosb"
 203                     : "=&c" (d0), "=&D" (d1)
 204                     : "a" (c), "1" (s), "0" (count)
 205                     : "memory");
 206        return s;
 207}
 208
 209/* we might want to write optimized versions of these later */
 210#define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
 211
 212/* Added by Gertjan van Wingerde to make minix and sysv module work */
 213#define __HAVE_ARCH_STRNLEN
 214extern size_t strnlen(const char *s, size_t count);
 215/* end of additional stuff */
 216
 217#define __HAVE_ARCH_STRSTR
 218extern char *strstr(const char *cs, const char *ct);
 219
 220#define __memset(s, c, count)                           \
 221        (__builtin_constant_p(count)                    \
 222         ? __constant_count_memset((s), (c), (count))   \
 223         : __memset_generic((s), (c), (count)))
 224
 225#define __HAVE_ARCH_MEMSET
 226extern void *memset(void *, int, size_t);
 227#ifndef CONFIG_FORTIFY_SOURCE
 228#define memset(s, c, count) __builtin_memset(s, c, count)
 229#endif /* !CONFIG_FORTIFY_SOURCE */
 230
 231#define __HAVE_ARCH_MEMSET16
 232static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
 233{
 234        int d0, d1;
 235        asm volatile("rep\n\t"
 236                     "stosw"
 237                     : "=&c" (d0), "=&D" (d1)
 238                     : "a" (v), "1" (s), "0" (n)
 239                     : "memory");
 240        return s;
 241}
 242
 243#define __HAVE_ARCH_MEMSET32
 244static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
 245{
 246        int d0, d1;
 247        asm volatile("rep\n\t"
 248                     "stosl"
 249                     : "=&c" (d0), "=&D" (d1)
 250                     : "a" (v), "1" (s), "0" (n)
 251                     : "memory");
 252        return s;
 253}
 254
 255/*
 256 * find the first occurrence of byte 'c', or 1 past the area if none
 257 */
 258#define __HAVE_ARCH_MEMSCAN
 259extern void *memscan(void *addr, int c, size_t size);
 260
 261#endif /* __KERNEL__ */
 262
 263#endif /* _ASM_X86_STRING_32_H */
 264