0001
0002 #ifndef _ASM_X86_STRING_32_H
0003 #define _ASM_X86_STRING_32_H
0004
0005 #ifdef __KERNEL__
0006
0007
0008
0009 #define __HAVE_ARCH_STRCPY
0010 extern char *strcpy(char *dest, const char *src);
0011
0012 #define __HAVE_ARCH_STRNCPY
0013 extern char *strncpy(char *dest, const char *src, size_t count);
0014
0015 #define __HAVE_ARCH_STRCAT
0016 extern char *strcat(char *dest, const char *src);
0017
0018 #define __HAVE_ARCH_STRNCAT
0019 extern char *strncat(char *dest, const char *src, size_t count);
0020
0021 #define __HAVE_ARCH_STRCMP
0022 extern int strcmp(const char *cs, const char *ct);
0023
0024 #define __HAVE_ARCH_STRNCMP
0025 extern int strncmp(const char *cs, const char *ct, size_t count);
0026
0027 #define __HAVE_ARCH_STRCHR
0028 extern char *strchr(const char *s, int c);
0029
0030 #define __HAVE_ARCH_STRLEN
0031 extern size_t strlen(const char *s);
0032
0033 static __always_inline void *__memcpy(void *to, const void *from, size_t n)
0034 {
0035 int d0, d1, d2;
0036 asm volatile("rep ; movsl\n\t"
0037 "movl %4,%%ecx\n\t"
0038 "andl $3,%%ecx\n\t"
0039 "jz 1f\n\t"
0040 "rep ; movsb\n\t"
0041 "1:"
0042 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
0043 : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
0044 : "memory");
0045 return to;
0046 }
0047
0048
0049
0050
0051
0052 static __always_inline void *__constant_memcpy(void *to, const void *from,
0053 size_t n)
0054 {
0055 long esi, edi;
0056 if (!n)
0057 return to;
0058
0059 switch (n) {
0060 case 1:
0061 *(char *)to = *(char *)from;
0062 return to;
0063 case 2:
0064 *(short *)to = *(short *)from;
0065 return to;
0066 case 4:
0067 *(int *)to = *(int *)from;
0068 return to;
0069 case 3:
0070 *(short *)to = *(short *)from;
0071 *((char *)to + 2) = *((char *)from + 2);
0072 return to;
0073 case 5:
0074 *(int *)to = *(int *)from;
0075 *((char *)to + 4) = *((char *)from + 4);
0076 return to;
0077 case 6:
0078 *(int *)to = *(int *)from;
0079 *((short *)to + 2) = *((short *)from + 2);
0080 return to;
0081 case 8:
0082 *(int *)to = *(int *)from;
0083 *((int *)to + 1) = *((int *)from + 1);
0084 return to;
0085 }
0086
0087 esi = (long)from;
0088 edi = (long)to;
0089 if (n >= 5 * 4) {
0090
0091 int ecx;
0092 asm volatile("rep ; movsl"
0093 : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
0094 : "0" (n / 4), "1" (edi), "2" (esi)
0095 : "memory"
0096 );
0097 } else {
0098
0099 if (n >= 4 * 4)
0100 asm volatile("movsl"
0101 : "=&D"(edi), "=&S"(esi)
0102 : "0"(edi), "1"(esi)
0103 : "memory");
0104 if (n >= 3 * 4)
0105 asm volatile("movsl"
0106 : "=&D"(edi), "=&S"(esi)
0107 : "0"(edi), "1"(esi)
0108 : "memory");
0109 if (n >= 2 * 4)
0110 asm volatile("movsl"
0111 : "=&D"(edi), "=&S"(esi)
0112 : "0"(edi), "1"(esi)
0113 : "memory");
0114 if (n >= 1 * 4)
0115 asm volatile("movsl"
0116 : "=&D"(edi), "=&S"(esi)
0117 : "0"(edi), "1"(esi)
0118 : "memory");
0119 }
0120 switch (n % 4) {
0121
0122 case 0:
0123 return to;
0124 case 1:
0125 asm volatile("movsb"
0126 : "=&D"(edi), "=&S"(esi)
0127 : "0"(edi), "1"(esi)
0128 : "memory");
0129 return to;
0130 case 2:
0131 asm volatile("movsw"
0132 : "=&D"(edi), "=&S"(esi)
0133 : "0"(edi), "1"(esi)
0134 : "memory");
0135 return to;
0136 default:
0137 asm volatile("movsw\n\tmovsb"
0138 : "=&D"(edi), "=&S"(esi)
0139 : "0"(edi), "1"(esi)
0140 : "memory");
0141 return to;
0142 }
0143 }
0144
0145 #define __HAVE_ARCH_MEMCPY
0146 extern void *memcpy(void *, const void *, size_t);
0147
0148 #ifndef CONFIG_FORTIFY_SOURCE
0149
0150 #define memcpy(t, f, n) __builtin_memcpy(t, f, n)
0151
0152 #endif
0153
0154 #define __HAVE_ARCH_MEMMOVE
0155 void *memmove(void *dest, const void *src, size_t n);
0156
0157 extern int memcmp(const void *, const void *, size_t);
0158 #ifndef CONFIG_FORTIFY_SOURCE
0159 #define memcmp __builtin_memcmp
0160 #endif
0161
0162 #define __HAVE_ARCH_MEMCHR
0163 extern void *memchr(const void *cs, int c, size_t count);
0164
0165 static inline void *__memset_generic(void *s, char c, size_t count)
0166 {
0167 int d0, d1;
0168 asm volatile("rep\n\t"
0169 "stosb"
0170 : "=&c" (d0), "=&D" (d1)
0171 : "a" (c), "1" (s), "0" (count)
0172 : "memory");
0173 return s;
0174 }
0175
0176
0177 #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
0178
0179
0180 #define __HAVE_ARCH_STRNLEN
0181 extern size_t strnlen(const char *s, size_t count);
0182
0183
0184 #define __HAVE_ARCH_STRSTR
0185 extern char *strstr(const char *cs, const char *ct);
0186
0187 #define __memset(s, c, count) \
0188 (__builtin_constant_p(count) \
0189 ? __constant_count_memset((s), (c), (count)) \
0190 : __memset_generic((s), (c), (count)))
0191
0192 #define __HAVE_ARCH_MEMSET
0193 extern void *memset(void *, int, size_t);
0194 #ifndef CONFIG_FORTIFY_SOURCE
0195 #define memset(s, c, count) __builtin_memset(s, c, count)
0196 #endif
0197
0198 #define __HAVE_ARCH_MEMSET16
0199 static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
0200 {
0201 int d0, d1;
0202 asm volatile("rep\n\t"
0203 "stosw"
0204 : "=&c" (d0), "=&D" (d1)
0205 : "a" (v), "1" (s), "0" (n)
0206 : "memory");
0207 return s;
0208 }
0209
0210 #define __HAVE_ARCH_MEMSET32
0211 static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
0212 {
0213 int d0, d1;
0214 asm volatile("rep\n\t"
0215 "stosl"
0216 : "=&c" (d0), "=&D" (d1)
0217 : "a" (v), "1" (s), "0" (n)
0218 : "memory");
0219 return s;
0220 }
0221
0222
0223
0224
0225 #define __HAVE_ARCH_MEMSCAN
0226 extern void *memscan(void *addr, int c, size_t size);
0227
0228 #endif
0229
0230 #endif