1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_STRING_32_H
3 #define _ASM_X86_STRING_32_H
4
5 #ifdef __KERNEL__
6
7 /* Let gcc decide whether to inline or use the out of line functions */
8
9 #define __HAVE_ARCH_STRCPY
10 extern char *strcpy(char *dest, const char *src);
11
12 #define __HAVE_ARCH_STRNCPY
13 extern char *strncpy(char *dest, const char *src, size_t count);
14
15 #define __HAVE_ARCH_STRCAT
16 extern char *strcat(char *dest, const char *src);
17
18 #define __HAVE_ARCH_STRNCAT
19 extern char *strncat(char *dest, const char *src, size_t count);
20
21 #define __HAVE_ARCH_STRCMP
22 extern int strcmp(const char *cs, const char *ct);
23
24 #define __HAVE_ARCH_STRNCMP
25 extern int strncmp(const char *cs, const char *ct, size_t count);
26
27 #define __HAVE_ARCH_STRCHR
28 extern char *strchr(const char *s, int c);
29
30 #define __HAVE_ARCH_STRLEN
31 extern size_t strlen(const char *s);
32
__memcpy(void * to,const void * from,size_t n)33 static __always_inline void *__memcpy(void *to, const void *from, size_t n)
34 {
35 int d0, d1, d2;
36 asm volatile("rep movsl\n\t"
37 "movl %4,%%ecx\n\t"
38 "andl $3,%%ecx\n\t"
39 "jz 1f\n\t"
40 "rep movsb\n\t"
41 "1:"
42 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
43 : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
44 : "memory");
45 return to;
46 }
47
48 /*
49 * This looks ugly, but the compiler can optimize it totally,
50 * as the count is constant.
51 */
__constant_memcpy(void * to,const void * from,size_t n)52 static __always_inline void *__constant_memcpy(void *to, const void *from,
53 size_t n)
54 {
55 long esi, edi;
56 if (!n)
57 return to;
58
59 switch (n) {
60 case 1:
61 *(char *)to = *(char *)from;
62 return to;
63 case 2:
64 *(short *)to = *(short *)from;
65 return to;
66 case 4:
67 *(int *)to = *(int *)from;
68 return to;
69 case 3:
70 *(short *)to = *(short *)from;
71 *((char *)to + 2) = *((char *)from + 2);
72 return to;
73 case 5:
74 *(int *)to = *(int *)from;
75 *((char *)to + 4) = *((char *)from + 4);
76 return to;
77 case 6:
78 *(int *)to = *(int *)from;
79 *((short *)to + 2) = *((short *)from + 2);
80 return to;
81 case 8:
82 *(int *)to = *(int *)from;
83 *((int *)to + 1) = *((int *)from + 1);
84 return to;
85 }
86
87 esi = (long)from;
88 edi = (long)to;
89 if (n >= 5 * 4) {
90 /* large block: use rep prefix */
91 int ecx;
92 asm volatile("rep movsl"
93 : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
94 : "0" (n / 4), "1" (edi), "2" (esi)
95 : "memory"
96 );
97 } else {
98 /* small block: don't clobber ecx + smaller code */
99 if (n >= 4 * 4)
100 asm volatile("movsl"
101 : "=&D"(edi), "=&S"(esi)
102 : "0"(edi), "1"(esi)
103 : "memory");
104 if (n >= 3 * 4)
105 asm volatile("movsl"
106 : "=&D"(edi), "=&S"(esi)
107 : "0"(edi), "1"(esi)
108 : "memory");
109 if (n >= 2 * 4)
110 asm volatile("movsl"
111 : "=&D"(edi), "=&S"(esi)
112 : "0"(edi), "1"(esi)
113 : "memory");
114 if (n >= 1 * 4)
115 asm volatile("movsl"
116 : "=&D"(edi), "=&S"(esi)
117 : "0"(edi), "1"(esi)
118 : "memory");
119 }
120 switch (n % 4) {
121 /* tail */
122 case 0:
123 return to;
124 case 1:
125 asm volatile("movsb"
126 : "=&D"(edi), "=&S"(esi)
127 : "0"(edi), "1"(esi)
128 : "memory");
129 return to;
130 case 2:
131 asm volatile("movsw"
132 : "=&D"(edi), "=&S"(esi)
133 : "0"(edi), "1"(esi)
134 : "memory");
135 return to;
136 default:
137 asm volatile("movsw\n\tmovsb"
138 : "=&D"(edi), "=&S"(esi)
139 : "0"(edi), "1"(esi)
140 : "memory");
141 return to;
142 }
143 }
144
145 #define __HAVE_ARCH_MEMCPY
146 extern void *memcpy(void *, const void *, size_t);
147
148 #ifndef CONFIG_FORTIFY_SOURCE
149
150 #define memcpy(t, f, n) __builtin_memcpy(t, f, n)
151
152 #endif /* !CONFIG_FORTIFY_SOURCE */
153
154 #define __HAVE_ARCH_MEMMOVE
155 void *memmove(void *dest, const void *src, size_t n);
156
157 extern int memcmp(const void *, const void *, size_t);
158 #ifndef CONFIG_FORTIFY_SOURCE
159 #define memcmp __builtin_memcmp
160 #endif
161
162 #define __HAVE_ARCH_MEMCHR
163 extern void *memchr(const void *cs, int c, size_t count);
164
__memset_generic(void * s,char c,size_t count)165 static inline void *__memset_generic(void *s, char c, size_t count)
166 {
167 int d0, d1;
168 asm volatile("rep stosb"
169 : "=&c" (d0), "=&D" (d1)
170 : "a" (c), "1" (s), "0" (count)
171 : "memory");
172 return s;
173 }
174
175 /* we might want to write optimized versions of these later */
176 #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
177
178 /* Added by Gertjan van Wingerde to make minix and sysv module work */
179 #define __HAVE_ARCH_STRNLEN
180 extern size_t strnlen(const char *s, size_t count);
181 /* end of additional stuff */
182
183 #define __HAVE_ARCH_STRSTR
184 extern char *strstr(const char *cs, const char *ct);
185
186 #define __memset(s, c, count) \
187 (__builtin_constant_p(count) \
188 ? __constant_count_memset((s), (c), (count)) \
189 : __memset_generic((s), (c), (count)))
190
191 #define __HAVE_ARCH_MEMSET
192 extern void *memset(void *, int, size_t);
193 #ifndef CONFIG_FORTIFY_SOURCE
194 #define memset(s, c, count) __builtin_memset(s, c, count)
195 #endif /* !CONFIG_FORTIFY_SOURCE */
196
197 #define __HAVE_ARCH_MEMSET16
memset16(uint16_t * s,uint16_t v,size_t n)198 static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
199 {
200 int d0, d1;
201 asm volatile("rep stosw"
202 : "=&c" (d0), "=&D" (d1)
203 : "a" (v), "1" (s), "0" (n)
204 : "memory");
205 return s;
206 }
207
208 #define __HAVE_ARCH_MEMSET32
memset32(uint32_t * s,uint32_t v,size_t n)209 static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
210 {
211 int d0, d1;
212 asm volatile("rep stosl"
213 : "=&c" (d0), "=&D" (d1)
214 : "a" (v), "1" (s), "0" (n)
215 : "memory");
216 return s;
217 }
218
219 /*
220 * find the first occurrence of byte 'c', or 1 past the area if none
221 */
222 #define __HAVE_ARCH_MEMSCAN
223 extern void *memscan(void *addr, int c, size_t size);
224
225 #endif /* __KERNEL__ */
226
227 #endif /* _ASM_X86_STRING_32_H */
228