• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_STRING_32_H
3 #define _ASM_X86_STRING_32_H
4 
5 #ifdef __KERNEL__
6 
7 /* Let gcc decide whether to inline or use the out of line functions */
8 
9 #define __HAVE_ARCH_STRCPY
10 extern char *strcpy(char *dest, const char *src);
11 
12 #define __HAVE_ARCH_STRNCPY
13 extern char *strncpy(char *dest, const char *src, size_t count);
14 
15 #define __HAVE_ARCH_STRCAT
16 extern char *strcat(char *dest, const char *src);
17 
18 #define __HAVE_ARCH_STRNCAT
19 extern char *strncat(char *dest, const char *src, size_t count);
20 
21 #define __HAVE_ARCH_STRCMP
22 extern int strcmp(const char *cs, const char *ct);
23 
24 #define __HAVE_ARCH_STRNCMP
25 extern int strncmp(const char *cs, const char *ct, size_t count);
26 
27 #define __HAVE_ARCH_STRCHR
28 extern char *strchr(const char *s, int c);
29 
30 #define __HAVE_ARCH_STRLEN
31 extern size_t strlen(const char *s);
32 
__memcpy(void * to,const void * from,size_t n)33 static __always_inline void *__memcpy(void *to, const void *from, size_t n)
34 {
35 	int d0, d1, d2;
36 	asm volatile("rep ; movsl\n\t"
37 		     "movl %4,%%ecx\n\t"
38 		     "andl $3,%%ecx\n\t"
39 		     "jz 1f\n\t"
40 		     "rep ; movsb\n\t"
41 		     "1:"
42 		     : "=&c" (d0), "=&D" (d1), "=&S" (d2)
43 		     : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
44 		     : "memory");
45 	return to;
46 }
47 
48 /*
49  * This looks ugly, but the compiler can optimize it totally,
50  * as the count is constant.
51  */
__constant_memcpy(void * to,const void * from,size_t n)52 static __always_inline void *__constant_memcpy(void *to, const void *from,
53 					       size_t n)
54 {
55 	long esi, edi;
56 	if (!n)
57 		return to;
58 
59 	switch (n) {
60 	case 1:
61 		*(char *)to = *(char *)from;
62 		return to;
63 	case 2:
64 		*(short *)to = *(short *)from;
65 		return to;
66 	case 4:
67 		*(int *)to = *(int *)from;
68 		return to;
69 	case 3:
70 		*(short *)to = *(short *)from;
71 		*((char *)to + 2) = *((char *)from + 2);
72 		return to;
73 	case 5:
74 		*(int *)to = *(int *)from;
75 		*((char *)to + 4) = *((char *)from + 4);
76 		return to;
77 	case 6:
78 		*(int *)to = *(int *)from;
79 		*((short *)to + 2) = *((short *)from + 2);
80 		return to;
81 	case 8:
82 		*(int *)to = *(int *)from;
83 		*((int *)to + 1) = *((int *)from + 1);
84 		return to;
85 	}
86 
87 	esi = (long)from;
88 	edi = (long)to;
89 	if (n >= 5 * 4) {
90 		/* large block: use rep prefix */
91 		int ecx;
92 		asm volatile("rep ; movsl"
93 			     : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
94 			     : "0" (n / 4), "1" (edi), "2" (esi)
95 			     : "memory"
96 		);
97 	} else {
98 		/* small block: don't clobber ecx + smaller code */
99 		if (n >= 4 * 4)
100 			asm volatile("movsl"
101 				     : "=&D"(edi), "=&S"(esi)
102 				     : "0"(edi), "1"(esi)
103 				     : "memory");
104 		if (n >= 3 * 4)
105 			asm volatile("movsl"
106 				     : "=&D"(edi), "=&S"(esi)
107 				     : "0"(edi), "1"(esi)
108 				     : "memory");
109 		if (n >= 2 * 4)
110 			asm volatile("movsl"
111 				     : "=&D"(edi), "=&S"(esi)
112 				     : "0"(edi), "1"(esi)
113 				     : "memory");
114 		if (n >= 1 * 4)
115 			asm volatile("movsl"
116 				     : "=&D"(edi), "=&S"(esi)
117 				     : "0"(edi), "1"(esi)
118 				     : "memory");
119 	}
120 	switch (n % 4) {
121 		/* tail */
122 	case 0:
123 		return to;
124 	case 1:
125 		asm volatile("movsb"
126 			     : "=&D"(edi), "=&S"(esi)
127 			     : "0"(edi), "1"(esi)
128 			     : "memory");
129 		return to;
130 	case 2:
131 		asm volatile("movsw"
132 			     : "=&D"(edi), "=&S"(esi)
133 			     : "0"(edi), "1"(esi)
134 			     : "memory");
135 		return to;
136 	default:
137 		asm volatile("movsw\n\tmovsb"
138 			     : "=&D"(edi), "=&S"(esi)
139 			     : "0"(edi), "1"(esi)
140 			     : "memory");
141 		return to;
142 	}
143 }
144 
145 #define __HAVE_ARCH_MEMCPY
146 extern void *memcpy(void *, const void *, size_t);
147 
148 #ifndef CONFIG_FORTIFY_SOURCE
149 #ifdef CONFIG_X86_USE_3DNOW
150 
151 #include <asm/mmx.h>
152 
153 /*
154  *	This CPU favours 3DNow strongly (eg AMD Athlon)
155  */
156 
__constant_memcpy3d(void * to,const void * from,size_t len)157 static inline void *__constant_memcpy3d(void *to, const void *from, size_t len)
158 {
159 	if (len < 512)
160 		return __constant_memcpy(to, from, len);
161 	return _mmx_memcpy(to, from, len);
162 }
163 
__memcpy3d(void * to,const void * from,size_t len)164 static inline void *__memcpy3d(void *to, const void *from, size_t len)
165 {
166 	if (len < 512)
167 		return __memcpy(to, from, len);
168 	return _mmx_memcpy(to, from, len);
169 }
170 
171 #define memcpy(t, f, n)				\
172 	(__builtin_constant_p((n))		\
173 	 ? __constant_memcpy3d((t), (f), (n))	\
174 	 : __memcpy3d((t), (f), (n)))
175 
176 #else
177 
178 /*
179  *	No 3D Now!
180  */
181 
182 #define memcpy(t, f, n) __builtin_memcpy(t, f, n)
183 
184 #endif
185 #endif /* !CONFIG_FORTIFY_SOURCE */
186 
187 #define __HAVE_ARCH_MEMMOVE
188 void *memmove(void *dest, const void *src, size_t n);
189 
190 extern int memcmp(const void *, const void *, size_t);
191 #ifndef CONFIG_FORTIFY_SOURCE
192 #define memcmp __builtin_memcmp
193 #endif
194 
195 #define __HAVE_ARCH_MEMCHR
196 extern void *memchr(const void *cs, int c, size_t count);
197 
__memset_generic(void * s,char c,size_t count)198 static inline void *__memset_generic(void *s, char c, size_t count)
199 {
200 	int d0, d1;
201 	asm volatile("rep\n\t"
202 		     "stosb"
203 		     : "=&c" (d0), "=&D" (d1)
204 		     : "a" (c), "1" (s), "0" (count)
205 		     : "memory");
206 	return s;
207 }
208 
209 /* we might want to write optimized versions of these later */
210 #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
211 
212 /*
213  * memset(x, 0, y) is a reasonably common thing to do, so we want to fill
214  * things 32 bits at a time even when we don't know the size of the
215  * area at compile-time..
216  */
217 static __always_inline
__constant_c_memset(void * s,unsigned long c,size_t count)218 void *__constant_c_memset(void *s, unsigned long c, size_t count)
219 {
220 	int d0, d1;
221 	asm volatile("rep ; stosl\n\t"
222 		     "testb $2,%b3\n\t"
223 		     "je 1f\n\t"
224 		     "stosw\n"
225 		     "1:\ttestb $1,%b3\n\t"
226 		     "je 2f\n\t"
227 		     "stosb\n"
228 		     "2:"
229 		     : "=&c" (d0), "=&D" (d1)
230 		     : "a" (c), "q" (count), "0" (count/4), "1" ((long)s)
231 		     : "memory");
232 	return s;
233 }
234 
235 /* Added by Gertjan van Wingerde to make minix and sysv module work */
236 #define __HAVE_ARCH_STRNLEN
237 extern size_t strnlen(const char *s, size_t count);
238 /* end of additional stuff */
239 
240 #define __HAVE_ARCH_STRSTR
241 extern char *strstr(const char *cs, const char *ct);
242 
243 /*
244  * This looks horribly ugly, but the compiler can optimize it totally,
245  * as we by now know that both pattern and count is constant..
246  */
247 static __always_inline
__constant_c_and_count_memset(void * s,unsigned long pattern,size_t count)248 void *__constant_c_and_count_memset(void *s, unsigned long pattern,
249 				    size_t count)
250 {
251 	switch (count) {
252 	case 0:
253 		return s;
254 	case 1:
255 		*(unsigned char *)s = pattern & 0xff;
256 		return s;
257 	case 2:
258 		*(unsigned short *)s = pattern & 0xffff;
259 		return s;
260 	case 3:
261 		*(unsigned short *)s = pattern & 0xffff;
262 		*((unsigned char *)s + 2) = pattern & 0xff;
263 		return s;
264 	case 4:
265 		*(unsigned long *)s = pattern;
266 		return s;
267 	}
268 
269 #define COMMON(x)							\
270 	asm volatile("rep ; stosl"					\
271 		     x							\
272 		     : "=&c" (d0), "=&D" (d1)				\
273 		     : "a" (eax), "0" (count/4), "1" ((long)s)	\
274 		     : "memory")
275 
276 	{
277 		int d0, d1;
278 		unsigned long eax = pattern;
279 
280 		switch (count % 4) {
281 		case 0:
282 			COMMON("");
283 			return s;
284 		case 1:
285 			COMMON("\n\tstosb");
286 			return s;
287 		case 2:
288 			COMMON("\n\tstosw");
289 			return s;
290 		default:
291 			COMMON("\n\tstosw\n\tstosb");
292 			return s;
293 		}
294 	}
295 
296 #undef COMMON
297 }
298 
299 #define __constant_c_x_memset(s, c, count)			\
300 	(__builtin_constant_p(count)				\
301 	 ? __constant_c_and_count_memset((s), (c), (count))	\
302 	 : __constant_c_memset((s), (c), (count)))
303 
304 #define __memset(s, c, count)				\
305 	(__builtin_constant_p(count)			\
306 	 ? __constant_count_memset((s), (c), (count))	\
307 	 : __memset_generic((s), (c), (count)))
308 
309 #define __HAVE_ARCH_MEMSET
310 extern void *memset(void *, int, size_t);
311 #ifndef CONFIG_FORTIFY_SOURCE
312 #define memset(s, c, count) __builtin_memset(s, c, count)
313 #endif /* !CONFIG_FORTIFY_SOURCE */
314 
315 #define __HAVE_ARCH_MEMSET16
memset16(uint16_t * s,uint16_t v,size_t n)316 static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
317 {
318 	int d0, d1;
319 	asm volatile("rep\n\t"
320 		     "stosw"
321 		     : "=&c" (d0), "=&D" (d1)
322 		     : "a" (v), "1" (s), "0" (n)
323 		     : "memory");
324 	return s;
325 }
326 
327 #define __HAVE_ARCH_MEMSET32
memset32(uint32_t * s,uint32_t v,size_t n)328 static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
329 {
330 	int d0, d1;
331 	asm volatile("rep\n\t"
332 		     "stosl"
333 		     : "=&c" (d0), "=&D" (d1)
334 		     : "a" (v), "1" (s), "0" (n)
335 		     : "memory");
336 	return s;
337 }
338 
339 /*
340  * find the first occurrence of byte 'c', or 1 past the area if none
341  */
342 #define __HAVE_ARCH_MEMSCAN
343 extern void *memscan(void *addr, int c, size_t size);
344 
345 #endif /* __KERNEL__ */
346 
347 #endif /* _ASM_X86_STRING_32_H */
348