1 #ifndef _I386_STRING_H_
2 #define _I386_STRING_H_
3 
4 #ifdef __KERNEL__
5 #include <linux/config.h>
6 /*
7  * On a 486 or Pentium, we are better off not using the
8  * byte string operations. But on a 386 or a PPro the
9  * byte string ops are faster than doing it by hand
10  * (MUCH faster on a Pentium).
11  *
12  * Also, the byte strings actually work correctly. Forget
13  * the i486 routines for now as they may be broken..
14  */
15 #if FIXED_486_STRING && defined(CONFIG_X86_USE_STRING_486)
16 #include <asm/string-486.h>
17 #else
18 
19 /*
20  * This string-include defines all string functions as inline
21  * functions. Use gcc. It also assumes ds=es=data space, this should be
22  * normal. Most of the string-functions are rather heavily hand-optimized,
23  * see especially strtok,strstr,str[c]spn. They should work, but are not
24  * very easy to understand. Everything is done entirely within the register
25  * set, making the functions fast and clean. String instructions have been
26  * used through-out, making for "slightly" unclear code :-)
27  *
28  *		NO Copyright (C) 1991, 1992 Linus Torvalds,
29  *		consider these trivial functions to be PD.
30  */
31 
32 #define __HAVE_ARCH_STRCPY
strcpy(char * dest,const char * src)33 static inline char * strcpy(char * dest,const char *src)
34 {
35 int d0, d1, d2;
36 __asm__ __volatile__(
37 	"1:\tlodsb\n\t"
38 	"stosb\n\t"
39 	"testb %%al,%%al\n\t"
40 	"jne 1b"
41 	: "=&S" (d0), "=&D" (d1), "=&a" (d2)
42 	:"0" (src),"1" (dest) : "memory");
43 return dest;
44 }
45 
46 #define __HAVE_ARCH_STRNCPY
strncpy(char * dest,const char * src,size_t count)47 static inline char * strncpy(char * dest,const char *src,size_t count)
48 {
49 int d0, d1, d2, d3;
50 __asm__ __volatile__(
51 	"1:\tdecl %2\n\t"
52 	"js 2f\n\t"
53 	"lodsb\n\t"
54 	"stosb\n\t"
55 	"testb %%al,%%al\n\t"
56 	"jne 1b\n\t"
57 	"rep\n\t"
58 	"stosb\n"
59 	"2:"
60 	: "=&S" (d0), "=&D" (d1), "=&c" (d2), "=&a" (d3)
61 	:"0" (src),"1" (dest),"2" (count) : "memory");
62 return dest;
63 }
64 
65 #define __HAVE_ARCH_STRCAT
strcat(char * dest,const char * src)66 static inline char * strcat(char * dest,const char * src)
67 {
68 int d0, d1, d2, d3;
69 __asm__ __volatile__(
70 	"repne\n\t"
71 	"scasb\n\t"
72 	"decl %1\n"
73 	"1:\tlodsb\n\t"
74 	"stosb\n\t"
75 	"testb %%al,%%al\n\t"
76 	"jne 1b"
77 	: "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
78 	: "0" (src), "1" (dest), "2" (0), "3" (0xffffffff):"memory");
79 return dest;
80 }
81 
82 #define __HAVE_ARCH_STRNCAT
strncat(char * dest,const char * src,size_t count)83 static inline char * strncat(char * dest,const char * src,size_t count)
84 {
85 int d0, d1, d2, d3;
86 __asm__ __volatile__(
87 	"repne\n\t"
88 	"scasb\n\t"
89 	"decl %1\n\t"
90 	"movl %8,%3\n"
91 	"1:\tdecl %3\n\t"
92 	"js 2f\n\t"
93 	"lodsb\n\t"
94 	"stosb\n\t"
95 	"testb %%al,%%al\n\t"
96 	"jne 1b\n"
97 	"2:\txorl %2,%2\n\t"
98 	"stosb"
99 	: "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
100 	: "0" (src),"1" (dest),"2" (0),"3" (0xffffffff), "g" (count)
101 	: "memory");
102 return dest;
103 }
104 
105 #define __HAVE_ARCH_STRCMP
strcmp(const char * cs,const char * ct)106 static inline int strcmp(const char * cs,const char * ct)
107 {
108 int d0, d1;
109 register int __res;
110 __asm__ __volatile__(
111 	"1:\tlodsb\n\t"
112 	"scasb\n\t"
113 	"jne 2f\n\t"
114 	"testb %%al,%%al\n\t"
115 	"jne 1b\n\t"
116 	"xorl %%eax,%%eax\n\t"
117 	"jmp 3f\n"
118 	"2:\tsbbl %%eax,%%eax\n\t"
119 	"orb $1,%%al\n"
120 	"3:"
121 	:"=a" (__res), "=&S" (d0), "=&D" (d1)
122 		     :"1" (cs),"2" (ct));
123 return __res;
124 }
125 
126 #define __HAVE_ARCH_STRNCMP
strncmp(const char * cs,const char * ct,size_t count)127 static inline int strncmp(const char * cs,const char * ct,size_t count)
128 {
129 register int __res;
130 int d0, d1, d2;
131 __asm__ __volatile__(
132 	"1:\tdecl %3\n\t"
133 	"js 2f\n\t"
134 	"lodsb\n\t"
135 	"scasb\n\t"
136 	"jne 3f\n\t"
137 	"testb %%al,%%al\n\t"
138 	"jne 1b\n"
139 	"2:\txorl %%eax,%%eax\n\t"
140 	"jmp 4f\n"
141 	"3:\tsbbl %%eax,%%eax\n\t"
142 	"orb $1,%%al\n"
143 	"4:"
144 		     :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
145 		     :"1" (cs),"2" (ct),"3" (count));
146 return __res;
147 }
148 
149 #define __HAVE_ARCH_STRCHR
strchr(const char * s,int c)150 static inline char * strchr(const char * s, int c)
151 {
152 int d0;
153 register char * __res;
154 __asm__ __volatile__(
155 	"movb %%al,%%ah\n"
156 	"1:\tlodsb\n\t"
157 	"cmpb %%ah,%%al\n\t"
158 	"je 2f\n\t"
159 	"testb %%al,%%al\n\t"
160 	"jne 1b\n\t"
161 	"movl $1,%1\n"
162 	"2:\tmovl %1,%0\n\t"
163 	"decl %0"
164 	:"=a" (__res), "=&S" (d0) : "1" (s),"0" (c));
165 return __res;
166 }
167 
168 #define __HAVE_ARCH_STRRCHR
strrchr(const char * s,int c)169 static inline char * strrchr(const char * s, int c)
170 {
171 int d0, d1;
172 register char * __res;
173 __asm__ __volatile__(
174 	"movb %%al,%%ah\n"
175 	"1:\tlodsb\n\t"
176 	"cmpb %%ah,%%al\n\t"
177 	"jne 2f\n\t"
178 	"leal -1(%%esi),%0\n"
179 	"2:\ttestb %%al,%%al\n\t"
180 	"jne 1b"
181 	:"=g" (__res), "=&S" (d0), "=&a" (d1) :"0" (0),"1" (s),"2" (c));
182 return __res;
183 }
184 
185 #define __HAVE_ARCH_STRLEN
strlen(const char * s)186 static inline size_t strlen(const char * s)
187 {
188 int d0;
189 register int __res;
190 __asm__ __volatile__(
191 	"repne\n\t"
192 	"scasb\n\t"
193 	"notl %0\n\t"
194 	"decl %0"
195 	:"=c" (__res), "=&D" (d0) :"1" (s),"a" (0), "0" (0xffffffff));
196 return __res;
197 }
198 
__memcpy(void * to,const void * from,size_t n)199 static inline void * __memcpy(void * to, const void * from, size_t n)
200 {
201 int d0, d1, d2;
202 __asm__ __volatile__(
203 	"rep ; movsl\n\t"
204 	"testb $2,%b4\n\t"
205 	"je 1f\n\t"
206 	"movsw\n"
207 	"1:\ttestb $1,%b4\n\t"
208 	"je 2f\n\t"
209 	"movsb\n"
210 	"2:"
211 	: "=&c" (d0), "=&D" (d1), "=&S" (d2)
212 	:"0" (n/4), "q" (n),"1" ((long) to),"2" ((long) from)
213 	: "memory");
214 return (to);
215 }
216 
217 /*
218  * This looks horribly ugly, but the compiler can optimize it totally,
219  * as the count is constant.
220  */
__constant_memcpy(void * to,const void * from,size_t n)221 static inline void * __constant_memcpy(void * to, const void * from, size_t n)
222 {
223 	switch (n) {
224 		case 0:
225 			return to;
226 		case 1:
227 			*(unsigned char *)to = *(const unsigned char *)from;
228 			return to;
229 		case 2:
230 			*(unsigned short *)to = *(const unsigned short *)from;
231 			return to;
232 		case 3:
233 			*(unsigned short *)to = *(const unsigned short *)from;
234 			*(2+(unsigned char *)to) = *(2+(const unsigned char *)from);
235 			return to;
236 		case 4:
237 			*(unsigned long *)to = *(const unsigned long *)from;
238 			return to;
239 		case 6:	/* for Ethernet addresses */
240 			*(unsigned long *)to = *(const unsigned long *)from;
241 			*(2+(unsigned short *)to) = *(2+(const unsigned short *)from);
242 			return to;
243 		case 8:
244 			*(unsigned long *)to = *(const unsigned long *)from;
245 			*(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
246 			return to;
247 		case 12:
248 			*(unsigned long *)to = *(const unsigned long *)from;
249 			*(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
250 			*(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
251 			return to;
252 		case 16:
253 			*(unsigned long *)to = *(const unsigned long *)from;
254 			*(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
255 			*(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
256 			*(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
257 			return to;
258 		case 20:
259 			*(unsigned long *)to = *(const unsigned long *)from;
260 			*(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
261 			*(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
262 			*(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
263 			*(4+(unsigned long *)to) = *(4+(const unsigned long *)from);
264 			return to;
265 	}
266 #define COMMON(x) \
267 __asm__ __volatile__( \
268 	"rep ; movsl" \
269 	x \
270 	: "=&c" (d0), "=&D" (d1), "=&S" (d2) \
271 	: "0" (n/4),"1" ((long) to),"2" ((long) from) \
272 	: "memory");
273 {
274 	int d0, d1, d2;
275 	switch (n % 4) {
276 		case 0: COMMON(""); return to;
277 		case 1: COMMON("\n\tmovsb"); return to;
278 		case 2: COMMON("\n\tmovsw"); return to;
279 		default: COMMON("\n\tmovsw\n\tmovsb"); return to;
280 	}
281 }
282 
283 #undef COMMON
284 }
285 
286 #define __HAVE_ARCH_MEMCPY
287 
288 #ifdef CONFIG_X86_USE_3DNOW
289 
290 #include <asm/mmx.h>
291 
292 /*
293  *	This CPU favours 3DNow strongly (eg AMD Athlon)
294  */
295 
__constant_memcpy3d(void * to,const void * from,size_t len)296 static inline void * __constant_memcpy3d(void * to, const void * from, size_t len)
297 {
298 	if (len < 512)
299 		return __constant_memcpy(to, from, len);
300 	return _mmx_memcpy(to, from, len);
301 }
302 
__memcpy3d(void * to,const void * from,size_t len)303 static __inline__ void *__memcpy3d(void *to, const void *from, size_t len)
304 {
305 	if (len < 512)
306 		return __memcpy(to, from, len);
307 	return _mmx_memcpy(to, from, len);
308 }
309 
310 #define memcpy(t, f, n) \
311 (__builtin_constant_p(n) ? \
312  __constant_memcpy3d((t),(f),(n)) : \
313  __memcpy3d((t),(f),(n)))
314 
315 #else
316 
317 /*
318  *	No 3D Now!
319  */
320 
321 #define memcpy(t, f, n) \
322 (__builtin_constant_p(n) ? \
323  __constant_memcpy((t),(f),(n)) : \
324  __memcpy((t),(f),(n)))
325 
326 #endif
327 
328 /*
329  * struct_cpy(x,y), copy structure *x into (matching structure) *y.
330  *
331  * We get link-time errors if the structure sizes do not match.
332  * There is no runtime overhead, it's all optimized away at
333  * compile time.
334  */
335 extern void __struct_cpy_bug (void);
336 
337 #define struct_cpy(x,y) 			\
338 ({						\
339 	if (sizeof(*(x)) != sizeof(*(y))) 	\
340 		__struct_cpy_bug();		\
341 	memcpy(x, y, sizeof(*(x)));		\
342 })
343 
344 #define __HAVE_ARCH_MEMMOVE
memmove(void * dest,const void * src,size_t n)345 static inline void * memmove(void * dest,const void * src, size_t n)
346 {
347 int d0, d1, d2;
348 if (dest<src)
349 __asm__ __volatile__(
350 	"rep\n\t"
351 	"movsb"
352 	: "=&c" (d0), "=&S" (d1), "=&D" (d2)
353 	:"0" (n),"1" (src),"2" (dest)
354 	: "memory");
355 else
356 __asm__ __volatile__(
357 	"std\n\t"
358 	"rep\n\t"
359 	"movsb\n\t"
360 	"cld"
361 	: "=&c" (d0), "=&S" (d1), "=&D" (d2)
362 	:"0" (n),
363 	 "1" (n-1+(const char *)src),
364 	 "2" (n-1+(char *)dest)
365 	:"memory");
366 return dest;
367 }
368 
369 #define memcmp __builtin_memcmp
370 
371 #define __HAVE_ARCH_MEMCHR
memchr(const void * cs,int c,size_t count)372 static inline void * memchr(const void * cs,int c,size_t count)
373 {
374 int d0;
375 register void * __res;
376 if (!count)
377 	return NULL;
378 __asm__ __volatile__(
379 	"repne\n\t"
380 	"scasb\n\t"
381 	"je 1f\n\t"
382 	"movl $1,%0\n"
383 	"1:\tdecl %0"
384 	:"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count));
385 return __res;
386 }
387 
__memset_generic(void * s,char c,size_t count)388 static inline void * __memset_generic(void * s, char c,size_t count)
389 {
390 int d0, d1;
391 __asm__ __volatile__(
392 	"rep\n\t"
393 	"stosb"
394 	: "=&c" (d0), "=&D" (d1)
395 	:"a" (c),"1" (s),"0" (count)
396 	:"memory");
397 return s;
398 }
399 
400 /* we might want to write optimized versions of these later */
401 #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
402 
403 /*
404  * memset(x,0,y) is a reasonably common thing to do, so we want to fill
405  * things 32 bits at a time even when we don't know the size of the
406  * area at compile-time..
407  */
__constant_c_memset(void * s,unsigned long c,size_t count)408 static inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
409 {
410 int d0, d1;
411 __asm__ __volatile__(
412 	"rep ; stosl\n\t"
413 	"testb $2,%b3\n\t"
414 	"je 1f\n\t"
415 	"stosw\n"
416 	"1:\ttestb $1,%b3\n\t"
417 	"je 2f\n\t"
418 	"stosb\n"
419 	"2:"
420 	: "=&c" (d0), "=&D" (d1)
421 	:"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
422 	:"memory");
423 return (s);
424 }
425 
426 /* Added by Gertjan van Wingerde to make minix and sysv module work */
427 #define __HAVE_ARCH_STRNLEN
strnlen(const char * s,size_t count)428 static inline size_t strnlen(const char * s, size_t count)
429 {
430 int d0;
431 register int __res;
432 __asm__ __volatile__(
433 	"movl %2,%0\n\t"
434 	"jmp 2f\n"
435 	"1:\tcmpb $0,(%0)\n\t"
436 	"je 3f\n\t"
437 	"incl %0\n"
438 	"2:\tdecl %1\n\t"
439 	"cmpl $-1,%1\n\t"
440 	"jne 1b\n"
441 	"3:\tsubl %2,%0"
442 	:"=a" (__res), "=&d" (d0)
443 	:"c" (s),"1" (count));
444 return __res;
445 }
446 /* end of additional stuff */
447 
448 #define __HAVE_ARCH_STRSTR
449 
450 extern char *strstr(const char *cs, const char *ct);
451 
452 /*
453  * This looks horribly ugly, but the compiler can optimize it totally,
454  * as we by now know that both pattern and count is constant..
455  */
__constant_c_and_count_memset(void * s,unsigned long pattern,size_t count)456 static inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
457 {
458 	switch (count) {
459 		case 0:
460 			return s;
461 		case 1:
462 			*(unsigned char *)s = pattern;
463 			return s;
464 		case 2:
465 			*(unsigned short *)s = pattern;
466 			return s;
467 		case 3:
468 			*(unsigned short *)s = pattern;
469 			*(2+(unsigned char *)s) = pattern;
470 			return s;
471 		case 4:
472 			*(unsigned long *)s = pattern;
473 			return s;
474 	}
475 #define COMMON(x) \
476 __asm__  __volatile__( \
477 	"rep ; stosl" \
478 	x \
479 	: "=&c" (d0), "=&D" (d1) \
480 	: "a" (pattern),"0" (count/4),"1" ((long) s) \
481 	: "memory")
482 {
483 	int d0, d1;
484 	switch (count % 4) {
485 		case 0: COMMON(""); return s;
486 		case 1: COMMON("\n\tstosb"); return s;
487 		case 2: COMMON("\n\tstosw"); return s;
488 		default: COMMON("\n\tstosw\n\tstosb"); return s;
489 	}
490 }
491 
492 #undef COMMON
493 }
494 
495 #define __constant_c_x_memset(s, c, count) \
496 (__builtin_constant_p(count) ? \
497  __constant_c_and_count_memset((s),(c),(count)) : \
498  __constant_c_memset((s),(c),(count)))
499 
500 #define __memset(s, c, count) \
501 (__builtin_constant_p(count) ? \
502  __constant_count_memset((s),(c),(count)) : \
503  __memset_generic((s),(c),(count)))
504 
505 #define __HAVE_ARCH_MEMSET
506 #define memset(s, c, count) \
507 (__builtin_constant_p(c) ? \
508  __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
509  __memset((s),(c),(count)))
510 
511 /*
512  * find the first occurrence of byte 'c', or 1 past the area if none
513  */
514 #define __HAVE_ARCH_MEMSCAN
memscan(void * addr,int c,size_t size)515 static inline void * memscan(void * addr, int c, size_t size)
516 {
517 	if (!size)
518 		return addr;
519 	__asm__("repnz; scasb\n\t"
520 		"jnz 1f\n\t"
521 		"dec %%edi\n"
522 		"1:"
523 		: "=D" (addr), "=c" (size)
524 		: "0" (addr), "1" (size), "a" (c));
525 	return addr;
526 }
527 
528 #endif /* CONFIG_X86_USE_STRING_486 */
529 #endif /* __KERNEL__ */
530 
531 #endif
532