- Linus: drop support for old-style Makefiles entirely. Big.
[davej-history.git] / include / asm-i386 / string.h
blob62bf7916c1e64b5ce651beb5ec2f4c0ddc4b2071
1 #ifndef _I386_STRING_H_
2 #define _I386_STRING_H_
4 #ifdef __KERNEL__
5 #include <linux/config.h>
6 /*
7 * On a 486 or Pentium, we are better off not using the
8 * byte string operations. But on a 386 or a PPro the
9 * byte string ops are faster than doing it by hand
10 * (MUCH faster on a Pentium).
12 * Also, the byte strings actually work correctly. Forget
13 * the i486 routines for now as they may be broken..
15 #if FIXED_486_STRING && defined(CONFIG_X86_USE_STRING_486)
16 #include <asm/string-486.h>
17 #else
20 * This string-include defines all string functions as inline
21 * functions. Use gcc. It also assumes ds=es=data space, this should be
22 * normal. Most of the string-functions are rather heavily hand-optimized,
23 * see especially strtok,strstr,str[c]spn. They should work, but are not
24 * very easy to understand. Everything is done entirely within the register
25 * set, making the functions fast and clean. String instructions have been
26 * used through-out, making for "slightly" unclear code :-)
28 * NO Copyright (C) 1991, 1992 Linus Torvalds,
29 * consider these trivial functions to be PD.
32 #define __HAVE_ARCH_STRCPY
33 static inline char * strcpy(char * dest,const char *src)
35 int d0, d1, d2;
36 __asm__ __volatile__(
37 "1:\tlodsb\n\t"
38 "stosb\n\t"
39 "testb %%al,%%al\n\t"
40 "jne 1b"
41 : "=&S" (d0), "=&D" (d1), "=&a" (d2)
42 :"0" (src),"1" (dest) : "memory");
43 return dest;
46 #define __HAVE_ARCH_STRNCPY
47 static inline char * strncpy(char * dest,const char *src,size_t count)
49 int d0, d1, d2, d3;
50 __asm__ __volatile__(
51 "1:\tdecl %2\n\t"
52 "js 2f\n\t"
53 "lodsb\n\t"
54 "stosb\n\t"
55 "testb %%al,%%al\n\t"
56 "jne 1b\n\t"
57 "rep\n\t"
58 "stosb\n"
59 "2:"
60 : "=&S" (d0), "=&D" (d1), "=&c" (d2), "=&a" (d3)
61 :"0" (src),"1" (dest),"2" (count) : "memory");
62 return dest;
65 #define __HAVE_ARCH_STRCAT
66 static inline char * strcat(char * dest,const char * src)
68 int d0, d1, d2, d3;
69 __asm__ __volatile__(
70 "repne\n\t"
71 "scasb\n\t"
72 "decl %1\n"
73 "1:\tlodsb\n\t"
74 "stosb\n\t"
75 "testb %%al,%%al\n\t"
76 "jne 1b"
77 : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
78 : "0" (src), "1" (dest), "2" (0), "3" (0xffffffff):"memory");
79 return dest;
82 #define __HAVE_ARCH_STRNCAT
83 static inline char * strncat(char * dest,const char * src,size_t count)
85 int d0, d1, d2, d3;
86 __asm__ __volatile__(
87 "repne\n\t"
88 "scasb\n\t"
89 "decl %1\n\t"
90 "movl %8,%3\n"
91 "1:\tdecl %3\n\t"
92 "js 2f\n\t"
93 "lodsb\n\t"
94 "stosb\n\t"
95 "testb %%al,%%al\n\t"
96 "jne 1b\n"
97 "2:\txorl %2,%2\n\t"
98 "stosb"
99 : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
100 : "0" (src),"1" (dest),"2" (0),"3" (0xffffffff), "g" (count)
101 : "memory");
102 return dest;
105 #define __HAVE_ARCH_STRCMP
106 static inline int strcmp(const char * cs,const char * ct)
108 int d0, d1;
109 register int __res;
110 __asm__ __volatile__(
111 "1:\tlodsb\n\t"
112 "scasb\n\t"
113 "jne 2f\n\t"
114 "testb %%al,%%al\n\t"
115 "jne 1b\n\t"
116 "xorl %%eax,%%eax\n\t"
117 "jmp 3f\n"
118 "2:\tsbbl %%eax,%%eax\n\t"
119 "orb $1,%%al\n"
120 "3:"
121 :"=a" (__res), "=&S" (d0), "=&D" (d1)
122 :"1" (cs),"2" (ct));
123 return __res;
126 #define __HAVE_ARCH_STRNCMP
127 static inline int strncmp(const char * cs,const char * ct,size_t count)
129 register int __res;
130 int d0, d1, d2;
131 __asm__ __volatile__(
132 "1:\tdecl %3\n\t"
133 "js 2f\n\t"
134 "lodsb\n\t"
135 "scasb\n\t"
136 "jne 3f\n\t"
137 "testb %%al,%%al\n\t"
138 "jne 1b\n"
139 "2:\txorl %%eax,%%eax\n\t"
140 "jmp 4f\n"
141 "3:\tsbbl %%eax,%%eax\n\t"
142 "orb $1,%%al\n"
143 "4:"
144 :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
145 :"1" (cs),"2" (ct),"3" (count));
146 return __res;
149 #define __HAVE_ARCH_STRCHR
150 static inline char * strchr(const char * s, int c)
152 int d0;
153 register char * __res;
154 __asm__ __volatile__(
155 "movb %%al,%%ah\n"
156 "1:\tlodsb\n\t"
157 "cmpb %%ah,%%al\n\t"
158 "je 2f\n\t"
159 "testb %%al,%%al\n\t"
160 "jne 1b\n\t"
161 "movl $1,%1\n"
162 "2:\tmovl %1,%0\n\t"
163 "decl %0"
164 :"=a" (__res), "=&S" (d0) : "1" (s),"0" (c));
165 return __res;
168 #define __HAVE_ARCH_STRRCHR
169 static inline char * strrchr(const char * s, int c)
171 int d0, d1;
172 register char * __res;
173 __asm__ __volatile__(
174 "movb %%al,%%ah\n"
175 "1:\tlodsb\n\t"
176 "cmpb %%ah,%%al\n\t"
177 "jne 2f\n\t"
178 "leal -1(%%esi),%0\n"
179 "2:\ttestb %%al,%%al\n\t"
180 "jne 1b"
181 :"=g" (__res), "=&S" (d0), "=&a" (d1) :"0" (0),"1" (s),"2" (c));
182 return __res;
185 #define __HAVE_ARCH_STRLEN
186 static inline size_t strlen(const char * s)
188 int d0;
189 register int __res;
190 __asm__ __volatile__(
191 "repne\n\t"
192 "scasb\n\t"
193 "notl %0\n\t"
194 "decl %0"
195 :"=c" (__res), "=&D" (d0) :"1" (s),"a" (0), "0" (0xffffffff));
196 return __res;
199 static inline void * __memcpy(void * to, const void * from, size_t n)
201 int d0, d1, d2;
202 __asm__ __volatile__(
203 "rep ; movsl\n\t"
204 "testb $2,%b4\n\t"
205 "je 1f\n\t"
206 "movsw\n"
207 "1:\ttestb $1,%b4\n\t"
208 "je 2f\n\t"
209 "movsb\n"
210 "2:"
211 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
212 :"0" (n/4), "q" (n),"1" ((long) to),"2" ((long) from)
213 : "memory");
214 return (to);
218 * This looks horribly ugly, but the compiler can optimize it totally,
219 * as the count is constant.
221 static inline void * __constant_memcpy(void * to, const void * from, size_t n)
223 switch (n) {
224 case 0:
225 return to;
226 case 1:
227 *(unsigned char *)to = *(const unsigned char *)from;
228 return to;
229 case 2:
230 *(unsigned short *)to = *(const unsigned short *)from;
231 return to;
232 case 3:
233 *(unsigned short *)to = *(const unsigned short *)from;
234 *(2+(unsigned char *)to) = *(2+(const unsigned char *)from);
235 return to;
236 case 4:
237 *(unsigned long *)to = *(const unsigned long *)from;
238 return to;
239 case 6: /* for Ethernet addresses */
240 *(unsigned long *)to = *(const unsigned long *)from;
241 *(2+(unsigned short *)to) = *(2+(const unsigned short *)from);
242 return to;
243 case 8:
244 *(unsigned long *)to = *(const unsigned long *)from;
245 *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
246 return to;
247 case 12:
248 *(unsigned long *)to = *(const unsigned long *)from;
249 *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
250 *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
251 return to;
252 case 16:
253 *(unsigned long *)to = *(const unsigned long *)from;
254 *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
255 *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
256 *(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
257 return to;
258 case 20:
259 *(unsigned long *)to = *(const unsigned long *)from;
260 *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
261 *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
262 *(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
263 *(4+(unsigned long *)to) = *(4+(const unsigned long *)from);
264 return to;
266 #define COMMON(x) \
267 __asm__ __volatile__( \
268 "rep ; movsl" \
270 : "=&c" (d0), "=&D" (d1), "=&S" (d2) \
271 : "0" (n/4),"1" ((long) to),"2" ((long) from) \
272 : "memory");
274 int d0, d1, d2;
275 switch (n % 4) {
276 case 0: COMMON(""); return to;
277 case 1: COMMON("\n\tmovsb"); return to;
278 case 2: COMMON("\n\tmovsw"); return to;
279 default: COMMON("\n\tmovsw\n\tmovsb"); return to;
283 #undef COMMON
286 #define __HAVE_ARCH_MEMCPY
288 #ifdef CONFIG_X86_USE_3DNOW
290 /* All this just for in_interrupt() ... */
292 #include <asm/system.h>
293 #include <asm/ptrace.h>
294 #include <linux/smp.h>
295 #include <linux/spinlock.h>
296 #include <linux/interrupt.h>
297 #include <asm/mmx.h>
300 * This CPU favours 3DNow strongly (eg AMD Athlon)
303 static inline void * __constant_memcpy3d(void * to, const void * from, size_t len)
305 if(len<512 || in_interrupt())
306 return __constant_memcpy(to, from, len);
307 return _mmx_memcpy(to, from, len);
310 extern __inline__ void *__memcpy3d(void *to, const void *from, size_t len)
312 if(len<512 || in_interrupt())
313 return __memcpy(to, from, len);
314 return _mmx_memcpy(to, from, len);
317 #define memcpy(t, f, n) \
318 (__builtin_constant_p(n) ? \
319 __constant_memcpy3d((t),(f),(n)) : \
320 __memcpy3d((t),(f),(n)))
322 #else
325 * No 3D Now!
328 #define memcpy(t, f, n) \
329 (__builtin_constant_p(n) ? \
330 __constant_memcpy((t),(f),(n)) : \
331 __memcpy((t),(f),(n)))
333 #endif
336 * struct_cpy(x,y), copy structure *x into (matching structure) *y.
338 * We get link-time errors if the structure sizes do not match.
339 * There is no runtime overhead, it's all optimized away at
340 * compile time.
342 extern void __struct_cpy_bug (void);
344 #define struct_cpy(x,y) \
345 ({ \
346 if (sizeof(*(x)) != sizeof(*(y))) \
347 __struct_cpy_bug; \
348 memcpy(x, y, sizeof(*(x))); \
351 #define __HAVE_ARCH_MEMMOVE
352 static inline void * memmove(void * dest,const void * src, size_t n)
354 int d0, d1, d2;
355 if (dest<src)
356 __asm__ __volatile__(
357 "rep\n\t"
358 "movsb"
359 : "=&c" (d0), "=&S" (d1), "=&D" (d2)
360 :"0" (n),"1" (src),"2" (dest)
361 : "memory");
362 else
363 __asm__ __volatile__(
364 "std\n\t"
365 "rep\n\t"
366 "movsb\n\t"
367 "cld"
368 : "=&c" (d0), "=&S" (d1), "=&D" (d2)
369 :"0" (n),
370 "1" (n-1+(const char *)src),
371 "2" (n-1+(char *)dest)
372 :"memory");
373 return dest;
376 #define memcmp __builtin_memcmp
378 #define __HAVE_ARCH_MEMCHR
379 static inline void * memchr(const void * cs,int c,size_t count)
381 int d0;
382 register void * __res;
383 if (!count)
384 return NULL;
385 __asm__ __volatile__(
386 "repne\n\t"
387 "scasb\n\t"
388 "je 1f\n\t"
389 "movl $1,%0\n"
390 "1:\tdecl %0"
391 :"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count));
392 return __res;
395 static inline void * __memset_generic(void * s, char c,size_t count)
397 int d0, d1;
398 __asm__ __volatile__(
399 "rep\n\t"
400 "stosb"
401 : "=&c" (d0), "=&D" (d1)
402 :"a" (c),"1" (s),"0" (count)
403 :"memory");
404 return s;
407 /* we might want to write optimized versions of these later */
408 #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
411 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
412 * things 32 bits at a time even when we don't know the size of the
413 * area at compile-time..
415 static inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
417 int d0, d1;
418 __asm__ __volatile__(
419 "rep ; stosl\n\t"
420 "testb $2,%b3\n\t"
421 "je 1f\n\t"
422 "stosw\n"
423 "1:\ttestb $1,%b3\n\t"
424 "je 2f\n\t"
425 "stosb\n"
426 "2:"
427 : "=&c" (d0), "=&D" (d1)
428 :"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
429 :"memory");
430 return (s);
433 /* Added by Gertjan van Wingerde to make minix and sysv module work */
434 #define __HAVE_ARCH_STRNLEN
435 static inline size_t strnlen(const char * s, size_t count)
437 int d0;
438 register int __res;
439 __asm__ __volatile__(
440 "movl %2,%0\n\t"
441 "jmp 2f\n"
442 "1:\tcmpb $0,(%0)\n\t"
443 "je 3f\n\t"
444 "incl %0\n"
445 "2:\tdecl %1\n\t"
446 "cmpl $-1,%1\n\t"
447 "jne 1b\n"
448 "3:\tsubl %2,%0"
449 :"=a" (__res), "=&d" (d0)
450 :"c" (s),"1" (count));
451 return __res;
453 /* end of additional stuff */
455 #define __HAVE_ARCH_STRSTR
456 static inline char * strstr(const char * cs,const char * ct)
458 int d0, d1;
459 register char * __res;
460 __asm__ __volatile__(
461 "movl %6,%%edi\n\t"
462 "repne\n\t"
463 "scasb\n\t"
464 "notl %%ecx\n\t"
465 "decl %%ecx\n\t" /* NOTE! This also sets Z if searchstring='' */
466 "movl %%ecx,%%edx\n"
467 "1:\tmovl %6,%%edi\n\t"
468 "movl %%esi,%%eax\n\t"
469 "movl %%edx,%%ecx\n\t"
470 "repe\n\t"
471 "cmpsb\n\t"
472 "je 2f\n\t" /* also works for empty string, see above */
473 "xchgl %%eax,%%esi\n\t"
474 "incl %%esi\n\t"
475 "cmpb $0,-1(%%eax)\n\t"
476 "jne 1b\n\t"
477 "xorl %%eax,%%eax\n\t"
478 "2:"
479 :"=a" (__res), "=&c" (d0), "=&S" (d1)
480 :"0" (0), "1" (0xffffffff), "2" (cs), "g" (ct)
481 :"dx", "di");
482 return __res;
486 * This looks horribly ugly, but the compiler can optimize it totally,
487 * as we by now know that both pattern and count is constant..
489 static inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
491 switch (count) {
492 case 0:
493 return s;
494 case 1:
495 *(unsigned char *)s = pattern;
496 return s;
497 case 2:
498 *(unsigned short *)s = pattern;
499 return s;
500 case 3:
501 *(unsigned short *)s = pattern;
502 *(2+(unsigned char *)s) = pattern;
503 return s;
504 case 4:
505 *(unsigned long *)s = pattern;
506 return s;
508 #define COMMON(x) \
509 __asm__ __volatile__( \
510 "rep ; stosl" \
512 : "=&c" (d0), "=&D" (d1) \
513 : "a" (pattern),"0" (count/4),"1" ((long) s) \
514 : "memory")
516 int d0, d1;
517 switch (count % 4) {
518 case 0: COMMON(""); return s;
519 case 1: COMMON("\n\tstosb"); return s;
520 case 2: COMMON("\n\tstosw"); return s;
521 default: COMMON("\n\tstosw\n\tstosb"); return s;
525 #undef COMMON
528 #define __constant_c_x_memset(s, c, count) \
529 (__builtin_constant_p(count) ? \
530 __constant_c_and_count_memset((s),(c),(count)) : \
531 __constant_c_memset((s),(c),(count)))
533 #define __memset(s, c, count) \
534 (__builtin_constant_p(count) ? \
535 __constant_count_memset((s),(c),(count)) : \
536 __memset_generic((s),(c),(count)))
538 #define __HAVE_ARCH_MEMSET
539 #define memset(s, c, count) \
540 (__builtin_constant_p(c) ? \
541 __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
542 __memset((s),(c),(count)))
545 * find the first occurrence of byte 'c', or 1 past the area if none
547 #define __HAVE_ARCH_MEMSCAN
548 static inline void * memscan(void * addr, int c, size_t size)
550 if (!size)
551 return addr;
552 __asm__("repnz; scasb
553 jnz 1f
554 dec %%edi
555 1: "
556 : "=D" (addr), "=c" (size)
557 : "0" (addr), "1" (size), "a" (c));
558 return addr;
561 #endif /* CONFIG_X86_USE_STRING_486 */
562 #endif /* __KERNEL__ */
564 #endif