1 #ifndef _I386_STRING_H_
2 #define _I386_STRING_H_
5 #include <linux/config.h>
7 * On a 486 or Pentium, we are better off not using the
8 * byte string operations. But on a 386 or a PPro the
9 * byte string ops are faster than doing it by hand
10 * (MUCH faster on a Pentium).
12 * Also, the byte strings actually work correctly. Forget
13 * the i486 routines for now as they may be broken..
15 #if FIXED_486_STRING && defined(CONFIG_X86_USE_STRING_486)
16 #include <asm/string-486.h>
20 * This string-include defines all string functions as inline
21 * functions. Use gcc. It also assumes ds=es=data space, this should be
22 * normal. Most of the string-functions are rather heavily hand-optimized,
23 * see especially strtok,strstr,str[c]spn. They should work, but are not
24 * very easy to understand. Everything is done entirely within the register
25 * set, making the functions fast and clean. String instructions have been
26 * used through-out, making for "slightly" unclear code :-)
28 * NO Copyright (C) 1991, 1992 Linus Torvalds,
29 * consider these trivial functions to be PD.
32 #define __HAVE_ARCH_STRCPY
33 static inline char * strcpy(char * dest
,const char *src
)
41 : "=&S" (d0
), "=&D" (d1
), "=&a" (d2
)
42 :"0" (src
),"1" (dest
) : "memory");
46 #define __HAVE_ARCH_STRNCPY
47 static inline char * strncpy(char * dest
,const char *src
,size_t count
)
60 : "=&S" (d0
), "=&D" (d1
), "=&c" (d2
), "=&a" (d3
)
61 :"0" (src
),"1" (dest
),"2" (count
) : "memory");
65 #define __HAVE_ARCH_STRCAT
66 static inline char * strcat(char * dest
,const char * src
)
77 : "=&S" (d0
), "=&D" (d1
), "=&a" (d2
), "=&c" (d3
)
78 : "0" (src
), "1" (dest
), "2" (0), "3" (0xffffffff):"memory");
82 #define __HAVE_ARCH_STRNCAT
83 static inline char * strncat(char * dest
,const char * src
,size_t count
)
99 : "=&S" (d0
), "=&D" (d1
), "=&a" (d2
), "=&c" (d3
)
100 : "0" (src
),"1" (dest
),"2" (0),"3" (0xffffffff), "g" (count
)
105 #define __HAVE_ARCH_STRCMP
106 static inline int strcmp(const char * cs
,const char * ct
)
110 __asm__
__volatile__(
114 "testb %%al,%%al\n\t"
116 "xorl %%eax,%%eax\n\t"
118 "2:\tsbbl %%eax,%%eax\n\t"
121 :"=a" (__res
), "=&S" (d0
), "=&D" (d1
)
126 #define __HAVE_ARCH_STRNCMP
127 static inline int strncmp(const char * cs
,const char * ct
,size_t count
)
131 __asm__
__volatile__(
137 "testb %%al,%%al\n\t"
139 "2:\txorl %%eax,%%eax\n\t"
141 "3:\tsbbl %%eax,%%eax\n\t"
144 :"=a" (__res
), "=&S" (d0
), "=&D" (d1
), "=&c" (d2
)
145 :"1" (cs
),"2" (ct
),"3" (count
));
149 #define __HAVE_ARCH_STRCHR
150 static inline char * strchr(const char * s
, int c
)
153 register char * __res
;
154 __asm__
__volatile__(
159 "testb %%al,%%al\n\t"
164 :"=a" (__res
), "=&S" (d0
) : "1" (s
),"0" (c
));
168 #define __HAVE_ARCH_STRRCHR
169 static inline char * strrchr(const char * s
, int c
)
172 register char * __res
;
173 __asm__
__volatile__(
178 "leal -1(%%esi),%0\n"
179 "2:\ttestb %%al,%%al\n\t"
181 :"=g" (__res
), "=&S" (d0
), "=&a" (d1
) :"0" (0),"1" (s
),"2" (c
));
185 #define __HAVE_ARCH_STRLEN
186 static inline size_t strlen(const char * s
)
190 __asm__
__volatile__(
195 :"=c" (__res
), "=&D" (d0
) :"1" (s
),"a" (0), "0" (0xffffffff));
199 static inline void * __memcpy(void * to
, const void * from
, size_t n
)
202 __asm__
__volatile__(
207 "1:\ttestb $1,%b4\n\t"
211 : "=&c" (d0
), "=&D" (d1
), "=&S" (d2
)
212 :"0" (n
/4), "q" (n
),"1" ((long) to
),"2" ((long) from
)
218 * This looks horribly ugly, but the compiler can optimize it totally,
219 * as the count is constant.
221 static inline void * __constant_memcpy(void * to
, const void * from
, size_t n
)
227 *(unsigned char *)to
= *(const unsigned char *)from
;
230 *(unsigned short *)to
= *(const unsigned short *)from
;
233 *(unsigned short *)to
= *(const unsigned short *)from
;
234 *(2+(unsigned char *)to
) = *(2+(const unsigned char *)from
);
237 *(unsigned long *)to
= *(const unsigned long *)from
;
239 case 6: /* for Ethernet addresses */
240 *(unsigned long *)to
= *(const unsigned long *)from
;
241 *(2+(unsigned short *)to
) = *(2+(const unsigned short *)from
);
244 *(unsigned long *)to
= *(const unsigned long *)from
;
245 *(1+(unsigned long *)to
) = *(1+(const unsigned long *)from
);
248 *(unsigned long *)to
= *(const unsigned long *)from
;
249 *(1+(unsigned long *)to
) = *(1+(const unsigned long *)from
);
250 *(2+(unsigned long *)to
) = *(2+(const unsigned long *)from
);
253 *(unsigned long *)to
= *(const unsigned long *)from
;
254 *(1+(unsigned long *)to
) = *(1+(const unsigned long *)from
);
255 *(2+(unsigned long *)to
) = *(2+(const unsigned long *)from
);
256 *(3+(unsigned long *)to
) = *(3+(const unsigned long *)from
);
259 *(unsigned long *)to
= *(const unsigned long *)from
;
260 *(1+(unsigned long *)to
) = *(1+(const unsigned long *)from
);
261 *(2+(unsigned long *)to
) = *(2+(const unsigned long *)from
);
262 *(3+(unsigned long *)to
) = *(3+(const unsigned long *)from
);
263 *(4+(unsigned long *)to
) = *(4+(const unsigned long *)from
);
267 __asm__ __volatile__( \
270 : "=&c" (d0), "=&D" (d1), "=&S" (d2) \
271 : "0" (n/4),"1" ((long) to),"2" ((long) from) \
276 case 0: COMMON(""); return to
;
277 case 1: COMMON("\n\tmovsb"); return to
;
278 case 2: COMMON("\n\tmovsw"); return to
;
279 default: COMMON("\n\tmovsw\n\tmovsb"); return to
;
286 #define __HAVE_ARCH_MEMCPY
288 #ifdef CONFIG_X86_USE_3DNOW
290 /* All this just for in_interrupt() ... */
292 #include <asm/system.h>
293 #include <asm/ptrace.h>
294 #include <linux/smp.h>
295 #include <linux/spinlock.h>
296 #include <linux/interrupt.h>
300 * This CPU favours 3DNow strongly (eg AMD Athlon)
303 static inline void * __constant_memcpy3d(void * to
, const void * from
, size_t len
)
305 if(len
<512 || in_interrupt())
306 return __constant_memcpy(to
, from
, len
);
307 return _mmx_memcpy(to
, from
, len
);
310 extern __inline__
void *__memcpy3d(void *to
, const void *from
, size_t len
)
312 if(len
<512 || in_interrupt())
313 return __memcpy(to
, from
, len
);
314 return _mmx_memcpy(to
, from
, len
);
317 #define memcpy(t, f, n) \
318 (__builtin_constant_p(n) ? \
319 __constant_memcpy3d((t),(f),(n)) : \
320 __memcpy3d((t),(f),(n)))
328 #define memcpy(t, f, n) \
329 (__builtin_constant_p(n) ? \
330 __constant_memcpy((t),(f),(n)) : \
331 __memcpy((t),(f),(n)))
336 * struct_cpy(x,y), copy structure *x into (matching structure) *y.
338 * We get link-time errors if the structure sizes do not match.
339 * There is no runtime overhead, it's all optimized away at
342 extern void __struct_cpy_bug (void);
344 #define struct_cpy(x,y) \
346 if (sizeof(*(x)) != sizeof(*(y))) \
348 memcpy(x, y, sizeof(*(x))); \
351 #define __HAVE_ARCH_MEMMOVE
352 static inline void * memmove(void * dest
,const void * src
, size_t n
)
356 __asm__
__volatile__(
359 : "=&c" (d0
), "=&S" (d1
), "=&D" (d2
)
360 :"0" (n
),"1" (src
),"2" (dest
)
363 __asm__
__volatile__(
368 : "=&c" (d0
), "=&S" (d1
), "=&D" (d2
)
370 "1" (n
-1+(const char *)src
),
371 "2" (n
-1+(char *)dest
)
376 #define memcmp __builtin_memcmp
378 #define __HAVE_ARCH_MEMCHR
379 static inline void * memchr(const void * cs
,int c
,size_t count
)
382 register void * __res
;
385 __asm__
__volatile__(
391 :"=D" (__res
), "=&c" (d0
) : "a" (c
),"0" (cs
),"1" (count
));
395 static inline void * __memset_generic(void * s
, char c
,size_t count
)
398 __asm__
__volatile__(
401 : "=&c" (d0
), "=&D" (d1
)
402 :"a" (c
),"1" (s
),"0" (count
)
407 /* we might want to write optimized versions of these later */
408 #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
411 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
412 * things 32 bits at a time even when we don't know the size of the
413 * area at compile-time..
415 static inline void * __constant_c_memset(void * s
, unsigned long c
, size_t count
)
418 __asm__
__volatile__(
423 "1:\ttestb $1,%b3\n\t"
427 : "=&c" (d0
), "=&D" (d1
)
428 :"a" (c
), "q" (count
), "0" (count
/4), "1" ((long) s
)
433 /* Added by Gertjan van Wingerde to make minix and sysv module work */
434 #define __HAVE_ARCH_STRNLEN
435 static inline size_t strnlen(const char * s
, size_t count
)
439 __asm__
__volatile__(
442 "1:\tcmpb $0,(%0)\n\t"
449 :"=a" (__res
), "=&d" (d0
)
450 :"c" (s
),"1" (count
));
453 /* end of additional stuff */
455 #define __HAVE_ARCH_STRSTR
456 static inline char * strstr(const char * cs
,const char * ct
)
459 register char * __res
;
460 __asm__
__volatile__(
465 "decl %%ecx\n\t" /* NOTE! This also sets Z if searchstring='' */
467 "1:\tmovl %6,%%edi\n\t"
468 "movl %%esi,%%eax\n\t"
469 "movl %%edx,%%ecx\n\t"
472 "je 2f\n\t" /* also works for empty string, see above */
473 "xchgl %%eax,%%esi\n\t"
475 "cmpb $0,-1(%%eax)\n\t"
477 "xorl %%eax,%%eax\n\t"
479 :"=a" (__res
), "=&c" (d0
), "=&S" (d1
)
480 :"0" (0), "1" (0xffffffff), "2" (cs
), "g" (ct
)
486 * This looks horribly ugly, but the compiler can optimize it totally,
487 * as we by now know that both pattern and count is constant..
489 static inline void * __constant_c_and_count_memset(void * s
, unsigned long pattern
, size_t count
)
495 *(unsigned char *)s
= pattern
;
498 *(unsigned short *)s
= pattern
;
501 *(unsigned short *)s
= pattern
;
502 *(2+(unsigned char *)s
) = pattern
;
505 *(unsigned long *)s
= pattern
;
509 __asm__ __volatile__( \
512 : "=&c" (d0), "=&D" (d1) \
513 : "a" (pattern),"0" (count/4),"1" ((long) s) \
518 case 0: COMMON(""); return s
;
519 case 1: COMMON("\n\tstosb"); return s
;
520 case 2: COMMON("\n\tstosw"); return s
;
521 default: COMMON("\n\tstosw\n\tstosb"); return s
;
528 #define __constant_c_x_memset(s, c, count) \
529 (__builtin_constant_p(count) ? \
530 __constant_c_and_count_memset((s),(c),(count)) : \
531 __constant_c_memset((s),(c),(count)))
533 #define __memset(s, c, count) \
534 (__builtin_constant_p(count) ? \
535 __constant_count_memset((s),(c),(count)) : \
536 __memset_generic((s),(c),(count)))
538 #define __HAVE_ARCH_MEMSET
539 #define memset(s, c, count) \
540 (__builtin_constant_p(c) ? \
541 __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
542 __memset((s),(c),(count)))
545 * find the first occurrence of byte 'c', or 1 past the area if none
547 #define __HAVE_ARCH_MEMSCAN
548 static inline void * memscan(void * addr
, int c
, size_t size
)
552 __asm__("repnz; scasb
556 : "=D" (addr
), "=c" (size
)
557 : "0" (addr
), "1" (size
), "a" (c
));
561 #endif /* CONFIG_X86_USE_STRING_486 */
562 #endif /* __KERNEL__ */