1 #ifndef _I386_STRING_H_
2 #define _I386_STRING_H_
5 #include <linux/config.h>
7 * On a 486 or Pentium, we are better off not using the
8 * byte string operations. But on a 386 or a PPro the
9 * byte string ops are faster than doing it by hand
10 * (MUCH faster on a Pentium).
14 * This string-include defines all string functions as inline
15 * functions. Use gcc. It also assumes ds=es=data space, this should be
16 * normal. Most of the string-functions are rather heavily hand-optimized,
17 * see especially strsep,strstr,str[c]spn. They should work, but are not
18 * very easy to understand. Everything is done entirely within the register
19 * set, making the functions fast and clean. String instructions have been
20 * used through-out, making for "slightly" unclear code :-)
22 * NO Copyright (C) 1991, 1992 Linus Torvalds,
23 * consider these trivial functions to be PD.
26 /* AK: in fact I bet it would be better to move this stuff all out of line.
28 #if !defined(IN_STRING_C)
30 #define __HAVE_ARCH_STRCPY
31 static inline char * strcpy(char * dest
,const char *src
)
39 : "=&S" (d0
), "=&D" (d1
), "=&a" (d2
)
40 :"0" (src
),"1" (dest
) : "memory");
44 #define __HAVE_ARCH_STRNCPY
45 static inline char * strncpy(char * dest
,const char *src
,size_t count
)
58 : "=&S" (d0
), "=&D" (d1
), "=&c" (d2
), "=&a" (d3
)
59 :"0" (src
),"1" (dest
),"2" (count
) : "memory");
63 #define __HAVE_ARCH_STRCAT
64 static inline char * strcat(char * dest
,const char * src
)
75 : "=&S" (d0
), "=&D" (d1
), "=&a" (d2
), "=&c" (d3
)
76 : "0" (src
), "1" (dest
), "2" (0), "3" (0xffffffffu
):"memory");
80 #define __HAVE_ARCH_STRNCAT
81 static inline char * strncat(char * dest
,const char * src
,size_t count
)
97 : "=&S" (d0
), "=&D" (d1
), "=&a" (d2
), "=&c" (d3
)
98 : "0" (src
),"1" (dest
),"2" (0),"3" (0xffffffffu
), "g" (count
)
103 #define __HAVE_ARCH_STRCMP
104 static inline int strcmp(const char * cs
,const char * ct
)
108 __asm__
__volatile__(
112 "testb %%al,%%al\n\t"
114 "xorl %%eax,%%eax\n\t"
116 "2:\tsbbl %%eax,%%eax\n\t"
119 :"=a" (__res
), "=&S" (d0
), "=&D" (d1
)
124 #define __HAVE_ARCH_STRNCMP
125 static inline int strncmp(const char * cs
,const char * ct
,size_t count
)
129 __asm__
__volatile__(
135 "testb %%al,%%al\n\t"
137 "2:\txorl %%eax,%%eax\n\t"
139 "3:\tsbbl %%eax,%%eax\n\t"
142 :"=a" (__res
), "=&S" (d0
), "=&D" (d1
), "=&c" (d2
)
143 :"1" (cs
),"2" (ct
),"3" (count
));
147 #define __HAVE_ARCH_STRCHR
148 static inline char * strchr(const char * s
, int c
)
151 register char * __res
;
152 __asm__
__volatile__(
157 "testb %%al,%%al\n\t"
162 :"=a" (__res
), "=&S" (d0
) : "1" (s
),"0" (c
));
166 #define __HAVE_ARCH_STRRCHR
167 static inline char * strrchr(const char * s
, int c
)
170 register char * __res
;
171 __asm__
__volatile__(
176 "leal -1(%%esi),%0\n"
177 "2:\ttestb %%al,%%al\n\t"
179 :"=g" (__res
), "=&S" (d0
), "=&a" (d1
) :"0" (0),"1" (s
),"2" (c
));
185 #define __HAVE_ARCH_STRLEN
186 static inline size_t strlen(const char * s
)
190 __asm__
__volatile__(
195 :"=c" (__res
), "=&D" (d0
) :"1" (s
),"a" (0), "0" (0xffffffffu
));
199 static inline void * __memcpy(void * to
, const void * from
, size_t n
)
202 __asm__
__volatile__(
207 "1:\ttestb $1,%b4\n\t"
211 : "=&c" (d0
), "=&D" (d1
), "=&S" (d2
)
212 :"0" (n
/4), "q" (n
),"1" ((long) to
),"2" ((long) from
)
218 * This looks horribly ugly, but the compiler can optimize it totally,
219 * as the count is constant.
221 static inline void * __constant_memcpy(void * to
, const void * from
, size_t n
)
224 return __builtin_memcpy(to
, from
, n
);
227 __asm__ __volatile__( \
230 : "=&c" (d0), "=&D" (d1), "=&S" (d2) \
231 : "0" (n/4),"1" ((long) to),"2" ((long) from) \
236 case 0: COMMON(""); return to
;
237 case 1: COMMON("\n\tmovsb"); return to
;
238 case 2: COMMON("\n\tmovsw"); return to
;
239 default: COMMON("\n\tmovsw\n\tmovsb"); return to
;
246 #define __HAVE_ARCH_MEMCPY
248 #ifdef CONFIG_X86_USE_3DNOW
253 * This CPU favours 3DNow strongly (eg AMD Athlon)
256 static inline void * __constant_memcpy3d(void * to
, const void * from
, size_t len
)
259 return __constant_memcpy(to
, from
, len
);
260 return _mmx_memcpy(to
, from
, len
);
263 static __inline__
void *__memcpy3d(void *to
, const void *from
, size_t len
)
266 return __memcpy(to
, from
, len
);
267 return _mmx_memcpy(to
, from
, len
);
270 #define memcpy(t, f, n) \
271 (__builtin_constant_p(n) ? \
272 __constant_memcpy3d((t),(f),(n)) : \
273 __memcpy3d((t),(f),(n)))
281 #define memcpy(t, f, n) \
282 (__builtin_constant_p(n) ? \
283 __constant_memcpy((t),(f),(n)) : \
284 __memcpy((t),(f),(n)))
288 #define __HAVE_ARCH_MEMMOVE
289 void *memmove(void * dest
,const void * src
, size_t n
);
291 #define memcmp __builtin_memcmp
293 #define __HAVE_ARCH_MEMCHR
294 static inline void * memchr(const void * cs
,int c
,size_t count
)
297 register void * __res
;
300 __asm__
__volatile__(
306 :"=D" (__res
), "=&c" (d0
) : "a" (c
),"0" (cs
),"1" (count
));
310 static inline void * __memset_generic(void * s
, char c
,size_t count
)
313 __asm__
__volatile__(
316 : "=&c" (d0
), "=&D" (d1
)
317 :"a" (c
),"1" (s
),"0" (count
)
322 /* we might want to write optimized versions of these later */
323 #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
326 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
327 * things 32 bits at a time even when we don't know the size of the
328 * area at compile-time..
330 static inline void * __constant_c_memset(void * s
, unsigned long c
, size_t count
)
333 __asm__
__volatile__(
338 "1:\ttestb $1,%b3\n\t"
342 : "=&c" (d0
), "=&D" (d1
)
343 :"a" (c
), "q" (count
), "0" (count
/4), "1" ((long) s
)
348 /* Added by Gertjan van Wingerde to make minix and sysv module work */
349 #define __HAVE_ARCH_STRNLEN
350 static inline size_t strnlen(const char * s
, size_t count
)
354 __asm__
__volatile__(
357 "1:\tcmpb $0,(%0)\n\t"
364 :"=a" (__res
), "=&d" (d0
)
365 :"c" (s
),"1" (count
));
368 /* end of additional stuff */
370 #define __HAVE_ARCH_STRSTR
372 extern char *strstr(const char *cs
, const char *ct
);
375 * This looks horribly ugly, but the compiler can optimize it totally,
376 * as we by now know that both pattern and count is constant..
378 static inline void * __constant_c_and_count_memset(void * s
, unsigned long pattern
, size_t count
)
384 *(unsigned char *)s
= pattern
;
387 *(unsigned short *)s
= pattern
;
390 *(unsigned short *)s
= pattern
;
391 *(2+(unsigned char *)s
) = pattern
;
394 *(unsigned long *)s
= pattern
;
398 __asm__ __volatile__( \
401 : "=&c" (d0), "=&D" (d1) \
402 : "a" (pattern),"0" (count/4),"1" ((long) s) \
407 case 0: COMMON(""); return s
;
408 case 1: COMMON("\n\tstosb"); return s
;
409 case 2: COMMON("\n\tstosw"); return s
;
410 default: COMMON("\n\tstosw\n\tstosb"); return s
;
417 #define __constant_c_x_memset(s, c, count) \
418 (__builtin_constant_p(count) ? \
419 __constant_c_and_count_memset((s),(c),(count)) : \
420 __constant_c_memset((s),(c),(count)))
422 #define __memset(s, c, count) \
423 (__builtin_constant_p(count) ? \
424 __constant_count_memset((s),(c),(count)) : \
425 __memset_generic((s),(c),(count)))
427 #define __HAVE_ARCH_MEMSET
428 #define memset(s, c, count) \
429 (__builtin_constant_p(c) ? \
430 __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
431 __memset((s),(c),(count)))
434 * find the first occurrence of byte 'c', or 1 past the area if none
436 #define __HAVE_ARCH_MEMSCAN
437 static inline void * memscan(void * addr
, int c
, size_t size
)
441 __asm__("repnz; scasb\n\t"
445 : "=D" (addr
), "=c" (size
)
446 : "0" (addr
), "1" (size
), "a" (c
));
450 #endif /* __KERNEL__ */