1 #ifndef _I386_STRING_H_
2 #define _I386_STRING_H_
6 /* Let gcc decide wether to inline or use the out of line functions */
8 #define __HAVE_ARCH_STRCPY
9 extern char *strcpy(char *dest
, const char *src
);
11 #define __HAVE_ARCH_STRNCPY
12 extern char *strncpy(char *dest
, const char *src
, size_t count
);
14 #define __HAVE_ARCH_STRCAT
15 extern char *strcat(char *dest
, const char *src
);
17 #define __HAVE_ARCH_STRNCAT
18 extern char *strncat(char *dest
, const char *src
, size_t count
);
20 #define __HAVE_ARCH_STRCMP
21 extern int strcmp(const char *cs
, const char *ct
);
23 #define __HAVE_ARCH_STRNCMP
24 extern int strncmp(const char *cs
, const char *ct
, size_t count
);
26 #define __HAVE_ARCH_STRCHR
27 extern char *strchr(const char *s
, int c
);
29 #define __HAVE_ARCH_STRLEN
30 extern size_t strlen(const char *s
);
32 static __always_inline
void * __memcpy(void * to
, const void * from
, size_t n
)
42 : "=&c" (d0
), "=&D" (d1
), "=&S" (d2
)
43 : "0" (n
/4), "g" (n
), "1" ((long) to
), "2" ((long) from
)
49 * This looks ugly, but the compiler can optimize it totally,
50 * as the count is constant.
52 static __always_inline
void * __constant_memcpy(void * to
, const void * from
, size_t n
)
56 #if 1 /* want to do small copies with non-string ops? */
58 case 1: *(char*)to
= *(char*)from
; return to
;
59 case 2: *(short*)to
= *(short*)from
; return to
;
60 case 4: *(int*)to
= *(int*)from
; return to
;
61 #if 1 /* including those doable with two moves? */
62 case 3: *(short*)to
= *(short*)from
;
63 *((char*)to
+2) = *((char*)from
+2); return to
;
64 case 5: *(int*)to
= *(int*)from
;
65 *((char*)to
+4) = *((char*)from
+4); return to
;
66 case 6: *(int*)to
= *(int*)from
;
67 *((short*)to
+2) = *((short*)from
+2); return to
;
68 case 8: *(int*)to
= *(int*)from
;
69 *((int*)to
+1) = *((int*)from
+1); return to
;
76 /* large block: use rep prefix */
80 : "=&c" (ecx
), "=&D" (edi
), "=&S" (esi
)
81 : "0" (n
/4), "1" (edi
),"2" (esi
)
85 /* small block: don't clobber ecx + smaller code */
86 if (n
>= 4*4) __asm__
__volatile__("movsl"
87 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
88 if (n
>= 3*4) __asm__
__volatile__("movsl"
89 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
90 if (n
>= 2*4) __asm__
__volatile__("movsl"
91 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
92 if (n
>= 1*4) __asm__
__volatile__("movsl"
93 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
98 case 1: __asm__
__volatile__("movsb"
99 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
101 case 2: __asm__
__volatile__("movsw"
102 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
104 default: __asm__
__volatile__("movsw\n\tmovsb"
105 :"=&D"(edi
),"=&S"(esi
):"0"(edi
),"1"(esi
):"memory");
110 #define __HAVE_ARCH_MEMCPY
112 #ifdef CONFIG_X86_USE_3DNOW
117 * This CPU favours 3DNow strongly (eg AMD Athlon)
120 static inline void * __constant_memcpy3d(void * to
, const void * from
, size_t len
)
123 return __constant_memcpy(to
, from
, len
);
124 return _mmx_memcpy(to
, from
, len
);
127 static __inline__
void *__memcpy3d(void *to
, const void *from
, size_t len
)
130 return __memcpy(to
, from
, len
);
131 return _mmx_memcpy(to
, from
, len
);
134 #define memcpy(t, f, n) \
135 (__builtin_constant_p(n) ? \
136 __constant_memcpy3d((t),(f),(n)) : \
137 __memcpy3d((t),(f),(n)))
145 #define memcpy(t, f, n) \
146 (__builtin_constant_p(n) ? \
147 __constant_memcpy((t),(f),(n)) : \
148 __memcpy((t),(f),(n)))
152 #define __HAVE_ARCH_MEMMOVE
153 void *memmove(void * dest
,const void * src
, size_t n
);
155 #define memcmp __builtin_memcmp
157 #define __HAVE_ARCH_MEMCHR
158 extern void *memchr(const void * cs
,int c
,size_t count
);
160 static inline void * __memset_generic(void * s
, char c
,size_t count
)
163 __asm__
__volatile__(
166 : "=&c" (d0
), "=&D" (d1
)
167 :"a" (c
),"1" (s
),"0" (count
)
172 /* we might want to write optimized versions of these later */
173 #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
176 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
177 * things 32 bits at a time even when we don't know the size of the
178 * area at compile-time..
180 static __always_inline
void * __constant_c_memset(void * s
, unsigned long c
, size_t count
)
183 __asm__
__volatile__(
188 "1:\ttestb $1,%b3\n\t"
192 :"=&c" (d0
), "=&D" (d1
)
193 :"a" (c
), "q" (count
), "0" (count
/4), "1" ((long) s
)
198 /* Added by Gertjan van Wingerde to make minix and sysv module work */
199 #define __HAVE_ARCH_STRNLEN
200 extern size_t strnlen(const char * s
, size_t count
);
201 /* end of additional stuff */
203 #define __HAVE_ARCH_STRSTR
204 extern char *strstr(const char *cs
, const char *ct
);
207 * This looks horribly ugly, but the compiler can optimize it totally,
208 * as we by now know that both pattern and count is constant..
210 static __always_inline
void * __constant_c_and_count_memset(void * s
, unsigned long pattern
, size_t count
)
216 *(unsigned char *)s
= pattern
;
219 *(unsigned short *)s
= pattern
;
222 *(unsigned short *)s
= pattern
;
223 *(2+(unsigned char *)s
) = pattern
;
226 *(unsigned long *)s
= pattern
;
230 __asm__ __volatile__( \
233 : "=&c" (d0), "=&D" (d1) \
234 : "a" (pattern),"0" (count/4),"1" ((long) s) \
239 case 0: COMMON(""); return s
;
240 case 1: COMMON("\n\tstosb"); return s
;
241 case 2: COMMON("\n\tstosw"); return s
;
242 default: COMMON("\n\tstosw\n\tstosb"); return s
;
249 #define __constant_c_x_memset(s, c, count) \
250 (__builtin_constant_p(count) ? \
251 __constant_c_and_count_memset((s),(c),(count)) : \
252 __constant_c_memset((s),(c),(count)))
254 #define __memset(s, c, count) \
255 (__builtin_constant_p(count) ? \
256 __constant_count_memset((s),(c),(count)) : \
257 __memset_generic((s),(c),(count)))
259 #define __HAVE_ARCH_MEMSET
260 #define memset(s, c, count) \
261 (__builtin_constant_p(c) ? \
262 __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
263 __memset((s),(c),(count)))
266 * find the first occurrence of byte 'c', or 1 past the area if none
268 #define __HAVE_ARCH_MEMSCAN
269 extern void *memscan(void * addr
, int c
, size_t size
);
271 #endif /* __KERNEL__ */