[netdrvr] CS89X0: Add cleanup for dma after fail
[linux-2.6/mini2440.git] / include / asm-x86 / string_32.h
blobb49369ad9a6179d0ca3e29eb1b7ddbf9670318a3
1 #ifndef _I386_STRING_H_
2 #define _I386_STRING_H_
4 #ifdef __KERNEL__
6 /* Let gcc decide whether to inline or use the out of line functions */
8 #define __HAVE_ARCH_STRCPY
9 extern char *strcpy(char *dest, const char *src);
11 #define __HAVE_ARCH_STRNCPY
12 extern char *strncpy(char *dest, const char *src, size_t count);
14 #define __HAVE_ARCH_STRCAT
15 extern char *strcat(char *dest, const char *src);
17 #define __HAVE_ARCH_STRNCAT
18 extern char *strncat(char *dest, const char *src, size_t count);
20 #define __HAVE_ARCH_STRCMP
21 extern int strcmp(const char *cs, const char *ct);
23 #define __HAVE_ARCH_STRNCMP
24 extern int strncmp(const char *cs, const char *ct, size_t count);
26 #define __HAVE_ARCH_STRCHR
27 extern char *strchr(const char *s, int c);
29 #define __HAVE_ARCH_STRLEN
30 extern size_t strlen(const char *s);
32 static __always_inline void * __memcpy(void * to, const void * from, size_t n)
34 int d0, d1, d2;
35 __asm__ __volatile__(
36 "rep ; movsl\n\t"
37 "movl %4,%%ecx\n\t"
38 "andl $3,%%ecx\n\t"
39 "jz 1f\n\t"
40 "rep ; movsb\n\t"
41 "1:"
42 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
43 : "0" (n/4), "g" (n), "1" ((long) to), "2" ((long) from)
44 : "memory");
45 return (to);
49 * This looks ugly, but the compiler can optimize it totally,
50 * as the count is constant.
52 static __always_inline void * __constant_memcpy(void * to, const void * from, size_t n)
54 long esi, edi;
55 if (!n) return to;
56 #if 1 /* want to do small copies with non-string ops? */
57 switch (n) {
58 case 1: *(char*)to = *(char*)from; return to;
59 case 2: *(short*)to = *(short*)from; return to;
60 case 4: *(int*)to = *(int*)from; return to;
61 #if 1 /* including those doable with two moves? */
62 case 3: *(short*)to = *(short*)from;
63 *((char*)to+2) = *((char*)from+2); return to;
64 case 5: *(int*)to = *(int*)from;
65 *((char*)to+4) = *((char*)from+4); return to;
66 case 6: *(int*)to = *(int*)from;
67 *((short*)to+2) = *((short*)from+2); return to;
68 case 8: *(int*)to = *(int*)from;
69 *((int*)to+1) = *((int*)from+1); return to;
70 #endif
72 #endif
73 esi = (long) from;
74 edi = (long) to;
75 if (n >= 5*4) {
76 /* large block: use rep prefix */
77 int ecx;
78 __asm__ __volatile__(
79 "rep ; movsl"
80 : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
81 : "0" (n/4), "1" (edi),"2" (esi)
82 : "memory"
84 } else {
85 /* small block: don't clobber ecx + smaller code */
86 if (n >= 4*4) __asm__ __volatile__("movsl"
87 :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
88 if (n >= 3*4) __asm__ __volatile__("movsl"
89 :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
90 if (n >= 2*4) __asm__ __volatile__("movsl"
91 :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
92 if (n >= 1*4) __asm__ __volatile__("movsl"
93 :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
95 switch (n % 4) {
96 /* tail */
97 case 0: return to;
98 case 1: __asm__ __volatile__("movsb"
99 :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
100 return to;
101 case 2: __asm__ __volatile__("movsw"
102 :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
103 return to;
104 default: __asm__ __volatile__("movsw\n\tmovsb"
105 :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
106 return to;
110 #define __HAVE_ARCH_MEMCPY
112 #ifdef CONFIG_X86_USE_3DNOW
114 #include <asm/mmx.h>
117 * This CPU favours 3DNow strongly (eg AMD Athlon)
120 static inline void * __constant_memcpy3d(void * to, const void * from, size_t len)
122 if (len < 512)
123 return __constant_memcpy(to, from, len);
124 return _mmx_memcpy(to, from, len);
127 static __inline__ void *__memcpy3d(void *to, const void *from, size_t len)
129 if (len < 512)
130 return __memcpy(to, from, len);
131 return _mmx_memcpy(to, from, len);
134 #define memcpy(t, f, n) \
135 (__builtin_constant_p(n) ? \
136 __constant_memcpy3d((t),(f),(n)) : \
137 __memcpy3d((t),(f),(n)))
139 #else
142 * No 3D Now!
145 #define memcpy(t, f, n) \
146 (__builtin_constant_p(n) ? \
147 __constant_memcpy((t),(f),(n)) : \
148 __memcpy((t),(f),(n)))
150 #endif
152 #define __HAVE_ARCH_MEMMOVE
153 void *memmove(void * dest,const void * src, size_t n);
155 #define memcmp __builtin_memcmp
157 #define __HAVE_ARCH_MEMCHR
158 extern void *memchr(const void * cs,int c,size_t count);
160 static inline void * __memset_generic(void * s, char c,size_t count)
162 int d0, d1;
163 __asm__ __volatile__(
164 "rep\n\t"
165 "stosb"
166 : "=&c" (d0), "=&D" (d1)
167 :"a" (c),"1" (s),"0" (count)
168 :"memory");
169 return s;
172 /* we might want to write optimized versions of these later */
173 #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
176 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
177 * things 32 bits at a time even when we don't know the size of the
178 * area at compile-time..
180 static __always_inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
182 int d0, d1;
183 __asm__ __volatile__(
184 "rep ; stosl\n\t"
185 "testb $2,%b3\n\t"
186 "je 1f\n\t"
187 "stosw\n"
188 "1:\ttestb $1,%b3\n\t"
189 "je 2f\n\t"
190 "stosb\n"
191 "2:"
192 :"=&c" (d0), "=&D" (d1)
193 :"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
194 :"memory");
195 return (s);
198 /* Added by Gertjan van Wingerde to make minix and sysv module work */
199 #define __HAVE_ARCH_STRNLEN
200 extern size_t strnlen(const char * s, size_t count);
201 /* end of additional stuff */
203 #define __HAVE_ARCH_STRSTR
204 extern char *strstr(const char *cs, const char *ct);
207 * This looks horribly ugly, but the compiler can optimize it totally,
208 * as we by now know that both pattern and count is constant..
210 static __always_inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
212 switch (count) {
213 case 0:
214 return s;
215 case 1:
216 *(unsigned char *)s = pattern & 0xff;
217 return s;
218 case 2:
219 *(unsigned short *)s = pattern & 0xffff;
220 return s;
221 case 3:
222 *(unsigned short *)s = pattern & 0xffff;
223 *(2+(unsigned char *)s) = pattern & 0xff;
224 return s;
225 case 4:
226 *(unsigned long *)s = pattern;
227 return s;
229 #define COMMON(x) \
230 __asm__ __volatile__( \
231 "rep ; stosl" \
233 : "=&c" (d0), "=&D" (d1) \
234 : "a" (pattern),"0" (count/4),"1" ((long) s) \
235 : "memory")
237 int d0, d1;
238 switch (count % 4) {
239 case 0: COMMON(""); return s;
240 case 1: COMMON("\n\tstosb"); return s;
241 case 2: COMMON("\n\tstosw"); return s;
242 default: COMMON("\n\tstosw\n\tstosb"); return s;
246 #undef COMMON
249 #define __constant_c_x_memset(s, c, count) \
250 (__builtin_constant_p(count) ? \
251 __constant_c_and_count_memset((s),(c),(count)) : \
252 __constant_c_memset((s),(c),(count)))
254 #define __memset(s, c, count) \
255 (__builtin_constant_p(count) ? \
256 __constant_count_memset((s),(c),(count)) : \
257 __memset_generic((s),(c),(count)))
259 #define __HAVE_ARCH_MEMSET
260 #define memset(s, c, count) \
261 (__builtin_constant_p(c) ? \
262 __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
263 __memset((s),(c),(count)))
266 * find the first occurrence of byte 'c', or 1 past the area if none
268 #define __HAVE_ARCH_MEMSCAN
269 extern void *memscan(void * addr, int c, size_t size);
271 #endif /* __KERNEL__ */
273 #endif