[PATCH] ppc64: dynamically allocate segment tables
[linux-2.6/kmemtrace.git] / include / asm-i386 / byteorder.h
bloba0d73f48d5bea050cb21d61dfab049e38f5a3b78
1 #ifndef _I386_BYTEORDER_H
2 #define _I386_BYTEORDER_H
4 #include <asm/types.h>
5 #include <linux/compiler.h>
7 #ifdef __GNUC__
9 /* For avoiding bswap on i386 */
10 #ifdef __KERNEL__
11 #include <linux/config.h>
12 #endif
14 static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 x)
16 #ifdef CONFIG_X86_BSWAP
17 __asm__("bswap %0" : "=r" (x) : "0" (x));
18 #else
19 __asm__("xchgb %b0,%h0\n\t" /* swap lower bytes */
20 "rorl $16,%0\n\t" /* swap words */
21 "xchgb %b0,%h0" /* swap higher bytes */
22 :"=q" (x)
23 : "0" (x));
24 #endif
25 return x;
28 static __inline__ __attribute_const__ __u64 ___arch__swab64(__u64 val)
30 union {
31 struct { __u32 a,b; } s;
32 __u64 u;
33 } v;
34 v.u = val;
35 #ifdef CONFIG_X86_BSWAP
36 asm("bswapl %0 ; bswapl %1 ; xchgl %0,%1"
37 : "=r" (v.s.a), "=r" (v.s.b)
38 : "0" (v.s.a), "1" (v.s.b));
39 #else
40 v.s.a = ___arch__swab32(v.s.a);
41 v.s.b = ___arch__swab32(v.s.b);
42 asm("xchgl %0,%1" : "=r" (v.s.a), "=r" (v.s.b) : "0" (v.s.a), "1" (v.s.b));
43 #endif
44 return v.u;
47 /* Do not define swab16. Gcc is smart enough to recognize "C" version and
48 convert it into rotation or exhange. */
50 #define __arch__swab64(x) ___arch__swab64(x)
51 #define __arch__swab32(x) ___arch__swab32(x)
53 #define __BYTEORDER_HAS_U64__
55 #endif /* __GNUC__ */
57 #include <linux/byteorder/little_endian.h>
59 #endif /* _I386_BYTEORDER_H */