2 * netsniff-ng - the packet sniffing beast
3 * By Daniel Borkmann <daniel@netsniff-ng.org>
4 * Copyright 2009-2012 Daniel Borkmann.
5 * Subject to the GPL, version 2.
11 #include <linux/if_packet.h>
22 /* /sys/devices/system/cpu/cpuX/cache/indexX/coherency_line_size */
24 #if defined(__amd64__) || defined(__x86_64__) || defined(__AMD64__) || \
25 defined(_M_X64) || defined(__amd64)
26 # define CO_IN_CACHE_SHIFT 7
27 #elif defined(__i386__) || defined(__x86__) || defined(__X86__) || \
28 defined(_M_IX86) || defined(__i386)
29 # define CO_IN_CACHE_SHIFT 7
30 #elif defined(__ia64__) || defined(__IA64__) || defined(__M_IA64)
31 # define CO_IN_CACHE_SHIFT 6
32 #elif defined(__SPU__)
33 # define CO_IN_CACHE_SHIFT 7
34 #elif defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__) || \
36 # define CO_IN_CACHE_SHIFT 8
37 #elif defined(__powerpc__) || defined(__ppc__) || defined(__PPC__) || \
39 # define CO_IN_CACHE_SHIFT 7
40 #elif defined(__sparcv9__) || defined(__sparcv9)
41 # define CO_IN_CACHE_SHIFT 6
42 #elif defined(__sparc_v8__)
43 # define CO_IN_CACHE_SHIFT 5
44 #elif defined(__sparc__) || defined(__sparc)
45 # define CO_IN_CACHE_SHIFT 5
46 #elif defined(__ARM_EABI__)
47 # define CO_IN_CACHE_SHIFT 5
48 #elif defined(__arm__)
49 # define CO_IN_CACHE_SHIFT 5
50 #elif defined(__mips__) || defined(__mips) || defined(__MIPS__)
52 # define CO_IN_CACHE_SHIFT 5
53 # elif defined(_ABIN32)
54 # define CO_IN_CACHE_SHIFT 5
56 # define CO_IN_CACHE_SHIFT 6
59 # define CO_IN_CACHE_SHIFT 5
62 #ifndef CO_CACHE_LINE_SIZE
63 # define CO_CACHE_LINE_SIZE (1 << CO_IN_CACHE_SHIFT)
67 # define __aligned_16 __attribute__((aligned(16)))
70 #ifndef __cacheline_aligned
71 # define __cacheline_aligned __attribute__((aligned(CO_CACHE_LINE_SIZE)))
74 #ifndef __aligned_tpacket
75 # define __aligned_tpacket __attribute__((aligned(TPACKET_ALIGNMENT)))
78 #ifndef __check_format_printf
79 # define __check_format_printf(pos_fmtstr, pos_fmtargs) \
80 __attribute__ ((format (printf, (pos_fmtstr), (pos_fmtargs))))
84 # define __packed __attribute__((packed))
88 # define round_up(x, alignment) (((x) + (alignment) - 1) & ~((alignment) - 1))
91 #ifndef round_up_cacheline
92 # define round_up_cacheline(x) round_up((x), CO_CACHE_LINE_SIZE)
96 # define likely(x) __builtin_expect(!!(x), 1)
100 # define unlikely(x) __builtin_expect(!!(x), 0)
104 # define constant(x) __builtin_constant_p(x)
107 #ifndef prefetch_rd_hi
108 # define prefetch_rd_hi(addr) __builtin_prefetch(addr, 0, 3)
111 #ifndef prefetch_rd_lo
112 # define prefetch_rd_lo(addr) __builtin_prefetch(addr, 0, 0)
115 #ifndef prefetch_wr_hi
116 # define prefetch_wr_hi(addr) __builtin_prefetch(addr, 1, 3)
119 #ifndef prefetch_wr_lo
120 # define prefetch_wr_lo(addr) __builtin_prefetch(addr, 1, 0)
124 # define fmemset __builtin_memset
128 # define fmemcpy __builtin_memcpy
131 #ifndef atomic_cmp_swp
132 # define atomic_cmp_swp __sync_val_compare_and_swap
136 # define __deprecated /* unimplemented */
139 #ifndef EXPORT_SYMBOL
140 # define EXPORT_SYMBOL(x) /* empty, just for readability */
144 # define unreachable() do { } while (1)
147 #ifndef __read_mostly
148 # define __read_mostly __attribute__((__section__(".data.read_mostly")))
152 # define __unused __attribute__ ((__unused__))
156 # define noinline __attribute__((noinline))
159 #ifndef __always_inline
160 # define __always_inline inline
164 # define __hidden __attribute__((visibility("hidden")))
168 # define __pure __attribute__ ((pure))
172 # define force_cast(type, arg) ((type) (arg))
176 # define access_once(x) (*(volatile typeof(x) *) &(x))
182 typeof (a) _a = (a); \
183 typeof (b) _b = (b); \
191 typeof (a) _a = (a); \
192 typeof (b) _b = (b); \
198 # define ispow2(x) ({ !!((x) && !((x) & ((x) - 1))); })
202 # define offsetof(type, member) ((size_t) &((type *) 0)->member)
206 # define container_of(ptr, type, member) \
208 const typeof(((type *) 0)->member) * __mptr = (ptr); \
209 (type *) ((char *) __mptr - offsetof(type, member)); \
214 # define array_size(x) (sizeof(x) / sizeof((x)[0]) + __must_be_array(x))
217 #ifndef __must_be_array
218 # define __must_be_array(x) \
219 build_bug_on_zero(__builtin_types_compatible_p(typeof(x), \
223 #ifndef build_bug_on_zero
224 # define build_bug_on_zero(e) (sizeof(char[1 - 2 * !!(e)]) - 1)
228 # define bug_on(cond) assert(!(cond))
232 # define bug assert(0)
235 #define PAGE_SIZE (getpagesize())
236 #define PAGE_MASK (~(PAGE_SIZE - 1))
237 #define PAGE_ALIGN(addr) (((addr) + PAGE_SIZE - 1) & PAGE_MASK)
239 #if __BYTE_ORDER == __LITTLE_ENDIAN
240 static inline uint64_t htonll(uint64_t x
)
245 static inline uint64_t ntohll(uint64_t x
)
249 #elif __BYTE_ORDER == __BIG_ENDIAN
250 static inline uint64_t htonll(uint64_t x
)
255 static inline uint64_t ntohll(uint64_t x
)
260 # error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
263 #if __BYTE_ORDER == __LITTLE_ENDIAN
264 static inline u16
cpu_to_be16(u16 val
)
266 return bswap_16(val
);
269 static inline u32
cpu_to_be32(u32 val
)
271 return bswap_32(val
);
274 static inline u64
cpu_to_be64(u64 val
)
276 return bswap_64(val
);
279 static inline u16
cpu_to_le16(u16 val
)
284 static inline u32
cpu_to_le32(u32 val
)
289 static inline u64
cpu_to_le64(u64 val
)
293 #elif __BYTE_ORDER == __BIG_ENDIAN
294 static inline u16
cpu_to_be16(u16 val
)
299 static inline u32
cpu_to_be32(u32 val
)
304 static inline u64
cpu_to_be64(u64 val
)
309 static inline u16
cpu_to_le16(u16 val
)
311 return bswap_16(val
);
314 static inline u32
cpu_to_le32(u32 val
)
316 return bswap_32(val
);
319 static inline u64
cpu_to_le64(u64 val
)
321 return bswap_64(val
);
324 # error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
327 #define le64_to_cpu cpu_to_le64
328 #define le32_to_cpu cpu_to_le32
329 #define le16_to_cpu cpu_to_le16
330 #define be64_to_cpu cpu_to_be64
331 #define be32_to_cpu cpu_to_be32
332 #define be16_to_cpu cpu_to_be16
337 #define memset fmemset
338 #define memcpy fmemcpy
340 #endif /* BUILT_IN_H */