2 * netsniff-ng - the packet sniffing beast
3 * Copyright 2009-2012 Daniel Borkmann.
4 * Parts taken from the Linux kernel, GPL, version 2.
5 * Subject to the GPL, version 2.
11 #include <linux/if_packet.h>
15 #include <asm/byteorder.h>
23 #ifndef CO_CACHE_LINE_SIZE
24 # define CO_CACHE_LINE_SIZE (1 << CO_IN_CACHE_SHIFT)
32 # define __aligned_16 __attribute__((aligned(16)))
35 #ifndef __cacheline_aligned
36 # define __cacheline_aligned __attribute__((aligned(CO_CACHE_LINE_SIZE)))
39 #ifndef __aligned_tpacket
40 # define __aligned_tpacket __attribute__((aligned(TPACKET_ALIGNMENT)))
43 #ifndef __align_tpacket
44 # define __align_tpacket(x) __attribute__((aligned(TPACKET_ALIGN(x))))
47 #ifndef __check_format_printf
48 # define __check_format_printf(pos_fmtstr, pos_fmtargs) \
49 __attribute__ ((format (printf, (pos_fmtstr), (pos_fmtargs))))
53 # define __packed __attribute__((packed))
57 # define round_up(x, alignment) (((x) + (alignment) - 1) & ~((alignment) - 1))
60 #ifndef round_up_cacheline
61 # define round_up_cacheline(x) round_up((x), CO_CACHE_LINE_SIZE)
65 # define likely(x) __builtin_expect(!!(x), 1)
69 # define unlikely(x) __builtin_expect(!!(x), 0)
73 # define constant(x) __builtin_constant_p(x)
77 # define fmemset __builtin_memset
81 # define fmemcpy __builtin_memcpy
84 #ifndef __maybe_unused
85 # define __maybe_unused __attribute__ ((__unused__))
89 # define noinline __attribute__((noinline))
93 # define __hidden __attribute__((visibility("hidden")))
97 # define __pure __attribute__ ((pure))
101 # define __force /* unimplemented */
105 # define force_cast(type, arg) ((type) (arg))
111 typeof (a) _a = (a); \
112 typeof (b) _b = (b); \
120 typeof (a) _a = (a); \
121 typeof (b) _b = (b); \
127 # define ispow2(x) ({ !!((x) && !((x) & ((x) - 1))); })
131 # define offsetof(type, member) ((size_t) &((type *) 0)->member)
135 # define container_of(ptr, type, member) \
137 const typeof(((type *) 0)->member) * __mptr = (ptr); \
138 (type *) ((char *) __mptr - offsetof(type, member)); \
143 # define array_size(x) (sizeof(x) / sizeof((x)[0]) + __must_be_array(x))
146 #ifndef __must_be_array
147 # define __must_be_array(x) \
148 build_bug_on_zero(__builtin_types_compatible_p(typeof(x), \
152 #ifndef build_bug_on_zero
153 # define build_bug_on_zero(e) (sizeof(char[1 - 2 * !!(e)]) - 1)
157 # define bug_on(cond) assert(!(cond))
161 # define bug() assert(0)
164 #define PAGE_SIZE (getpagesize())
165 #define PAGE_MASK (~(PAGE_SIZE - 1))
166 #define PAGE_ALIGN(addr) (((addr) + PAGE_SIZE - 1) & PAGE_MASK)
168 #if __BYTE_ORDER == __LITTLE_ENDIAN
169 static inline uint64_t htonll(uint64_t x
)
174 static inline uint64_t ntohll(uint64_t x
)
178 #elif __BYTE_ORDER == __BIG_ENDIAN
179 static inline uint64_t htonll(uint64_t x
)
184 static inline uint64_t ntohll(uint64_t x
)
189 # error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
191 #ifndef ___constant_swab16
192 # define ___constant_swab16(x) ((__u16)( \
193 (((__u16)(x) & (__u16)0x00ffU) << 8) | \
194 (((__u16)(x) & (__u16)0xff00U) >> 8)))
196 #ifndef ___constant_swab32
197 # define ___constant_swab32(x) ((__u32)( \
198 (((__u32)(x) & (__u32)0x000000ffUL) << 24) | \
199 (((__u32)(x) & (__u32)0x0000ff00UL) << 8) | \
200 (((__u32)(x) & (__u32)0x00ff0000UL) >> 8) | \
201 (((__u32)(x) & (__u32)0xff000000UL) >> 24)))
203 #if __BYTE_ORDER == __LITTLE_ENDIAN
204 static inline u16
cpu_to_be16(u16 val
)
206 return bswap_16(val
);
209 static inline u32
cpu_to_be32(u32 val
)
211 return bswap_32(val
);
214 static inline u64
cpu_to_be64(u64 val
)
216 return bswap_64(val
);
219 static inline u16
cpu_to_le16(u16 val
)
224 static inline u32
cpu_to_le32(u32 val
)
229 static inline u64
cpu_to_le64(u64 val
)
234 # ifndef __constant_htonl
235 # define __constant_htonl(x) ((__force __be32)___constant_swab32((x)))
237 # ifndef __constant_ntohl
238 # define __constant_ntohl(x) ___constant_swab32((__force __be32)(x))
240 # ifndef __constant_htons
241 # define __constant_htons(x) ((__force __be16)___constant_swab16((x)))
243 # ifndef __constant_ntohs
244 # define __constant_ntohs(x) ___constant_swab16((__force __be16)(x))
246 #elif __BYTE_ORDER == __BIG_ENDIAN
247 static inline u16
cpu_to_be16(u16 val
)
252 static inline u32
cpu_to_be32(u32 val
)
257 static inline u64
cpu_to_be64(u64 val
)
262 static inline u16
cpu_to_le16(u16 val
)
264 return bswap_16(val
);
267 static inline u32
cpu_to_le32(u32 val
)
269 return bswap_32(val
);
272 static inline u64
cpu_to_le64(u64 val
)
274 return bswap_64(val
);
277 # ifndef __constant_htonl
278 # define __constant_htonl(x) ((__force __be32)(__u32)(x))
280 # ifndef __constant_ntohl
281 # define __constant_ntohl(x) ((__force __u32)(__be32)(x))
283 # ifndef __constant_htons
284 # define __constant_htons(x) ((__force __be16)(__u16)(x))
286 # ifndef __constant_ntohs
287 # define __constant_ntohs(x) ((__force __u16)(__be16)(x))
290 # error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
293 #define le64_to_cpu cpu_to_le64
294 #define le32_to_cpu cpu_to_le32
295 #define le16_to_cpu cpu_to_le16
296 #define be64_to_cpu cpu_to_be64
297 #define be32_to_cpu cpu_to_be32
298 #define be16_to_cpu cpu_to_be16
303 #define memset fmemset
304 #define memcpy fmemcpy
306 #if defined(__amd64__) || defined(__x86_64__) || defined(__AMD64__) || \
307 defined(_M_X64) || defined(__amd64)
308 # define CO_IN_CACHE_SHIFT 7
309 #elif defined(__i386__) || defined(__x86__) || defined(__X86__) || \
310 defined(_M_IX86) || defined(__i386)
311 # define CO_IN_CACHE_SHIFT 7
312 #elif defined(__ia64__) || defined(__IA64__) || defined(__M_IA64)
313 # define CO_IN_CACHE_SHIFT 6
314 #elif defined(__SPU__)
315 # define CO_IN_CACHE_SHIFT 7
316 #elif defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__) || \
318 # define CO_IN_CACHE_SHIFT 8
319 #elif defined(__powerpc__) || defined(__ppc__) || defined(__PPC__) || \
321 # define CO_IN_CACHE_SHIFT 7
322 #elif defined(__sparcv9__) || defined(__sparcv9)
323 # define CO_IN_CACHE_SHIFT 6
324 #elif defined(__sparc_v8__)
325 # define CO_IN_CACHE_SHIFT 5
326 #elif defined(__sparc__) || defined(__sparc)
327 # define CO_IN_CACHE_SHIFT 5
328 #elif defined(__ARM_EABI__)
329 # define CO_IN_CACHE_SHIFT 5
330 #elif defined(__arm__)
331 # define CO_IN_CACHE_SHIFT 5
332 #elif defined(__mips__) || defined(__mips) || defined(__MIPS__)
333 # if defined(_ABIO32)
334 # define CO_IN_CACHE_SHIFT 5
335 # elif defined(_ABIN32)
336 # define CO_IN_CACHE_SHIFT 5
338 # define CO_IN_CACHE_SHIFT 6
341 # define CO_IN_CACHE_SHIFT 5
344 #endif /* BUILT_IN_H */