2 * netsniff-ng - the packet sniffing beast
3 * By Daniel Borkmann <daniel@netsniff-ng.org>
4 * Copyright 2009-2012 Daniel Borkmann.
5 * Subject to the GPL, version 2.
11 #include <linux/if_packet.h>
22 /* /sys/devices/system/cpu/cpuX/cache/indexX/coherency_line_size */
24 #if defined(__amd64__) || defined(__x86_64__) || defined(__AMD64__) || \
25 defined(_M_X64) || defined(__amd64)
26 # define CO_IN_CACHE_SHIFT 7
27 #elif defined(__i386__) || defined(__x86__) || defined(__X86__) || \
28 defined(_M_IX86) || defined(__i386)
29 # define CO_IN_CACHE_SHIFT 7
30 #elif defined(__ia64__) || defined(__IA64__) || defined(__M_IA64)
31 # define CO_IN_CACHE_SHIFT 6
32 #elif defined(__SPU__)
33 # define CO_IN_CACHE_SHIFT 7
34 #elif defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__) || \
36 # define CO_IN_CACHE_SHIFT 8
37 #elif defined(__powerpc__) || defined(__ppc__) || defined(__PPC__) || \
39 # define CO_IN_CACHE_SHIFT 7
40 #elif defined(__sparcv9__) || defined(__sparcv9)
41 # define CO_IN_CACHE_SHIFT 6
42 #elif defined(__sparc_v8__)
43 # define CO_IN_CACHE_SHIFT 5
44 #elif defined(__sparc__) || defined(__sparc)
45 # define CO_IN_CACHE_SHIFT 5
46 #elif defined(__ARM_EABI__)
47 # define CO_IN_CACHE_SHIFT 5
48 #elif defined(__arm__)
49 # define CO_IN_CACHE_SHIFT 5
50 #elif defined(__mips__) || defined(__mips) || defined(__MIPS__)
52 # define CO_IN_CACHE_SHIFT 5
53 # elif defined(_ABIN32)
54 # define CO_IN_CACHE_SHIFT 5
56 # define CO_IN_CACHE_SHIFT 6
59 # define CO_IN_CACHE_SHIFT 5
62 #ifndef CO_CACHE_LINE_SIZE
63 # define CO_CACHE_LINE_SIZE (1 << CO_IN_CACHE_SHIFT)
67 # define __aligned_16 __attribute__((aligned(16)))
70 #ifndef __cacheline_aligned
71 # define __cacheline_aligned __attribute__((aligned(CO_CACHE_LINE_SIZE)))
74 #ifndef __aligned_tpacket
75 # define __aligned_tpacket __attribute__((aligned(TPACKET_ALIGNMENT)))
79 # define __packed __attribute__((packed))
83 # define round_up(x, alignment) (((x) + (alignment) - 1) & ~((alignment) - 1))
86 #ifndef round_up_cacheline
87 # define round_up_cacheline(x) round_up((x), CO_CACHE_LINE_SIZE)
91 # define likely(x) __builtin_expect(!!(x), 1)
95 # define unlikely(x) __builtin_expect(!!(x), 0)
99 # define constant(x) __builtin_constant_p(x)
102 #ifndef prefetch_rd_hi
103 # define prefetch_rd_hi(addr) __builtin_prefetch(addr, 0, 3)
106 #ifndef prefetch_rd_lo
107 # define prefetch_rd_lo(addr) __builtin_prefetch(addr, 0, 0)
110 #ifndef prefetch_wr_hi
111 # define prefetch_wr_hi(addr) __builtin_prefetch(addr, 1, 3)
114 #ifndef prefetch_wr_lo
115 # define prefetch_wr_lo(addr) __builtin_prefetch(addr, 1, 0)
119 # define fmemset __builtin_memset
123 # define fmemcpy __builtin_memcpy
126 #ifndef atomic_cmp_swp
127 # define atomic_cmp_swp __sync_val_compare_and_swap
131 # define __deprecated /* unimplemented */
134 #ifndef EXPORT_SYMBOL
135 # define EXPORT_SYMBOL(x) /* empty, just for readability */
139 # define unreachable() do { } while (1)
142 #ifndef __read_mostly
143 # define __read_mostly __attribute__((__section__(".data.read_mostly")))
147 # define __unused __attribute__ ((__unused__))
151 # define noinline __attribute__((noinline))
154 #ifndef __always_inline
155 # define __always_inline inline
159 # define __hidden __attribute__((visibility("hidden")))
163 # define __pure __attribute__ ((pure))
167 # define force_cast(type, arg) ((type) (arg))
171 # define access_once(x) (*(volatile typeof(x) *) &(x))
177 typeof (a) _a = (a); \
178 typeof (b) _b = (b); \
186 typeof (a) _a = (a); \
187 typeof (b) _b = (b); \
193 # define ispow2(x) ({ !!((x) && !((x) & ((x) - 1))); })
197 # define offsetof(type, member) ((size_t) &((type *) 0)->member)
201 # define container_of(ptr, type, member) \
203 const typeof(((type *) 0)->member) * __mptr = (ptr); \
204 (type *) ((char *) __mptr - offsetof(type, member)); \
209 # define array_size(x) (sizeof(x) / sizeof((x)[0]) + __must_be_array(x))
212 #ifndef __must_be_array
213 # define __must_be_array(x) \
214 build_bug_on_zero(__builtin_types_compatible_p(typeof(x), \
218 #ifndef build_bug_on_zero
219 # define build_bug_on_zero(e) (sizeof(char[1 - 2 * !!(e)]) - 1)
223 # define bug_on(cond) assert(!(cond))
227 # define bug assert(0)
230 #define PAGE_SIZE (getpagesize())
231 #define PAGE_MASK (~(PAGE_SIZE - 1))
232 #define PAGE_ALIGN(addr) (((addr) + PAGE_SIZE - 1) & PAGE_MASK)
234 #if __BYTE_ORDER == __LITTLE_ENDIAN
235 static inline uint64_t htonll(uint64_t x
)
240 static inline uint64_t ntohll(uint64_t x
)
244 #elif __BYTE_ORDER == __BIG_ENDIAN
245 static inline uint64_t htonll(uint64_t x
)
250 static inline uint64_t ntohll(uint64_t x
)
255 # error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
258 #if __BYTE_ORDER == __LITTLE_ENDIAN
259 static inline u16
cpu_to_be16(u16 val
)
261 return bswap_16(val
);
264 static inline u32
cpu_to_be32(u32 val
)
266 return bswap_32(val
);
269 static inline u64
cpu_to_be64(u64 val
)
271 return bswap_64(val
);
274 static inline u16
cpu_to_le16(u16 val
)
279 static inline u32
cpu_to_le32(u32 val
)
284 static inline u64
cpu_to_le64(u64 val
)
288 #elif __BYTE_ORDER == __BIG_ENDIAN
289 static inline u16
cpu_to_be16(u16 val
)
294 static inline u32
cpu_to_be32(u32 val
)
299 static inline u64
cpu_to_be64(u64 val
)
304 static inline u16
cpu_to_le16(u16 val
)
306 return bswap_16(val
);
309 static inline u32
cpu_to_le32(u32 val
)
311 return bswap_32(val
);
314 static inline u64
cpu_to_le64(u64 val
)
316 return bswap_64(val
);
319 # error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
322 #define le64_to_cpu cpu_to_le64
323 #define le32_to_cpu cpu_to_le32
324 #define le16_to_cpu cpu_to_le16
325 #define be64_to_cpu cpu_to_be64
326 #define be32_to_cpu cpu_to_be32
327 #define be16_to_cpu cpu_to_be16
332 #define memset fmemset
333 #define memcpy fmemcpy
335 #endif /* BUILT_IN_H */