2 * netsniff-ng - the packet sniffing beast
3 * By Daniel Borkmann <daniel@netsniff-ng.org>
4 * Copyright 2009-2012 Daniel Borkmann.
5 * Subject to the GPL, version 2.
11 #include <linux/if_packet.h>
22 /* /sys/devices/system/cpu/cpuX/cache/indexX/coherency_line_size */
24 #if defined(__amd64__) || defined(__x86_64__) || defined(__AMD64__) || \
25 defined(_M_X64) || defined(__amd64)
26 # define CO_IN_CACHE_SHIFT 7
27 #elif defined(__i386__) || defined(__x86__) || defined(__X86__) || \
28 defined(_M_IX86) || defined(__i386)
29 # define CO_IN_CACHE_SHIFT 7
30 #elif defined(__ia64__) || defined(__IA64__) || defined(__M_IA64)
31 # define CO_IN_CACHE_SHIFT 6
32 #elif defined(__SPU__)
33 # define CO_IN_CACHE_SHIFT 7
34 #elif defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__) || \
36 # define CO_IN_CACHE_SHIFT 8
37 #elif defined(__powerpc__) || defined(__ppc__) || defined(__PPC__) || \
39 # define CO_IN_CACHE_SHIFT 7
40 #elif defined(__sparcv9__) || defined(__sparcv9)
41 # define CO_IN_CACHE_SHIFT 6
42 #elif defined(__sparc_v8__)
43 # define CO_IN_CACHE_SHIFT 5
44 #elif defined(__sparc__) || defined(__sparc)
45 # define CO_IN_CACHE_SHIFT 5
46 #elif defined(__ARM_EABI__)
47 # define CO_IN_CACHE_SHIFT 5
48 #elif defined(__arm__)
49 # define CO_IN_CACHE_SHIFT 5
50 #elif defined(__mips__) || defined(__mips) || defined(__MIPS__)
52 # define CO_IN_CACHE_SHIFT 5
53 # elif defined(_ABIN32)
54 # define CO_IN_CACHE_SHIFT 5
56 # define CO_IN_CACHE_SHIFT 6
59 # define CO_IN_CACHE_SHIFT 5
62 #ifndef CO_CACHE_LINE_SIZE
63 # define CO_CACHE_LINE_SIZE (1 << CO_IN_CACHE_SHIFT)
67 # define __aligned_16 __attribute__((aligned(16)))
70 #ifndef __cacheline_aligned
71 # define __cacheline_aligned __attribute__((aligned(CO_CACHE_LINE_SIZE)))
74 #ifndef __aligned_tpacket
75 # define __aligned_tpacket __attribute__((aligned(TPACKET_ALIGNMENT)))
79 # define __packed __attribute__((packed))
83 # define round_up(x, alignment) (((x) + (alignment) - 1) & ~((alignment) - 1))
86 #ifndef round_up_cacheline
87 # define round_up_cacheline(x) round_up((x), CO_CACHE_LINE_SIZE)
91 # define likely(x) __builtin_expect(!!(x), 1)
95 # define unlikely(x) __builtin_expect(!!(x), 0)
98 #ifndef prefetch_rd_hi
99 # define prefetch_rd_hi(addr) __builtin_prefetch(addr, 0, 3)
102 #ifndef prefetch_rd_lo
103 # define prefetch_rd_lo(addr) __builtin_prefetch(addr, 0, 0)
106 #ifndef prefetch_wr_hi
107 # define prefetch_wr_hi(addr) __builtin_prefetch(addr, 1, 3)
110 #ifndef prefetch_wr_lo
111 # define prefetch_wr_lo(addr) __builtin_prefetch(addr, 1, 0)
115 # define fmemset __builtin_memset
119 # define fmemcpy __builtin_memcpy
122 #ifndef atomic_cmp_swp
123 # define atomic_cmp_swp __sync_val_compare_and_swap
127 # define __deprecated /* unimplemented */
130 #ifndef EXPORT_SYMBOL
131 # define EXPORT_SYMBOL(x) /* empty, just for readability */
135 # define unreachable() do { } while (1)
138 #ifndef __read_mostly
139 # define __read_mostly __attribute__((__section__(".data.read_mostly")))
143 # define __unused __attribute__ ((__unused__))
147 # define noinline __attribute__((noinline))
150 #ifndef __always_inline
151 # define __always_inline inline
155 # define __hidden __attribute__((visibility("hidden")))
159 # define __pure __attribute__ ((pure))
163 # define force_cast(type, arg) ((type) (arg))
167 # define access_once(x) (*(volatile typeof(x) *) &(x))
173 typeof (a) _a = (a); \
174 typeof (b) _b = (b); \
182 typeof (a) _a = (a); \
183 typeof (b) _b = (b); \
189 # define ispow2(x) ({ !!((x) && !((x) & ((x) - 1))); })
193 # define offsetof(type, member) ((size_t) &((type *) 0)->member)
197 # define container_of(ptr, type, member) \
199 const typeof(((type *) 0)->member) * __mptr = (ptr); \
200 (type *) ((char *) __mptr - offsetof(type, member)); \
205 # define array_size(x) (sizeof(x) / sizeof((x)[0]) + __must_be_array(x))
208 #ifndef __must_be_array
209 # define __must_be_array(x) \
210 build_bug_on_zero(__builtin_types_compatible_p(typeof(x), \
214 #ifndef build_bug_on_zero
215 # define build_bug_on_zero(e) (sizeof(char[1 - 2 * !!(e)]) - 1)
219 # define bug_on(cond) assert(!(cond))
223 # define bug assert(0)
226 #define PAGE_SIZE (getpagesize())
227 #define PAGE_MASK (~(PAGE_SIZE - 1))
228 #define PAGE_ALIGN(addr) (((addr) + PAGE_SIZE - 1) & PAGE_MASK)
230 #if __BYTE_ORDER == __LITTLE_ENDIAN
231 static inline uint64_t htonll(uint64_t x
)
236 static inline uint64_t ntohll(uint64_t x
)
240 #elif __BYTE_ORDER == __BIG_ENDIAN
241 static inline uint64_t htonll(uint64_t x
)
246 static inline uint64_t ntohll(uint64_t x
)
251 # error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
254 #if __BYTE_ORDER == __LITTLE_ENDIAN
255 static inline u16
cpu_to_be16(u16 val
)
257 return bswap_16(val
);
260 static inline u32
cpu_to_be32(u32 val
)
262 return bswap_32(val
);
265 static inline u64
cpu_to_be64(u64 val
)
267 return bswap_64(val
);
270 static inline u16
cpu_to_le16(u16 val
)
275 static inline u32
cpu_to_le32(u32 val
)
280 static inline u64
cpu_to_le64(u64 val
)
284 #elif __BYTE_ORDER == __BIG_ENDIAN
285 static inline u16
cpu_to_be16(u16 val
)
290 static inline u32
cpu_to_be32(u32 val
)
295 static inline u64
cpu_to_be64(u64 val
)
300 static inline u16
cpu_to_le16(u16 val
)
302 return bswap_16(val
);
305 static inline u32
cpu_to_le32(u32 val
)
307 return bswap_32(val
);
310 static inline u64
cpu_to_le64(u64 val
)
312 return bswap_64(val
);
315 # error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
318 #define le64_to_cpu cpu_to_le64
319 #define le32_to_cpu cpu_to_le32
320 #define le16_to_cpu cpu_to_le16
321 #define be64_to_cpu cpu_to_be64
322 #define be32_to_cpu cpu_to_be32
323 #define be16_to_cpu cpu_to_be16
328 #define memset fmemset
329 #define memcpy fmemcpy
331 #endif /* BUILT_IN_H */