4 /* Parts taken from the Linux kernel, GPL, version 2. */
8 #include <linux/if_packet.h>
12 #include <asm/byteorder.h>
19 #ifndef CO_CACHE_LINE_SIZE
20 # define CO_CACHE_LINE_SIZE (1 << CO_IN_CACHE_SHIFT)
24 # define __aligned_16 __attribute__((aligned(16)))
27 #ifndef __cacheline_aligned
28 # define __cacheline_aligned __attribute__((aligned(CO_CACHE_LINE_SIZE)))
31 #ifndef __aligned_tpacket
32 # define __aligned_tpacket __attribute__((aligned(TPACKET_ALIGNMENT)))
35 #ifndef __align_tpacket
36 # define __align_tpacket(x) __attribute__((aligned(TPACKET_ALIGN(x))))
39 #ifndef __check_format_printf
40 # define __check_format_printf(pos_fmtstr, pos_fmtargs) \
41 __attribute__ ((format (printf, (pos_fmtstr), (pos_fmtargs))))
45 # define __packed __attribute__((packed))
49 # define round_up(x, alignment) (((x) + (alignment) - 1) & ~((alignment) - 1))
52 #ifndef round_up_cacheline
53 # define round_up_cacheline(x) round_up((x), CO_CACHE_LINE_SIZE)
57 # define likely(x) __builtin_expect(!!(x), 1)
61 # define unlikely(x) __builtin_expect(!!(x), 0)
65 # define constant(x) __builtin_constant_p(x)
69 # define fmemset __builtin_memset
73 # define fmemcpy __builtin_memcpy
76 #ifndef __maybe_unused
77 # define __maybe_unused __attribute__((__unused__))
80 #ifndef __warn_unused_result
81 # define __warn_unused_result __attribute__((warn_unused_result))
85 # define noinline __attribute__((noinline))
89 # define __noreturn __attribute__((noreturn))
93 # define __hidden __attribute__((visibility("hidden")))
97 # define __pure __attribute__ ((pure))
101 # define __force /* unimplemented */
104 /* see config_enabled et al. in linux/kconfig.h for details. */
105 #define __ARG_PLACEHOLDER_1 0,
106 #define is_defined(cfg) _is_defined(cfg)
107 #define _is_defined(value) __is_defined(__ARG_PLACEHOLDER_##value)
108 #define __is_defined(arg1_or_junk) ___is_defined(arg1_or_junk 1, 0)
109 #define ___is_defined(__ignored, val, ...) val
114 typeof (a) _a = (a); \
115 typeof (b) _b = (b); \
121 # define max_t(type, a, b) \
123 type ___max1 = (a); \
124 type ___max2 = (b); \
125 ___max1 > ___max2 ? ___max1 : ___max2; \
132 typeof (a) _a = (a); \
133 typeof (b) _b = (b); \
139 # define min_t(type, a, b) \
141 type ___min1 = (a); \
142 type ___min2 = (b); \
143 ___min1 < ___min2 ? ___min1 : ___min2; \
148 # define ispow2(x) ({ !!((x) && !((x) & ((x) - 1))); })
152 # define offsetof(type, member) ((size_t) &((type *) 0)->member)
156 # define container_of(ptr, type, member) \
158 const typeof(((type *) 0)->member) * __mptr = (ptr); \
159 (type *) ((char *) __mptr - offsetof(type, member)); \
164 # define array_size(x) (sizeof(x) / sizeof((x)[0]) + __must_be_array(x))
167 #ifndef __must_be_array
168 # define __must_be_array(x) \
169 build_bug_on_zero(__builtin_types_compatible_p(typeof(x), \
173 #ifndef build_bug_on_zero
174 # define build_bug_on_zero(e) (sizeof(char[1 - 2 * !!(e)]) - 1)
178 # define build_bug_on(e) ((void)sizeof(char[1 - 2*!!(e)]))
182 # define bug_on(cond) assert(!(cond))
186 # define bug() assert(0)
189 #define RUNTIME_PAGE_SIZE (sysconf(_SC_PAGE_SIZE))
190 #define PAGE_MASK (~(RUNTIME_PAGE_SIZE - 1))
191 #define PAGE_ALIGN(addr) (((addr) + RUNTIME_PAGE_SIZE - 1) & PAGE_MASK)
193 #if __BYTE_ORDER == __LITTLE_ENDIAN
194 static inline uint64_t htonll(uint64_t x
)
199 static inline uint64_t ntohll(uint64_t x
)
203 #elif __BYTE_ORDER == __BIG_ENDIAN
204 static inline uint64_t htonll(uint64_t x
)
209 static inline uint64_t ntohll(uint64_t x
)
214 # error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
216 #ifndef ___constant_swab16
217 # define ___constant_swab16(x) ((__u16)( \
218 (((__u16)(x) & (__u16)0x00ffU) << 8) | \
219 (((__u16)(x) & (__u16)0xff00U) >> 8)))
221 #ifndef ___constant_swab32
222 # define ___constant_swab32(x) ((__u32)( \
223 (((__u32)(x) & (__u32)0x000000ffUL) << 24) | \
224 (((__u32)(x) & (__u32)0x0000ff00UL) << 8) | \
225 (((__u32)(x) & (__u32)0x00ff0000UL) >> 8) | \
226 (((__u32)(x) & (__u32)0xff000000UL) >> 24)))
228 #if __BYTE_ORDER == __LITTLE_ENDIAN
229 static inline u16
cpu_to_be16(u16 val
)
231 return bswap_16(val
);
234 static inline u32
cpu_to_be32(u32 val
)
236 return bswap_32(val
);
239 static inline u64
cpu_to_be64(u64 val
)
241 return bswap_64(val
);
244 static inline u16
cpu_to_le16(u16 val
)
249 static inline u32
cpu_to_le32(u32 val
)
254 static inline u64
cpu_to_le64(u64 val
)
259 # ifndef __constant_htonl
260 # define __constant_htonl(x) ((__force __be32)___constant_swab32((x)))
262 # ifndef __constant_ntohl
263 # define __constant_ntohl(x) ___constant_swab32((__force __be32)(x))
265 # ifndef __constant_htons
266 # define __constant_htons(x) ((__force __be16)___constant_swab16((x)))
268 # ifndef __constant_ntohs
269 # define __constant_ntohs(x) ___constant_swab16((__force __be16)(x))
271 #elif __BYTE_ORDER == __BIG_ENDIAN
272 static inline u16
cpu_to_be16(u16 val
)
277 static inline u32
cpu_to_be32(u32 val
)
282 static inline u64
cpu_to_be64(u64 val
)
287 static inline u16
cpu_to_le16(u16 val
)
289 return bswap_16(val
);
292 static inline u32
cpu_to_le32(u32 val
)
294 return bswap_32(val
);
297 static inline u64
cpu_to_le64(u64 val
)
299 return bswap_64(val
);
302 # ifndef __constant_htonl
303 # define __constant_htonl(x) ((__force __be32)(__u32)(x))
305 # ifndef __constant_ntohl
306 # define __constant_ntohl(x) ((__force __u32)(__be32)(x))
308 # ifndef __constant_htons
309 # define __constant_htons(x) ((__force __be16)(__u16)(x))
311 # ifndef __constant_ntohs
312 # define __constant_ntohs(x) ((__force __u16)(__be16)(x))
315 # error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
318 #define le64_to_cpu cpu_to_le64
319 #define le32_to_cpu cpu_to_le32
320 #define le16_to_cpu cpu_to_le16
321 #define be64_to_cpu cpu_to_be64
322 #define be32_to_cpu cpu_to_be32
323 #define be16_to_cpu cpu_to_be16
328 #define memset fmemset
329 #define memcpy fmemcpy
331 #if defined(__amd64__) || defined(__x86_64__) || defined(__AMD64__) || \
332 defined(_M_X64) || defined(__amd64)
333 # define CO_IN_CACHE_SHIFT 7
334 #elif defined(__i386__) || defined(__x86__) || defined(__X86__) || \
335 defined(_M_IX86) || defined(__i386)
336 # define CO_IN_CACHE_SHIFT 7
337 #elif defined(__ia64__) || defined(__IA64__) || defined(__M_IA64)
338 # define CO_IN_CACHE_SHIFT 6
339 #elif defined(__SPU__)
340 # define CO_IN_CACHE_SHIFT 7
341 #elif defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__) || \
343 # define CO_IN_CACHE_SHIFT 8
344 #elif defined(__powerpc__) || defined(__ppc__) || defined(__PPC__) || \
346 # define CO_IN_CACHE_SHIFT 7
347 #elif defined(__sparcv9__) || defined(__sparcv9)
348 # define CO_IN_CACHE_SHIFT 6
349 #elif defined(__sparc_v8__)
350 # define CO_IN_CACHE_SHIFT 5
351 #elif defined(__sparc__) || defined(__sparc)
352 # define CO_IN_CACHE_SHIFT 5
353 #elif defined(__ARM_EABI__)
354 # define CO_IN_CACHE_SHIFT 5
355 #elif defined(__arm__)
356 # define CO_IN_CACHE_SHIFT 5
357 #elif defined(__mips__) || defined(__mips) || defined(__MIPS__)
358 # if defined(_ABIO32)
359 # define CO_IN_CACHE_SHIFT 5
360 # elif defined(_ABIN32)
361 # define CO_IN_CACHE_SHIFT 5
363 # define CO_IN_CACHE_SHIFT 6
366 # define CO_IN_CACHE_SHIFT 5
369 #ifndef TP_STATUS_TS_SOFTWARE
370 # define TP_STATUS_TS_SOFTWARE (1 << 29)
373 #ifndef TP_STATUS_TS_SYS_HARDWARE
374 # define TP_STATUS_TS_SYS_HARDWARE (1 << 30)
377 #ifndef TP_STATUS_TS_RAW_HARDWARE
378 # define TP_STATUS_TS_RAW_HARDWARE (1 << 31)
381 #ifndef PACKET_QDISC_BYPASS
382 # define PACKET_QDISC_BYPASS 20
385 #ifndef ARPHRD_IEEE802154_MONITOR
386 # define ARPHRD_IEEE802154_MONITOR 805
389 #ifndef ARPHRD_IP6GRE
390 # define ARPHRD_IP6GRE 823
393 #ifndef ARPHRD_NETLINK
394 # define ARPHRD_NETLINK 824
398 # define PACKET_USER 6
401 #ifndef PACKET_KERNEL
402 # define PACKET_KERNEL 7
406 # define DEFFILEMODE (S_IRUSR|S_IWUSR|S_IRGRP|S_IWGRP|S_IROTH|S_IWOTH) /* 0666 */
409 #endif /* BUILT_IN_H */