4 /* Parts taken from the Linux kernel, GPL, version 2. */
6 #include <linux/if_packet.h>
10 #include <asm/byteorder.h>
18 #ifndef CO_CACHE_LINE_SIZE
19 # define CO_CACHE_LINE_SIZE (1 << CO_IN_CACHE_SHIFT)
23 # define __aligned_16 __attribute__((aligned(16)))
26 #ifndef __cacheline_aligned
27 # define __cacheline_aligned __attribute__((aligned(CO_CACHE_LINE_SIZE)))
30 #ifndef __aligned_tpacket
31 # define __aligned_tpacket __attribute__((aligned(TPACKET_ALIGNMENT)))
34 #ifndef __align_tpacket
35 # define __align_tpacket(x) __attribute__((aligned(TPACKET_ALIGN(x))))
38 #ifndef __check_format_printf
39 # define __check_format_printf(pos_fmtstr, pos_fmtargs) \
40 __attribute__ ((format (printf, (pos_fmtstr), (pos_fmtargs))))
44 # define __packed __attribute__((packed))
48 # define round_up(x, alignment) (((x) + (alignment) - 1) & ~((alignment) - 1))
51 #ifndef round_up_cacheline
52 # define round_up_cacheline(x) round_up((x), CO_CACHE_LINE_SIZE)
56 # define likely(x) __builtin_expect(!!(x), 1)
60 # define unlikely(x) __builtin_expect(!!(x), 0)
64 # define constant(x) __builtin_constant_p(x)
68 # define fmemset __builtin_memset
72 # define fmemcpy __builtin_memcpy
75 #ifndef __maybe_unused
76 # define __maybe_unused __attribute__((__unused__))
79 #ifndef __warn_unused_result
80 # define __warn_unused_result __attribute__((warn_unused_result))
84 # define noinline __attribute__((noinline))
88 # define __noreturn __attribute__((noreturn))
92 # define __hidden __attribute__((visibility("hidden")))
96 # define __pure __attribute__ ((pure))
100 # define __force /* unimplemented */
103 /* see config_enabled et al. in linux/kconfig.h for details. */
104 #define __ARG_PLACEHOLDER_1 0,
105 #define is_defined(cfg) _is_defined(cfg)
106 #define _is_defined(value) __is_defined(__ARG_PLACEHOLDER_##value)
107 #define __is_defined(arg1_or_junk) ___is_defined(arg1_or_junk 1, 0)
108 #define ___is_defined(__ignored, val, ...) val
113 typeof (a) _a = (a); \
114 typeof (b) _b = (b); \
120 # define max_t(type, a, b) \
122 type ___max1 = (a); \
123 type ___max2 = (b); \
124 ___max1 > ___max2 ? ___max1 : ___max2; \
131 typeof (a) _a = (a); \
132 typeof (b) _b = (b); \
138 # define min_t(type, a, b) \
140 type ___min1 = (a); \
141 type ___min2 = (b); \
142 ___min1 < ___min2 ? ___min1 : ___min2; \
147 # define ispow2(x) ({ !!((x) && !((x) & ((x) - 1))); })
151 # define offsetof(type, member) ((size_t) &((type *) 0)->member)
155 # define container_of(ptr, type, member) \
157 const typeof(((type *) 0)->member) * __mptr = (ptr); \
158 (type *) ((char *) __mptr - offsetof(type, member)); \
163 # define array_size(x) (sizeof(x) / sizeof((x)[0]) + __must_be_array(x))
166 #ifndef __must_be_array
167 # define __must_be_array(x) \
168 build_bug_on_zero(__builtin_types_compatible_p(typeof(x), \
172 #ifndef build_bug_on_zero
173 # define build_bug_on_zero(e) (sizeof(char[1 - 2 * !!(e)]) - 1)
177 # define build_bug_on(e) ((void)sizeof(char[1 - 2*!!(e)]))
181 # define bug_on(cond) assert(!(cond))
185 # define bug() assert(0)
188 #define RUNTIME_PAGE_SIZE (sysconf(_SC_PAGE_SIZE))
189 #define PAGE_MASK (~(RUNTIME_PAGE_SIZE - 1))
190 #define PAGE_ALIGN(addr) (((addr) + RUNTIME_PAGE_SIZE - 1) & PAGE_MASK)
192 #if __BYTE_ORDER == __LITTLE_ENDIAN
193 static inline uint64_t htonll(uint64_t x
)
198 static inline uint64_t ntohll(uint64_t x
)
202 #elif __BYTE_ORDER == __BIG_ENDIAN
203 static inline uint64_t htonll(uint64_t x
)
208 static inline uint64_t ntohll(uint64_t x
)
213 # error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
215 #ifndef ___constant_swab16
216 # define ___constant_swab16(x) ((__u16)( \
217 (((__u16)(x) & (__u16)0x00ffU) << 8) | \
218 (((__u16)(x) & (__u16)0xff00U) >> 8)))
220 #ifndef ___constant_swab32
221 # define ___constant_swab32(x) ((__u32)( \
222 (((__u32)(x) & (__u32)0x000000ffUL) << 24) | \
223 (((__u32)(x) & (__u32)0x0000ff00UL) << 8) | \
224 (((__u32)(x) & (__u32)0x00ff0000UL) >> 8) | \
225 (((__u32)(x) & (__u32)0xff000000UL) >> 24)))
227 #if __BYTE_ORDER == __LITTLE_ENDIAN
228 static inline u16
cpu_to_be16(u16 val
)
230 return bswap_16(val
);
233 static inline u32
cpu_to_be32(u32 val
)
235 return bswap_32(val
);
238 static inline u64
cpu_to_be64(u64 val
)
240 return bswap_64(val
);
243 static inline u16
cpu_to_le16(u16 val
)
248 static inline u32
cpu_to_le32(u32 val
)
253 static inline u64
cpu_to_le64(u64 val
)
258 # ifndef __constant_htonl
259 # define __constant_htonl(x) ((__force __be32)___constant_swab32((x)))
261 # ifndef __constant_ntohl
262 # define __constant_ntohl(x) ___constant_swab32((__force __be32)(x))
264 # ifndef __constant_htons
265 # define __constant_htons(x) ((__force __be16)___constant_swab16((x)))
267 # ifndef __constant_ntohs
268 # define __constant_ntohs(x) ___constant_swab16((__force __be16)(x))
270 #elif __BYTE_ORDER == __BIG_ENDIAN
271 static inline u16
cpu_to_be16(u16 val
)
276 static inline u32
cpu_to_be32(u32 val
)
281 static inline u64
cpu_to_be64(u64 val
)
286 static inline u16
cpu_to_le16(u16 val
)
288 return bswap_16(val
);
291 static inline u32
cpu_to_le32(u32 val
)
293 return bswap_32(val
);
296 static inline u64
cpu_to_le64(u64 val
)
298 return bswap_64(val
);
301 # ifndef __constant_htonl
302 # define __constant_htonl(x) ((__force __be32)(__u32)(x))
304 # ifndef __constant_ntohl
305 # define __constant_ntohl(x) ((__force __u32)(__be32)(x))
307 # ifndef __constant_htons
308 # define __constant_htons(x) ((__force __be16)(__u16)(x))
310 # ifndef __constant_ntohs
311 # define __constant_ntohs(x) ((__force __u16)(__be16)(x))
314 # error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
317 #define le64_to_cpu cpu_to_le64
318 #define le32_to_cpu cpu_to_le32
319 #define le16_to_cpu cpu_to_le16
320 #define be64_to_cpu cpu_to_be64
321 #define be32_to_cpu cpu_to_be32
322 #define be16_to_cpu cpu_to_be16
327 #define memset fmemset
328 #define memcpy fmemcpy
330 #if defined(__amd64__) || defined(__x86_64__) || defined(__AMD64__) || \
331 defined(_M_X64) || defined(__amd64)
332 # define CO_IN_CACHE_SHIFT 7
333 #elif defined(__i386__) || defined(__x86__) || defined(__X86__) || \
334 defined(_M_IX86) || defined(__i386)
335 # define CO_IN_CACHE_SHIFT 7
336 #elif defined(__ia64__) || defined(__IA64__) || defined(__M_IA64)
337 # define CO_IN_CACHE_SHIFT 6
338 #elif defined(__SPU__)
339 # define CO_IN_CACHE_SHIFT 7
340 #elif defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__) || \
342 # define CO_IN_CACHE_SHIFT 8
343 #elif defined(__powerpc__) || defined(__ppc__) || defined(__PPC__) || \
345 # define CO_IN_CACHE_SHIFT 7
346 #elif defined(__sparcv9__) || defined(__sparcv9)
347 # define CO_IN_CACHE_SHIFT 6
348 #elif defined(__sparc_v8__)
349 # define CO_IN_CACHE_SHIFT 5
350 #elif defined(__sparc__) || defined(__sparc)
351 # define CO_IN_CACHE_SHIFT 5
352 #elif defined(__ARM_EABI__)
353 # define CO_IN_CACHE_SHIFT 5
354 #elif defined(__arm__)
355 # define CO_IN_CACHE_SHIFT 5
356 #elif defined(__mips__) || defined(__mips) || defined(__MIPS__)
357 # if defined(_ABIO32)
358 # define CO_IN_CACHE_SHIFT 5
359 # elif defined(_ABIN32)
360 # define CO_IN_CACHE_SHIFT 5
362 # define CO_IN_CACHE_SHIFT 6
365 # define CO_IN_CACHE_SHIFT 5
368 #ifndef TP_STATUS_TS_SOFTWARE
369 # define TP_STATUS_TS_SOFTWARE (1 << 29)
372 #ifndef TP_STATUS_TS_SYS_HARDWARE
373 # define TP_STATUS_TS_SYS_HARDWARE (1 << 30)
376 #ifndef TP_STATUS_TS_RAW_HARDWARE
377 # define TP_STATUS_TS_RAW_HARDWARE (1 << 31)
380 #ifndef PACKET_QDISC_BYPASS
381 # define PACKET_QDISC_BYPASS 20
384 #ifndef ARPHRD_IEEE802154_MONITOR
385 # define ARPHRD_IEEE802154_MONITOR 805
388 #ifndef ARPHRD_IP6GRE
389 # define ARPHRD_IP6GRE 823
392 #ifndef ARPHRD_NETLINK
393 # define ARPHRD_NETLINK 824
397 # define PACKET_USER 6
400 #ifndef PACKET_KERNEL
401 # define PACKET_KERNEL 7
405 # define DEFFILEMODE (S_IRUSR|S_IWUSR|S_IRGRP|S_IWGRP|S_IROTH|S_IWOTH) /* 0666 */
408 #endif /* BUILT_IN_H */