2 * lz4defs.h -- architecture specific defines
4 * Copyright (C) 2013, LG Electronics, Kyungsik Lee <kyungsik.lee@lge.com>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
12 * Detects 64 bits mode
14 #if (defined(__x86_64__) || defined(__x86_64) || defined(__amd64__) \
15 || defined(__ppc64__) || defined(__LP64__))
22 * Architecture-specific macros
25 typedef struct _U16_S
{ u16 v
; } U16_S
;
26 typedef struct _U32_S
{ u32 v
; } U32_S
;
27 typedef struct _U64_S
{ u64 v
; } U64_S
;
28 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) \
29 || defined(CONFIG_ARM) && __LINUX_ARM_ARCH__ >= 6 \
30 && defined(ARM_EFFICIENT_UNALIGNED_ACCESS)
32 #define A16(x) (((U16_S *)(x))->v)
33 #define A32(x) (((U32_S *)(x))->v)
34 #define A64(x) (((U64_S *)(x))->v)
36 #define PUT4(s, d) (A32(d) = A32(s))
37 #define PUT8(s, d) (A64(d) = A64(s))
38 #define LZ4_WRITE_LITTLEENDIAN_16(p, v) \
43 #else /* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */
45 #define A64(x) get_unaligned((u64 *)&(((U16_S *)(x))->v))
46 #define A32(x) get_unaligned((u32 *)&(((U16_S *)(x))->v))
47 #define A16(x) get_unaligned((u16 *)&(((U16_S *)(x))->v))
50 put_unaligned(get_unaligned((const u32 *) s), (u32 *) d)
52 put_unaligned(get_unaligned((const u64 *) s), (u64 *) d)
54 #define LZ4_WRITE_LITTLEENDIAN_16(p, v) \
56 put_unaligned(v, (u16 *)(p)); \
63 #define ML_MASK ((1U << ML_BITS) - 1)
64 #define RUN_BITS (8 - ML_BITS)
65 #define RUN_MASK ((1U << RUN_BITS) - 1)
66 #define MEMORY_USAGE 14
68 #define SKIPSTRENGTH 6
69 #define LASTLITERALS 5
70 #define MFLIMIT (COPYLENGTH + MINMATCH)
71 #define MINLENGTH (MFLIMIT + 1)
73 #define MAXD (1 << MAXD_LOG)
74 #define MAXD_MASK (u32)(MAXD - 1)
75 #define MAX_DISTANCE (MAXD - 1)
76 #define HASH_LOG (MAXD_LOG - 1)
77 #define HASHTABLESIZE (1 << HASH_LOG)
78 #define MAX_NB_ATTEMPTS 256
79 #define OPTIMAL_ML (int)((ML_MASK-1)+MINMATCH)
80 #define LZ4_64KLIMIT ((1<<16) + (MFLIMIT - 1))
81 #define HASHLOG64K ((MEMORY_USAGE - 2) + 1)
82 #define HASH64KTABLESIZE (1U << HASHLOG64K)
83 #define LZ4_HASH_VALUE(p) (((A32(p)) * 2654435761U) >> \
84 ((MINMATCH * 8) - (MEMORY_USAGE-2)))
85 #define LZ4_HASH64K_VALUE(p) (((A32(p)) * 2654435761U) >> \
86 ((MINMATCH * 8) - HASHLOG64K))
87 #define HASH_VALUE(p) (((A32(p)) * 2654435761U) >> \
88 ((MINMATCH * 8) - HASH_LOG))
90 #if LZ4_ARCH64/* 64-bit */
93 #define LZ4_COPYSTEP(s, d) \
100 #define LZ4_COPYPACKET(s, d) LZ4_COPYSTEP(s, d)
102 #define LZ4_SECURECOPY(s, d, e) \
105 LZ4_WILDCOPY(s, d, e); \
111 #define LZ4_NBCOMMONBYTES(val) (__builtin_clzll(val) >> 3)
113 #define LZ4_NBCOMMONBYTES(val) (__builtin_ctzll(val) >> 3)
119 #define LZ4_COPYSTEP(s, d) \
126 #define LZ4_COPYPACKET(s, d) \
128 LZ4_COPYSTEP(s, d); \
129 LZ4_COPYSTEP(s, d); \
132 #define LZ4_SECURECOPY LZ4_WILDCOPY
133 #define HTYPE const u8*
136 #define LZ4_NBCOMMONBYTES(val) (__builtin_clz(val) >> 3)
138 #define LZ4_NBCOMMONBYTES(val) (__builtin_ctz(val) >> 3)
143 #define LZ4_READ_LITTLEENDIAN_16(d, s, p) \
144 (d = s - get_unaligned_le16(p))
146 #define LZ4_WILDCOPY(s, d, e) \
148 LZ4_COPYPACKET(s, d); \
151 #define LZ4_BLINDCOPY(s, d, l) \
154 LZ4_WILDCOPY(s, d, e); \