1 /* vi: set sw=4 ts=4: */
3 * Small lzma deflate implementation.
4 * Copyright (C) 2006 Aurelien Jacobs <aurel@gnuage.org>
6 * Based on LzmaDecode.c from the LZMA SDK 4.22 (http://www.7-zip.org/)
7 * Copyright (C) 1999-2005 Igor Pavlov
9 * Licensed under GPLv2 or later, see file LICENSE in this source tree.
12 #include "bb_archive.h"
14 #if ENABLE_FEATURE_LZMA_FAST
15 # define speed_inline ALWAYS_INLINE
19 # define size_inline ALWAYS_INLINE
27 /* Was keeping rc on stack in unlzma and separately allocating buffer,
28 * but with "buffer 'attached to' allocated rc" code is smaller: */
29 /* uint8_t *buffer; */
30 #define RC_BUFFER ((uint8_t*)(rc+1))
34 /* Had provisions for variable buffer, but we don't need it here */
35 /* int buffer_size; */
36 #define RC_BUFFER_SIZE 0x10000
43 #define RC_TOP_BITS 24
44 #define RC_MOVE_BITS 5
45 #define RC_MODEL_TOTAL_BITS 11
48 /* Called twice: once at startup (LZMA_FAST only) and once in rc_normalize() */
49 static size_inline
void rc_read(rc_t
*rc
)
51 int buffer_size
= safe_read(rc
->fd
, RC_BUFFER
, RC_BUFFER_SIZE
);
52 //TODO: return -1 instead
53 //This will make unlzma delete broken unpacked file on unpack errors
55 bb_error_msg_and_die("unexpected EOF");
57 rc
->buffer_end
= RC_BUFFER
+ buffer_size
;
60 /* Called twice, but one callsite is in speed_inline'd rc_is_bit_1() */
61 static void rc_do_normalize(rc_t
*rc
)
63 if (rc
->ptr
>= rc
->buffer_end
)
66 rc
->code
= (rc
->code
<< 8) | *rc
->ptr
++;
70 static ALWAYS_INLINE rc_t
* rc_init(int fd
) /*, int buffer_size) */
75 rc
= xzalloc(sizeof(*rc
) + RC_BUFFER_SIZE
);
78 /* rc->ptr = rc->buffer_end; */
80 for (i
= 0; i
< 5; i
++) {
81 #if ENABLE_FEATURE_LZMA_FAST
82 if (rc
->ptr
>= rc
->buffer_end
)
84 rc
->code
= (rc
->code
<< 8) | *rc
->ptr
++;
89 rc
->range
= 0xFFFFFFFF;
94 static ALWAYS_INLINE
void rc_free(rc_t
*rc
)
99 static ALWAYS_INLINE
void rc_normalize(rc_t
*rc
)
101 if (rc
->range
< (1 << RC_TOP_BITS
)) {
106 /* rc_is_bit_1 is called 9 times */
107 static speed_inline
int rc_is_bit_1(rc_t
*rc
, uint16_t *p
)
110 rc
->bound
= *p
* (rc
->range
>> RC_MODEL_TOTAL_BITS
);
111 if (rc
->code
< rc
->bound
) {
112 rc
->range
= rc
->bound
;
113 *p
+= ((1 << RC_MODEL_TOTAL_BITS
) - *p
) >> RC_MOVE_BITS
;
116 rc
->range
-= rc
->bound
;
117 rc
->code
-= rc
->bound
;
118 *p
-= *p
>> RC_MOVE_BITS
;
122 /* Called 4 times in unlzma loop */
123 static speed_inline
int rc_get_bit(rc_t
*rc
, uint16_t *p
, int *symbol
)
125 int ret
= rc_is_bit_1(rc
, p
);
126 *symbol
= *symbol
* 2 + ret
;
131 static ALWAYS_INLINE
int rc_direct_bit(rc_t
*rc
)
135 if (rc
->code
>= rc
->range
) {
136 rc
->code
-= rc
->range
;
143 static speed_inline
void
144 rc_bit_tree_decode(rc_t
*rc
, uint16_t *p
, int num_levels
, int *symbol
)
150 rc_get_bit(rc
, p
+ *symbol
, symbol
);
151 *symbol
-= 1 << num_levels
;
159 } PACKED lzma_header_t
;
162 /* #defines will force compiler to compute/optimize each one with each usage.
163 * Have heart and use enum instead. */
165 LZMA_BASE_SIZE
= 1846,
168 LZMA_NUM_POS_BITS_MAX
= 4,
170 LZMA_LEN_NUM_LOW_BITS
= 3,
171 LZMA_LEN_NUM_MID_BITS
= 3,
172 LZMA_LEN_NUM_HIGH_BITS
= 8,
175 LZMA_LEN_CHOICE_2
= (LZMA_LEN_CHOICE
+ 1),
176 LZMA_LEN_LOW
= (LZMA_LEN_CHOICE_2
+ 1),
177 LZMA_LEN_MID
= (LZMA_LEN_LOW \
178 + (1 << (LZMA_NUM_POS_BITS_MAX
+ LZMA_LEN_NUM_LOW_BITS
))),
179 LZMA_LEN_HIGH
= (LZMA_LEN_MID \
180 + (1 << (LZMA_NUM_POS_BITS_MAX
+ LZMA_LEN_NUM_MID_BITS
))),
181 LZMA_NUM_LEN_PROBS
= (LZMA_LEN_HIGH
+ (1 << LZMA_LEN_NUM_HIGH_BITS
)),
183 LZMA_NUM_STATES
= 12,
184 LZMA_NUM_LIT_STATES
= 7,
186 LZMA_START_POS_MODEL_INDEX
= 4,
187 LZMA_END_POS_MODEL_INDEX
= 14,
188 LZMA_NUM_FULL_DISTANCES
= (1 << (LZMA_END_POS_MODEL_INDEX
>> 1)),
190 LZMA_NUM_POS_SLOT_BITS
= 6,
191 LZMA_NUM_LEN_TO_POS_STATES
= 4,
193 LZMA_NUM_ALIGN_BITS
= 4,
195 LZMA_MATCH_MIN_LEN
= 2,
198 LZMA_IS_REP
= (LZMA_IS_MATCH
+ (LZMA_NUM_STATES
<< LZMA_NUM_POS_BITS_MAX
)),
199 LZMA_IS_REP_G0
= (LZMA_IS_REP
+ LZMA_NUM_STATES
),
200 LZMA_IS_REP_G1
= (LZMA_IS_REP_G0
+ LZMA_NUM_STATES
),
201 LZMA_IS_REP_G2
= (LZMA_IS_REP_G1
+ LZMA_NUM_STATES
),
202 LZMA_IS_REP_0_LONG
= (LZMA_IS_REP_G2
+ LZMA_NUM_STATES
),
203 LZMA_POS_SLOT
= (LZMA_IS_REP_0_LONG \
204 + (LZMA_NUM_STATES
<< LZMA_NUM_POS_BITS_MAX
)),
205 LZMA_SPEC_POS
= (LZMA_POS_SLOT \
206 + (LZMA_NUM_LEN_TO_POS_STATES
<< LZMA_NUM_POS_SLOT_BITS
)),
207 LZMA_ALIGN
= (LZMA_SPEC_POS \
208 + LZMA_NUM_FULL_DISTANCES
- LZMA_END_POS_MODEL_INDEX
),
209 LZMA_LEN_CODER
= (LZMA_ALIGN
+ (1 << LZMA_NUM_ALIGN_BITS
)),
210 LZMA_REP_LEN_CODER
= (LZMA_LEN_CODER
+ LZMA_NUM_LEN_PROBS
),
211 LZMA_LITERAL
= (LZMA_REP_LEN_CODER
+ LZMA_NUM_LEN_PROBS
),
215 IF_DESKTOP(long long) int FAST_FUNC
216 unpack_lzma_stream(transformer_aux_data_t
*aux UNUSED_PARAM
, int src_fd
, int dst_fd
)
218 IF_DESKTOP(long long total_written
= 0;)
219 lzma_header_t header
;
221 uint32_t pos_state_mask
;
222 uint32_t literal_pos_mask
;
229 uint8_t previous_byte
= 0;
230 size_t buffer_pos
= 0, global_pos
= 0;
233 uint32_t rep0
= 1, rep1
= 1, rep2
= 1, rep3
= 1;
235 if (full_read(src_fd
, &header
, sizeof(header
)) != sizeof(header
)
236 || header
.pos
>= (9 * 5 * 5)
238 bb_error_msg("bad lzma header");
246 pos_state_mask
= (1 << pb
) - 1;
247 literal_pos_mask
= (1 << lp
) - 1;
249 header
.dict_size
= SWAP_LE32(header
.dict_size
);
250 header
.dst_size
= SWAP_LE64(header
.dst_size
);
252 if (header
.dict_size
== 0)
255 buffer
= xmalloc(MIN(header
.dst_size
, header
.dict_size
));
257 num_probs
= LZMA_BASE_SIZE
+ (LZMA_LIT_SIZE
<< (lc
+ lp
));
258 p
= xmalloc(num_probs
* sizeof(*p
));
259 num_probs
+= LZMA_LITERAL
- LZMA_BASE_SIZE
;
260 for (i
= 0; i
< num_probs
; i
++)
261 p
[i
] = (1 << RC_MODEL_TOTAL_BITS
) >> 1;
263 rc
= rc_init(src_fd
); /*, RC_BUFFER_SIZE); */
265 while (global_pos
+ buffer_pos
< header
.dst_size
) {
266 int pos_state
= (buffer_pos
+ global_pos
) & pos_state_mask
;
267 uint16_t *prob
= p
+ LZMA_IS_MATCH
+ (state
<< LZMA_NUM_POS_BITS_MAX
) + pos_state
;
269 if (!rc_is_bit_1(rc
, prob
)) {
270 static const char next_state
[LZMA_NUM_STATES
] =
271 { 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 4, 5 };
274 prob
= (p
+ LZMA_LITERAL
275 + (LZMA_LIT_SIZE
* ((((buffer_pos
+ global_pos
) & literal_pos_mask
) << lc
)
276 + (previous_byte
>> (8 - lc
))
281 if (state
>= LZMA_NUM_LIT_STATES
) {
283 uint32_t pos
= buffer_pos
- rep0
;
285 while (pos
>= header
.dict_size
)
286 pos
+= header
.dict_size
;
287 match_byte
= buffer
[pos
];
292 bit
= match_byte
& 0x100;
293 bit
^= (rc_get_bit(rc
, prob
+ 0x100 + bit
+ mi
, &mi
) << 8); /* 0x100 or 0 */
296 } while (mi
< 0x100);
299 rc_get_bit(rc
, prob
+ mi
, &mi
);
302 state
= next_state
[state
];
304 previous_byte
= (uint8_t) mi
;
305 #if ENABLE_FEATURE_LZMA_FAST
307 buffer
[buffer_pos
++] = previous_byte
;
308 if (buffer_pos
== header
.dict_size
) {
310 global_pos
+= header
.dict_size
;
311 if (full_write(dst_fd
, buffer
, header
.dict_size
) != (ssize_t
)header
.dict_size
)
313 IF_DESKTOP(total_written
+= header
.dict_size
;)
322 #define prob_len prob2
324 prob2
= p
+ LZMA_IS_REP
+ state
;
325 if (!rc_is_bit_1(rc
, prob2
)) {
329 state
= state
< LZMA_NUM_LIT_STATES
? 0 : 3;
330 prob2
= p
+ LZMA_LEN_CODER
;
332 prob2
+= LZMA_IS_REP_G0
- LZMA_IS_REP
;
333 if (!rc_is_bit_1(rc
, prob2
)) {
334 prob2
= (p
+ LZMA_IS_REP_0_LONG
335 + (state
<< LZMA_NUM_POS_BITS_MAX
)
338 if (!rc_is_bit_1(rc
, prob2
)) {
339 #if ENABLE_FEATURE_LZMA_FAST
340 uint32_t pos
= buffer_pos
- rep0
;
341 state
= state
< LZMA_NUM_LIT_STATES
? 9 : 11;
342 while (pos
>= header
.dict_size
)
343 pos
+= header
.dict_size
;
344 previous_byte
= buffer
[pos
];
347 state
= state
< LZMA_NUM_LIT_STATES
? 9 : 11;
355 prob2
+= LZMA_IS_REP_G1
- LZMA_IS_REP_G0
;
357 if (rc_is_bit_1(rc
, prob2
)) {
358 prob2
+= LZMA_IS_REP_G2
- LZMA_IS_REP_G1
;
360 if (rc_is_bit_1(rc
, prob2
)) {
369 state
= state
< LZMA_NUM_LIT_STATES
? 8 : 11;
370 prob2
= p
+ LZMA_REP_LEN_CODER
;
373 prob_len
= prob2
+ LZMA_LEN_CHOICE
;
374 num_bits
= LZMA_LEN_NUM_LOW_BITS
;
375 if (!rc_is_bit_1(rc
, prob_len
)) {
376 prob_len
+= LZMA_LEN_LOW
- LZMA_LEN_CHOICE
377 + (pos_state
<< LZMA_LEN_NUM_LOW_BITS
);
380 prob_len
+= LZMA_LEN_CHOICE_2
- LZMA_LEN_CHOICE
;
381 if (!rc_is_bit_1(rc
, prob_len
)) {
382 prob_len
+= LZMA_LEN_MID
- LZMA_LEN_CHOICE_2
383 + (pos_state
<< LZMA_LEN_NUM_MID_BITS
);
384 offset
= 1 << LZMA_LEN_NUM_LOW_BITS
;
385 num_bits
+= LZMA_LEN_NUM_MID_BITS
- LZMA_LEN_NUM_LOW_BITS
;
387 prob_len
+= LZMA_LEN_HIGH
- LZMA_LEN_CHOICE_2
;
388 offset
= ((1 << LZMA_LEN_NUM_LOW_BITS
)
389 + (1 << LZMA_LEN_NUM_MID_BITS
));
390 num_bits
+= LZMA_LEN_NUM_HIGH_BITS
- LZMA_LEN_NUM_LOW_BITS
;
393 rc_bit_tree_decode(rc
, prob_len
, num_bits
, &len
);
400 state
+= LZMA_NUM_LIT_STATES
;
401 prob3
= p
+ LZMA_POS_SLOT
+
402 ((len
< LZMA_NUM_LEN_TO_POS_STATES
? len
:
403 LZMA_NUM_LEN_TO_POS_STATES
- 1)
404 << LZMA_NUM_POS_SLOT_BITS
);
405 rc_bit_tree_decode(rc
, prob3
,
406 LZMA_NUM_POS_SLOT_BITS
, &pos_slot
);
408 if (pos_slot
>= LZMA_START_POS_MODEL_INDEX
) {
409 int i2
, mi2
, num_bits2
= (pos_slot
>> 1) - 1;
410 rep0
= 2 | (pos_slot
& 1);
411 if (pos_slot
< LZMA_END_POS_MODEL_INDEX
) {
413 prob3
= p
+ LZMA_SPEC_POS
+ rep0
- pos_slot
- 1;
415 for (; num_bits2
!= LZMA_NUM_ALIGN_BITS
; num_bits2
--)
416 rep0
= (rep0
<< 1) | rc_direct_bit(rc
);
417 rep0
<<= LZMA_NUM_ALIGN_BITS
;
418 prob3
= p
+ LZMA_ALIGN
;
422 while (num_bits2
--) {
423 if (rc_get_bit(rc
, prob3
+ mi2
, &mi2
))
432 len
+= LZMA_MATCH_MIN_LEN
;
433 IF_NOT_FEATURE_LZMA_FAST(string
:)
435 uint32_t pos
= buffer_pos
- rep0
;
436 while (pos
>= header
.dict_size
)
437 pos
+= header
.dict_size
;
438 previous_byte
= buffer
[pos
];
439 IF_NOT_FEATURE_LZMA_FAST(one_byte2
:)
440 buffer
[buffer_pos
++] = previous_byte
;
441 if (buffer_pos
== header
.dict_size
) {
443 global_pos
+= header
.dict_size
;
444 if (full_write(dst_fd
, buffer
, header
.dict_size
) != (ssize_t
)header
.dict_size
)
446 IF_DESKTOP(total_written
+= header
.dict_size
;)
449 } while (len
!= 0 && buffer_pos
< header
.dst_size
);
454 IF_NOT_DESKTOP(int total_written
= 0; /* success */)
455 IF_DESKTOP(total_written
+= buffer_pos
;)
456 if (full_write(dst_fd
, buffer
, buffer_pos
) != (ssize_t
)buffer_pos
) {
458 total_written
= -1; /* failure */
463 return total_written
;