winevulkan: Update to VK spec version 1.2.195.
[wine.git] / dlls / cabinet / deflate.c
blob681f187ecdaefdbd01589d18755f3bae9639c2f1
1 /* deflate.c -- compress data using the deflation algorithm
3 * Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
5 * This software is provided 'as-is', without any express or implied
6 * warranty. In no event will the authors be held liable for any damages
7 * arising from the use of this software.
9 * Permission is granted to anyone to use this software for any purpose,
10 * including commercial applications, and to alter it and redistribute it
11 * freely, subject to the following restrictions:
13 * 1. The origin of this software must not be misrepresented; you must not
14 * claim that you wrote the original software. If you use this software
15 * in a product, an acknowledgment in the product documentation would be
16 * appreciated but is not required.
17 * 2. Altered source versions must be plainly marked as such, and must not be
18 * misrepresented as being the original software.
19 * 3. This notice may not be removed or altered from any source distribution.
21 * Jean-loup Gailly Mark Adler
22 * jloup@gzip.org madler@alumni.caltech.edu
25 #include <stdlib.h>
26 #include <string.h>
27 #include <limits.h>
28 #include "zlib.h"
30 #define DEF_MEM_LEVEL 8
31 #define DEF_WBITS MAX_WBITS
32 #define zmemcpy memcpy
33 #define zmemzero(dest, len) memset(dest, 0, len)
35 #define Assert(cond,msg)
36 #define Trace(x)
37 #define Tracev(x)
38 #define Tracevv(x)
39 #define Tracecv(c,x)
41 #define ZALLOC(strm, items, size) \
42 (*((strm)->zalloc))((strm)->opaque, (items), (size))
43 #define ZFREE(strm, addr) (*((strm)->zfree))((strm)->opaque, (voidpf)(addr))
44 #define TRY_FREE(s, p) {if (p) ZFREE(s, p);}
46 /* Reverse the bytes in a 32-bit value */
47 #define ZSWAP32(q) ((((q) >> 24) & 0xff) + (((q) >> 8) & 0xff00) + \
48 (((q) & 0xff00) << 8) + (((q) & 0xff) << 24))
50 static const char * const z_errmsg[10] = {
51 (z_const char *)"need dictionary", /* Z_NEED_DICT 2 */
52 (z_const char *)"stream end", /* Z_STREAM_END 1 */
53 (z_const char *)"", /* Z_OK 0 */
54 (z_const char *)"file error", /* Z_ERRNO (-1) */
55 (z_const char *)"stream error", /* Z_STREAM_ERROR (-2) */
56 (z_const char *)"data error", /* Z_DATA_ERROR (-3) */
57 (z_const char *)"insufficient memory", /* Z_MEM_ERROR (-4) */
58 (z_const char *)"buffer error", /* Z_BUF_ERROR (-5) */
59 (z_const char *)"incompatible version",/* Z_VERSION_ERROR (-6) */
60 (z_const char *)""
63 #define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)]
65 #define ERR_RETURN(strm,err) \
66 return (strm->msg = ERR_MSG(err), (err))
67 /* To be used only when the state is known to be valid */
69 #define STORED_BLOCK 0
70 #define STATIC_TREES 1
71 #define DYN_TREES 2
72 /* The three kinds of block type */
74 #define MIN_MATCH 3
75 #define MAX_MATCH 258
76 /* The minimum and maximum match lengths */
78 #define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */
80 #define BASE 65521U /* largest prime smaller than 65536 */
81 #define NMAX 5552
82 /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
84 #define DO1(buf,i) {adler += (buf)[i]; sum2 += adler;}
85 #define DO2(buf,i) DO1(buf,i); DO1(buf,i+1);
86 #define DO4(buf,i) DO2(buf,i); DO2(buf,i+2);
87 #define DO8(buf,i) DO4(buf,i); DO4(buf,i+4);
88 #define DO16(buf) DO8(buf,0); DO8(buf,8);
90 #define MOD(a) a %= BASE
91 #define MOD28(a) a %= BASE
92 #define MOD63(a) a %= BASE
94 static uLong adler32( uLong adler, const Bytef *buf, uInt len )
96 unsigned long sum2;
97 unsigned n;
99 /* split Adler-32 into component sums */
100 sum2 = (adler >> 16) & 0xffff;
101 adler &= 0xffff;
103 /* in case user likes doing a byte at a time, keep it fast */
104 if (len == 1) {
105 adler += buf[0];
106 if (adler >= BASE)
107 adler -= BASE;
108 sum2 += adler;
109 if (sum2 >= BASE)
110 sum2 -= BASE;
111 return adler | (sum2 << 16);
114 /* initial Adler-32 value (deferred check for len == 1 speed) */
115 if (buf == Z_NULL)
116 return 1L;
118 /* in case short lengths are provided, keep it somewhat fast */
119 if (len < 16) {
120 while (len--) {
121 adler += *buf++;
122 sum2 += adler;
124 if (adler >= BASE)
125 adler -= BASE;
126 MOD28(sum2); /* only added so many BASE's */
127 return adler | (sum2 << 16);
130 /* do length NMAX blocks -- requires just one modulo operation */
131 while (len >= NMAX) {
132 len -= NMAX;
133 n = NMAX / 16; /* NMAX is divisible by 16 */
134 do {
135 DO16(buf); /* 16 sums unrolled */
136 buf += 16;
137 } while (--n);
138 MOD(adler);
139 MOD(sum2);
142 /* do remaining bytes (less than NMAX, still just one modulo) */
143 if (len) { /* avoid modulos if none remaining */
144 while (len >= 16) {
145 len -= 16;
146 DO16(buf);
147 buf += 16;
149 while (len--) {
150 adler += *buf++;
151 sum2 += adler;
153 MOD(adler);
154 MOD(sum2);
157 /* return recombined sums */
158 return adler | (sum2 << 16);
161 /* ===========================================================================
162 * Internal compression state.
165 #define LENGTH_CODES 29
166 /* number of length codes, not counting the special END_BLOCK code */
168 #define LITERALS 256
169 /* number of literal bytes 0..255 */
171 #define L_CODES (LITERALS+1+LENGTH_CODES)
172 /* number of Literal or Length codes, including the END_BLOCK code */
174 #define D_CODES 30
175 /* number of distance codes */
177 #define BL_CODES 19
178 /* number of codes used to transfer the bit lengths */
180 #define HEAP_SIZE (2*L_CODES+1)
181 /* maximum heap size */
183 #define MAX_BITS 15
184 /* All codes must not exceed MAX_BITS bits */
186 #define Buf_size 16
187 /* size of bit buffer in bi_buf */
189 #define INIT_STATE 42 /* zlib header -> BUSY_STATE */
190 #ifdef GZIP
191 # define GZIP_STATE 57 /* gzip header -> BUSY_STATE | EXTRA_STATE */
192 #endif
193 #define EXTRA_STATE 69 /* gzip extra block -> NAME_STATE */
194 #define NAME_STATE 73 /* gzip file name -> COMMENT_STATE */
195 #define COMMENT_STATE 91 /* gzip comment -> HCRC_STATE */
196 #define HCRC_STATE 103 /* gzip header CRC -> BUSY_STATE */
197 #define BUSY_STATE 113 /* deflate -> FINISH_STATE */
198 #define FINISH_STATE 666 /* stream complete */
199 /* Stream status */
202 /* Data structure describing a single value and its code string. */
203 typedef struct ct_data_s {
204 union {
205 ush freq; /* frequency count */
206 ush code; /* bit string */
207 } fc;
208 union {
209 ush dad; /* father node in Huffman tree */
210 ush len; /* length of bit string */
211 } dl;
212 } FAR ct_data;
214 #define Freq fc.freq
215 #define Code fc.code
216 #define Dad dl.dad
217 #define Len dl.len
219 typedef struct static_tree_desc_s static_tree_desc;
221 typedef struct tree_desc_s {
222 ct_data *dyn_tree; /* the dynamic tree */
223 int max_code; /* largest code with non zero frequency */
224 const static_tree_desc *stat_desc; /* the corresponding static tree */
225 } FAR tree_desc;
227 typedef ush Pos;
228 typedef Pos FAR Posf;
229 typedef unsigned IPos;
231 /* A Pos is an index in the character window. We use short instead of int to
232 * save space in the various tables. IPos is used only for parameter passing.
235 typedef struct internal_state {
236 z_streamp strm; /* pointer back to this zlib stream */
237 int status; /* as the name implies */
238 Bytef *pending_buf; /* output still pending */
239 ulg pending_buf_size; /* size of pending_buf */
240 Bytef *pending_out; /* next pending byte to output to the stream */
241 ulg pending; /* nb of bytes in the pending buffer */
242 int wrap; /* bit 0 true for zlib, bit 1 true for gzip */
243 gz_headerp gzhead; /* gzip header information to write */
244 ulg gzindex; /* where in extra, name, or comment */
245 Byte method; /* can only be DEFLATED */
246 int last_flush; /* value of flush param for previous deflate call */
248 /* used by deflate.c: */
250 uInt w_size; /* LZ77 window size (32K by default) */
251 uInt w_bits; /* log2(w_size) (8..16) */
252 uInt w_mask; /* w_size - 1 */
254 Bytef *window;
255 /* Sliding window. Input bytes are read into the second half of the window,
256 * and move to the first half later to keep a dictionary of at least wSize
257 * bytes. With this organization, matches are limited to a distance of
258 * wSize-MAX_MATCH bytes, but this ensures that IO is always
259 * performed with a length multiple of the block size. Also, it limits
260 * the window size to 64K, which is quite useful on MSDOS.
261 * To do: use the user input buffer as sliding window.
264 ulg window_size;
265 /* Actual size of window: 2*wSize, except when the user input buffer
266 * is directly used as sliding window.
269 Posf *prev;
270 /* Link to older string with same hash index. To limit the size of this
271 * array to 64K, this link is maintained only for the last 32K strings.
272 * An index in this array is thus a window index modulo 32K.
275 Posf *head; /* Heads of the hash chains or NIL. */
277 uInt ins_h; /* hash index of string to be inserted */
278 uInt hash_size; /* number of elements in hash table */
279 uInt hash_bits; /* log2(hash_size) */
280 uInt hash_mask; /* hash_size-1 */
282 uInt hash_shift;
283 /* Number of bits by which ins_h must be shifted at each input
284 * step. It must be such that after MIN_MATCH steps, the oldest
285 * byte no longer takes part in the hash key, that is:
286 * hash_shift * MIN_MATCH >= hash_bits
289 long block_start;
290 /* Window position at the beginning of the current output block. Gets
291 * negative when the window is moved backwards.
294 uInt match_length; /* length of best match */
295 IPos prev_match; /* previous match */
296 int match_available; /* set if previous match exists */
297 uInt strstart; /* start of string to insert */
298 uInt match_start; /* start of matching string */
299 uInt lookahead; /* number of valid bytes ahead in window */
301 uInt prev_length;
302 /* Length of the best match at previous step. Matches not greater than this
303 * are discarded. This is used in the lazy match evaluation.
306 uInt max_chain_length;
307 /* To speed up deflation, hash chains are never searched beyond this
308 * length. A higher limit improves compression ratio but degrades the
309 * speed.
312 uInt max_lazy_match;
313 /* Attempt to find a better match only when the current match is strictly
314 * smaller than this value. This mechanism is used only for compression
315 * levels >= 4.
317 # define max_insert_length max_lazy_match
318 /* Insert new strings in the hash table only if the match length is not
319 * greater than this length. This saves time but degrades compression.
320 * max_insert_length is used only for compression levels <= 3.
323 int level; /* compression level (1..9) */
324 int strategy; /* favor or force Huffman coding*/
326 uInt good_match;
327 /* Use a faster search when the previous match is longer than this */
329 int nice_match; /* Stop searching when current match exceeds this */
331 /* used by trees.c: */
332 /* Didn't use ct_data typedef below to suppress compiler warning */
333 struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */
334 struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
335 struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */
337 struct tree_desc_s l_desc; /* desc. for literal tree */
338 struct tree_desc_s d_desc; /* desc. for distance tree */
339 struct tree_desc_s bl_desc; /* desc. for bit length tree */
341 ush bl_count[MAX_BITS+1];
342 /* number of codes at each bit length for an optimal tree */
344 int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */
345 int heap_len; /* number of elements in the heap */
346 int heap_max; /* element of largest frequency */
347 /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
348 * The same heap array is used to build all trees.
351 uch depth[2*L_CODES+1];
352 /* Depth of each subtree used as tie breaker for trees of equal frequency
355 uchf *l_buf; /* buffer for literals or lengths */
357 uInt lit_bufsize;
358 /* Size of match buffer for literals/lengths. There are 4 reasons for
359 * limiting lit_bufsize to 64K:
360 * - frequencies can be kept in 16 bit counters
361 * - if compression is not successful for the first block, all input
362 * data is still in the window so we can still emit a stored block even
363 * when input comes from standard input. (This can also be done for
364 * all blocks if lit_bufsize is not greater than 32K.)
365 * - if compression is not successful for a file smaller than 64K, we can
366 * even emit a stored file instead of a stored block (saving 5 bytes).
367 * This is applicable only for zip (not gzip or zlib).
368 * - creating new Huffman trees less frequently may not provide fast
369 * adaptation to changes in the input data statistics. (Take for
370 * example a binary file with poorly compressible code followed by
371 * a highly compressible string table.) Smaller buffer sizes give
372 * fast adaptation but have of course the overhead of transmitting
373 * trees more frequently.
374 * - I can't count above 4
377 uInt last_lit; /* running index in l_buf */
379 ushf *d_buf;
380 /* Buffer for distances. To simplify the code, d_buf and l_buf have
381 * the same number of elements. To use different lengths, an extra flag
382 * array would be necessary.
385 ulg opt_len; /* bit length of current block with optimal trees */
386 ulg static_len; /* bit length of current block with static trees */
387 uInt matches; /* number of string matches in current block */
388 uInt insert; /* bytes at end of window left to insert */
390 #ifdef ZLIB_DEBUG
391 ulg compressed_len; /* total bit length of compressed file mod 2^32 */
392 ulg bits_sent; /* bit length of compressed data sent mod 2^32 */
393 #endif
395 ush bi_buf;
396 /* Output buffer. bits are inserted starting at the bottom (least
397 * significant bits).
399 int bi_valid;
400 /* Number of valid bits in bi_buf. All bits above the last valid bit
401 * are always zero.
404 ulg high_water;
405 /* High water mark offset in window for initialized bytes -- bytes above
406 * this are set to zero in order to avoid memory check warnings when
407 * longest match routines access bytes past the input. This is then
408 * updated to the new high water mark.
411 } FAR deflate_state;
413 /* Output a byte on the stream.
414 * IN assertion: there is enough room in pending_buf.
416 #define put_byte(s, c) {s->pending_buf[s->pending++] = (Bytef)(c);}
419 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
420 /* Minimum amount of lookahead, except at the end of the input file.
421 * See deflate.c for comments about the MIN_MATCH+1.
424 #define MAX_DIST(s) ((s)->w_size-MIN_LOOKAHEAD)
425 /* In order to simplify the code, particularly on 16 bit machines, match
426 * distances are limited to MAX_DIST instead of WSIZE.
429 #define WIN_INIT MAX_MATCH
430 /* Number of bytes after end of data in window to initialize in order to avoid
431 memory checker errors from longest match routines */
434 #define MAX_BL_BITS 7
435 /* Bit length codes must not exceed MAX_BL_BITS bits */
437 #define END_BLOCK 256
438 /* end of block literal code */
440 #define REP_3_6 16
441 /* repeat previous bit length 3-6 times (2 bits of repeat count) */
443 #define REPZ_3_10 17
444 /* repeat a zero length 3-10 times (3 bits of repeat count) */
446 #define REPZ_11_138 18
447 /* repeat a zero length 11-138 times (7 bits of repeat count) */
449 static const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
450 = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
452 static const int extra_dbits[D_CODES] /* extra bits for each distance code */
453 = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
455 static const int extra_blbits[BL_CODES]/* extra bits for each bit length code */
456 = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
458 static const uch bl_order[BL_CODES]
459 = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
460 /* The lengths of the bit length codes are sent in order of decreasing
461 * probability, to avoid transmitting the lengths for unused bit length codes.
464 /* ===========================================================================
465 * Local data. These are initialized only once.
468 #define DIST_CODE_LEN 512 /* see definition of array dist_code below */
470 static const ct_data static_ltree[L_CODES+2] = {
471 {{ 12},{ 8}}, {{140},{ 8}}, {{ 76},{ 8}}, {{204},{ 8}}, {{ 44},{ 8}},
472 {{172},{ 8}}, {{108},{ 8}}, {{236},{ 8}}, {{ 28},{ 8}}, {{156},{ 8}},
473 {{ 92},{ 8}}, {{220},{ 8}}, {{ 60},{ 8}}, {{188},{ 8}}, {{124},{ 8}},
474 {{252},{ 8}}, {{ 2},{ 8}}, {{130},{ 8}}, {{ 66},{ 8}}, {{194},{ 8}},
475 {{ 34},{ 8}}, {{162},{ 8}}, {{ 98},{ 8}}, {{226},{ 8}}, {{ 18},{ 8}},
476 {{146},{ 8}}, {{ 82},{ 8}}, {{210},{ 8}}, {{ 50},{ 8}}, {{178},{ 8}},
477 {{114},{ 8}}, {{242},{ 8}}, {{ 10},{ 8}}, {{138},{ 8}}, {{ 74},{ 8}},
478 {{202},{ 8}}, {{ 42},{ 8}}, {{170},{ 8}}, {{106},{ 8}}, {{234},{ 8}},
479 {{ 26},{ 8}}, {{154},{ 8}}, {{ 90},{ 8}}, {{218},{ 8}}, {{ 58},{ 8}},
480 {{186},{ 8}}, {{122},{ 8}}, {{250},{ 8}}, {{ 6},{ 8}}, {{134},{ 8}},
481 {{ 70},{ 8}}, {{198},{ 8}}, {{ 38},{ 8}}, {{166},{ 8}}, {{102},{ 8}},
482 {{230},{ 8}}, {{ 22},{ 8}}, {{150},{ 8}}, {{ 86},{ 8}}, {{214},{ 8}},
483 {{ 54},{ 8}}, {{182},{ 8}}, {{118},{ 8}}, {{246},{ 8}}, {{ 14},{ 8}},
484 {{142},{ 8}}, {{ 78},{ 8}}, {{206},{ 8}}, {{ 46},{ 8}}, {{174},{ 8}},
485 {{110},{ 8}}, {{238},{ 8}}, {{ 30},{ 8}}, {{158},{ 8}}, {{ 94},{ 8}},
486 {{222},{ 8}}, {{ 62},{ 8}}, {{190},{ 8}}, {{126},{ 8}}, {{254},{ 8}},
487 {{ 1},{ 8}}, {{129},{ 8}}, {{ 65},{ 8}}, {{193},{ 8}}, {{ 33},{ 8}},
488 {{161},{ 8}}, {{ 97},{ 8}}, {{225},{ 8}}, {{ 17},{ 8}}, {{145},{ 8}},
489 {{ 81},{ 8}}, {{209},{ 8}}, {{ 49},{ 8}}, {{177},{ 8}}, {{113},{ 8}},
490 {{241},{ 8}}, {{ 9},{ 8}}, {{137},{ 8}}, {{ 73},{ 8}}, {{201},{ 8}},
491 {{ 41},{ 8}}, {{169},{ 8}}, {{105},{ 8}}, {{233},{ 8}}, {{ 25},{ 8}},
492 {{153},{ 8}}, {{ 89},{ 8}}, {{217},{ 8}}, {{ 57},{ 8}}, {{185},{ 8}},
493 {{121},{ 8}}, {{249},{ 8}}, {{ 5},{ 8}}, {{133},{ 8}}, {{ 69},{ 8}},
494 {{197},{ 8}}, {{ 37},{ 8}}, {{165},{ 8}}, {{101},{ 8}}, {{229},{ 8}},
495 {{ 21},{ 8}}, {{149},{ 8}}, {{ 85},{ 8}}, {{213},{ 8}}, {{ 53},{ 8}},
496 {{181},{ 8}}, {{117},{ 8}}, {{245},{ 8}}, {{ 13},{ 8}}, {{141},{ 8}},
497 {{ 77},{ 8}}, {{205},{ 8}}, {{ 45},{ 8}}, {{173},{ 8}}, {{109},{ 8}},
498 {{237},{ 8}}, {{ 29},{ 8}}, {{157},{ 8}}, {{ 93},{ 8}}, {{221},{ 8}},
499 {{ 61},{ 8}}, {{189},{ 8}}, {{125},{ 8}}, {{253},{ 8}}, {{ 19},{ 9}},
500 {{275},{ 9}}, {{147},{ 9}}, {{403},{ 9}}, {{ 83},{ 9}}, {{339},{ 9}},
501 {{211},{ 9}}, {{467},{ 9}}, {{ 51},{ 9}}, {{307},{ 9}}, {{179},{ 9}},
502 {{435},{ 9}}, {{115},{ 9}}, {{371},{ 9}}, {{243},{ 9}}, {{499},{ 9}},
503 {{ 11},{ 9}}, {{267},{ 9}}, {{139},{ 9}}, {{395},{ 9}}, {{ 75},{ 9}},
504 {{331},{ 9}}, {{203},{ 9}}, {{459},{ 9}}, {{ 43},{ 9}}, {{299},{ 9}},
505 {{171},{ 9}}, {{427},{ 9}}, {{107},{ 9}}, {{363},{ 9}}, {{235},{ 9}},
506 {{491},{ 9}}, {{ 27},{ 9}}, {{283},{ 9}}, {{155},{ 9}}, {{411},{ 9}},
507 {{ 91},{ 9}}, {{347},{ 9}}, {{219},{ 9}}, {{475},{ 9}}, {{ 59},{ 9}},
508 {{315},{ 9}}, {{187},{ 9}}, {{443},{ 9}}, {{123},{ 9}}, {{379},{ 9}},
509 {{251},{ 9}}, {{507},{ 9}}, {{ 7},{ 9}}, {{263},{ 9}}, {{135},{ 9}},
510 {{391},{ 9}}, {{ 71},{ 9}}, {{327},{ 9}}, {{199},{ 9}}, {{455},{ 9}},
511 {{ 39},{ 9}}, {{295},{ 9}}, {{167},{ 9}}, {{423},{ 9}}, {{103},{ 9}},
512 {{359},{ 9}}, {{231},{ 9}}, {{487},{ 9}}, {{ 23},{ 9}}, {{279},{ 9}},
513 {{151},{ 9}}, {{407},{ 9}}, {{ 87},{ 9}}, {{343},{ 9}}, {{215},{ 9}},
514 {{471},{ 9}}, {{ 55},{ 9}}, {{311},{ 9}}, {{183},{ 9}}, {{439},{ 9}},
515 {{119},{ 9}}, {{375},{ 9}}, {{247},{ 9}}, {{503},{ 9}}, {{ 15},{ 9}},
516 {{271},{ 9}}, {{143},{ 9}}, {{399},{ 9}}, {{ 79},{ 9}}, {{335},{ 9}},
517 {{207},{ 9}}, {{463},{ 9}}, {{ 47},{ 9}}, {{303},{ 9}}, {{175},{ 9}},
518 {{431},{ 9}}, {{111},{ 9}}, {{367},{ 9}}, {{239},{ 9}}, {{495},{ 9}},
519 {{ 31},{ 9}}, {{287},{ 9}}, {{159},{ 9}}, {{415},{ 9}}, {{ 95},{ 9}},
520 {{351},{ 9}}, {{223},{ 9}}, {{479},{ 9}}, {{ 63},{ 9}}, {{319},{ 9}},
521 {{191},{ 9}}, {{447},{ 9}}, {{127},{ 9}}, {{383},{ 9}}, {{255},{ 9}},
522 {{511},{ 9}}, {{ 0},{ 7}}, {{ 64},{ 7}}, {{ 32},{ 7}}, {{ 96},{ 7}},
523 {{ 16},{ 7}}, {{ 80},{ 7}}, {{ 48},{ 7}}, {{112},{ 7}}, {{ 8},{ 7}},
524 {{ 72},{ 7}}, {{ 40},{ 7}}, {{104},{ 7}}, {{ 24},{ 7}}, {{ 88},{ 7}},
525 {{ 56},{ 7}}, {{120},{ 7}}, {{ 4},{ 7}}, {{ 68},{ 7}}, {{ 36},{ 7}},
526 {{100},{ 7}}, {{ 20},{ 7}}, {{ 84},{ 7}}, {{ 52},{ 7}}, {{116},{ 7}},
527 {{ 3},{ 8}}, {{131},{ 8}}, {{ 67},{ 8}}, {{195},{ 8}}, {{ 35},{ 8}},
528 {{163},{ 8}}, {{ 99},{ 8}}, {{227},{ 8}}
531 static const ct_data static_dtree[D_CODES] = {
532 {{ 0},{ 5}}, {{16},{ 5}}, {{ 8},{ 5}}, {{24},{ 5}}, {{ 4},{ 5}},
533 {{20},{ 5}}, {{12},{ 5}}, {{28},{ 5}}, {{ 2},{ 5}}, {{18},{ 5}},
534 {{10},{ 5}}, {{26},{ 5}}, {{ 6},{ 5}}, {{22},{ 5}}, {{14},{ 5}},
535 {{30},{ 5}}, {{ 1},{ 5}}, {{17},{ 5}}, {{ 9},{ 5}}, {{25},{ 5}},
536 {{ 5},{ 5}}, {{21},{ 5}}, {{13},{ 5}}, {{29},{ 5}}, {{ 3},{ 5}},
537 {{19},{ 5}}, {{11},{ 5}}, {{27},{ 5}}, {{ 7},{ 5}}, {{23},{ 5}}
540 static const uch _dist_code[DIST_CODE_LEN] = {
541 0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8,
542 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10,
543 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
544 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
545 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13,
546 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
547 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
548 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
549 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
550 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15,
551 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
552 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
553 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 0, 0, 16, 17,
554 18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22,
555 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
556 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
557 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
558 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27,
559 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
560 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
561 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
562 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
563 28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
564 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
565 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
566 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29
569 static const uch _length_code[MAX_MATCH-MIN_MATCH+1]= {
570 0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 12, 12,
571 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16,
572 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19,
573 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
574 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22,
575 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23,
576 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
577 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
578 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
579 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26,
580 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
581 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
582 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28
585 static const int base_length[LENGTH_CODES] = {
586 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
587 64, 80, 96, 112, 128, 160, 192, 224, 0
590 static const int base_dist[D_CODES] = {
591 0, 1, 2, 3, 4, 6, 8, 12, 16, 24,
592 32, 48, 64, 96, 128, 192, 256, 384, 512, 768,
593 1024, 1536, 2048, 3072, 4096, 6144, 8192, 12288, 16384, 24576
597 struct static_tree_desc_s {
598 const ct_data *static_tree; /* static tree or NULL */
599 const intf *extra_bits; /* extra bits for each code or NULL */
600 int extra_base; /* base index for extra_bits */
601 int elems; /* max number of elements in the tree */
602 int max_length; /* max bit length for the codes */
605 static const static_tree_desc static_l_desc =
606 {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
608 static const static_tree_desc static_d_desc =
609 {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS};
611 static const static_tree_desc static_bl_desc =
612 {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS};
614 #define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
615 /* Send a code of the given tree. c and tree must not have side effects */
617 #define d_code(dist) \
618 ((dist) < 256 ? _dist_code[dist] : _dist_code[256+((dist)>>7)])
619 /* Mapping from a distance to a distance code. dist is the distance - 1 and
620 * must not have side effects. _dist_code[256] and _dist_code[257] are never
621 * used.
624 # define _tr_tally_lit(s, c, flush) \
625 { uch cc = (c); \
626 s->d_buf[s->last_lit] = 0; \
627 s->l_buf[s->last_lit++] = cc; \
628 s->dyn_ltree[cc].Freq++; \
629 flush = (s->last_lit == s->lit_bufsize-1); \
631 # define _tr_tally_dist(s, distance, length, flush) \
632 { uch len = (uch)(length); \
633 ush dist = (ush)(distance); \
634 s->d_buf[s->last_lit] = dist; \
635 s->l_buf[s->last_lit++] = len; \
636 dist--; \
637 s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \
638 s->dyn_dtree[d_code(dist)].Freq++; \
639 flush = (s->last_lit == s->lit_bufsize-1); \
643 /* ===========================================================================
644 * Output a short LSB first on the stream.
645 * IN assertion: there is enough room in pendingBuf.
647 #define put_short(s, w) { \
648 put_byte(s, (uch)((w) & 0xff)); \
649 put_byte(s, (uch)((ush)(w) >> 8)); \
652 #define send_bits(s, value, length) \
653 { int len = length;\
654 if (s->bi_valid > (int)Buf_size - len) {\
655 int val = (int)value;\
656 s->bi_buf |= (ush)val << s->bi_valid;\
657 put_short(s, s->bi_buf);\
658 s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
659 s->bi_valid += len - Buf_size;\
660 } else {\
661 s->bi_buf |= (ush)(value) << s->bi_valid;\
662 s->bi_valid += len;\
667 /* ===========================================================================
668 * Send the block data compressed using the given Huffman trees
670 static void compress_block( deflate_state *s, const ct_data *ltree, const ct_data *dtree )
672 unsigned dist; /* distance of matched string */
673 int lc; /* match length or unmatched char (if dist == 0) */
674 unsigned lx = 0; /* running index in l_buf */
675 unsigned code; /* the code to send */
676 int extra; /* number of extra bits to send */
678 if (s->last_lit != 0) do {
679 dist = s->d_buf[lx];
680 lc = s->l_buf[lx++];
681 if (dist == 0) {
682 send_code(s, lc, ltree); /* send a literal byte */
683 Tracecv(isgraph(lc), (stderr," '%c' ", lc));
684 } else {
685 /* Here, lc is the match length - MIN_MATCH */
686 code = _length_code[lc];
687 send_code(s, code+LITERALS+1, ltree); /* send the length code */
688 extra = extra_lbits[code];
689 if (extra != 0) {
690 lc -= base_length[code];
691 send_bits(s, lc, extra); /* send the extra length bits */
693 dist--; /* dist is now the match distance - 1 */
694 code = d_code(dist);
695 Assert (code < D_CODES, "bad d_code");
697 send_code(s, code, dtree); /* send the distance code */
698 extra = extra_dbits[code];
699 if (extra != 0) {
700 dist -= (unsigned)base_dist[code];
701 send_bits(s, dist, extra); /* send the extra distance bits */
703 } /* literal or match pair ? */
705 /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
706 Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx,
707 "pendingBuf overflow");
709 } while (lx < s->last_lit);
711 send_code(s, END_BLOCK, ltree);
714 /* ===========================================================================
715 * Check if the data type is TEXT or BINARY, using the following algorithm:
716 * - TEXT if the two conditions below are satisfied:
717 * a) There are no non-portable control characters belonging to the
718 * "black list" (0..6, 14..25, 28..31).
719 * b) There is at least one printable character belonging to the
720 * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255).
721 * - BINARY otherwise.
722 * - The following partially-portable control characters form a
723 * "gray list" that is ignored in this detection algorithm:
724 * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}).
725 * IN assertion: the fields Freq of dyn_ltree are set.
727 static int detect_data_type( deflate_state *s )
729 /* black_mask is the bit mask of black-listed bytes
730 * set bits 0..6, 14..25, and 28..31
731 * 0xf3ffc07f = binary 11110011111111111100000001111111
733 unsigned long black_mask = 0xf3ffc07fUL;
734 int n;
736 /* Check for non-textual ("black-listed") bytes. */
737 for (n = 0; n <= 31; n++, black_mask >>= 1)
738 if ((black_mask & 1) && (s->dyn_ltree[n].Freq != 0))
739 return Z_BINARY;
741 /* Check for textual ("white-listed") bytes. */
742 if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0
743 || s->dyn_ltree[13].Freq != 0)
744 return Z_TEXT;
745 for (n = 32; n < LITERALS; n++)
746 if (s->dyn_ltree[n].Freq != 0)
747 return Z_TEXT;
749 /* There are no "black-listed" or "white-listed" bytes:
750 * this stream either is empty or has tolerated ("gray-listed") bytes only.
752 return Z_BINARY;
755 /* ===========================================================================
756 * Reverse the first len bits of a code, using straightforward code (a faster
757 * method would use a table)
758 * IN assertion: 1 <= len <= 15
760 static unsigned bi_reverse( unsigned code, int len )
762 register unsigned res = 0;
763 do {
764 res |= code & 1;
765 code >>= 1, res <<= 1;
766 } while (--len > 0);
767 return res >> 1;
770 /* ===========================================================================
771 * Flush the bit buffer, keeping at most 7 bits in it.
773 static void bi_flush( deflate_state *s )
775 if (s->bi_valid == 16) {
776 put_short(s, s->bi_buf);
777 s->bi_buf = 0;
778 s->bi_valid = 0;
779 } else if (s->bi_valid >= 8) {
780 put_byte(s, (Byte)s->bi_buf);
781 s->bi_buf >>= 8;
782 s->bi_valid -= 8;
786 /* ===========================================================================
787 * Flush the bit buffer and align the output on a byte boundary
789 static void bi_windup( deflate_state *s )
791 if (s->bi_valid > 8) {
792 put_short(s, s->bi_buf);
793 } else if (s->bi_valid > 0) {
794 put_byte(s, (Byte)s->bi_buf);
796 s->bi_buf = 0;
797 s->bi_valid = 0;
798 #ifdef ZLIB_DEBUG
799 s->bits_sent = (s->bits_sent+7) & ~7;
800 #endif
803 /* ===========================================================================
804 * Initialize a new block.
806 static void init_block( deflate_state *s )
808 int n; /* iterates over tree elements */
810 /* Initialize the trees. */
811 for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
812 for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
813 for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
815 s->dyn_ltree[END_BLOCK].Freq = 1;
816 s->opt_len = s->static_len = 0L;
817 s->last_lit = s->matches = 0;
820 /* ===========================================================================
821 * Initialize the tree data structures for a new zlib stream.
823 static void _tr_init( deflate_state *s )
825 s->l_desc.dyn_tree = s->dyn_ltree;
826 s->l_desc.stat_desc = &static_l_desc;
828 s->d_desc.dyn_tree = s->dyn_dtree;
829 s->d_desc.stat_desc = &static_d_desc;
831 s->bl_desc.dyn_tree = s->bl_tree;
832 s->bl_desc.stat_desc = &static_bl_desc;
834 s->bi_buf = 0;
835 s->bi_valid = 0;
836 #ifdef ZLIB_DEBUG
837 s->compressed_len = 0L;
838 s->bits_sent = 0L;
839 #endif
841 /* Initialize the first block of the first file: */
842 init_block(s);
845 #define SMALLEST 1
846 /* Index within the heap array of least frequent node in the Huffman tree */
849 /* ===========================================================================
850 * Remove the smallest element from the heap and recreate the heap with
851 * one less element. Updates heap and heap_len.
853 #define pqremove(s, tree, top) \
855 top = s->heap[SMALLEST]; \
856 s->heap[SMALLEST] = s->heap[s->heap_len--]; \
857 pqdownheap(s, tree, SMALLEST); \
860 /* ===========================================================================
861 * Compares to subtrees, using the tree depth as tie breaker when
862 * the subtrees have equal frequency. This minimizes the worst case length.
864 #define smaller(tree, n, m, depth) \
865 (tree[n].Freq < tree[m].Freq || \
866 (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
868 /* ===========================================================================
869 * Restore the heap property by moving down the tree starting at node k,
870 * exchanging a node with the smallest of its two sons if necessary, stopping
871 * when the heap property is re-established (each father smaller than its
872 * two sons).
874 static void pqdownheap( deflate_state *s, ct_data *tree, int k )
876 int v = s->heap[k];
877 int j = k << 1; /* left son of k */
878 while (j <= s->heap_len) {
879 /* Set j to the smallest of the two sons: */
880 if (j < s->heap_len &&
881 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
882 j++;
884 /* Exit if v is smaller than both sons */
885 if (smaller(tree, v, s->heap[j], s->depth)) break;
887 /* Exchange v with the smallest son */
888 s->heap[k] = s->heap[j]; k = j;
890 /* And continue down the tree, setting j to the left son of k */
891 j <<= 1;
893 s->heap[k] = v;
896 /* ===========================================================================
897 * Compute the optimal bit lengths for a tree and update the total bit length
898 * for the current block.
899 * IN assertion: the fields freq and dad are set, heap[heap_max] and
900 * above are the tree nodes sorted by increasing frequency.
901 * OUT assertions: the field len is set to the optimal bit length, the
902 * array bl_count contains the frequencies for each bit length.
903 * The length opt_len is updated; static_len is also updated if stree is
904 * not null.
906 static void gen_bitlen( deflate_state *s, tree_desc *desc )
908 ct_data *tree = desc->dyn_tree;
909 int max_code = desc->max_code;
910 const ct_data *stree = desc->stat_desc->static_tree;
911 const intf *extra = desc->stat_desc->extra_bits;
912 int base = desc->stat_desc->extra_base;
913 int max_length = desc->stat_desc->max_length;
914 int h; /* heap index */
915 int n, m; /* iterate over the tree elements */
916 int bits; /* bit length */
917 int xbits; /* extra bits */
918 ush f; /* frequency */
919 int overflow = 0; /* number of elements with bit length too large */
921 for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
923 /* In a first pass, compute the optimal bit lengths (which may
924 * overflow in the case of the bit length tree).
926 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
928 for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
929 n = s->heap[h];
930 bits = tree[tree[n].Dad].Len + 1;
931 if (bits > max_length) bits = max_length, overflow++;
932 tree[n].Len = (ush)bits;
933 /* We overwrite tree[n].Dad which is no longer needed */
935 if (n > max_code) continue; /* not a leaf node */
937 s->bl_count[bits]++;
938 xbits = 0;
939 if (n >= base) xbits = extra[n-base];
940 f = tree[n].Freq;
941 s->opt_len += (ulg)f * (unsigned)(bits + xbits);
942 if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits);
944 if (overflow == 0) return;
946 Tracev((stderr,"\nbit length overflow\n"));
947 /* This happens for example on obj2 and pic of the Calgary corpus */
949 /* Find the first bit length which could increase: */
950 do {
951 bits = max_length-1;
952 while (s->bl_count[bits] == 0) bits--;
953 s->bl_count[bits]--; /* move one leaf down the tree */
954 s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
955 s->bl_count[max_length]--;
956 /* The brother of the overflow item also moves one step up,
957 * but this does not affect bl_count[max_length]
959 overflow -= 2;
960 } while (overflow > 0);
962 /* Now recompute all bit lengths, scanning in increasing frequency.
963 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
964 * lengths instead of fixing only the wrong ones. This idea is taken
965 * from 'ar' written by Haruhiko Okumura.)
967 for (bits = max_length; bits != 0; bits--) {
968 n = s->bl_count[bits];
969 while (n != 0) {
970 m = s->heap[--h];
971 if (m > max_code) continue;
972 if ((unsigned) tree[m].Len != (unsigned) bits) {
973 Tracev((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
974 s->opt_len += ((ulg)bits - tree[m].Len) * tree[m].Freq;
975 tree[m].Len = (ush)bits;
977 n--;
982 /* ===========================================================================
983 * Generate the codes for a given tree and bit counts (which need not be
984 * optimal).
985 * IN assertion: the array bl_count contains the bit length statistics for
986 * the given tree and the field len is set for all tree elements.
987 * OUT assertion: the field code is set for all tree elements of non
988 * zero code length.
990 static void gen_codes( ct_data *tree, int max_code, ushf *bl_count )
992 ush next_code[MAX_BITS+1]; /* next code value for each bit length */
993 unsigned code = 0; /* running code value */
994 int bits; /* bit index */
995 int n; /* code index */
997 /* The distribution counts are first used to generate the code values
998 * without bit reversal.
1000 for (bits = 1; bits <= MAX_BITS; bits++) {
1001 code = (code + bl_count[bits-1]) << 1;
1002 next_code[bits] = (ush)code;
1004 /* Check that the bit counts in bl_count are consistent. The last code
1005 * must be all ones.
1007 Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
1008 "inconsistent bit counts");
1009 Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
1011 for (n = 0; n <= max_code; n++) {
1012 int len = tree[n].Len;
1013 if (len == 0) continue;
1014 /* Now reverse the bits */
1015 tree[n].Code = (ush)bi_reverse(next_code[len]++, len);
1017 Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
1018 n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
1022 /* ===========================================================================
1023 * Construct one Huffman tree and assigns the code bit strings and lengths.
1024 * Update the total bit length for the current block.
1025 * IN assertion: the field freq is set for all tree elements.
1026 * OUT assertions: the fields len and code are set to the optimal bit length
1027 * and corresponding code. The length opt_len is updated; static_len is
1028 * also updated if stree is not null. The field max_code is set.
1030 static void build_tree( deflate_state *s, tree_desc *desc )
1032 ct_data *tree = desc->dyn_tree;
1033 const ct_data *stree = desc->stat_desc->static_tree;
1034 int elems = desc->stat_desc->elems;
1035 int n, m; /* iterate over heap elements */
1036 int max_code = -1; /* largest code with non zero frequency */
1037 int node; /* new node being created */
1039 /* Construct the initial heap, with least frequent element in
1040 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
1041 * heap[0] is not used.
1043 s->heap_len = 0, s->heap_max = HEAP_SIZE;
1045 for (n = 0; n < elems; n++) {
1046 if (tree[n].Freq != 0) {
1047 s->heap[++(s->heap_len)] = max_code = n;
1048 s->depth[n] = 0;
1049 } else {
1050 tree[n].Len = 0;
1054 /* The pkzip format requires that at least one distance code exists,
1055 * and that at least one bit should be sent even if there is only one
1056 * possible code. So to avoid special checks later on we force at least
1057 * two codes of non zero frequency.
1059 while (s->heap_len < 2) {
1060 node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
1061 tree[node].Freq = 1;
1062 s->depth[node] = 0;
1063 s->opt_len--; if (stree) s->static_len -= stree[node].Len;
1064 /* node is 0 or 1 so it does not have extra bits */
1066 desc->max_code = max_code;
1068 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
1069 * establish sub-heaps of increasing lengths:
1071 for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
1073 /* Construct the Huffman tree by repeatedly combining the least two
1074 * frequent nodes.
1076 node = elems; /* next internal node of the tree */
1077 do {
1078 pqremove(s, tree, n); /* n = node of least frequency */
1079 m = s->heap[SMALLEST]; /* m = node of next least frequency */
1081 s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
1082 s->heap[--(s->heap_max)] = m;
1084 /* Create a new node father of n and m */
1085 tree[node].Freq = tree[n].Freq + tree[m].Freq;
1086 s->depth[node] = (uch)((s->depth[n] >= s->depth[m] ?
1087 s->depth[n] : s->depth[m]) + 1);
1088 tree[n].Dad = tree[m].Dad = (ush)node;
1089 #ifdef DUMP_BL_TREE
1090 if (tree == s->bl_tree) {
1091 fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
1092 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
1094 #endif
1095 /* and insert the new node in the heap */
1096 s->heap[SMALLEST] = node++;
1097 pqdownheap(s, tree, SMALLEST);
1099 } while (s->heap_len >= 2);
1101 s->heap[--(s->heap_max)] = s->heap[SMALLEST];
1103 /* At this point, the fields freq and dad are set. We can now
1104 * generate the bit lengths.
1106 gen_bitlen(s, (tree_desc *)desc);
1108 /* The field len is now set, we can generate the bit codes */
1109 gen_codes ((ct_data *)tree, max_code, s->bl_count);
1112 /* ===========================================================================
1113 * Scan a literal or distance tree to determine the frequencies of the codes
1114 * in the bit length tree.
1116 static void scan_tree( deflate_state *s, ct_data *tree, int max_code )
1118 int n; /* iterates over all tree elements */
1119 int prevlen = -1; /* last emitted length */
1120 int curlen; /* length of current code */
1121 int nextlen = tree[0].Len; /* length of next code */
1122 int count = 0; /* repeat count of the current code */
1123 int max_count = 7; /* max repeat count */
1124 int min_count = 4; /* min repeat count */
1126 if (nextlen == 0) max_count = 138, min_count = 3;
1127 tree[max_code+1].Len = (ush)0xffff; /* guard */
1129 for (n = 0; n <= max_code; n++) {
1130 curlen = nextlen; nextlen = tree[n+1].Len;
1131 if (++count < max_count && curlen == nextlen) {
1132 continue;
1133 } else if (count < min_count) {
1134 s->bl_tree[curlen].Freq += count;
1135 } else if (curlen != 0) {
1136 if (curlen != prevlen) s->bl_tree[curlen].Freq++;
1137 s->bl_tree[REP_3_6].Freq++;
1138 } else if (count <= 10) {
1139 s->bl_tree[REPZ_3_10].Freq++;
1140 } else {
1141 s->bl_tree[REPZ_11_138].Freq++;
1143 count = 0; prevlen = curlen;
1144 if (nextlen == 0) {
1145 max_count = 138, min_count = 3;
1146 } else if (curlen == nextlen) {
1147 max_count = 6, min_count = 3;
1148 } else {
1149 max_count = 7, min_count = 4;
1154 /* ===========================================================================
1155 * Send a literal or distance tree in compressed form, using the codes in
1156 * bl_tree.
1158 static void send_tree( deflate_state *s, ct_data *tree, int max_code )
1160 int n; /* iterates over all tree elements */
1161 int prevlen = -1; /* last emitted length */
1162 int curlen; /* length of current code */
1163 int nextlen = tree[0].Len; /* length of next code */
1164 int count = 0; /* repeat count of the current code */
1165 int max_count = 7; /* max repeat count */
1166 int min_count = 4; /* min repeat count */
1168 /* tree[max_code+1].Len = -1; */ /* guard already set */
1169 if (nextlen == 0) max_count = 138, min_count = 3;
1171 for (n = 0; n <= max_code; n++) {
1172 curlen = nextlen; nextlen = tree[n+1].Len;
1173 if (++count < max_count && curlen == nextlen) {
1174 continue;
1175 } else if (count < min_count) {
1176 do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
1178 } else if (curlen != 0) {
1179 if (curlen != prevlen) {
1180 send_code(s, curlen, s->bl_tree); count--;
1182 Assert(count >= 3 && count <= 6, " 3_6?");
1183 send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
1185 } else if (count <= 10) {
1186 send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
1188 } else {
1189 send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
1191 count = 0; prevlen = curlen;
1192 if (nextlen == 0) {
1193 max_count = 138, min_count = 3;
1194 } else if (curlen == nextlen) {
1195 max_count = 6, min_count = 3;
1196 } else {
1197 max_count = 7, min_count = 4;
1202 /* ===========================================================================
1203 * Construct the Huffman tree for the bit lengths and return the index in
1204 * bl_order of the last bit length code to send.
1206 static int build_bl_tree( deflate_state *s )
1208 int max_blindex; /* index of last bit length code of non zero freq */
1210 /* Determine the bit length frequencies for literal and distance trees */
1211 scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
1212 scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
1214 /* Build the bit length tree: */
1215 build_tree(s, (tree_desc *)(&(s->bl_desc)));
1216 /* opt_len now includes the length of the tree representations, except
1217 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
1220 /* Determine the number of bit length codes to send. The pkzip format
1221 * requires that at least 4 bit length codes be sent. (appnote.txt says
1222 * 3 but the actual value used is 4.)
1224 for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
1225 if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
1227 /* Update opt_len to include the bit length tree and counts */
1228 s->opt_len += 3*((ulg)max_blindex+1) + 5+5+4;
1229 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
1230 s->opt_len, s->static_len));
1232 return max_blindex;
1235 /* ===========================================================================
1236 * Send the header for a block using dynamic Huffman trees: the counts, the
1237 * lengths of the bit length codes, the literal tree and the distance tree.
1238 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
1240 static void send_all_trees( deflate_state *s, int lcodes, int dcodes, int blcodes )
1242 int rank; /* index in bl_order */
1244 Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
1245 Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
1246 "too many codes");
1247 Tracev((stderr, "\nbl counts: "));
1248 send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
1249 send_bits(s, dcodes-1, 5);
1250 send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */
1251 for (rank = 0; rank < blcodes; rank++) {
1252 Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
1253 send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
1255 Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
1257 send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
1258 Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
1260 send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
1261 Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
1264 /* ===========================================================================
1265 * Send a stored block
1267 static void _tr_stored_block( deflate_state *s, charf *buf, ulg stored_len, int last )
1269 send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */
1270 bi_windup(s); /* align on byte boundary */
1271 put_short(s, (ush)stored_len);
1272 put_short(s, (ush)~stored_len);
1273 zmemcpy(s->pending_buf + s->pending, (Bytef *)buf, stored_len);
1274 s->pending += stored_len;
1275 #ifdef ZLIB_DEBUG
1276 s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
1277 s->compressed_len += (stored_len + 4) << 3;
1278 s->bits_sent += 2*16;
1279 s->bits_sent += stored_len<<3;
1280 #endif
1283 /* ===========================================================================
1284 * Flush the bits in the bit buffer to pending output (leaves at most 7 bits)
1286 static void _tr_flush_bits( deflate_state *s )
1288 bi_flush(s);
1291 /* ===========================================================================
1292 * Send one empty static block to give enough lookahead for inflate.
1293 * This takes 10 bits, of which 7 may remain in the bit buffer.
1295 static void _tr_align( deflate_state *s )
1297 send_bits(s, STATIC_TREES<<1, 3);
1298 send_code(s, END_BLOCK, static_ltree);
1299 #ifdef ZLIB_DEBUG
1300 s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
1301 #endif
1302 bi_flush(s);
1305 /* ===========================================================================
1306 * Determine the best encoding for the current block: dynamic trees, static
1307 * trees or store, and write out the encoded block.
1309 static void _tr_flush_block( deflate_state *s, charf *buf, ulg stored_len, int last )
1311 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
1312 int max_blindex = 0; /* index of last bit length code of non zero freq */
1314 /* Build the Huffman trees unless a stored block is forced */
1315 if (s->level > 0) {
1317 /* Check if the file is binary or text */
1318 if (s->strm->data_type == Z_UNKNOWN)
1319 s->strm->data_type = detect_data_type(s);
1321 /* Construct the literal and distance trees */
1322 build_tree(s, (tree_desc *)(&(s->l_desc)));
1323 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
1324 s->static_len));
1326 build_tree(s, (tree_desc *)(&(s->d_desc)));
1327 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
1328 s->static_len));
1329 /* At this point, opt_len and static_len are the total bit lengths of
1330 * the compressed block data, excluding the tree representations.
1333 /* Build the bit length tree for the above two trees, and get the index
1334 * in bl_order of the last bit length code to send.
1336 max_blindex = build_bl_tree(s);
1338 /* Determine the best encoding. Compute the block lengths in bytes. */
1339 opt_lenb = (s->opt_len+3+7)>>3;
1340 static_lenb = (s->static_len+3+7)>>3;
1342 Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
1343 opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
1344 s->last_lit));
1346 if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
1348 } else {
1349 Assert(buf != (char*)0, "lost buf");
1350 opt_lenb = static_lenb = stored_len + 5; /* force a stored block */
1353 #ifdef FORCE_STORED
1354 if (buf != (char*)0) { /* force stored block */
1355 #else
1356 if (stored_len+4 <= opt_lenb && buf != (char*)0) {
1357 /* 4: two words for the lengths */
1358 #endif
1359 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
1360 * Otherwise we can't have processed more than WSIZE input bytes since
1361 * the last block flush, because compression would have been
1362 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
1363 * transform a block into a stored block.
1365 _tr_stored_block(s, buf, stored_len, last);
1367 #ifdef FORCE_STATIC
1368 } else if (static_lenb >= 0) { /* force static trees */
1369 #else
1370 } else if (s->strategy == Z_FIXED || static_lenb == opt_lenb) {
1371 #endif
1372 send_bits(s, (STATIC_TREES<<1)+last, 3);
1373 compress_block(s, (const ct_data *)static_ltree,
1374 (const ct_data *)static_dtree);
1375 #ifdef ZLIB_DEBUG
1376 s->compressed_len += 3 + s->static_len;
1377 #endif
1378 } else {
1379 send_bits(s, (DYN_TREES<<1)+last, 3);
1380 send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
1381 max_blindex+1);
1382 compress_block(s, (const ct_data *)s->dyn_ltree,
1383 (const ct_data *)s->dyn_dtree);
1384 #ifdef ZLIB_DEBUG
1385 s->compressed_len += 3 + s->opt_len;
1386 #endif
1388 Assert (s->compressed_len == s->bits_sent, "bad compressed size");
1389 /* The above check is made mod 2^32, for files larger than 512 MB
1390 * and uLong implemented on 32 bits.
1392 init_block(s);
1394 if (last) {
1395 bi_windup(s);
1396 #ifdef ZLIB_DEBUG
1397 s->compressed_len += 7; /* align on byte boundary */
1398 #endif
1400 Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
1401 s->compressed_len-7*last));
1404 const char deflate_copyright[] =
1405 " deflate 1.2.11 Copyright 1995-2017 Jean-loup Gailly and Mark Adler ";
1407 If you use the zlib library in a product, an acknowledgment is welcome
1408 in the documentation of your product. If for some reason you cannot
1409 include such an acknowledgment, I would appreciate that you keep this
1410 copyright string in the executable of your product.
1413 /* ===========================================================================
1414 * Function prototypes.
1416 typedef enum {
1417 need_more, /* block not completed, need more input or more output */
1418 block_done, /* block flush performed */
1419 finish_started, /* finish started, need only more output at next deflate */
1420 finish_done /* finish done, accept no more input or output */
1421 } block_state;
1423 typedef block_state (*compress_func)(deflate_state *s, int flush);
1424 /* Compression function. Returns the block state after the call. */
1426 static int deflateReset(z_streamp strm);
1427 static block_state deflate_stored(deflate_state *s, int flush);
1428 static block_state deflate_fast(deflate_state *s, int flush);
1429 static block_state deflate_slow(deflate_state *s, int flush);
1430 static block_state deflate_rle(deflate_state *s, int flush);
1431 static block_state deflate_huff(deflate_state *s, int flush);
1432 static void lm_init(deflate_state *s);
1434 /* ===========================================================================
1435 * Local data
1438 #define NIL 0
1439 /* Tail of hash chains */
1441 #ifndef TOO_FAR
1442 # define TOO_FAR 4096
1443 #endif
1444 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
1446 /* Values for max_lazy_match, good_match and max_chain_length, depending on
1447 * the desired pack level (0..9). The values given below have been tuned to
1448 * exclude worst case performance for pathological files. Better values may be
1449 * found for specific files.
1451 typedef struct config_s {
1452 ush good_length; /* reduce lazy search above this match length */
1453 ush max_lazy; /* do not perform lazy search above this match length */
1454 ush nice_length; /* quit search above this match length */
1455 ush max_chain;
1456 compress_func func;
1457 } config;
1459 static const config configuration_table[10] = {
1460 /* good lazy nice chain */
1461 /* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */
1462 /* 1 */ {4, 4, 8, 4, deflate_fast}, /* max speed, no lazy matches */
1463 /* 2 */ {4, 5, 16, 8, deflate_fast},
1464 /* 3 */ {4, 6, 32, 32, deflate_fast},
1466 /* 4 */ {4, 4, 16, 16, deflate_slow}, /* lazy matches */
1467 /* 5 */ {8, 16, 32, 32, deflate_slow},
1468 /* 6 */ {8, 16, 128, 128, deflate_slow},
1469 /* 7 */ {8, 32, 128, 256, deflate_slow},
1470 /* 8 */ {32, 128, 258, 1024, deflate_slow},
1471 /* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* max compression */
1473 /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
1474 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
1475 * meaning.
1478 /* rank Z_BLOCK between Z_NO_FLUSH and Z_PARTIAL_FLUSH */
1479 #define RANK(f) (((f) * 2) - ((f) > 4 ? 9 : 0))
1481 /* ===========================================================================
1482 * Update a hash value with the given input byte
1483 * IN assertion: all calls to UPDATE_HASH are made with consecutive input
1484 * characters, so that a running hash key can be computed from the previous
1485 * key instead of complete recalculation each time.
1487 #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
1490 /* ===========================================================================
1491 * Insert string str in the dictionary and set match_head to the previous head
1492 * of the hash chain (the most recent string with same hash key). Return
1493 * the previous length of the hash chain.
1494 * If this file is compiled with -DFASTEST, the compression level is forced
1495 * to 1, and no hash chains are maintained.
1496 * IN assertion: all calls to INSERT_STRING are made with consecutive input
1497 * characters and the first MIN_MATCH bytes of str are valid (except for
1498 * the last MIN_MATCH-1 bytes of the input file).
1500 #ifdef FASTEST
1501 #define INSERT_STRING(s, str, match_head) \
1502 (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
1503 match_head = s->head[s->ins_h], \
1504 s->head[s->ins_h] = (Pos)(str))
1505 #else
1506 #define INSERT_STRING(s, str, match_head) \
1507 (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
1508 match_head = s->prev[(str) & s->w_mask] = s->head[s->ins_h], \
1509 s->head[s->ins_h] = (Pos)(str))
1510 #endif
1512 /* ===========================================================================
1513 * Initialize the hash table (avoiding 64K overflow for 16 bit systems).
1514 * prev[] will be initialized on the fly.
1516 #define CLEAR_HASH(s) \
1517 s->head[s->hash_size-1] = NIL; \
1518 zmemzero((Bytef *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head));
1520 /* ===========================================================================
1521 * Slide the hash table when sliding the window down (could be avoided with 32
1522 * bit values at the expense of memory usage). We slide even when level == 0 to
1523 * keep the hash table consistent if we switch back to level > 0 later.
1525 static void slide_hash( deflate_state *s )
1527 unsigned n, m;
1528 Posf *p;
1529 uInt wsize = s->w_size;
1531 n = s->hash_size;
1532 p = &s->head[n];
1533 do {
1534 m = *--p;
1535 *p = (Pos)(m >= wsize ? m - wsize : NIL);
1536 } while (--n);
1537 n = wsize;
1538 #ifndef FASTEST
1539 p = &s->prev[n];
1540 do {
1541 m = *--p;
1542 *p = (Pos)(m >= wsize ? m - wsize : NIL);
1543 /* If n is not on any hash chain, prev[n] is garbage but
1544 * its value will never be used.
1546 } while (--n);
1547 #endif
1550 /* ========================================================================= */
1551 int deflateInit( z_streamp strm, int level )
1553 return deflateInit2(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY);
1554 /* To do: ignore strm->next_in if we use it as window */
1557 /* ========================================================================= */
1558 int deflateInit2( z_streamp strm, int level, int method, int windowBits, int memLevel, int strategy )
1560 deflate_state *s;
1561 int wrap = 1;
1562 ushf *overlay;
1563 /* We overlay pending_buf and d_buf+l_buf. This works since the average
1564 * output size for (length,distance) codes is <= 24 bits.
1567 strm->msg = Z_NULL;
1568 #ifdef FASTEST
1569 if (level != 0) level = 1;
1570 #else
1571 if (level == Z_DEFAULT_COMPRESSION) level = 6;
1572 #endif
1574 if (windowBits < 0) { /* suppress zlib wrapper */
1575 wrap = 0;
1576 windowBits = -windowBits;
1578 #ifdef GZIP
1579 else if (windowBits > 15) {
1580 wrap = 2; /* write gzip wrapper instead */
1581 windowBits -= 16;
1583 #endif
1584 if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED ||
1585 windowBits < 8 || windowBits > 15 || level < 0 || level > 9 ||
1586 strategy < 0 || strategy > Z_FIXED || (windowBits == 8 && wrap != 1)) {
1587 return Z_STREAM_ERROR;
1589 if (windowBits == 8) windowBits = 9; /* until 256-byte window bug fixed */
1590 s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state));
1591 if (s == Z_NULL) return Z_MEM_ERROR;
1592 strm->state = (struct internal_state FAR *)s;
1593 s->strm = strm;
1594 s->status = INIT_STATE; /* to pass state test in deflateReset() */
1596 s->wrap = wrap;
1597 s->gzhead = Z_NULL;
1598 s->w_bits = (uInt)windowBits;
1599 s->w_size = 1 << s->w_bits;
1600 s->w_mask = s->w_size - 1;
1602 s->hash_bits = (uInt)memLevel + 7;
1603 s->hash_size = 1 << s->hash_bits;
1604 s->hash_mask = s->hash_size - 1;
1605 s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
1607 s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
1608 s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos));
1609 s->head = (Posf *) ZALLOC(strm, s->hash_size, sizeof(Pos));
1611 s->high_water = 0; /* nothing written to s->window yet */
1613 s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
1615 overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2);
1616 s->pending_buf = (uchf *) overlay;
1617 s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L);
1619 if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
1620 s->pending_buf == Z_NULL) {
1621 s->status = FINISH_STATE;
1622 strm->msg = ERR_MSG(Z_MEM_ERROR);
1623 deflateEnd (strm);
1624 return Z_MEM_ERROR;
1626 s->d_buf = overlay + s->lit_bufsize/sizeof(ush);
1627 s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize;
1629 s->level = level;
1630 s->strategy = strategy;
1631 s->method = (Byte)method;
1633 return deflateReset(strm);
1636 /* =========================================================================
1637 * Check for a valid deflate stream state. Return 0 if ok, 1 if not.
1639 static int deflateStateCheck( z_streamp strm )
1641 deflate_state *s;
1642 if (strm == Z_NULL ||
1643 strm->zalloc == (alloc_func)0 || strm->zfree == (free_func)0)
1644 return 1;
1645 s = strm->state;
1646 if (s == Z_NULL || s->strm != strm || (s->status != INIT_STATE &&
1647 #ifdef GZIP
1648 s->status != GZIP_STATE &&
1649 #endif
1650 s->status != EXTRA_STATE &&
1651 s->status != NAME_STATE &&
1652 s->status != COMMENT_STATE &&
1653 s->status != HCRC_STATE &&
1654 s->status != BUSY_STATE &&
1655 s->status != FINISH_STATE))
1656 return 1;
1657 return 0;
1660 /* ========================================================================= */
1661 static int deflateResetKeep( z_streamp strm )
1663 deflate_state *s;
1665 if (deflateStateCheck(strm)) {
1666 return Z_STREAM_ERROR;
1669 strm->total_in = strm->total_out = 0;
1670 strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */
1671 strm->data_type = Z_UNKNOWN;
1673 s = (deflate_state *)strm->state;
1674 s->pending = 0;
1675 s->pending_out = s->pending_buf;
1677 if (s->wrap < 0) {
1678 s->wrap = -s->wrap; /* was made negative by deflate(..., Z_FINISH); */
1680 s->status =
1681 #ifdef GZIP
1682 s->wrap == 2 ? GZIP_STATE :
1683 #endif
1684 s->wrap ? INIT_STATE : BUSY_STATE;
1685 strm->adler =
1686 #ifdef GZIP
1687 s->wrap == 2 ? crc32(0L, Z_NULL, 0) :
1688 #endif
1689 adler32(0L, Z_NULL, 0);
1690 s->last_flush = Z_NO_FLUSH;
1692 _tr_init(s);
1694 return Z_OK;
1697 /* ========================================================================= */
1698 static int deflateReset( z_streamp strm )
1700 int ret;
1702 ret = deflateResetKeep(strm);
1703 if (ret == Z_OK)
1704 lm_init(strm->state);
1705 return ret;
1708 /* =========================================================================
1709 * Put a short in the pending buffer. The 16-bit value is put in MSB order.
1710 * IN assertion: the stream state is correct and there is enough room in
1711 * pending_buf.
1713 static void putShortMSB( deflate_state *s, uInt b )
1715 put_byte(s, (Byte)(b >> 8));
1716 put_byte(s, (Byte)(b & 0xff));
1719 /* =========================================================================
1720 * Flush as much pending output as possible. All deflate() output, except for
1721 * some deflate_stored() output, goes through this function so some
1722 * applications may wish to modify it to avoid allocating a large
1723 * strm->next_out buffer and copying into it. (See also read_buf()).
1725 static void flush_pending( z_streamp strm )
1727 unsigned len;
1728 deflate_state *s = strm->state;
1730 _tr_flush_bits(s);
1731 len = s->pending;
1732 if (len > strm->avail_out) len = strm->avail_out;
1733 if (len == 0) return;
1735 zmemcpy(strm->next_out, s->pending_out, len);
1736 strm->next_out += len;
1737 s->pending_out += len;
1738 strm->total_out += len;
1739 strm->avail_out -= len;
1740 s->pending -= len;
1741 if (s->pending == 0) {
1742 s->pending_out = s->pending_buf;
1746 /* ===========================================================================
1747 * Update the header CRC with the bytes s->pending_buf[beg..s->pending - 1].
1749 #define HCRC_UPDATE(beg) \
1750 do { \
1751 if (s->gzhead->hcrc && s->pending > (beg)) \
1752 strm->adler = crc32(strm->adler, s->pending_buf + (beg), \
1753 s->pending - (beg)); \
1754 } while (0)
1756 /* ========================================================================= */
1757 int deflate( z_streamp strm, int flush )
1759 int old_flush; /* value of flush param for previous deflate call */
1760 deflate_state *s;
1762 if (deflateStateCheck(strm) || flush > Z_BLOCK || flush < 0) {
1763 return Z_STREAM_ERROR;
1765 s = strm->state;
1767 if (strm->next_out == Z_NULL ||
1768 (strm->avail_in != 0 && strm->next_in == Z_NULL) ||
1769 (s->status == FINISH_STATE && flush != Z_FINISH)) {
1770 ERR_RETURN(strm, Z_STREAM_ERROR);
1772 if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR);
1774 old_flush = s->last_flush;
1775 s->last_flush = flush;
1777 /* Flush as much pending output as possible */
1778 if (s->pending != 0) {
1779 flush_pending(strm);
1780 if (strm->avail_out == 0) {
1781 /* Since avail_out is 0, deflate will be called again with
1782 * more output space, but possibly with both pending and
1783 * avail_in equal to zero. There won't be anything to do,
1784 * but this is not an error situation so make sure we
1785 * return OK instead of BUF_ERROR at next call of deflate:
1787 s->last_flush = -1;
1788 return Z_OK;
1791 /* Make sure there is something to do and avoid duplicate consecutive
1792 * flushes. For repeated and useless calls with Z_FINISH, we keep
1793 * returning Z_STREAM_END instead of Z_BUF_ERROR.
1795 } else if (strm->avail_in == 0 && RANK(flush) <= RANK(old_flush) &&
1796 flush != Z_FINISH) {
1797 ERR_RETURN(strm, Z_BUF_ERROR);
1800 /* User must not provide more input after the first FINISH: */
1801 if (s->status == FINISH_STATE && strm->avail_in != 0) {
1802 ERR_RETURN(strm, Z_BUF_ERROR);
1805 /* Write the header */
1806 if (s->status == INIT_STATE) {
1807 /* zlib header */
1808 uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8;
1809 uInt level_flags;
1811 if (s->strategy >= Z_HUFFMAN_ONLY || s->level < 2)
1812 level_flags = 0;
1813 else if (s->level < 6)
1814 level_flags = 1;
1815 else if (s->level == 6)
1816 level_flags = 2;
1817 else
1818 level_flags = 3;
1819 header |= (level_flags << 6);
1820 if (s->strstart != 0) header |= PRESET_DICT;
1821 header += 31 - (header % 31);
1823 putShortMSB(s, header);
1825 /* Save the adler32 of the preset dictionary: */
1826 if (s->strstart != 0) {
1827 putShortMSB(s, (uInt)(strm->adler >> 16));
1828 putShortMSB(s, (uInt)(strm->adler & 0xffff));
1830 strm->adler = adler32(0L, Z_NULL, 0);
1831 s->status = BUSY_STATE;
1833 /* Compression must start with an empty pending buffer */
1834 flush_pending(strm);
1835 if (s->pending != 0) {
1836 s->last_flush = -1;
1837 return Z_OK;
1840 #ifdef GZIP
1841 if (s->status == GZIP_STATE) {
1842 /* gzip header */
1843 strm->adler = crc32(0L, Z_NULL, 0);
1844 put_byte(s, 31);
1845 put_byte(s, 139);
1846 put_byte(s, 8);
1847 if (s->gzhead == Z_NULL) {
1848 put_byte(s, 0);
1849 put_byte(s, 0);
1850 put_byte(s, 0);
1851 put_byte(s, 0);
1852 put_byte(s, 0);
1853 put_byte(s, s->level == 9 ? 2 :
1854 (s->strategy >= Z_HUFFMAN_ONLY || s->level < 2 ?
1855 4 : 0));
1856 put_byte(s, OS_CODE);
1857 s->status = BUSY_STATE;
1859 /* Compression must start with an empty pending buffer */
1860 flush_pending(strm);
1861 if (s->pending != 0) {
1862 s->last_flush = -1;
1863 return Z_OK;
1866 else {
1867 put_byte(s, (s->gzhead->text ? 1 : 0) +
1868 (s->gzhead->hcrc ? 2 : 0) +
1869 (s->gzhead->extra == Z_NULL ? 0 : 4) +
1870 (s->gzhead->name == Z_NULL ? 0 : 8) +
1871 (s->gzhead->comment == Z_NULL ? 0 : 16)
1873 put_byte(s, (Byte)(s->gzhead->time & 0xff));
1874 put_byte(s, (Byte)((s->gzhead->time >> 8) & 0xff));
1875 put_byte(s, (Byte)((s->gzhead->time >> 16) & 0xff));
1876 put_byte(s, (Byte)((s->gzhead->time >> 24) & 0xff));
1877 put_byte(s, s->level == 9 ? 2 :
1878 (s->strategy >= Z_HUFFMAN_ONLY || s->level < 2 ?
1879 4 : 0));
1880 put_byte(s, s->gzhead->os & 0xff);
1881 if (s->gzhead->extra != Z_NULL) {
1882 put_byte(s, s->gzhead->extra_len & 0xff);
1883 put_byte(s, (s->gzhead->extra_len >> 8) & 0xff);
1885 if (s->gzhead->hcrc)
1886 strm->adler = crc32(strm->adler, s->pending_buf,
1887 s->pending);
1888 s->gzindex = 0;
1889 s->status = EXTRA_STATE;
1892 if (s->status == EXTRA_STATE) {
1893 if (s->gzhead->extra != Z_NULL) {
1894 ulg beg = s->pending; /* start of bytes to update crc */
1895 uInt left = (s->gzhead->extra_len & 0xffff) - s->gzindex;
1896 while (s->pending + left > s->pending_buf_size) {
1897 uInt copy = s->pending_buf_size - s->pending;
1898 zmemcpy(s->pending_buf + s->pending,
1899 s->gzhead->extra + s->gzindex, copy);
1900 s->pending = s->pending_buf_size;
1901 HCRC_UPDATE(beg);
1902 s->gzindex += copy;
1903 flush_pending(strm);
1904 if (s->pending != 0) {
1905 s->last_flush = -1;
1906 return Z_OK;
1908 beg = 0;
1909 left -= copy;
1911 zmemcpy(s->pending_buf + s->pending,
1912 s->gzhead->extra + s->gzindex, left);
1913 s->pending += left;
1914 HCRC_UPDATE(beg);
1915 s->gzindex = 0;
1917 s->status = NAME_STATE;
1919 if (s->status == NAME_STATE) {
1920 if (s->gzhead->name != Z_NULL) {
1921 ulg beg = s->pending; /* start of bytes to update crc */
1922 int val;
1923 do {
1924 if (s->pending == s->pending_buf_size) {
1925 HCRC_UPDATE(beg);
1926 flush_pending(strm);
1927 if (s->pending != 0) {
1928 s->last_flush = -1;
1929 return Z_OK;
1931 beg = 0;
1933 val = s->gzhead->name[s->gzindex++];
1934 put_byte(s, val);
1935 } while (val != 0);
1936 HCRC_UPDATE(beg);
1937 s->gzindex = 0;
1939 s->status = COMMENT_STATE;
1941 if (s->status == COMMENT_STATE) {
1942 if (s->gzhead->comment != Z_NULL) {
1943 ulg beg = s->pending; /* start of bytes to update crc */
1944 int val;
1945 do {
1946 if (s->pending == s->pending_buf_size) {
1947 HCRC_UPDATE(beg);
1948 flush_pending(strm);
1949 if (s->pending != 0) {
1950 s->last_flush = -1;
1951 return Z_OK;
1953 beg = 0;
1955 val = s->gzhead->comment[s->gzindex++];
1956 put_byte(s, val);
1957 } while (val != 0);
1958 HCRC_UPDATE(beg);
1960 s->status = HCRC_STATE;
1962 if (s->status == HCRC_STATE) {
1963 if (s->gzhead->hcrc) {
1964 if (s->pending + 2 > s->pending_buf_size) {
1965 flush_pending(strm);
1966 if (s->pending != 0) {
1967 s->last_flush = -1;
1968 return Z_OK;
1971 put_byte(s, (Byte)(strm->adler & 0xff));
1972 put_byte(s, (Byte)((strm->adler >> 8) & 0xff));
1973 strm->adler = crc32(0L, Z_NULL, 0);
1975 s->status = BUSY_STATE;
1977 /* Compression must start with an empty pending buffer */
1978 flush_pending(strm);
1979 if (s->pending != 0) {
1980 s->last_flush = -1;
1981 return Z_OK;
1984 #endif
1986 /* Start a new block or continue the current one.
1988 if (strm->avail_in != 0 || s->lookahead != 0 ||
1989 (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) {
1990 block_state bstate;
1992 bstate = s->level == 0 ? deflate_stored(s, flush) :
1993 s->strategy == Z_HUFFMAN_ONLY ? deflate_huff(s, flush) :
1994 s->strategy == Z_RLE ? deflate_rle(s, flush) :
1995 (*(configuration_table[s->level].func))(s, flush);
1997 if (bstate == finish_started || bstate == finish_done) {
1998 s->status = FINISH_STATE;
2000 if (bstate == need_more || bstate == finish_started) {
2001 if (strm->avail_out == 0) {
2002 s->last_flush = -1; /* avoid BUF_ERROR next call, see above */
2004 return Z_OK;
2005 /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
2006 * of deflate should use the same flush parameter to make sure
2007 * that the flush is complete. So we don't have to output an
2008 * empty block here, this will be done at next call. This also
2009 * ensures that for a very small output buffer, we emit at most
2010 * one empty block.
2013 if (bstate == block_done) {
2014 if (flush == Z_PARTIAL_FLUSH) {
2015 _tr_align(s);
2016 } else if (flush != Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */
2017 _tr_stored_block(s, (char*)0, 0L, 0);
2018 /* For a full flush, this empty block will be recognized
2019 * as a special marker by inflate_sync().
2021 if (flush == Z_FULL_FLUSH) {
2022 CLEAR_HASH(s); /* forget history */
2023 if (s->lookahead == 0) {
2024 s->strstart = 0;
2025 s->block_start = 0L;
2026 s->insert = 0;
2030 flush_pending(strm);
2031 if (strm->avail_out == 0) {
2032 s->last_flush = -1; /* avoid BUF_ERROR at next call, see above */
2033 return Z_OK;
2038 if (flush != Z_FINISH) return Z_OK;
2039 if (s->wrap <= 0) return Z_STREAM_END;
2041 /* Write the trailer */
2042 #ifdef GZIP
2043 if (s->wrap == 2) {
2044 put_byte(s, (Byte)(strm->adler & 0xff));
2045 put_byte(s, (Byte)((strm->adler >> 8) & 0xff));
2046 put_byte(s, (Byte)((strm->adler >> 16) & 0xff));
2047 put_byte(s, (Byte)((strm->adler >> 24) & 0xff));
2048 put_byte(s, (Byte)(strm->total_in & 0xff));
2049 put_byte(s, (Byte)((strm->total_in >> 8) & 0xff));
2050 put_byte(s, (Byte)((strm->total_in >> 16) & 0xff));
2051 put_byte(s, (Byte)((strm->total_in >> 24) & 0xff));
2053 else
2054 #endif
2056 putShortMSB(s, (uInt)(strm->adler >> 16));
2057 putShortMSB(s, (uInt)(strm->adler & 0xffff));
2059 flush_pending(strm);
2060 /* If avail_out is zero, the application will call deflate again
2061 * to flush the rest.
2063 if (s->wrap > 0) s->wrap = -s->wrap; /* write the trailer only once! */
2064 return s->pending != 0 ? Z_OK : Z_STREAM_END;
2067 /* ========================================================================= */
2068 int deflateEnd( z_streamp strm )
2070 int status;
2072 if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
2074 status = strm->state->status;
2076 /* Deallocate in reverse order of allocations: */
2077 TRY_FREE(strm, strm->state->pending_buf);
2078 TRY_FREE(strm, strm->state->head);
2079 TRY_FREE(strm, strm->state->prev);
2080 TRY_FREE(strm, strm->state->window);
2082 ZFREE(strm, strm->state);
2083 strm->state = Z_NULL;
2085 return status == BUSY_STATE ? Z_DATA_ERROR : Z_OK;
2088 /* ===========================================================================
2089 * Read a new buffer from the current input stream, update the adler32
2090 * and total number of bytes read. All deflate() input goes through
2091 * this function so some applications may wish to modify it to avoid
2092 * allocating a large strm->next_in buffer and copying from it.
2093 * (See also flush_pending()).
2095 static unsigned read_buf( z_streamp strm, Bytef *buf, unsigned size )
2097 unsigned len = strm->avail_in;
2099 if (len > size) len = size;
2100 if (len == 0) return 0;
2102 strm->avail_in -= len;
2104 zmemcpy(buf, strm->next_in, len);
2105 if (strm->state->wrap == 1) {
2106 strm->adler = adler32(strm->adler, buf, len);
2108 #ifdef GZIP
2109 else if (strm->state->wrap == 2) {
2110 strm->adler = crc32(strm->adler, buf, len);
2112 #endif
2113 strm->next_in += len;
2114 strm->total_in += len;
2116 return len;
2119 /* ===========================================================================
2120 * Initialize the "longest match" routines for a new zlib stream
2122 static void lm_init( deflate_state *s )
2124 s->window_size = (ulg)2L*s->w_size;
2126 CLEAR_HASH(s);
2128 /* Set the default configuration parameters:
2130 s->max_lazy_match = configuration_table[s->level].max_lazy;
2131 s->good_match = configuration_table[s->level].good_length;
2132 s->nice_match = configuration_table[s->level].nice_length;
2133 s->max_chain_length = configuration_table[s->level].max_chain;
2135 s->strstart = 0;
2136 s->block_start = 0L;
2137 s->lookahead = 0;
2138 s->insert = 0;
2139 s->match_length = s->prev_length = MIN_MATCH-1;
2140 s->match_available = 0;
2141 s->ins_h = 0;
2144 /* ===========================================================================
2145 * Set match_start to the longest match starting at the given string and
2146 * return its length. Matches shorter or equal to prev_length are discarded,
2147 * in which case the result is equal to prev_length and match_start is
2148 * garbage.
2149 * IN assertions: cur_match is the head of the hash chain for the current
2150 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
2151 * OUT assertion: the match length is not greater than s->lookahead.
2153 static uInt longest_match( deflate_state *s, IPos cur_match )
2155 unsigned chain_length = s->max_chain_length;/* max hash chain length */
2156 register Bytef *scan = s->window + s->strstart; /* current string */
2157 register Bytef *match; /* matched string */
2158 register int len; /* length of current match */
2159 int best_len = (int)s->prev_length; /* best match length so far */
2160 int nice_match = s->nice_match; /* stop if match long enough */
2161 IPos limit = s->strstart > (IPos)MAX_DIST(s) ?
2162 s->strstart - (IPos)MAX_DIST(s) : NIL;
2163 /* Stop when cur_match becomes <= limit. To simplify the code,
2164 * we prevent matches with the string of window index 0.
2166 Posf *prev = s->prev;
2167 uInt wmask = s->w_mask;
2169 #ifdef UNALIGNED_OK
2170 /* Compare two bytes at a time. Note: this is not always beneficial.
2171 * Try with and without -DUNALIGNED_OK to check.
2173 register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
2174 register ush scan_start = *(ushf*)scan;
2175 register ush scan_end = *(ushf*)(scan+best_len-1);
2176 #else
2177 register Bytef *strend = s->window + s->strstart + MAX_MATCH;
2178 register Byte scan_end1 = scan[best_len-1];
2179 register Byte scan_end = scan[best_len];
2180 #endif
2182 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
2183 * It is easy to get rid of this optimization if necessary.
2185 Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
2187 /* Do not waste too much time if we already have a good match: */
2188 if (s->prev_length >= s->good_match) {
2189 chain_length >>= 2;
2191 /* Do not look for matches beyond the end of the input. This is necessary
2192 * to make deflate deterministic.
2194 if ((uInt)nice_match > s->lookahead) nice_match = (int)s->lookahead;
2196 Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
2198 do {
2199 Assert(cur_match < s->strstart, "no future");
2200 match = s->window + cur_match;
2202 /* Skip to next match if the match length cannot increase
2203 * or if the match length is less than 2. Note that the checks below
2204 * for insufficient lookahead only occur occasionally for performance
2205 * reasons. Therefore uninitialized memory will be accessed, and
2206 * conditional jumps will be made that depend on those values.
2207 * However the length of the match is limited to the lookahead, so
2208 * the output of deflate is not affected by the uninitialized values.
2210 #if (defined(UNALIGNED_OK) && MAX_MATCH == 258)
2211 /* This code assumes sizeof(unsigned short) == 2. Do not use
2212 * UNALIGNED_OK if your compiler uses a different size.
2214 if (*(ushf*)(match+best_len-1) != scan_end ||
2215 *(ushf*)match != scan_start) continue;
2217 /* It is not necessary to compare scan[2] and match[2] since they are
2218 * always equal when the other bytes match, given that the hash keys
2219 * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
2220 * strstart+3, +5, ... up to strstart+257. We check for insufficient
2221 * lookahead only every 4th comparison; the 128th check will be made
2222 * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
2223 * necessary to put more guard bytes at the end of the window, or
2224 * to check more often for insufficient lookahead.
2226 Assert(scan[2] == match[2], "scan[2]?");
2227 scan++, match++;
2228 do {
2229 } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
2230 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
2231 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
2232 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
2233 scan < strend);
2234 /* The funny "do {}" generates better code on most compilers */
2236 /* Here, scan <= window+strstart+257 */
2237 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
2238 if (*scan == *match) scan++;
2240 len = (MAX_MATCH - 1) - (int)(strend-scan);
2241 scan = strend - (MAX_MATCH-1);
2243 #else /* UNALIGNED_OK */
2245 if (match[best_len] != scan_end ||
2246 match[best_len-1] != scan_end1 ||
2247 *match != *scan ||
2248 *++match != scan[1]) continue;
2250 /* The check at best_len-1 can be removed because it will be made
2251 * again later. (This heuristic is not always a win.)
2252 * It is not necessary to compare scan[2] and match[2] since they
2253 * are always equal when the other bytes match, given that
2254 * the hash keys are equal and that HASH_BITS >= 8.
2256 scan += 2, match++;
2257 Assert(*scan == *match, "match[2]?");
2259 /* We check for insufficient lookahead only every 8th comparison;
2260 * the 256th check will be made at strstart+258.
2262 do {
2263 } while (*++scan == *++match && *++scan == *++match &&
2264 *++scan == *++match && *++scan == *++match &&
2265 *++scan == *++match && *++scan == *++match &&
2266 *++scan == *++match && *++scan == *++match &&
2267 scan < strend);
2269 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
2271 len = MAX_MATCH - (int)(strend - scan);
2272 scan = strend - MAX_MATCH;
2274 #endif /* UNALIGNED_OK */
2276 if (len > best_len) {
2277 s->match_start = cur_match;
2278 best_len = len;
2279 if (len >= nice_match) break;
2280 #ifdef UNALIGNED_OK
2281 scan_end = *(ushf*)(scan+best_len-1);
2282 #else
2283 scan_end1 = scan[best_len-1];
2284 scan_end = scan[best_len];
2285 #endif
2287 } while ((cur_match = prev[cur_match & wmask]) > limit
2288 && --chain_length != 0);
2290 if ((uInt)best_len <= s->lookahead) return (uInt)best_len;
2291 return s->lookahead;
2294 #define check_match(s, start, match, length)
2296 /* ===========================================================================
2297 * Fill the window when the lookahead becomes insufficient.
2298 * Updates strstart and lookahead.
2300 * IN assertion: lookahead < MIN_LOOKAHEAD
2301 * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
2302 * At least one byte has been read, or avail_in == 0; reads are
2303 * performed for at least two bytes (required for the zip translate_eol
2304 * option -- not supported here).
2306 static void fill_window( deflate_state *s )
2308 unsigned n;
2309 unsigned more; /* Amount of free space at the end of the window. */
2310 uInt wsize = s->w_size;
2312 Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead");
2314 do {
2315 more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
2317 /* Deal with !@#$% 64K limit: */
2318 if (sizeof(int) <= 2) {
2319 if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
2320 more = wsize;
2322 } else if (more == (unsigned)(-1)) {
2323 /* Very unlikely, but possible on 16 bit machine if
2324 * strstart == 0 && lookahead == 1 (input done a byte at time)
2326 more--;
2330 /* If the window is almost full and there is insufficient lookahead,
2331 * move the upper half to the lower one to make room in the upper half.
2333 if (s->strstart >= wsize+MAX_DIST(s)) {
2335 zmemcpy(s->window, s->window+wsize, (unsigned)wsize - more);
2336 s->match_start -= wsize;
2337 s->strstart -= wsize; /* we now have strstart >= MAX_DIST */
2338 s->block_start -= (long) wsize;
2339 slide_hash(s);
2340 more += wsize;
2342 if (s->strm->avail_in == 0) break;
2344 /* If there was no sliding:
2345 * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
2346 * more == window_size - lookahead - strstart
2347 * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
2348 * => more >= window_size - 2*WSIZE + 2
2349 * In the BIG_MEM or MMAP case (not yet supported),
2350 * window_size == input_size + MIN_LOOKAHEAD &&
2351 * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
2352 * Otherwise, window_size == 2*WSIZE so more >= 2.
2353 * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
2355 Assert(more >= 2, "more < 2");
2357 n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more);
2358 s->lookahead += n;
2360 /* Initialize the hash value now that we have some input: */
2361 if (s->lookahead + s->insert >= MIN_MATCH) {
2362 uInt str = s->strstart - s->insert;
2363 s->ins_h = s->window[str];
2364 UPDATE_HASH(s, s->ins_h, s->window[str + 1]);
2365 #if MIN_MATCH != 3
2366 Call UPDATE_HASH() MIN_MATCH-3 more times
2367 #endif
2368 while (s->insert) {
2369 UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]);
2370 #ifndef FASTEST
2371 s->prev[str & s->w_mask] = s->head[s->ins_h];
2372 #endif
2373 s->head[s->ins_h] = (Pos)str;
2374 str++;
2375 s->insert--;
2376 if (s->lookahead + s->insert < MIN_MATCH)
2377 break;
2380 /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
2381 * but this is not important since only literal bytes will be emitted.
2384 } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
2386 /* If the WIN_INIT bytes after the end of the current data have never been
2387 * written, then zero those bytes in order to avoid memory check reports of
2388 * the use of uninitialized (or uninitialised as Julian writes) bytes by
2389 * the longest match routines. Update the high water mark for the next
2390 * time through here. WIN_INIT is set to MAX_MATCH since the longest match
2391 * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead.
2393 if (s->high_water < s->window_size) {
2394 ulg curr = s->strstart + (ulg)(s->lookahead);
2395 ulg init;
2397 if (s->high_water < curr) {
2398 /* Previous high water mark below current data -- zero WIN_INIT
2399 * bytes or up to end of window, whichever is less.
2401 init = s->window_size - curr;
2402 if (init > WIN_INIT)
2403 init = WIN_INIT;
2404 zmemzero(s->window + curr, (unsigned)init);
2405 s->high_water = curr + init;
2407 else if (s->high_water < (ulg)curr + WIN_INIT) {
2408 /* High water mark at or above current data, but below current data
2409 * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up
2410 * to end of window, whichever is less.
2412 init = (ulg)curr + WIN_INIT - s->high_water;
2413 if (init > s->window_size - s->high_water)
2414 init = s->window_size - s->high_water;
2415 zmemzero(s->window + s->high_water, (unsigned)init);
2416 s->high_water += init;
2420 Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
2421 "not enough room for search");
2424 /* ===========================================================================
2425 * Flush the current block, with given end-of-file flag.
2426 * IN assertion: strstart is set to the end of the current match.
2428 #define FLUSH_BLOCK_ONLY(s, last) { \
2429 _tr_flush_block(s, (s->block_start >= 0L ? \
2430 (charf *)&s->window[(unsigned)s->block_start] : \
2431 (charf *)Z_NULL), \
2432 (ulg)((long)s->strstart - s->block_start), \
2433 (last)); \
2434 s->block_start = s->strstart; \
2435 flush_pending(s->strm); \
2436 Tracev((stderr,"[FLUSH]")); \
2439 /* Same but force premature exit if necessary. */
2440 #define FLUSH_BLOCK(s, last) { \
2441 FLUSH_BLOCK_ONLY(s, last); \
2442 if (s->strm->avail_out == 0) return (last) ? finish_started : need_more; \
2445 /* Maximum stored block length in deflate format (not including header). */
2446 #define MAX_STORED 65535
2448 /* Minimum of a and b. */
2449 #define MIN(a, b) ((a) > (b) ? (b) : (a))
2451 /* ===========================================================================
2452 * Copy without compression as much as possible from the input stream, return
2453 * the current block state.
2455 * In case deflateParams() is used to later switch to a non-zero compression
2456 * level, s->matches (otherwise unused when storing) keeps track of the number
2457 * of hash table slides to perform. If s->matches is 1, then one hash table
2458 * slide will be done when switching. If s->matches is 2, the maximum value
2459 * allowed here, then the hash table will be cleared, since two or more slides
2460 * is the same as a clear.
2462 * deflate_stored() is written to minimize the number of times an input byte is
2463 * copied. It is most efficient with large input and output buffers, which
2464 * maximizes the opportunites to have a single copy from next_in to next_out.
2466 static block_state deflate_stored( deflate_state *s, int flush )
2468 /* Smallest worthy block size when not flushing or finishing. By default
2469 * this is 32K. This can be as small as 507 bytes for memLevel == 1. For
2470 * large input and output buffers, the stored block size will be larger.
2472 unsigned min_block = MIN(s->pending_buf_size - 5, s->w_size);
2474 /* Copy as many min_block or larger stored blocks directly to next_out as
2475 * possible. If flushing, copy the remaining available input to next_out as
2476 * stored blocks, if there is enough space.
2478 unsigned len, left, have, last = 0;
2479 unsigned used = s->strm->avail_in;
2480 do {
2481 /* Set len to the maximum size block that we can copy directly with the
2482 * available input data and output space. Set left to how much of that
2483 * would be copied from what's left in the window.
2485 len = MAX_STORED; /* maximum deflate stored block length */
2486 have = (s->bi_valid + 42) >> 3; /* number of header bytes */
2487 if (s->strm->avail_out < have) /* need room for header */
2488 break;
2489 /* maximum stored block length that will fit in avail_out: */
2490 have = s->strm->avail_out - have;
2491 left = s->strstart - s->block_start; /* bytes left in window */
2492 if (len > (ulg)left + s->strm->avail_in)
2493 len = left + s->strm->avail_in; /* limit len to the input */
2494 if (len > have)
2495 len = have; /* limit len to the output */
2497 /* If the stored block would be less than min_block in length, or if
2498 * unable to copy all of the available input when flushing, then try
2499 * copying to the window and the pending buffer instead. Also don't
2500 * write an empty block when flushing -- deflate() does that.
2502 if (len < min_block && ((len == 0 && flush != Z_FINISH) ||
2503 flush == Z_NO_FLUSH ||
2504 len != left + s->strm->avail_in))
2505 break;
2507 /* Make a dummy stored block in pending to get the header bytes,
2508 * including any pending bits. This also updates the debugging counts.
2510 last = flush == Z_FINISH && len == left + s->strm->avail_in ? 1 : 0;
2511 _tr_stored_block(s, (char *)0, 0L, last);
2513 /* Replace the lengths in the dummy stored block with len. */
2514 s->pending_buf[s->pending - 4] = len;
2515 s->pending_buf[s->pending - 3] = len >> 8;
2516 s->pending_buf[s->pending - 2] = ~len;
2517 s->pending_buf[s->pending - 1] = ~len >> 8;
2519 /* Write the stored block header bytes. */
2520 flush_pending(s->strm);
2522 #ifdef ZLIB_DEBUG
2523 /* Update debugging counts for the data about to be copied. */
2524 s->compressed_len += len << 3;
2525 s->bits_sent += len << 3;
2526 #endif
2528 /* Copy uncompressed bytes from the window to next_out. */
2529 if (left) {
2530 if (left > len)
2531 left = len;
2532 zmemcpy(s->strm->next_out, s->window + s->block_start, left);
2533 s->strm->next_out += left;
2534 s->strm->avail_out -= left;
2535 s->strm->total_out += left;
2536 s->block_start += left;
2537 len -= left;
2540 /* Copy uncompressed bytes directly from next_in to next_out, updating
2541 * the check value.
2543 if (len) {
2544 read_buf(s->strm, s->strm->next_out, len);
2545 s->strm->next_out += len;
2546 s->strm->avail_out -= len;
2547 s->strm->total_out += len;
2549 } while (last == 0);
2551 /* Update the sliding window with the last s->w_size bytes of the copied
2552 * data, or append all of the copied data to the existing window if less
2553 * than s->w_size bytes were copied. Also update the number of bytes to
2554 * insert in the hash tables, in the event that deflateParams() switches to
2555 * a non-zero compression level.
2557 used -= s->strm->avail_in; /* number of input bytes directly copied */
2558 if (used) {
2559 /* If any input was used, then no unused input remains in the window,
2560 * therefore s->block_start == s->strstart.
2562 if (used >= s->w_size) { /* supplant the previous history */
2563 s->matches = 2; /* clear hash */
2564 zmemcpy(s->window, s->strm->next_in - s->w_size, s->w_size);
2565 s->strstart = s->w_size;
2567 else {
2568 if (s->window_size - s->strstart <= used) {
2569 /* Slide the window down. */
2570 s->strstart -= s->w_size;
2571 zmemcpy(s->window, s->window + s->w_size, s->strstart);
2572 if (s->matches < 2)
2573 s->matches++; /* add a pending slide_hash() */
2575 zmemcpy(s->window + s->strstart, s->strm->next_in - used, used);
2576 s->strstart += used;
2578 s->block_start = s->strstart;
2579 s->insert += MIN(used, s->w_size - s->insert);
2581 if (s->high_water < s->strstart)
2582 s->high_water = s->strstart;
2584 /* If the last block was written to next_out, then done. */
2585 if (last)
2586 return finish_done;
2588 /* If flushing and all input has been consumed, then done. */
2589 if (flush != Z_NO_FLUSH && flush != Z_FINISH &&
2590 s->strm->avail_in == 0 && (long)s->strstart == s->block_start)
2591 return block_done;
2593 /* Fill the window with any remaining input. */
2594 have = s->window_size - s->strstart - 1;
2595 if (s->strm->avail_in > have && s->block_start >= (long)s->w_size) {
2596 /* Slide the window down. */
2597 s->block_start -= s->w_size;
2598 s->strstart -= s->w_size;
2599 zmemcpy(s->window, s->window + s->w_size, s->strstart);
2600 if (s->matches < 2)
2601 s->matches++; /* add a pending slide_hash() */
2602 have += s->w_size; /* more space now */
2604 if (have > s->strm->avail_in)
2605 have = s->strm->avail_in;
2606 if (have) {
2607 read_buf(s->strm, s->window + s->strstart, have);
2608 s->strstart += have;
2610 if (s->high_water < s->strstart)
2611 s->high_water = s->strstart;
2613 /* There was not enough avail_out to write a complete worthy or flushed
2614 * stored block to next_out. Write a stored block to pending instead, if we
2615 * have enough input for a worthy block, or if flushing and there is enough
2616 * room for the remaining input as a stored block in the pending buffer.
2618 have = (s->bi_valid + 42) >> 3; /* number of header bytes */
2619 /* maximum stored block length that will fit in pending: */
2620 have = MIN(s->pending_buf_size - have, MAX_STORED);
2621 min_block = MIN(have, s->w_size);
2622 left = s->strstart - s->block_start;
2623 if (left >= min_block ||
2624 ((left || flush == Z_FINISH) && flush != Z_NO_FLUSH &&
2625 s->strm->avail_in == 0 && left <= have)) {
2626 len = MIN(left, have);
2627 last = flush == Z_FINISH && s->strm->avail_in == 0 &&
2628 len == left ? 1 : 0;
2629 _tr_stored_block(s, (charf *)s->window + s->block_start, len, last);
2630 s->block_start += len;
2631 flush_pending(s->strm);
2634 /* We've done all we can with the available input and output. */
2635 return last ? finish_started : need_more;
2638 /* ===========================================================================
2639 * Compress as much as possible from the input stream, return the current
2640 * block state.
2641 * This function does not perform lazy evaluation of matches and inserts
2642 * new strings in the dictionary only for unmatched strings or for short
2643 * matches. It is used only for the fast compression options.
2645 static block_state deflate_fast( deflate_state *s, int flush )
2647 IPos hash_head; /* head of the hash chain */
2648 int bflush; /* set if current block must be flushed */
2650 for (;;) {
2651 /* Make sure that we always have enough lookahead, except
2652 * at the end of the input file. We need MAX_MATCH bytes
2653 * for the next match, plus MIN_MATCH bytes to insert the
2654 * string following the next match.
2656 if (s->lookahead < MIN_LOOKAHEAD) {
2657 fill_window(s);
2658 if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
2659 return need_more;
2661 if (s->lookahead == 0) break; /* flush the current block */
2664 /* Insert the string window[strstart .. strstart+2] in the
2665 * dictionary, and set hash_head to the head of the hash chain:
2667 hash_head = NIL;
2668 if (s->lookahead >= MIN_MATCH) {
2669 INSERT_STRING(s, s->strstart, hash_head);
2672 /* Find the longest match, discarding those <= prev_length.
2673 * At this point we have always match_length < MIN_MATCH
2675 if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) {
2676 /* To simplify the code, we prevent matches with the string
2677 * of window index 0 (in particular we have to avoid a match
2678 * of the string with itself at the start of the input file).
2680 s->match_length = longest_match (s, hash_head);
2681 /* longest_match() sets match_start */
2683 if (s->match_length >= MIN_MATCH) {
2684 check_match(s, s->strstart, s->match_start, s->match_length);
2686 _tr_tally_dist(s, s->strstart - s->match_start,
2687 s->match_length - MIN_MATCH, bflush);
2689 s->lookahead -= s->match_length;
2691 /* Insert new strings in the hash table only if the match length
2692 * is not too large. This saves time but degrades compression.
2694 #ifndef FASTEST
2695 if (s->match_length <= s->max_insert_length &&
2696 s->lookahead >= MIN_MATCH) {
2697 s->match_length--; /* string at strstart already in table */
2698 do {
2699 s->strstart++;
2700 INSERT_STRING(s, s->strstart, hash_head);
2701 /* strstart never exceeds WSIZE-MAX_MATCH, so there are
2702 * always MIN_MATCH bytes ahead.
2704 } while (--s->match_length != 0);
2705 s->strstart++;
2706 } else
2707 #endif
2709 s->strstart += s->match_length;
2710 s->match_length = 0;
2711 s->ins_h = s->window[s->strstart];
2712 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
2713 #if MIN_MATCH != 3
2714 Call UPDATE_HASH() MIN_MATCH-3 more times
2715 #endif
2716 /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not
2717 * matter since it will be recomputed at next deflate call.
2720 } else {
2721 /* No match, output a literal byte */
2722 Tracevv((stderr,"%c", s->window[s->strstart]));
2723 _tr_tally_lit (s, s->window[s->strstart], bflush);
2724 s->lookahead--;
2725 s->strstart++;
2727 if (bflush) FLUSH_BLOCK(s, 0);
2729 s->insert = s->strstart < MIN_MATCH-1 ? s->strstart : MIN_MATCH-1;
2730 if (flush == Z_FINISH) {
2731 FLUSH_BLOCK(s, 1);
2732 return finish_done;
2734 if (s->last_lit)
2735 FLUSH_BLOCK(s, 0);
2736 return block_done;
2739 /* ===========================================================================
2740 * Same as above, but achieves better compression. We use a lazy
2741 * evaluation for matches: a match is finally adopted only if there is
2742 * no better match at the next window position.
2744 static block_state deflate_slow( deflate_state *s, int flush )
2746 IPos hash_head; /* head of hash chain */
2747 int bflush; /* set if current block must be flushed */
2749 /* Process the input block. */
2750 for (;;) {
2751 /* Make sure that we always have enough lookahead, except
2752 * at the end of the input file. We need MAX_MATCH bytes
2753 * for the next match, plus MIN_MATCH bytes to insert the
2754 * string following the next match.
2756 if (s->lookahead < MIN_LOOKAHEAD) {
2757 fill_window(s);
2758 if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
2759 return need_more;
2761 if (s->lookahead == 0) break; /* flush the current block */
2764 /* Insert the string window[strstart .. strstart+2] in the
2765 * dictionary, and set hash_head to the head of the hash chain:
2767 hash_head = NIL;
2768 if (s->lookahead >= MIN_MATCH) {
2769 INSERT_STRING(s, s->strstart, hash_head);
2772 /* Find the longest match, discarding those <= prev_length.
2774 s->prev_length = s->match_length, s->prev_match = s->match_start;
2775 s->match_length = MIN_MATCH-1;
2777 if (hash_head != NIL && s->prev_length < s->max_lazy_match &&
2778 s->strstart - hash_head <= MAX_DIST(s)) {
2779 /* To simplify the code, we prevent matches with the string
2780 * of window index 0 (in particular we have to avoid a match
2781 * of the string with itself at the start of the input file).
2783 s->match_length = longest_match (s, hash_head);
2784 /* longest_match() sets match_start */
2786 if (s->match_length <= 5 && (s->strategy == Z_FILTERED
2787 #if TOO_FAR <= 32767
2788 || (s->match_length == MIN_MATCH &&
2789 s->strstart - s->match_start > TOO_FAR)
2790 #endif
2791 )) {
2793 /* If prev_match is also MIN_MATCH, match_start is garbage
2794 * but we will ignore the current match anyway.
2796 s->match_length = MIN_MATCH-1;
2799 /* If there was a match at the previous step and the current
2800 * match is not better, output the previous match:
2802 if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {
2803 uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
2804 /* Do not insert strings in hash table beyond this. */
2806 check_match(s, s->strstart-1, s->prev_match, s->prev_length);
2808 _tr_tally_dist(s, s->strstart -1 - s->prev_match,
2809 s->prev_length - MIN_MATCH, bflush);
2811 /* Insert in hash table all strings up to the end of the match.
2812 * strstart-1 and strstart are already inserted. If there is not
2813 * enough lookahead, the last two strings are not inserted in
2814 * the hash table.
2816 s->lookahead -= s->prev_length-1;
2817 s->prev_length -= 2;
2818 do {
2819 if (++s->strstart <= max_insert) {
2820 INSERT_STRING(s, s->strstart, hash_head);
2822 } while (--s->prev_length != 0);
2823 s->match_available = 0;
2824 s->match_length = MIN_MATCH-1;
2825 s->strstart++;
2827 if (bflush) FLUSH_BLOCK(s, 0);
2829 } else if (s->match_available) {
2830 /* If there was no match at the previous position, output a
2831 * single literal. If there was a match but the current match
2832 * is longer, truncate the previous match to a single literal.
2834 Tracevv((stderr,"%c", s->window[s->strstart-1]));
2835 _tr_tally_lit(s, s->window[s->strstart-1], bflush);
2836 if (bflush) {
2837 FLUSH_BLOCK_ONLY(s, 0);
2839 s->strstart++;
2840 s->lookahead--;
2841 if (s->strm->avail_out == 0) return need_more;
2842 } else {
2843 /* There is no previous match to compare with, wait for
2844 * the next step to decide.
2846 s->match_available = 1;
2847 s->strstart++;
2848 s->lookahead--;
2851 Assert (flush != Z_NO_FLUSH, "no flush?");
2852 if (s->match_available) {
2853 Tracevv((stderr,"%c", s->window[s->strstart-1]));
2854 _tr_tally_lit(s, s->window[s->strstart-1], bflush);
2855 s->match_available = 0;
2857 s->insert = s->strstart < MIN_MATCH-1 ? s->strstart : MIN_MATCH-1;
2858 if (flush == Z_FINISH) {
2859 FLUSH_BLOCK(s, 1);
2860 return finish_done;
2862 if (s->last_lit)
2863 FLUSH_BLOCK(s, 0);
2864 return block_done;
2867 /* ===========================================================================
2868 * For Z_RLE, simply look for runs of bytes, generate matches only of distance
2869 * one. Do not maintain a hash table. (It will be regenerated if this run of
2870 * deflate switches away from Z_RLE.)
2872 static block_state deflate_rle( deflate_state *s, int flush )
2874 int bflush; /* set if current block must be flushed */
2875 uInt prev; /* byte at distance one to match */
2876 Bytef *scan, *strend; /* scan goes up to strend for length of run */
2878 for (;;) {
2879 /* Make sure that we always have enough lookahead, except
2880 * at the end of the input file. We need MAX_MATCH bytes
2881 * for the longest run, plus one for the unrolled loop.
2883 if (s->lookahead <= MAX_MATCH) {
2884 fill_window(s);
2885 if (s->lookahead <= MAX_MATCH && flush == Z_NO_FLUSH) {
2886 return need_more;
2888 if (s->lookahead == 0) break; /* flush the current block */
2891 /* See how many times the previous byte repeats */
2892 s->match_length = 0;
2893 if (s->lookahead >= MIN_MATCH && s->strstart > 0) {
2894 scan = s->window + s->strstart - 1;
2895 prev = *scan;
2896 if (prev == *++scan && prev == *++scan && prev == *++scan) {
2897 strend = s->window + s->strstart + MAX_MATCH;
2898 do {
2899 } while (prev == *++scan && prev == *++scan &&
2900 prev == *++scan && prev == *++scan &&
2901 prev == *++scan && prev == *++scan &&
2902 prev == *++scan && prev == *++scan &&
2903 scan < strend);
2904 s->match_length = MAX_MATCH - (uInt)(strend - scan);
2905 if (s->match_length > s->lookahead)
2906 s->match_length = s->lookahead;
2908 Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan");
2911 /* Emit match if have run of MIN_MATCH or longer, else emit literal */
2912 if (s->match_length >= MIN_MATCH) {
2913 check_match(s, s->strstart, s->strstart - 1, s->match_length);
2915 _tr_tally_dist(s, 1, s->match_length - MIN_MATCH, bflush);
2917 s->lookahead -= s->match_length;
2918 s->strstart += s->match_length;
2919 s->match_length = 0;
2920 } else {
2921 /* No match, output a literal byte */
2922 Tracevv((stderr,"%c", s->window[s->strstart]));
2923 _tr_tally_lit (s, s->window[s->strstart], bflush);
2924 s->lookahead--;
2925 s->strstart++;
2927 if (bflush) FLUSH_BLOCK(s, 0);
2929 s->insert = 0;
2930 if (flush == Z_FINISH) {
2931 FLUSH_BLOCK(s, 1);
2932 return finish_done;
2934 if (s->last_lit)
2935 FLUSH_BLOCK(s, 0);
2936 return block_done;
2939 /* ===========================================================================
2940 * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table.
2941 * (It will be regenerated if this run of deflate switches away from Huffman.)
2943 static block_state deflate_huff( deflate_state *s, int flush )
2945 int bflush; /* set if current block must be flushed */
2947 for (;;) {
2948 /* Make sure that we have a literal to write. */
2949 if (s->lookahead == 0) {
2950 fill_window(s);
2951 if (s->lookahead == 0) {
2952 if (flush == Z_NO_FLUSH)
2953 return need_more;
2954 break; /* flush the current block */
2958 /* Output a literal byte */
2959 s->match_length = 0;
2960 Tracevv((stderr,"%c", s->window[s->strstart]));
2961 _tr_tally_lit (s, s->window[s->strstart], bflush);
2962 s->lookahead--;
2963 s->strstart++;
2964 if (bflush) FLUSH_BLOCK(s, 0);
2966 s->insert = 0;
2967 if (flush == Z_FINISH) {
2968 FLUSH_BLOCK(s, 1);
2969 return finish_done;
2971 if (s->last_lit)
2972 FLUSH_BLOCK(s, 0);
2973 return block_done;