More -Wwrite-strings cleanup and make sure you can actually play it.
[dragonfly.git] / sys / kern / inflate.c
blobefb88413c4c97af14910c812246b5668d70a8a54
1 /*
2 * Most parts of this file are not covered by:
3 * ----------------------------------------------------------------------------
4 * "THE BEER-WARE LICENSE" (Revision 42):
5 * <phk@login.dknet.dk> wrote this file. As long as you retain this notice you
6 * can do whatever you want with this stuff. If we meet some day, and you think
7 * this stuff is worth it, you can buy me a beer in return. Poul-Henning Kamp
8 * ----------------------------------------------------------------------------
10 * $FreeBSD: src/sys/kern/inflate.c,v 1.14 1999/12/29 04:54:39 peter Exp $
11 * $DragonFly: src/sys/kern/inflate.c,v 1.4 2003/08/26 21:09:02 rob Exp $
16 #include <sys/param.h>
17 #include <sys/inflate.h>
18 #ifdef _KERNEL
19 #include <sys/systm.h>
20 #include <sys/kernel.h>
21 #endif
22 #include <sys/malloc.h>
24 #ifdef _KERNEL
25 static MALLOC_DEFINE(M_GZIP, "Gzip trees", "Gzip trees");
26 #endif
28 /* needed to make inflate() work */
29 #define uch u_char
30 #define ush u_short
31 #define ulg u_long
33 /* Stuff to make inflate() work */
34 #ifdef _KERNEL
35 #define memzero(dest,len) bzero(dest,len)
36 #endif
37 #define NOMEMCPY
38 #ifdef _KERNEL
39 #define FPRINTF printf
40 #else
41 extern void putstr (char *);
42 #define FPRINTF putstr
43 #endif
45 #define FLUSH(x,y) { \
46 int foo = (*x->gz_output)(x->gz_private,x->gz_slide,y); \
47 if (foo) \
48 return foo; \
51 static const int qflag = 0;
53 #ifndef _KERNEL /* want to use this file in kzip also */
54 extern unsigned char *kzipmalloc (int);
55 extern void kzipfree (void*);
56 #define malloc(x, y, z) kzipmalloc((x))
57 #define free(x, y) kzipfree((x))
58 #endif
61 * This came from unzip-5.12. I have changed it the flow to pass
62 * a structure pointer around, thus hopefully making it re-entrant.
63 * Poul-Henning
66 /* inflate.c -- put in the public domain by Mark Adler
67 version c14o, 23 August 1994 */
69 /* You can do whatever you like with this source file, though I would
70 prefer that if you modify it and redistribute it that you include
71 comments to that effect with your name and the date. Thank you.
73 History:
74 vers date who what
75 ---- --------- -------------- ------------------------------------
76 a ~~ Feb 92 M. Adler used full (large, one-step) lookup table
77 b1 21 Mar 92 M. Adler first version with partial lookup tables
78 b2 21 Mar 92 M. Adler fixed bug in fixed-code blocks
79 b3 22 Mar 92 M. Adler sped up match copies, cleaned up some
80 b4 25 Mar 92 M. Adler added prototypes; removed window[] (now
81 is the responsibility of unzip.h--also
82 changed name to slide[]), so needs diffs
83 for unzip.c and unzip.h (this allows
84 compiling in the small model on MSDOS);
85 fixed cast of q in huft_build();
86 b5 26 Mar 92 M. Adler got rid of unintended macro recursion.
87 b6 27 Mar 92 M. Adler got rid of nextbyte() routine. fixed
88 bug in inflate_fixed().
89 c1 30 Mar 92 M. Adler removed lbits, dbits environment variables.
90 changed BMAX to 16 for explode. Removed
91 OUTB usage, and replaced it with flush()--
92 this was a 20% speed improvement! Added
93 an explode.c (to replace unimplod.c) that
94 uses the huft routines here. Removed
95 register union.
96 c2 4 Apr 92 M. Adler fixed bug for file sizes a multiple of 32k.
97 c3 10 Apr 92 M. Adler reduced memory of code tables made by
98 huft_build significantly (factor of two to
99 three).
100 c4 15 Apr 92 M. Adler added NOMEMCPY do kill use of memcpy().
101 worked around a Turbo C optimization bug.
102 c5 21 Apr 92 M. Adler added the GZ_WSIZE #define to allow reducing
103 the 32K window size for specialized
104 applications.
105 c6 31 May 92 M. Adler added some typecasts to eliminate warnings
106 c7 27 Jun 92 G. Roelofs added some more typecasts (444: MSC bug).
107 c8 5 Oct 92 J-l. Gailly added ifdef'd code to deal with PKZIP bug.
108 c9 9 Oct 92 M. Adler removed a memory error message (~line 416).
109 c10 17 Oct 92 G. Roelofs changed ULONG/UWORD/byte to ulg/ush/uch,
110 removed old inflate, renamed inflate_entry
111 to inflate, added Mark's fix to a comment.
112 c10.5 14 Dec 92 M. Adler fix up error messages for incomplete trees.
113 c11 2 Jan 93 M. Adler fixed bug in detection of incomplete
114 tables, and removed assumption that EOB is
115 the longest code (bad assumption).
116 c12 3 Jan 93 M. Adler make tables for fixed blocks only once.
117 c13 5 Jan 93 M. Adler allow all zero length codes (pkzip 2.04c
118 outputs one zero length code for an empty
119 distance tree).
120 c14 12 Mar 93 M. Adler made inflate.c standalone with the
121 introduction of inflate.h.
122 c14b 16 Jul 93 G. Roelofs added (unsigned) typecast to w at 470.
123 c14c 19 Jul 93 J. Bush changed v[N_MAX], l[288], ll[28x+3x] arrays
124 to static for Amiga.
125 c14d 13 Aug 93 J-l. Gailly de-complicatified Mark's c[*p++]++ thing.
126 c14e 8 Oct 93 G. Roelofs changed memset() to memzero().
127 c14f 22 Oct 93 G. Roelofs renamed quietflg to qflag; made Trace()
128 conditional; added inflate_free().
129 c14g 28 Oct 93 G. Roelofs changed l/(lx+1) macro to pointer (Cray bug)
130 c14h 7 Dec 93 C. Ghisler huft_build() optimizations.
131 c14i 9 Jan 94 A. Verheijen set fixed_t{d,l} to NULL after freeing;
132 G. Roelofs check NEXTBYTE macro for GZ_EOF.
133 c14j 23 Jan 94 G. Roelofs removed Ghisler "optimizations"; ifdef'd
134 GZ_EOF check.
135 c14k 27 Feb 94 G. Roelofs added some typecasts to avoid warnings.
136 c14l 9 Apr 94 G. Roelofs fixed split comments on preprocessor lines
137 to avoid bug in Encore compiler.
138 c14m 7 Jul 94 P. Kienitz modified to allow assembler version of
139 inflate_codes() (define ASM_INFLATECODES)
140 c14n 22 Jul 94 G. Roelofs changed fprintf to FPRINTF for DLL versions
141 c14o 23 Aug 94 C. Spieler added a newline to a debug statement;
142 G. Roelofs added another typecast to avoid MSC warning
147 Inflate deflated (PKZIP's method 8 compressed) data. The compression
148 method searches for as much of the current string of bytes (up to a
149 length of 258) in the previous 32K bytes. If it doesn't find any
150 matches (of at least length 3), it codes the next byte. Otherwise, it
151 codes the length of the matched string and its distance backwards from
152 the current position. There is a single Huffman code that codes both
153 single bytes (called "literals") and match lengths. A second Huffman
154 code codes the distance information, which follows a length code. Each
155 length or distance code actually represents a base value and a number
156 of "extra" (sometimes zero) bits to get to add to the base value. At
157 the end of each deflated block is a special end-of-block (EOB) literal/
158 length code. The decoding process is basically: get a literal/length
159 code; if EOB then done; if a literal, emit the decoded byte; if a
160 length then get the distance and emit the referred-to bytes from the
161 sliding window of previously emitted data.
163 There are (currently) three kinds of inflate blocks: stored, fixed, and
164 dynamic. The compressor outputs a chunk of data at a time and decides
165 which method to use on a chunk-by-chunk basis. A chunk might typically
166 be 32K to 64K, uncompressed. If the chunk is uncompressible, then the
167 "stored" method is used. In this case, the bytes are simply stored as
168 is, eight bits per byte, with none of the above coding. The bytes are
169 preceded by a count, since there is no longer an EOB code.
171 If the data is compressible, then either the fixed or dynamic methods
172 are used. In the dynamic method, the compressed data is preceded by
173 an encoding of the literal/length and distance Huffman codes that are
174 to be used to decode this block. The representation is itself Huffman
175 coded, and so is preceded by a description of that code. These code
176 descriptions take up a little space, and so for small blocks, there is
177 a predefined set of codes, called the fixed codes. The fixed method is
178 used if the block ends up smaller that way (usually for quite small
179 chunks); otherwise the dynamic method is used. In the latter case, the
180 codes are customized to the probabilities in the current block and so
181 can code it much better than the pre-determined fixed codes can.
183 The Huffman codes themselves are decoded using a mutli-level table
184 lookup, in order to maximize the speed of decoding plus the speed of
185 building the decoding tables. See the comments below that precede the
186 lbits and dbits tuning parameters.
191 Notes beyond the 1.93a appnote.txt:
193 1. Distance pointers never point before the beginning of the output
194 stream.
195 2. Distance pointers can point back across blocks, up to 32k away.
196 3. There is an implied maximum of 7 bits for the bit length table and
197 15 bits for the actual data.
198 4. If only one code exists, then it is encoded using one bit. (Zero
199 would be more efficient, but perhaps a little confusing.) If two
200 codes exist, they are coded using one bit each (0 and 1).
201 5. There is no way of sending zero distance codes--a dummy must be
202 sent if there are none. (History: a pre 2.0 version of PKZIP would
203 store blocks with no distance codes, but this was discovered to be
204 too harsh a criterion.) Valid only for 1.93a. 2.04c does allow
205 zero distance codes, which is sent as one code of zero bits in
206 length.
207 6. There are up to 286 literal/length codes. Code 256 represents the
208 end-of-block. Note however that the static length tree defines
209 288 codes just to fill out the Huffman codes. Codes 286 and 287
210 cannot be used though, since there is no length base or extra bits
211 defined for them. Similarily, there are up to 30 distance codes.
212 However, static trees define 32 codes (all 5 bits) to fill out the
213 Huffman codes, but the last two had better not show up in the data.
214 7. Unzip can check dynamic Huffman blocks for complete code sets.
215 The exception is that a single code would not be complete (see #4).
216 8. The five bits following the block type is really the number of
217 literal codes sent minus 257.
218 9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits
219 (1+6+6). Therefore, to output three times the length, you output
220 three codes (1+1+1), whereas to output four times the same length,
221 you only need two codes (1+3). Hmm.
222 10. In the tree reconstruction algorithm, Code = Code + Increment
223 only if BitLength(i) is not zero. (Pretty obvious.)
224 11. Correction: 4 Bits: # of Bit Length codes - 4 (4 - 19)
225 12. Note: length code 284 can represent 227-258, but length code 285
226 really is 258. The last length deserves its own, short code
227 since it gets used a lot in very redundant files. The length
228 258 is special since 258 - 3 (the min match length) is 255.
229 13. The literal/length and distance code bit lengths are read as a
230 single stream of lengths. It is possible (and advantageous) for
231 a repeat code (16, 17, or 18) to go across the boundary between
232 the two sets of lengths.
236 #define PKZIP_BUG_WORKAROUND /* PKZIP 1.93a problem--live with it */
239 inflate.h must supply the uch slide[GZ_WSIZE] array and the NEXTBYTE,
240 FLUSH() and memzero macros. If the window size is not 32K, it
241 should also define GZ_WSIZE. If INFMOD is defined, it can include
242 compiled functions to support the NEXTBYTE and/or FLUSH() macros.
243 There are defaults for NEXTBYTE and FLUSH() below for use as
244 examples of what those functions need to do. Normally, you would
245 also want FLUSH() to compute a crc on the data. inflate.h also
246 needs to provide these typedefs:
248 typedef unsigned char uch;
249 typedef unsigned short ush;
250 typedef unsigned long ulg;
252 This module uses the external functions malloc() and free() (and
253 probably memset() or bzero() in the memzero() macro). Their
254 prototypes are normally found in <string.h> and <stdlib.h>.
256 #define INFMOD /* tell inflate.h to include code to be
257 * compiled */
259 /* Huffman code lookup table entry--this entry is four bytes for machines
260 that have 16-bit pointers (e.g. PC's in the small or medium model).
261 Valid extra bits are 0..13. e == 15 is EOB (end of block), e == 16
262 means that v is a literal, 16 < e < 32 means that v is a pointer to
263 the next table, which codes e - 16 bits, and lastly e == 99 indicates
264 an unused code. If a code with e == 99 is looked up, this implies an
265 error in the data. */
266 struct huft {
267 uch e; /* number of extra bits or operation */
268 uch b; /* number of bits in this code or subcode */
269 union {
270 ush n; /* literal, length base, or distance
271 * base */
272 struct huft *t; /* pointer to next level of table */
273 } v;
277 /* Function prototypes */
278 static int huft_build (struct inflate *, unsigned *, unsigned, unsigned, const ush *, const ush *, struct huft **, int *);
279 static int huft_free (struct inflate *, struct huft *);
280 static int inflate_codes (struct inflate *, struct huft *, struct huft *, int, int);
281 static int inflate_stored (struct inflate *);
282 static int xinflate (struct inflate *);
283 static int inflate_fixed (struct inflate *);
284 static int inflate_dynamic (struct inflate *);
285 static int inflate_block (struct inflate *, int *);
287 /* The inflate algorithm uses a sliding 32K byte window on the uncompressed
288 stream to find repeated byte strings. This is implemented here as a
289 circular buffer. The index is updated simply by incrementing and then
290 and'ing with 0x7fff (32K-1). */
291 /* It is left to other modules to supply the 32K area. It is assumed
292 to be usable as if it were declared "uch slide[32768];" or as just
293 "uch *slide;" and then malloc'ed in the latter case. The definition
294 must be in unzip.h, included above. */
297 /* Tables for deflate from PKZIP's appnote.txt. */
299 /* Order of the bit length code lengths */
300 static const unsigned border[] = {
301 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
303 static const ush cplens[] = { /* Copy lengths for literal codes 257..285 */
304 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
305 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
306 /* note: see note #13 above about the 258 in this list. */
308 static const ush cplext[] = { /* Extra bits for literal codes 257..285 */
309 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
310 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 99, 99}; /* 99==invalid */
312 static const ush cpdist[] = { /* Copy offsets for distance codes 0..29 */
313 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
314 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
315 8193, 12289, 16385, 24577};
317 static const ush cpdext[] = { /* Extra bits for distance codes */
318 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
319 7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
320 12, 12, 13, 13};
322 /* And'ing with mask[n] masks the lower n bits */
323 static const ush mask[] = {
324 0x0000,
325 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
326 0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
330 /* Macros for inflate() bit peeking and grabbing.
331 The usage is:
333 NEEDBITS(glbl,j)
334 x = b & mask[j];
335 DUMPBITS(j)
337 where NEEDBITS makes sure that b has at least j bits in it, and
338 DUMPBITS removes the bits from b. The macros use the variable k
339 for the number of bits in b. Normally, b and k are register
340 variables for speed, and are initialized at the begining of a
341 routine that uses these macros from a global bit buffer and count.
343 In order to not ask for more bits than there are in the compressed
344 stream, the Huffman tables are constructed to only ask for just
345 enough bits to make up the end-of-block code (value 256). Then no
346 bytes need to be "returned" to the buffer at the end of the last
347 block. See the huft_build() routine.
351 * The following 2 were global variables.
352 * They are now fields of the inflate structure.
355 #define NEEDBITS(glbl,n) { \
356 while(k<(n)) { \
357 int c=(*glbl->gz_input)(glbl->gz_private); \
358 if(c==GZ_EOF) \
359 return 1; \
360 b|=((ulg)c)<<k; \
361 k+=8; \
365 #define DUMPBITS(n) {b>>=(n);k-=(n);}
368 Huffman code decoding is performed using a multi-level table lookup.
369 The fastest way to decode is to simply build a lookup table whose
370 size is determined by the longest code. However, the time it takes
371 to build this table can also be a factor if the data being decoded
372 is not very long. The most common codes are necessarily the
373 shortest codes, so those codes dominate the decoding time, and hence
374 the speed. The idea is you can have a shorter table that decodes the
375 shorter, more probable codes, and then point to subsidiary tables for
376 the longer codes. The time it costs to decode the longer codes is
377 then traded against the time it takes to make longer tables.
379 This results of this trade are in the variables lbits and dbits
380 below. lbits is the number of bits the first level table for literal/
381 length codes can decode in one step, and dbits is the same thing for
382 the distance codes. Subsequent tables are also less than or equal to
383 those sizes. These values may be adjusted either when all of the
384 codes are shorter than that, in which case the longest code length in
385 bits is used, or when the shortest code is *longer* than the requested
386 table size, in which case the length of the shortest code in bits is
387 used.
389 There are two different values for the two tables, since they code a
390 different number of possibilities each. The literal/length table
391 codes 286 possible values, or in a flat code, a little over eight
392 bits. The distance table codes 30 possible values, or a little less
393 than five bits, flat. The optimum values for speed end up being
394 about one bit more than those, so lbits is 8+1 and dbits is 5+1.
395 The optimum values may differ though from machine to machine, and
396 possibly even between compilers. Your mileage may vary.
399 static const int lbits = 9; /* bits in base literal/length lookup table */
400 static const int dbits = 6; /* bits in base distance lookup table */
403 /* If BMAX needs to be larger than 16, then h and x[] should be ulg. */
404 #define BMAX 16 /* maximum bit length of any code (16 for
405 * explode) */
406 #define N_MAX 288 /* maximum number of codes in any set */
408 /* Given a list of code lengths and a maximum table size, make a set of
409 tables to decode that set of codes. Return zero on success, one if
410 the given code set is incomplete (the tables are still built in this
411 case), two if the input is invalid (all zero length codes or an
412 oversubscribed set of lengths), and three if not enough memory.
413 The code with value 256 is special, and the tables are constructed
414 so that no bits beyond that code are fetched when that code is
415 decoded. */
416 static int
417 huft_build(glbl, b, n, s, d, e, t, m)
418 struct inflate *glbl;
419 unsigned *b; /* code lengths in bits (all assumed <= BMAX) */
420 unsigned n; /* number of codes (assumed <= N_MAX) */
421 unsigned s; /* number of simple-valued codes (0..s-1) */
422 const ush *d; /* list of base values for non-simple codes */
423 const ush *e; /* list of extra bits for non-simple codes */
424 struct huft **t; /* result: starting table */
425 int *m; /* maximum lookup bits, returns actual */
427 unsigned a; /* counter for codes of length k */
428 unsigned c[BMAX + 1]; /* bit length count table */
429 unsigned el; /* length of EOB code (value 256) */
430 unsigned f; /* i repeats in table every f entries */
431 int g; /* maximum code length */
432 int h; /* table level */
433 unsigned i; /* counter, current code */
434 unsigned j; /* counter */
435 int k; /* number of bits in current code */
436 int lx[BMAX + 1]; /* memory for l[-1..BMAX-1] */
437 int *l = lx + 1; /* stack of bits per table */
438 unsigned *p; /* pointer into c[], b[], or v[] */
439 struct huft *q;/* points to current table */
440 struct huft r; /* table entry for structure assignment */
441 struct huft *u[BMAX];/* table stack */
442 unsigned v[N_MAX]; /* values in order of bit length */
443 int w; /* bits before this table == (l * h) */
444 unsigned x[BMAX + 1]; /* bit offsets, then code stack */
445 unsigned *xp; /* pointer into x */
446 int y; /* number of dummy codes added */
447 unsigned z; /* number of entries in current table */
449 /* Generate counts for each bit length */
450 el = n > 256 ? b[256] : BMAX; /* set length of EOB code, if any */
451 #ifdef _KERNEL
452 memzero((char *) c, sizeof(c));
453 #else
454 for (i = 0; i < BMAX+1; i++)
455 c [i] = 0;
456 #endif
457 p = b;
458 i = n;
459 do {
460 c[*p]++;
461 p++; /* assume all entries <= BMAX */
462 } while (--i);
463 if (c[0] == n) { /* null input--all zero length codes */
464 *t = (struct huft *) NULL;
465 *m = 0;
466 return 0;
468 /* Find minimum and maximum length, bound *m by those */
469 for (j = 1; j <= BMAX; j++)
470 if (c[j])
471 break;
472 k = j; /* minimum code length */
473 if ((unsigned) *m < j)
474 *m = j;
475 for (i = BMAX; i; i--)
476 if (c[i])
477 break;
478 g = i; /* maximum code length */
479 if ((unsigned) *m > i)
480 *m = i;
482 /* Adjust last length count to fill out codes, if needed */
483 for (y = 1 << j; j < i; j++, y <<= 1)
484 if ((y -= c[j]) < 0)
485 return 2; /* bad input: more codes than bits */
486 if ((y -= c[i]) < 0)
487 return 2;
488 c[i] += y;
490 /* Generate starting offsets into the value table for each length */
491 x[1] = j = 0;
492 p = c + 1;
493 xp = x + 2;
494 while (--i) { /* note that i == g from above */
495 *xp++ = (j += *p++);
498 /* Make a table of values in order of bit lengths */
499 p = b;
500 i = 0;
501 do {
502 if ((j = *p++) != 0)
503 v[x[j]++] = i;
504 } while (++i < n);
506 /* Generate the Huffman codes and for each, make the table entries */
507 x[0] = i = 0; /* first Huffman code is zero */
508 p = v; /* grab values in bit order */
509 h = -1; /* no tables yet--level -1 */
510 w = l[-1] = 0; /* no bits decoded yet */
511 u[0] = (struct huft *) NULL; /* just to keep compilers happy */
512 q = (struct huft *) NULL; /* ditto */
513 z = 0; /* ditto */
515 /* go through the bit lengths (k already is bits in shortest code) */
516 for (; k <= g; k++) {
517 a = c[k];
518 while (a--) {
520 * here i is the Huffman code of length k bits for
521 * value *p
523 /* make tables up to required level */
524 while (k > w + l[h]) {
525 w += l[h++]; /* add bits already decoded */
528 * compute minimum size table less than or
529 * equal to *m bits
531 z = (z = g - w) > (unsigned) *m ? *m : z; /* upper limit */
532 if ((f = 1 << (j = k - w)) > a + 1) { /* try a k-w bit table *//* t
533 * oo few codes for k-w
534 * bit table */
535 f -= a + 1; /* deduct codes from
536 * patterns left */
537 xp = c + k;
538 while (++j < z) { /* try smaller tables up
539 * to z bits */
540 if ((f <<= 1) <= *++xp)
541 break; /* enough codes to use
542 * up j bits */
543 f -= *xp; /* else deduct codes
544 * from patterns */
547 if ((unsigned) w + j > el && (unsigned) w < el)
548 j = el - w; /* make EOB code end at
549 * table */
550 z = 1 << j; /* table entries for j-bit
551 * table */
552 l[h] = j; /* set table size in stack */
554 /* allocate and link in new table */
555 if ((q = (struct huft *) malloc((z + 1) * sizeof(struct huft), M_GZIP, M_WAITOK)) ==
556 (struct huft *) NULL) {
557 if (h)
558 huft_free(glbl, u[0]);
559 return 3; /* not enough memory */
561 glbl->gz_hufts += z + 1; /* track memory usage */
562 *t = q + 1; /* link to list for
563 * huft_free() */
564 *(t = &(q->v.t)) = (struct huft *) NULL;
565 u[h] = ++q; /* table starts after link */
567 /* connect to last table, if there is one */
568 if (h) {
569 x[h] = i; /* save pattern for
570 * backing up */
571 r.b = (uch) l[h - 1]; /* bits to dump before
572 * this table */
573 r.e = (uch) (16 + j); /* bits in this table */
574 r.v.t = q; /* pointer to this table */
575 j = (i & ((1 << w) - 1)) >> (w - l[h - 1]);
576 u[h - 1][j] = r; /* connect to last table */
580 /* set up table entry in r */
581 r.b = (uch) (k - w);
582 if (p >= v + n)
583 r.e = 99; /* out of values--invalid
584 * code */
585 else if (*p < s) {
586 r.e = (uch) (*p < 256 ? 16 : 15); /* 256 is end-of-block
587 * code */
588 r.v.n = *p++; /* simple code is just the
589 * value */
590 } else {
591 r.e = (uch) e[*p - s]; /* non-simple--look up
592 * in lists */
593 r.v.n = d[*p++ - s];
596 /* fill code-like entries with r */
597 f = 1 << (k - w);
598 for (j = i >> w; j < z; j += f)
599 q[j] = r;
601 /* backwards increment the k-bit code i */
602 for (j = 1 << (k - 1); i & j; j >>= 1)
603 i ^= j;
604 i ^= j;
606 /* backup over finished tables */
607 while ((i & ((1 << w) - 1)) != x[h])
608 w -= l[--h]; /* don't need to update q */
612 /* return actual size of base table */
613 *m = l[0];
615 /* Return true (1) if we were given an incomplete table */
616 return y != 0 && g != 1;
619 static int
620 huft_free(glbl, t)
621 struct inflate *glbl;
622 struct huft *t; /* table to free */
623 /* Free the malloc'ed tables built by huft_build(), which makes a linked
624 list of the tables it made, with the links in a dummy first entry of
625 each table. */
627 struct huft *p, *q;
629 /* Go through linked list, freeing from the malloced (t[-1]) address. */
630 p = t;
631 while (p != (struct huft *) NULL) {
632 q = (--p)->v.t;
633 free(p, M_GZIP);
634 p = q;
636 return 0;
639 /* inflate (decompress) the codes in a deflated (compressed) block.
640 Return an error code or zero if it all goes ok. */
641 static int
642 inflate_codes(glbl, tl, td, bl, bd)
643 struct inflate *glbl;
644 struct huft *tl, *td;/* literal/length and distance decoder tables */
645 int bl, bd; /* number of bits decoded by tl[] and td[] */
647 unsigned e; /* table entry flag/number of extra bits */
648 unsigned n, d; /* length and index for copy */
649 unsigned w; /* current window position */
650 struct huft *t; /* pointer to table entry */
651 unsigned ml, md; /* masks for bl and bd bits */
652 ulg b; /* bit buffer */
653 unsigned k; /* number of bits in bit buffer */
655 /* make local copies of globals */
656 b = glbl->gz_bb; /* initialize bit buffer */
657 k = glbl->gz_bk;
658 w = glbl->gz_wp; /* initialize window position */
660 /* inflate the coded data */
661 ml = mask[bl]; /* precompute masks for speed */
662 md = mask[bd];
663 while (1) { /* do until end of block */
664 NEEDBITS(glbl, (unsigned) bl)
665 if ((e = (t = tl + ((unsigned) b & ml))->e) > 16)
666 do {
667 if (e == 99)
668 return 1;
669 DUMPBITS(t->b)
670 e -= 16;
671 NEEDBITS(glbl, e)
672 } while ((e = (t = t->v.t + ((unsigned) b & mask[e]))->e) > 16);
673 DUMPBITS(t->b)
674 if (e == 16) { /* then it's a literal */
675 glbl->gz_slide[w++] = (uch) t->v.n;
676 if (w == GZ_WSIZE) {
677 FLUSH(glbl, w);
678 w = 0;
680 } else { /* it's an EOB or a length */
681 /* exit if end of block */
682 if (e == 15)
683 break;
685 /* get length of block to copy */
686 NEEDBITS(glbl, e)
687 n = t->v.n + ((unsigned) b & mask[e]);
688 DUMPBITS(e);
690 /* decode distance of block to copy */
691 NEEDBITS(glbl, (unsigned) bd)
692 if ((e = (t = td + ((unsigned) b & md))->e) > 16)
693 do {
694 if (e == 99)
695 return 1;
696 DUMPBITS(t->b)
697 e -= 16;
698 NEEDBITS(glbl, e)
699 } while ((e = (t = t->v.t + ((unsigned) b & mask[e]))->e) > 16);
700 DUMPBITS(t->b)
701 NEEDBITS(glbl, e)
702 d = w - t->v.n - ((unsigned) b & mask[e]);
703 DUMPBITS(e)
704 /* do the copy */
705 do {
706 n -= (e = (e = GZ_WSIZE - ((d &= GZ_WSIZE - 1) > w ? d : w)) > n ? n : e);
707 #ifndef NOMEMCPY
708 if (w - d >= e) { /* (this test assumes
709 * unsigned comparison) */
710 memcpy(glbl->gz_slide + w, glbl->gz_slide + d, e);
711 w += e;
712 d += e;
713 } else /* do it slow to avoid memcpy()
714 * overlap */
715 #endif /* !NOMEMCPY */
716 do {
717 glbl->gz_slide[w++] = glbl->gz_slide[d++];
718 } while (--e);
719 if (w == GZ_WSIZE) {
720 FLUSH(glbl, w);
721 w = 0;
723 } while (n);
727 /* restore the globals from the locals */
728 glbl->gz_wp = w; /* restore global window pointer */
729 glbl->gz_bb = b; /* restore global bit buffer */
730 glbl->gz_bk = k;
732 /* done */
733 return 0;
736 /* "decompress" an inflated type 0 (stored) block. */
737 static int
738 inflate_stored(glbl)
739 struct inflate *glbl;
741 unsigned n; /* number of bytes in block */
742 unsigned w; /* current window position */
743 ulg b; /* bit buffer */
744 unsigned k; /* number of bits in bit buffer */
746 /* make local copies of globals */
747 b = glbl->gz_bb; /* initialize bit buffer */
748 k = glbl->gz_bk;
749 w = glbl->gz_wp; /* initialize window position */
751 /* go to byte boundary */
752 n = k & 7;
753 DUMPBITS(n);
755 /* get the length and its complement */
756 NEEDBITS(glbl, 16)
757 n = ((unsigned) b & 0xffff);
758 DUMPBITS(16)
759 NEEDBITS(glbl, 16)
760 if (n != (unsigned) ((~b) & 0xffff))
761 return 1; /* error in compressed data */
762 DUMPBITS(16)
763 /* read and output the compressed data */
764 while (n--) {
765 NEEDBITS(glbl, 8)
766 glbl->gz_slide[w++] = (uch) b;
767 if (w == GZ_WSIZE) {
768 FLUSH(glbl, w);
769 w = 0;
771 DUMPBITS(8)
774 /* restore the globals from the locals */
775 glbl->gz_wp = w; /* restore global window pointer */
776 glbl->gz_bb = b; /* restore global bit buffer */
777 glbl->gz_bk = k;
778 return 0;
781 /* decompress an inflated type 1 (fixed Huffman codes) block. We should
782 either replace this with a custom decoder, or at least precompute the
783 Huffman tables. */
784 static int
785 inflate_fixed(glbl)
786 struct inflate *glbl;
788 /* if first time, set up tables for fixed blocks */
789 if (glbl->gz_fixed_tl == (struct huft *) NULL) {
790 int i; /* temporary variable */
791 static unsigned l[288]; /* length list for huft_build */
793 /* literal table */
794 for (i = 0; i < 144; i++)
795 l[i] = 8;
796 for (; i < 256; i++)
797 l[i] = 9;
798 for (; i < 280; i++)
799 l[i] = 7;
800 for (; i < 288; i++) /* make a complete, but wrong code
801 * set */
802 l[i] = 8;
803 glbl->gz_fixed_bl = 7;
804 if ((i = huft_build(glbl, l, 288, 257, cplens, cplext,
805 &glbl->gz_fixed_tl, &glbl->gz_fixed_bl)) != 0) {
806 glbl->gz_fixed_tl = (struct huft *) NULL;
807 return i;
809 /* distance table */
810 for (i = 0; i < 30; i++) /* make an incomplete code
811 * set */
812 l[i] = 5;
813 glbl->gz_fixed_bd = 5;
814 if ((i = huft_build(glbl, l, 30, 0, cpdist, cpdext,
815 &glbl->gz_fixed_td, &glbl->gz_fixed_bd)) > 1) {
816 huft_free(glbl, glbl->gz_fixed_tl);
817 glbl->gz_fixed_tl = (struct huft *) NULL;
818 return i;
821 /* decompress until an end-of-block code */
822 return inflate_codes(glbl, glbl->gz_fixed_tl, glbl->gz_fixed_td, glbl->gz_fixed_bl, glbl->gz_fixed_bd) != 0;
825 /* decompress an inflated type 2 (dynamic Huffman codes) block. */
826 static int
827 inflate_dynamic(glbl)
828 struct inflate *glbl;
830 int i; /* temporary variables */
831 unsigned j;
832 unsigned l; /* last length */
833 unsigned m; /* mask for bit lengths table */
834 unsigned n; /* number of lengths to get */
835 struct huft *tl; /* literal/length code table */
836 struct huft *td; /* distance code table */
837 int bl; /* lookup bits for tl */
838 int bd; /* lookup bits for td */
839 unsigned nb; /* number of bit length codes */
840 unsigned nl; /* number of literal/length codes */
841 unsigned nd; /* number of distance codes */
842 #ifdef PKZIP_BUG_WORKAROUND
843 unsigned ll[288 + 32]; /* literal/length and distance code
844 * lengths */
845 #else
846 unsigned ll[286 + 30]; /* literal/length and distance code
847 * lengths */
848 #endif
849 ulg b; /* bit buffer */
850 unsigned k; /* number of bits in bit buffer */
852 /* make local bit buffer */
853 b = glbl->gz_bb;
854 k = glbl->gz_bk;
856 /* read in table lengths */
857 NEEDBITS(glbl, 5)
858 nl = 257 + ((unsigned) b & 0x1f); /* number of
859 * literal/length codes */
860 DUMPBITS(5)
861 NEEDBITS(glbl, 5)
862 nd = 1 + ((unsigned) b & 0x1f); /* number of distance codes */
863 DUMPBITS(5)
864 NEEDBITS(glbl, 4)
865 nb = 4 + ((unsigned) b & 0xf); /* number of bit length codes */
866 DUMPBITS(4)
867 #ifdef PKZIP_BUG_WORKAROUND
868 if (nl > 288 || nd > 32)
869 #else
870 if (nl > 286 || nd > 30)
871 #endif
872 return 1; /* bad lengths */
873 /* read in bit-length-code lengths */
874 for (j = 0; j < nb; j++) {
875 NEEDBITS(glbl, 3)
876 ll[border[j]] = (unsigned) b & 7;
877 DUMPBITS(3)
879 for (; j < 19; j++)
880 ll[border[j]] = 0;
882 /* build decoding table for trees--single level, 7 bit lookup */
883 bl = 7;
884 if ((i = huft_build(glbl, ll, 19, 19, NULL, NULL, &tl, &bl)) != 0) {
885 if (i == 1)
886 huft_free(glbl, tl);
887 return i; /* incomplete code set */
889 /* read in literal and distance code lengths */
890 n = nl + nd;
891 m = mask[bl];
892 i = l = 0;
893 while ((unsigned) i < n) {
894 NEEDBITS(glbl, (unsigned) bl)
895 j = (td = tl + ((unsigned) b & m))->b;
896 DUMPBITS(j)
897 j = td->v.n;
898 if (j < 16) /* length of code in bits (0..15) */
899 ll[i++] = l = j; /* save last length in l */
900 else if (j == 16) { /* repeat last length 3 to 6 times */
901 NEEDBITS(glbl, 2)
902 j = 3 + ((unsigned) b & 3);
903 DUMPBITS(2)
904 if ((unsigned) i + j > n)
905 return 1;
906 while (j--)
907 ll[i++] = l;
908 } else if (j == 17) { /* 3 to 10 zero length codes */
909 NEEDBITS(glbl, 3)
910 j = 3 + ((unsigned) b & 7);
911 DUMPBITS(3)
912 if ((unsigned) i + j > n)
913 return 1;
914 while (j--)
915 ll[i++] = 0;
916 l = 0;
917 } else { /* j == 18: 11 to 138 zero length codes */
918 NEEDBITS(glbl, 7)
919 j = 11 + ((unsigned) b & 0x7f);
920 DUMPBITS(7)
921 if ((unsigned) i + j > n)
922 return 1;
923 while (j--)
924 ll[i++] = 0;
925 l = 0;
929 /* free decoding table for trees */
930 huft_free(glbl, tl);
932 /* restore the global bit buffer */
933 glbl->gz_bb = b;
934 glbl->gz_bk = k;
936 /* build the decoding tables for literal/length and distance codes */
937 bl = lbits;
938 i = huft_build(glbl, ll, nl, 257, cplens, cplext, &tl, &bl);
939 if (i != 0) {
940 if (i == 1 && !qflag) {
941 FPRINTF("(incomplete l-tree) ");
942 huft_free(glbl, tl);
944 return i; /* incomplete code set */
946 bd = dbits;
947 i = huft_build(glbl, ll + nl, nd, 0, cpdist, cpdext, &td, &bd);
948 if (i != 0) {
949 if (i == 1 && !qflag) {
950 FPRINTF("(incomplete d-tree) ");
951 #ifdef PKZIP_BUG_WORKAROUND
952 i = 0;
954 #else
955 huft_free(glbl, td);
957 huft_free(glbl, tl);
958 return i; /* incomplete code set */
959 #endif
961 /* decompress until an end-of-block code */
962 if (inflate_codes(glbl, tl, td, bl, bd))
963 return 1;
965 /* free the decoding tables, return */
966 huft_free(glbl, tl);
967 huft_free(glbl, td);
968 return 0;
971 /* decompress an inflated block */
972 static int
973 inflate_block(glbl, e)
974 struct inflate *glbl;
975 int *e; /* last block flag */
977 unsigned t; /* block type */
978 ulg b; /* bit buffer */
979 unsigned k; /* number of bits in bit buffer */
981 /* make local bit buffer */
982 b = glbl->gz_bb;
983 k = glbl->gz_bk;
985 /* read in last block bit */
986 NEEDBITS(glbl, 1)
987 * e = (int) b & 1;
988 DUMPBITS(1)
989 /* read in block type */
990 NEEDBITS(glbl, 2)
991 t = (unsigned) b & 3;
992 DUMPBITS(2)
993 /* restore the global bit buffer */
994 glbl->gz_bb = b;
995 glbl->gz_bk = k;
997 /* inflate that block type */
998 if (t == 2)
999 return inflate_dynamic(glbl);
1000 if (t == 0)
1001 return inflate_stored(glbl);
1002 if (t == 1)
1003 return inflate_fixed(glbl);
1004 /* bad block type */
1005 return 2;
1010 /* decompress an inflated entry */
1011 static int
1012 xinflate(glbl)
1013 struct inflate *glbl;
1015 int e; /* last block flag */
1016 int r; /* result code */
1017 unsigned h; /* maximum struct huft's malloc'ed */
1019 glbl->gz_fixed_tl = (struct huft *) NULL;
1021 /* initialize window, bit buffer */
1022 glbl->gz_wp = 0;
1023 glbl->gz_bk = 0;
1024 glbl->gz_bb = 0;
1026 /* decompress until the last block */
1027 h = 0;
1028 do {
1029 glbl->gz_hufts = 0;
1030 if ((r = inflate_block(glbl, &e)) != 0)
1031 return r;
1032 if (glbl->gz_hufts > h)
1033 h = glbl->gz_hufts;
1034 } while (!e);
1036 /* flush out slide */
1037 FLUSH(glbl, glbl->gz_wp);
1039 /* return success */
1040 return 0;
1043 /* Nobody uses this - why not? */
1045 inflate(glbl)
1046 struct inflate *glbl;
1048 int i;
1049 #ifdef _KERNEL
1050 u_char *p = NULL;
1052 if (!glbl->gz_slide)
1053 p = glbl->gz_slide = malloc(GZ_WSIZE, M_GZIP, M_WAITOK);
1054 #endif
1055 if (!glbl->gz_slide)
1056 #ifdef _KERNEL
1057 return(ENOMEM);
1058 #else
1059 return 3; /* kzip expects 3 */
1060 #endif
1061 i = xinflate(glbl);
1063 if (glbl->gz_fixed_td != (struct huft *) NULL) {
1064 huft_free(glbl, glbl->gz_fixed_td);
1065 glbl->gz_fixed_td = (struct huft *) NULL;
1067 if (glbl->gz_fixed_tl != (struct huft *) NULL) {
1068 huft_free(glbl, glbl->gz_fixed_tl);
1069 glbl->gz_fixed_tl = (struct huft *) NULL;
1071 #ifdef _KERNEL
1072 if (p == glbl->gz_slide) {
1073 free(glbl->gz_slide, M_GZIP);
1074 glbl->gz_slide = NULL;
1076 #endif
1077 return i;
1079 /* ----------------------- END INFLATE.C */