xref: /netbsd-src/sys/net/zlib.c (revision d0fed6c87ddc40a8bffa6f99e7433ddfc864dd83)
1 /*	$NetBSD: zlib.c,v 1.5 1997/03/13 20:11:53 fvdl Exp $	*/
2 
3 /*
4  * This file is derived from various .h and .c files from the zlib-0.95
5  * distribution by Jean-loup Gailly and Mark Adler, with some additions
6  * by Paul Mackerras to aid in implementing Deflate compression and
7  * decompression for PPP packets.  See zlib.h for conditions of
8  * distribution and use.
9  *
10  * Changes that have been made include:
11  * - changed functions not used outside this file to "local"
12  * - added minCompression parameter to deflateInit2
13  * - added Z_PACKET_FLUSH (see zlib.h for details)
14  * - added inflateIncomp
15  *
16  * Id: zlib.c,v 1.5 1997/03/04 03:26:35 paulus Exp
17  */
18 
19 /*
20  *  ==FILEVERSION 960926==
21  *
22  * This marker is used by the Linux installation script to determine
23  * whether an up-to-date version of this file is already installed.
24  */
25 
26 /*+++++*/
27 /* zutil.h -- internal interface and configuration of the compression library
28  * Copyright (C) 1995 Jean-loup Gailly.
29  * For conditions of distribution and use, see copyright notice in zlib.h
30  */
31 
32 /* WARNING: this file should *not* be used by applications. It is
33    part of the implementation of the compression library and is
34    subject to change. Applications should only use zlib.h.
35  */
36 
37 /* From: zutil.h,v 1.9 1995/05/03 17:27:12 jloup Exp */
38 
39 #define _Z_UTIL_H
40 
41 #include "zlib.h"
42 
43 #ifndef local
44 #  define local static
45 #endif
46 /* compile with -Dlocal if your debugger can't find static symbols */
47 
48 #define FAR
49 
50 typedef unsigned char  uch;
51 typedef uch FAR uchf;
52 typedef unsigned short ush;
53 typedef ush FAR ushf;
54 typedef unsigned long  ulg;
55 
56 extern char *z_errmsg[]; /* indexed by 1-zlib_error */
57 
58 #define ERR_RETURN(strm,err) return (strm->msg=z_errmsg[1-err], err)
59 /* To be used only when the state is known to be valid */
60 
61 #ifndef NULL
62 #define NULL	((void *) 0)
63 #endif
64 
65         /* common constants */
66 
67 #define DEFLATED   8
68 
69 #ifndef DEF_WBITS
70 #  define DEF_WBITS MAX_WBITS
71 #endif
72 /* default windowBits for decompression. MAX_WBITS is for compression only */
73 
74 #if MAX_MEM_LEVEL >= 8
75 #  define DEF_MEM_LEVEL 8
76 #else
77 #  define DEF_MEM_LEVEL  MAX_MEM_LEVEL
78 #endif
79 /* default memLevel */
80 
81 #define STORED_BLOCK 0
82 #define STATIC_TREES 1
83 #define DYN_TREES    2
84 /* The three kinds of block type */
85 
86 #define MIN_MATCH  3
87 #define MAX_MATCH  258
88 /* The minimum and maximum match lengths */
89 
90          /* functions */
91 
92 #if defined(KERNEL) || defined(_KERNEL)
93 #include <sys/types.h>
94 #include <sys/time.h>
95 #include <sys/systm.h>
96 #  define zmemcpy(d, s, n)	bcopy((s), (d), (n))
97 #  define zmemzero		bzero
98 
99 #else
100 #if defined(__KERNEL__)
101 /* Assume this is Linux */
102 #include <linux/string.h>
103 #define zmemcpy memcpy
104 #define zmemzero(dest, len)	memset(dest, 0, len)
105 
106 #else /* not kernel */
107 #if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY)
108 #  define HAVE_MEMCPY
109 #endif
110 #ifdef HAVE_MEMCPY
111 #    define zmemcpy memcpy
112 #    define zmemzero(dest, len) memset(dest, 0, len)
113 #else
114    extern void zmemcpy  OF((Bytef* dest, Bytef* source, uInt len));
115    extern void zmemzero OF((Bytef* dest, uInt len));
116 #endif
117 #endif	/* __KERNEL__ */
118 #endif	/* KERNEL */
119 
120 /* Diagnostic functions */
121 #ifdef DEBUG_ZLIB
122 #  include <stdio.h>
123 #  ifndef verbose
124 #    define verbose 0
125 #  endif
126 #  define Assert(cond,msg) {if(!(cond)) z_error(msg);}
127 #  define Trace(x) fprintf x
128 #  define Tracev(x) {if (verbose) fprintf x ;}
129 #  define Tracevv(x) {if (verbose>1) fprintf x ;}
130 #  define Tracec(c,x) {if (verbose && (c)) fprintf x ;}
131 #  define Tracecv(c,x) {if (verbose>1 && (c)) fprintf x ;}
132 #else
133 #  define Assert(cond,msg)
134 #  define Trace(x)
135 #  define Tracev(x)
136 #  define Tracevv(x)
137 #  define Tracec(c,x)
138 #  define Tracecv(c,x)
139 #endif
140 
141 
142 typedef uLong (*check_func) OF((uLong check, Bytef *buf, uInt len));
143 
144 /* voidpf zcalloc OF((voidpf opaque, unsigned items, unsigned size)); */
145 /* void   zcfree  OF((voidpf opaque, voidpf ptr)); */
146 
147 #define ZALLOC(strm, items, size) \
148            (*((strm)->zalloc))((strm)->opaque, (items), (size))
149 #define ZFREE(strm, addr, size)	\
150 	   (*((strm)->zfree))((strm)->opaque, (voidpf)(addr), (size))
151 #define TRY_FREE(s, p, n) {if (p) ZFREE(s, p, n);}
152 
153 /* deflate.h -- internal compression state
154  * Copyright (C) 1995 Jean-loup Gailly
155  * For conditions of distribution and use, see copyright notice in zlib.h
156  */
157 
158 /* WARNING: this file should *not* be used by applications. It is
159    part of the implementation of the compression library and is
160    subject to change. Applications should only use zlib.h.
161  */
162 
163 
164 /*+++++*/
165 /* From: deflate.h,v 1.5 1995/05/03 17:27:09 jloup Exp */
166 
167 /* ===========================================================================
168  * Internal compression state.
169  */
170 
171 /* Data type */
172 #define BINARY  0
173 #define ASCII   1
174 #define UNKNOWN 2
175 
176 #define LENGTH_CODES 29
177 /* number of length codes, not counting the special END_BLOCK code */
178 
179 #define LITERALS  256
180 /* number of literal bytes 0..255 */
181 
182 #define L_CODES (LITERALS+1+LENGTH_CODES)
183 /* number of Literal or Length codes, including the END_BLOCK code */
184 
185 #define D_CODES   30
186 /* number of distance codes */
187 
188 #define BL_CODES  19
189 /* number of codes used to transfer the bit lengths */
190 
191 #define HEAP_SIZE (2*L_CODES+1)
192 /* maximum heap size */
193 
194 #define MAX_BITS 15
195 /* All codes must not exceed MAX_BITS bits */
196 
197 #define INIT_STATE    42
198 #define BUSY_STATE   113
199 #define FLUSH_STATE  124
200 #define FINISH_STATE 666
201 /* Stream status */
202 
203 
204 /* Data structure describing a single value and its code string. */
205 typedef struct ct_data_s {
206     union {
207         ush  freq;       /* frequency count */
208         ush  code;       /* bit string */
209     } fc;
210     union {
211         ush  dad;        /* father node in Huffman tree */
212         ush  len;        /* length of bit string */
213     } dl;
214 } FAR ct_data;
215 
216 #define Freq fc.freq
217 #define Code fc.code
218 #define Dad  dl.dad
219 #define Len  dl.len
220 
221 typedef struct static_tree_desc_s  static_tree_desc;
222 
223 typedef struct tree_desc_s {
224     ct_data *dyn_tree;           /* the dynamic tree */
225     int     max_code;            /* largest code with non zero frequency */
226     static_tree_desc *stat_desc; /* the corresponding static tree */
227 } FAR tree_desc;
228 
229 typedef ush Pos;
230 typedef Pos FAR Posf;
231 typedef unsigned IPos;
232 
233 /* A Pos is an index in the character window. We use short instead of int to
234  * save space in the various tables. IPos is used only for parameter passing.
235  */
236 
237 typedef struct deflate_state {
238     z_stream *strm;      /* pointer back to this zlib stream */
239     int   status;        /* as the name implies */
240     Bytef *pending_buf;  /* output still pending */
241     Bytef *pending_out;  /* next pending byte to output to the stream */
242     int   pending;       /* nb of bytes in the pending buffer */
243     uLong adler;         /* adler32 of uncompressed data */
244     int   noheader;      /* suppress zlib header and adler32 */
245     Byte  data_type;     /* UNKNOWN, BINARY or ASCII */
246     Byte  method;        /* STORED (for zip only) or DEFLATED */
247     int	  minCompr;	 /* min size decrease for Z_FLUSH_NOSTORE */
248 
249                 /* used by deflate.c: */
250 
251     uInt  w_size;        /* LZ77 window size (32K by default) */
252     uInt  w_bits;        /* log2(w_size)  (8..16) */
253     uInt  w_mask;        /* w_size - 1 */
254 
255     Bytef *window;
256     /* Sliding window. Input bytes are read into the second half of the window,
257      * and move to the first half later to keep a dictionary of at least wSize
258      * bytes. With this organization, matches are limited to a distance of
259      * wSize-MAX_MATCH bytes, but this ensures that IO is always
260      * performed with a length multiple of the block size. Also, it limits
261      * the window size to 64K, which is quite useful on MSDOS.
262      * To do: use the user input buffer as sliding window.
263      */
264 
265     ulg window_size;
266     /* Actual size of window: 2*wSize, except when the user input buffer
267      * is directly used as sliding window.
268      */
269 
270     Posf *prev;
271     /* Link to older string with same hash index. To limit the size of this
272      * array to 64K, this link is maintained only for the last 32K strings.
273      * An index in this array is thus a window index modulo 32K.
274      */
275 
276     Posf *head; /* Heads of the hash chains or NIL. */
277 
278     uInt  ins_h;          /* hash index of string to be inserted */
279     uInt  hash_size;      /* number of elements in hash table */
280     uInt  hash_bits;      /* log2(hash_size) */
281     uInt  hash_mask;      /* hash_size-1 */
282 
283     uInt  hash_shift;
284     /* Number of bits by which ins_h must be shifted at each input
285      * step. It must be such that after MIN_MATCH steps, the oldest
286      * byte no longer takes part in the hash key, that is:
287      *   hash_shift * MIN_MATCH >= hash_bits
288      */
289 
290     long block_start;
291     /* Window position at the beginning of the current output block. Gets
292      * negative when the window is moved backwards.
293      */
294 
295     uInt match_length;           /* length of best match */
296     IPos prev_match;             /* previous match */
297     int match_available;         /* set if previous match exists */
298     uInt strstart;               /* start of string to insert */
299     uInt match_start;            /* start of matching string */
300     uInt lookahead;              /* number of valid bytes ahead in window */
301 
302     uInt prev_length;
303     /* Length of the best match at previous step. Matches not greater than this
304      * are discarded. This is used in the lazy match evaluation.
305      */
306 
307     uInt max_chain_length;
308     /* To speed up deflation, hash chains are never searched beyond this
309      * length.  A higher limit improves compression ratio but degrades the
310      * speed.
311      */
312 
313     uInt max_lazy_match;
314     /* Attempt to find a better match only when the current match is strictly
315      * smaller than this value. This mechanism is used only for compression
316      * levels >= 4.
317      */
318 #   define max_insert_length  max_lazy_match
319     /* Insert new strings in the hash table only if the match length is not
320      * greater than this length. This saves time but degrades compression.
321      * max_insert_length is used only for compression levels <= 3.
322      */
323 
324     int level;    /* compression level (1..9) */
325     int strategy; /* favor or force Huffman coding*/
326 
327     uInt good_match;
328     /* Use a faster search when the previous match is longer than this */
329 
330      int nice_match; /* Stop searching when current match exceeds this */
331 
332                 /* used by trees.c: */
333     /* Didn't use ct_data typedef below to supress compiler warning */
334     struct ct_data_s dyn_ltree[HEAP_SIZE];   /* literal and length tree */
335     struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
336     struct ct_data_s bl_tree[2*BL_CODES+1];  /* Huffman tree for bit lengths */
337 
338     struct tree_desc_s l_desc;               /* desc. for literal tree */
339     struct tree_desc_s d_desc;               /* desc. for distance tree */
340     struct tree_desc_s bl_desc;              /* desc. for bit length tree */
341 
342     ush bl_count[MAX_BITS+1];
343     /* number of codes at each bit length for an optimal tree */
344 
345     int heap[2*L_CODES+1];      /* heap used to build the Huffman trees */
346     int heap_len;               /* number of elements in the heap */
347     int heap_max;               /* element of largest frequency */
348     /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
349      * The same heap array is used to build all trees.
350      */
351 
352     uch depth[2*L_CODES+1];
353     /* Depth of each subtree used as tie breaker for trees of equal frequency
354      */
355 
356     uchf *l_buf;          /* buffer for literals or lengths */
357 
358     uInt  lit_bufsize;
359     /* Size of match buffer for literals/lengths.  There are 4 reasons for
360      * limiting lit_bufsize to 64K:
361      *   - frequencies can be kept in 16 bit counters
362      *   - if compression is not successful for the first block, all input
363      *     data is still in the window so we can still emit a stored block even
364      *     when input comes from standard input.  (This can also be done for
365      *     all blocks if lit_bufsize is not greater than 32K.)
366      *   - if compression is not successful for a file smaller than 64K, we can
367      *     even emit a stored file instead of a stored block (saving 5 bytes).
368      *     This is applicable only for zip (not gzip or zlib).
369      *   - creating new Huffman trees less frequently may not provide fast
370      *     adaptation to changes in the input data statistics. (Take for
371      *     example a binary file with poorly compressible code followed by
372      *     a highly compressible string table.) Smaller buffer sizes give
373      *     fast adaptation but have of course the overhead of transmitting
374      *     trees more frequently.
375      *   - I can't count above 4
376      */
377 
378     uInt last_lit;      /* running index in l_buf */
379 
380     ushf *d_buf;
381     /* Buffer for distances. To simplify the code, d_buf and l_buf have
382      * the same number of elements. To use different lengths, an extra flag
383      * array would be necessary.
384      */
385 
386     ulg opt_len;        /* bit length of current block with optimal trees */
387     ulg static_len;     /* bit length of current block with static trees */
388     ulg compressed_len; /* total bit length of compressed file */
389     uInt matches;       /* number of string matches in current block */
390     int last_eob_len;   /* bit length of EOB code for last block */
391 
392 #ifdef DEBUG_ZLIB
393     ulg bits_sent;      /* bit length of the compressed data */
394 #endif
395 
396     ush bi_buf;
397     /* Output buffer. bits are inserted starting at the bottom (least
398      * significant bits).
399      */
400     int bi_valid;
401     /* Number of valid bits in bi_buf.  All bits above the last valid bit
402      * are always zero.
403      */
404 
405     uInt blocks_in_packet;
406     /* Number of blocks produced since the last time Z_PACKET_FLUSH
407      * was used.
408      */
409 
410 } FAR deflate_state;
411 
412 /* Output a byte on the stream.
413  * IN assertion: there is enough room in pending_buf.
414  */
415 #define put_byte(s, c) {s->pending_buf[s->pending++] = (c);}
416 
417 
418 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
419 /* Minimum amount of lookahead, except at the end of the input file.
420  * See deflate.c for comments about the MIN_MATCH+1.
421  */
422 
423 #define MAX_DIST(s)  ((s)->w_size-MIN_LOOKAHEAD)
424 /* In order to simplify the code, particularly on 16 bit machines, match
425  * distances are limited to MAX_DIST instead of WSIZE.
426  */
427 
428         /* in trees.c */
429 local void ct_init       OF((deflate_state *s));
430 local int  ct_tally      OF((deflate_state *s, int dist, int lc));
431 local ulg ct_flush_block OF((deflate_state *s, charf *buf, ulg stored_len,
432 			     int flush));
433 local void ct_align      OF((deflate_state *s));
434 local void ct_stored_block OF((deflate_state *s, charf *buf, ulg stored_len,
435                           int eof));
436 local void ct_stored_type_only OF((deflate_state *s));
437 
438 
439 /*+++++*/
440 /* deflate.c -- compress data using the deflation algorithm
441  * Copyright (C) 1995 Jean-loup Gailly.
442  * For conditions of distribution and use, see copyright notice in zlib.h
443  */
444 
445 /*
446  *  ALGORITHM
447  *
448  *      The "deflation" process depends on being able to identify portions
449  *      of the input text which are identical to earlier input (within a
450  *      sliding window trailing behind the input currently being processed).
451  *
452  *      The most straightforward technique turns out to be the fastest for
453  *      most input files: try all possible matches and select the longest.
454  *      The key feature of this algorithm is that insertions into the string
455  *      dictionary are very simple and thus fast, and deletions are avoided
456  *      completely. Insertions are performed at each input character, whereas
457  *      string matches are performed only when the previous match ends. So it
458  *      is preferable to spend more time in matches to allow very fast string
459  *      insertions and avoid deletions. The matching algorithm for small
460  *      strings is inspired from that of Rabin & Karp. A brute force approach
461  *      is used to find longer strings when a small match has been found.
462  *      A similar algorithm is used in comic (by Jan-Mark Wams) and freeze
463  *      (by Leonid Broukhis).
464  *         A previous version of this file used a more sophisticated algorithm
465  *      (by Fiala and Greene) which is guaranteed to run in linear amortized
466  *      time, but has a larger average cost, uses more memory and is patented.
467  *      However the F&G algorithm may be faster for some highly redundant
468  *      files if the parameter max_chain_length (described below) is too large.
469  *
470  *  ACKNOWLEDGEMENTS
471  *
472  *      The idea of lazy evaluation of matches is due to Jan-Mark Wams, and
473  *      I found it in 'freeze' written by Leonid Broukhis.
474  *      Thanks to many people for bug reports and testing.
475  *
476  *  REFERENCES
477  *
478  *      Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
479  *      Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
480  *
481  *      A description of the Rabin and Karp algorithm is given in the book
482  *         "Algorithms" by R. Sedgewick, Addison-Wesley, p252.
483  *
484  *      Fiala,E.R., and Greene,D.H.
485  *         Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595
486  *
487  */
488 
489 /* From: deflate.c,v 1.8 1995/05/03 17:27:08 jloup Exp */
490 
491 #if 0
492 local char zlib_copyright[] = " deflate Copyright 1995 Jean-loup Gailly ";
493 #endif
494 /*
495   If you use the zlib library in a product, an acknowledgment is welcome
496   in the documentation of your product. If for some reason you cannot
497   include such an acknowledgment, I would appreciate that you keep this
498   copyright string in the executable of your product.
499  */
500 
501 #define NIL 0
502 /* Tail of hash chains */
503 
504 #ifndef TOO_FAR
505 #  define TOO_FAR 4096
506 #endif
507 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
508 
509 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
510 /* Minimum amount of lookahead, except at the end of the input file.
511  * See deflate.c for comments about the MIN_MATCH+1.
512  */
513 
514 /* Values for max_lazy_match, good_match and max_chain_length, depending on
515  * the desired pack level (0..9). The values given below have been tuned to
516  * exclude worst case performance for pathological files. Better values may be
517  * found for specific files.
518  */
519 
520 typedef struct config_s {
521    ush good_length; /* reduce lazy search above this match length */
522    ush max_lazy;    /* do not perform lazy search above this match length */
523    ush nice_length; /* quit search above this match length */
524    ush max_chain;
525 } config;
526 
527 local config configuration_table[10] = {
528 /*      good lazy nice chain */
529 /* 0 */ {0,    0,  0,    0},  /* store only */
530 /* 1 */ {4,    4,  8,    4},  /* maximum speed, no lazy matches */
531 /* 2 */ {4,    5, 16,    8},
532 /* 3 */ {4,    6, 32,   32},
533 
534 /* 4 */ {4,    4, 16,   16},  /* lazy matches */
535 /* 5 */ {8,   16, 32,   32},
536 /* 6 */ {8,   16, 128, 128},
537 /* 7 */ {8,   32, 128, 256},
538 /* 8 */ {32, 128, 258, 1024},
539 /* 9 */ {32, 258, 258, 4096}}; /* maximum compression */
540 
541 /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
542  * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
543  * meaning.
544  */
545 
546 #define EQUAL 0
547 /* result of memcmp for equal strings */
548 
549 /* ===========================================================================
550  *  Prototypes for local functions.
551  */
552 
553 local void fill_window   OF((deflate_state *s));
554 local int  deflate_fast  OF((deflate_state *s, int flush));
555 local int  deflate_slow  OF((deflate_state *s, int flush));
556 local void lm_init       OF((deflate_state *s));
557 local int longest_match  OF((deflate_state *s, IPos cur_match));
558 local void putShortMSB   OF((deflate_state *s, uInt b));
559 local void flush_pending OF((z_stream *strm));
560 local int read_buf       OF((z_stream *strm, charf *buf, unsigned size));
561 #ifdef ASMV
562       void match_init OF((void)); /* asm code initialization */
563 #endif
564 
565 #ifdef DEBUG_ZLIB
566 local  void check_match OF((deflate_state *s, IPos start, IPos match,
567                             int length));
568 #endif
569 
570 
571 /* ===========================================================================
572  * Update a hash value with the given input byte
573  * IN  assertion: all calls to to UPDATE_HASH are made with consecutive
574  *    input characters, so that a running hash key can be computed from the
575  *    previous key instead of complete recalculation each time.
576  */
577 #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
578 
579 
580 /* ===========================================================================
581  * Insert string str in the dictionary and set match_head to the previous head
582  * of the hash chain (the most recent string with same hash key). Return
583  * the previous length of the hash chain.
584  * IN  assertion: all calls to to INSERT_STRING are made with consecutive
585  *    input characters and the first MIN_MATCH bytes of str are valid
586  *    (except for the last MIN_MATCH-1 bytes of the input file).
587  */
588 #define INSERT_STRING(s, str, match_head) \
589    (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
590     s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \
591     s->head[s->ins_h] = (str))
592 
593 /* ===========================================================================
594  * Initialize the hash table (avoiding 64K overflow for 16 bit systems).
595  * prev[] will be initialized on the fly.
596  */
597 #define CLEAR_HASH(s) \
598     s->head[s->hash_size-1] = NIL; \
599     zmemzero((charf *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head));
600 
601 /* ========================================================================= */
602 int deflateInit (strm, level)
603     z_stream *strm;
604     int level;
605 {
606     return deflateInit2 (strm, level, DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,
607 			 0, 0);
608     /* To do: ignore strm->next_in if we use it as window */
609 }
610 
611 /* ========================================================================= */
612 int deflateInit2 (strm, level, method, windowBits, memLevel,
613 		  strategy, minCompression)
614     z_stream *strm;
615     int  level;
616     int  method;
617     int  windowBits;
618     int  memLevel;
619     int  strategy;
620     int  minCompression;
621 {
622     deflate_state *s;
623     int noheader = 0;
624 
625     if (strm == Z_NULL) return Z_STREAM_ERROR;
626 
627     strm->msg = Z_NULL;
628 /*    if (strm->zalloc == Z_NULL) strm->zalloc = zcalloc; */
629 /*    if (strm->zfree == Z_NULL) strm->zfree = zcfree; */
630 
631     if (level == Z_DEFAULT_COMPRESSION) level = 6;
632 
633     if (windowBits < 0) { /* undocumented feature: suppress zlib header */
634         noheader = 1;
635         windowBits = -windowBits;
636     }
637     if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != DEFLATED ||
638         windowBits < 8 || windowBits > 15 || level < 1 || level > 9) {
639         return Z_STREAM_ERROR;
640     }
641     s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state));
642     if (s == Z_NULL) return Z_MEM_ERROR;
643     strm->state = (struct internal_state FAR *)s;
644     s->strm = strm;
645 
646     s->noheader = noheader;
647     s->w_bits = windowBits;
648     s->w_size = 1 << s->w_bits;
649     s->w_mask = s->w_size - 1;
650 
651     s->hash_bits = memLevel + 7;
652     s->hash_size = 1 << s->hash_bits;
653     s->hash_mask = s->hash_size - 1;
654     s->hash_shift =  ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
655 
656     s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
657     s->prev   = (Posf *)  ZALLOC(strm, s->w_size, sizeof(Pos));
658     s->head   = (Posf *)  ZALLOC(strm, s->hash_size, sizeof(Pos));
659 
660     s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
661 
662     s->pending_buf = (uchf *) ZALLOC(strm, s->lit_bufsize, 2*sizeof(ush));
663 
664     if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
665         s->pending_buf == Z_NULL) {
666         strm->msg = z_errmsg[1-Z_MEM_ERROR];
667         deflateEnd (strm);
668         return Z_MEM_ERROR;
669     }
670     s->d_buf = (ushf *) &(s->pending_buf[s->lit_bufsize]);
671     s->l_buf = (uchf *) &(s->pending_buf[3*s->lit_bufsize]);
672     /* We overlay pending_buf and d_buf+l_buf. This works since the average
673      * output size for (length,distance) codes is <= 32 bits (worst case
674      * is 15+15+13=33).
675      */
676 
677     s->level = level;
678     s->strategy = strategy;
679     s->method = (Byte)method;
680     s->minCompr = minCompression;
681     s->blocks_in_packet = 0;
682 
683     return deflateReset(strm);
684 }
685 
686 /* ========================================================================= */
687 int deflateReset (strm)
688     z_stream *strm;
689 {
690     deflate_state *s;
691 
692     if (strm == Z_NULL || strm->state == Z_NULL ||
693         strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return Z_STREAM_ERROR;
694 
695     strm->total_in = strm->total_out = 0;
696     strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */
697     strm->data_type = Z_UNKNOWN;
698 
699     s = (deflate_state *)strm->state;
700     s->pending = 0;
701     s->pending_out = s->pending_buf;
702 
703     if (s->noheader < 0) {
704         s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */
705     }
706     s->status = s->noheader ? BUSY_STATE : INIT_STATE;
707     s->adler = 1;
708 
709     ct_init(s);
710     lm_init(s);
711 
712     return Z_OK;
713 }
714 
715 /* =========================================================================
716  * Put a short in the pending buffer. The 16-bit value is put in MSB order.
717  * IN assertion: the stream state is correct and there is enough room in
718  * pending_buf.
719  */
720 local void putShortMSB (s, b)
721     deflate_state *s;
722     uInt b;
723 {
724     put_byte(s, (Byte)(b >> 8));
725     put_byte(s, (Byte)(b & 0xff));
726 }
727 
728 /* =========================================================================
729  * Flush as much pending output as possible.
730  */
731 local void flush_pending(strm)
732     z_stream *strm;
733 {
734     deflate_state *state = (deflate_state *) strm->state;
735     unsigned len = state->pending;
736 
737     if (len > strm->avail_out) len = strm->avail_out;
738     if (len == 0) return;
739 
740     if (strm->next_out != NULL) {
741 	zmemcpy(strm->next_out, state->pending_out, len);
742 	strm->next_out += len;
743     }
744     state->pending_out += len;
745     strm->total_out += len;
746     strm->avail_out -= len;
747     state->pending -= len;
748     if (state->pending == 0) {
749         state->pending_out = state->pending_buf;
750     }
751 }
752 
753 /* ========================================================================= */
754 int deflate (strm, flush)
755     z_stream *strm;
756     int flush;
757 {
758     deflate_state *state = (deflate_state *) strm->state;
759 
760     if (strm == Z_NULL || state == Z_NULL) return Z_STREAM_ERROR;
761 
762     if (strm->next_in == Z_NULL && strm->avail_in != 0) {
763         ERR_RETURN(strm, Z_STREAM_ERROR);
764     }
765     if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR);
766 
767     state->strm = strm; /* just in case */
768 
769     /* Write the zlib header */
770     if (state->status == INIT_STATE) {
771 
772         uInt header = (DEFLATED + ((state->w_bits-8)<<4)) << 8;
773         uInt level_flags = (state->level-1) >> 1;
774 
775         if (level_flags > 3) level_flags = 3;
776         header |= (level_flags << 6);
777         header += 31 - (header % 31);
778 
779         state->status = BUSY_STATE;
780         putShortMSB(state, header);
781     }
782 
783     /* Flush as much pending output as possible */
784     if (state->pending != 0) {
785         flush_pending(strm);
786         if (strm->avail_out == 0) return Z_OK;
787     }
788 
789     /* If we came back in here to get the last output from
790      * a previous flush, we're done for now.
791      */
792     if (state->status == FLUSH_STATE) {
793 	state->status = BUSY_STATE;
794 	if (flush != Z_NO_FLUSH && flush != Z_FINISH)
795 	    return Z_OK;
796     }
797 
798     /* User must not provide more input after the first FINISH: */
799     if (state->status == FINISH_STATE && strm->avail_in != 0) {
800         ERR_RETURN(strm, Z_BUF_ERROR);
801     }
802 
803     /* Start a new block or continue the current one.
804      */
805     if (strm->avail_in != 0 || state->lookahead != 0 ||
806         (flush == Z_FINISH && state->status != FINISH_STATE)) {
807         int quit;
808 
809         if (flush == Z_FINISH) {
810             state->status = FINISH_STATE;
811         }
812         if (state->level <= 3) {
813             quit = deflate_fast(state, flush);
814         } else {
815             quit = deflate_slow(state, flush);
816         }
817         if (quit || strm->avail_out == 0)
818 	    return Z_OK;
819         /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
820          * of deflate should use the same flush parameter to make sure
821          * that the flush is complete. So we don't have to output an
822          * empty block here, this will be done at next call. This also
823          * ensures that for a very small output buffer, we emit at most
824          * one empty block.
825          */
826     }
827 
828     /* If a flush was requested, we have a little more to output now. */
829     if (flush != Z_NO_FLUSH && flush != Z_FINISH
830 	&& state->status != FINISH_STATE) {
831 	switch (flush) {
832 	case Z_PARTIAL_FLUSH:
833 	    ct_align(state);
834 	    break;
835 	case Z_PACKET_FLUSH:
836 	    /* Output just the 3-bit `stored' block type value,
837 	       but not a zero length. */
838 	    ct_stored_type_only(state);
839 	    break;
840 	default:
841 	    ct_stored_block(state, (char*)0, 0L, 0);
842 	    /* For a full flush, this empty block will be recognized
843 	     * as a special marker by inflate_sync().
844 	     */
845 	    if (flush == Z_FULL_FLUSH) {
846 		CLEAR_HASH(state);             /* forget history */
847 	    }
848 	}
849 	flush_pending(strm);
850 	if (strm->avail_out == 0) {
851 	    /* We'll have to come back to get the rest of the output;
852 	     * this ensures we don't output a second zero-length stored
853 	     * block (or whatever).
854 	     */
855 	    state->status = FLUSH_STATE;
856 	    return Z_OK;
857 	}
858     }
859 
860     Assert(strm->avail_out > 0, "bug2");
861 
862     if (flush != Z_FINISH) return Z_OK;
863     if (state->noheader) return Z_STREAM_END;
864 
865     /* Write the zlib trailer (adler32) */
866     putShortMSB(state, (uInt)(state->adler >> 16));
867     putShortMSB(state, (uInt)(state->adler & 0xffff));
868     flush_pending(strm);
869     /* If avail_out is zero, the application will call deflate again
870      * to flush the rest.
871      */
872     state->noheader = -1; /* write the trailer only once! */
873     return state->pending != 0 ? Z_OK : Z_STREAM_END;
874 }
875 
876 /* ========================================================================= */
877 int deflateEnd (strm)
878     z_stream *strm;
879 {
880     deflate_state *state = (deflate_state *) strm->state;
881 
882     if (strm == Z_NULL || state == Z_NULL) return Z_STREAM_ERROR;
883 
884     TRY_FREE(strm, state->window, state->w_size * 2 * sizeof(Byte));
885     TRY_FREE(strm, state->prev, state->w_size * sizeof(Pos));
886     TRY_FREE(strm, state->head, state->hash_size * sizeof(Pos));
887     TRY_FREE(strm, state->pending_buf, state->lit_bufsize * 2 * sizeof(ush));
888 
889     ZFREE(strm, state, sizeof(deflate_state));
890     strm->state = Z_NULL;
891 
892     return Z_OK;
893 }
894 
895 /* ===========================================================================
896  * Read a new buffer from the current input stream, update the adler32
897  * and total number of bytes read.
898  */
899 local int read_buf(strm, buf, size)
900     z_stream *strm;
901     charf *buf;
902     unsigned size;
903 {
904     unsigned len = strm->avail_in;
905     deflate_state *state = (deflate_state *) strm->state;
906 
907     if (len > size) len = size;
908     if (len == 0) return 0;
909 
910     strm->avail_in  -= len;
911 
912     if (!state->noheader) {
913         state->adler = adler32(state->adler, strm->next_in, len);
914     }
915     zmemcpy(buf, strm->next_in, len);
916     strm->next_in  += len;
917     strm->total_in += len;
918 
919     return (int)len;
920 }
921 
922 /* ===========================================================================
923  * Initialize the "longest match" routines for a new zlib stream
924  */
925 local void lm_init (s)
926     deflate_state *s;
927 {
928     s->window_size = (ulg)2L*s->w_size;
929 
930     CLEAR_HASH(s);
931 
932     /* Set the default configuration parameters:
933      */
934     s->max_lazy_match   = configuration_table[s->level].max_lazy;
935     s->good_match       = configuration_table[s->level].good_length;
936     s->nice_match       = configuration_table[s->level].nice_length;
937     s->max_chain_length = configuration_table[s->level].max_chain;
938 
939     s->strstart = 0;
940     s->block_start = 0L;
941     s->lookahead = 0;
942     s->match_length = MIN_MATCH-1;
943     s->match_available = 0;
944     s->ins_h = 0;
945 #ifdef ASMV
946     match_init(); /* initialize the asm code */
947 #endif
948 }
949 
950 /* ===========================================================================
951  * Set match_start to the longest match starting at the given string and
952  * return its length. Matches shorter or equal to prev_length are discarded,
953  * in which case the result is equal to prev_length and match_start is
954  * garbage.
955  * IN assertions: cur_match is the head of the hash chain for the current
956  *   string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
957  */
958 #ifndef ASMV
959 /* For 80x86 and 680x0, an optimized version will be provided in match.asm or
960  * match.S. The code will be functionally equivalent.
961  */
962 local int longest_match(s, cur_match)
963     deflate_state *s;
964     IPos cur_match;                             /* current match */
965 {
966     unsigned chain_length = s->max_chain_length;/* max hash chain length */
967     register Bytef *scan = s->window + s->strstart; /* current string */
968     register Bytef *match;                       /* matched string */
969     register int len;                           /* length of current match */
970     int best_len = s->prev_length;              /* best match length so far */
971     IPos limit = s->strstart > (IPos)MAX_DIST(s) ?
972         s->strstart - (IPos)MAX_DIST(s) : NIL;
973     /* Stop when cur_match becomes <= limit. To simplify the code,
974      * we prevent matches with the string of window index 0.
975      */
976     Posf *prev = s->prev;
977     uInt wmask = s->w_mask;
978 
979 #ifdef UNALIGNED_OK
980     /* Compare two bytes at a time. Note: this is not always beneficial.
981      * Try with and without -DUNALIGNED_OK to check.
982      */
983     register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
984     register ush scan_start = *(ushf*)scan;
985     register ush scan_end   = *(ushf*)(scan+best_len-1);
986 #else
987     register Bytef *strend = s->window + s->strstart + MAX_MATCH;
988     register Byte scan_end1  = scan[best_len-1];
989     register Byte scan_end   = scan[best_len];
990 #endif
991 
992     /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
993      * It is easy to get rid of this optimization if necessary.
994      */
995     Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
996 
997     /* Do not waste too much time if we already have a good match: */
998     if (s->prev_length >= s->good_match) {
999         chain_length >>= 2;
1000     }
1001     Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
1002 
1003     do {
1004         Assert(cur_match < s->strstart, "no future");
1005         match = s->window + cur_match;
1006 
1007         /* Skip to next match if the match length cannot increase
1008          * or if the match length is less than 2:
1009          */
1010 #if (defined(UNALIGNED_OK) && MAX_MATCH == 258)
1011         /* This code assumes sizeof(unsigned short) == 2. Do not use
1012          * UNALIGNED_OK if your compiler uses a different size.
1013          */
1014         if (*(ushf*)(match+best_len-1) != scan_end ||
1015             *(ushf*)match != scan_start) continue;
1016 
1017         /* It is not necessary to compare scan[2] and match[2] since they are
1018          * always equal when the other bytes match, given that the hash keys
1019          * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
1020          * strstart+3, +5, ... up to strstart+257. We check for insufficient
1021          * lookahead only every 4th comparison; the 128th check will be made
1022          * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
1023          * necessary to put more guard bytes at the end of the window, or
1024          * to check more often for insufficient lookahead.
1025          */
1026         Assert(scan[2] == match[2], "scan[2]?");
1027         scan++, match++;
1028         do {
1029         } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1030                  *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1031                  *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1032                  *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1033                  scan < strend);
1034         /* The funny "do {}" generates better code on most compilers */
1035 
1036         /* Here, scan <= window+strstart+257 */
1037         Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1038         if (*scan == *match) scan++;
1039 
1040         len = (MAX_MATCH - 1) - (int)(strend-scan);
1041         scan = strend - (MAX_MATCH-1);
1042 
1043 #else /* UNALIGNED_OK */
1044 
1045         if (match[best_len]   != scan_end  ||
1046             match[best_len-1] != scan_end1 ||
1047             *match            != *scan     ||
1048             *++match          != scan[1])      continue;
1049 
1050         /* The check at best_len-1 can be removed because it will be made
1051          * again later. (This heuristic is not always a win.)
1052          * It is not necessary to compare scan[2] and match[2] since they
1053          * are always equal when the other bytes match, given that
1054          * the hash keys are equal and that HASH_BITS >= 8.
1055          */
1056         scan += 2, match++;
1057         Assert(*scan == *match, "match[2]?");
1058 
1059         /* We check for insufficient lookahead only every 8th comparison;
1060          * the 256th check will be made at strstart+258.
1061          */
1062         do {
1063         } while (*++scan == *++match && *++scan == *++match &&
1064                  *++scan == *++match && *++scan == *++match &&
1065                  *++scan == *++match && *++scan == *++match &&
1066                  *++scan == *++match && *++scan == *++match &&
1067                  scan < strend);
1068 
1069         Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1070 
1071         len = MAX_MATCH - (int)(strend - scan);
1072         scan = strend - MAX_MATCH;
1073 
1074 #endif /* UNALIGNED_OK */
1075 
1076         if (len > best_len) {
1077             s->match_start = cur_match;
1078             best_len = len;
1079             if (len >= s->nice_match) break;
1080 #ifdef UNALIGNED_OK
1081             scan_end = *(ushf*)(scan+best_len-1);
1082 #else
1083             scan_end1  = scan[best_len-1];
1084             scan_end   = scan[best_len];
1085 #endif
1086         }
1087     } while ((cur_match = prev[cur_match & wmask]) > limit
1088              && --chain_length != 0);
1089 
1090     return best_len;
1091 }
1092 #endif /* ASMV */
1093 
1094 #ifdef DEBUG_ZLIB
1095 /* ===========================================================================
1096  * Check that the match at match_start is indeed a match.
1097  */
1098 local void check_match(s, start, match, length)
1099     deflate_state *s;
1100     IPos start, match;
1101     int length;
1102 {
1103     /* check that the match is indeed a match */
1104     if (memcmp((charf *)s->window + match,
1105                 (charf *)s->window + start, length) != EQUAL) {
1106         fprintf(stderr,
1107             " start %u, match %u, length %d\n",
1108             start, match, length);
1109         do { fprintf(stderr, "%c%c", s->window[match++],
1110                      s->window[start++]); } while (--length != 0);
1111         z_error("invalid match");
1112     }
1113     if (verbose > 1) {
1114         fprintf(stderr,"\\[%d,%d]", start-match, length);
1115         do { putc(s->window[start++], stderr); } while (--length != 0);
1116     }
1117 }
1118 #else
1119 #  define check_match(s, start, match, length)
1120 #endif
1121 
1122 /* ===========================================================================
1123  * Fill the window when the lookahead becomes insufficient.
1124  * Updates strstart and lookahead.
1125  *
1126  * IN assertion: lookahead < MIN_LOOKAHEAD
1127  * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
1128  *    At least one byte has been read, or avail_in == 0; reads are
1129  *    performed for at least two bytes (required for the zip translate_eol
1130  *    option -- not supported here).
1131  */
1132 local void fill_window(s)
1133     deflate_state *s;
1134 {
1135     register unsigned n, m;
1136     register Posf *p;
1137     unsigned more;    /* Amount of free space at the end of the window. */
1138     uInt wsize = s->w_size;
1139 
1140     do {
1141         more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
1142 
1143         /* Deal with !@#$% 64K limit: */
1144         if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
1145             more = wsize;
1146         } else if (more == (unsigned)(-1)) {
1147             /* Very unlikely, but possible on 16 bit machine if strstart == 0
1148              * and lookahead == 1 (input done one byte at time)
1149              */
1150             more--;
1151 
1152         /* If the window is almost full and there is insufficient lookahead,
1153          * move the upper half to the lower one to make room in the upper half.
1154          */
1155         } else if (s->strstart >= wsize+MAX_DIST(s)) {
1156 
1157             /* By the IN assertion, the window is not empty so we can't confuse
1158              * more == 0 with more == 64K on a 16 bit machine.
1159              */
1160             zmemcpy((charf *)s->window, (charf *)s->window+wsize,
1161                    (unsigned)wsize);
1162             s->match_start -= wsize;
1163             s->strstart    -= wsize; /* we now have strstart >= MAX_DIST */
1164 
1165             s->block_start -= (long) wsize;
1166 
1167             /* Slide the hash table (could be avoided with 32 bit values
1168                at the expense of memory usage):
1169              */
1170             n = s->hash_size;
1171             p = &s->head[n];
1172             do {
1173                 m = *--p;
1174                 *p = (Pos)(m >= wsize ? m-wsize : NIL);
1175             } while (--n);
1176 
1177             n = wsize;
1178             p = &s->prev[n];
1179             do {
1180                 m = *--p;
1181                 *p = (Pos)(m >= wsize ? m-wsize : NIL);
1182                 /* If n is not on any hash chain, prev[n] is garbage but
1183                  * its value will never be used.
1184                  */
1185             } while (--n);
1186 
1187             more += wsize;
1188         }
1189         if (s->strm->avail_in == 0) return;
1190 
1191         /* If there was no sliding:
1192          *    strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
1193          *    more == window_size - lookahead - strstart
1194          * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
1195          * => more >= window_size - 2*WSIZE + 2
1196          * In the BIG_MEM or MMAP case (not yet supported),
1197          *   window_size == input_size + MIN_LOOKAHEAD  &&
1198          *   strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
1199          * Otherwise, window_size == 2*WSIZE so more >= 2.
1200          * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
1201          */
1202         Assert(more >= 2, "more < 2");
1203 
1204         n = read_buf(s->strm, (charf *)s->window + s->strstart + s->lookahead,
1205                      more);
1206         s->lookahead += n;
1207 
1208         /* Initialize the hash value now that we have some input: */
1209         if (s->lookahead >= MIN_MATCH) {
1210             s->ins_h = s->window[s->strstart];
1211             UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
1212 #if MIN_MATCH != 3
1213             Call UPDATE_HASH() MIN_MATCH-3 more times
1214 #endif
1215         }
1216         /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
1217          * but this is not important since only literal bytes will be emitted.
1218          */
1219 
1220     } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
1221 }
1222 
1223 /* ===========================================================================
1224  * Flush the current block, with given end-of-file flag.
1225  * IN assertion: strstart is set to the end of the current match.
1226  */
1227 #define FLUSH_BLOCK_ONLY(s, flush) { \
1228    ct_flush_block(s, (s->block_start >= 0L ? \
1229            (charf *)&s->window[(unsigned)s->block_start] : \
1230            (charf *)Z_NULL), (long)s->strstart - s->block_start, (flush)); \
1231    s->block_start = s->strstart; \
1232    flush_pending(s->strm); \
1233    Tracev((stderr,"[FLUSH]")); \
1234 }
1235 
1236 /* Same but force premature exit if necessary. */
1237 #define FLUSH_BLOCK(s, flush) { \
1238    FLUSH_BLOCK_ONLY(s, flush); \
1239    if (s->strm->avail_out == 0) return 1; \
1240 }
1241 
1242 /* ===========================================================================
1243  * Compress as much as possible from the input stream, return true if
1244  * processing was terminated prematurely (no more input or output space).
1245  * This function does not perform lazy evaluationof matches and inserts
1246  * new strings in the dictionary only for unmatched strings or for short
1247  * matches. It is used only for the fast compression options.
1248  */
1249 local int deflate_fast(s, flush)
1250     deflate_state *s;
1251     int flush;
1252 {
1253     IPos hash_head = NIL; /* head of the hash chain */
1254     int bflush;     /* set if current block must be flushed */
1255 
1256     s->prev_length = MIN_MATCH-1;
1257 
1258     for (;;) {
1259         /* Make sure that we always have enough lookahead, except
1260          * at the end of the input file. We need MAX_MATCH bytes
1261          * for the next match, plus MIN_MATCH bytes to insert the
1262          * string following the next match.
1263          */
1264         if (s->lookahead < MIN_LOOKAHEAD) {
1265             fill_window(s);
1266             if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) return 1;
1267 
1268             if (s->lookahead == 0) break; /* flush the current block */
1269         }
1270 
1271         /* Insert the string window[strstart .. strstart+2] in the
1272          * dictionary, and set hash_head to the head of the hash chain:
1273          */
1274         if (s->lookahead >= MIN_MATCH) {
1275             INSERT_STRING(s, s->strstart, hash_head);
1276         }
1277 
1278         /* Find the longest match, discarding those <= prev_length.
1279          * At this point we have always match_length < MIN_MATCH
1280          */
1281         if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) {
1282             /* To simplify the code, we prevent matches with the string
1283              * of window index 0 (in particular we have to avoid a match
1284              * of the string with itself at the start of the input file).
1285              */
1286             if (s->strategy != Z_HUFFMAN_ONLY) {
1287                 s->match_length = longest_match (s, hash_head);
1288             }
1289             /* longest_match() sets match_start */
1290 
1291             if (s->match_length > s->lookahead) s->match_length = s->lookahead;
1292         }
1293         if (s->match_length >= MIN_MATCH) {
1294             check_match(s, s->strstart, s->match_start, s->match_length);
1295 
1296             bflush = ct_tally(s, s->strstart - s->match_start,
1297                               s->match_length - MIN_MATCH);
1298 
1299             s->lookahead -= s->match_length;
1300 
1301             /* Insert new strings in the hash table only if the match length
1302              * is not too large. This saves time but degrades compression.
1303              */
1304             if (s->match_length <= s->max_insert_length &&
1305                 s->lookahead >= MIN_MATCH) {
1306                 s->match_length--; /* string at strstart already in hash table */
1307                 do {
1308                     s->strstart++;
1309                     INSERT_STRING(s, s->strstart, hash_head);
1310                     /* strstart never exceeds WSIZE-MAX_MATCH, so there are
1311                      * always MIN_MATCH bytes ahead.
1312                      */
1313                 } while (--s->match_length != 0);
1314                 s->strstart++;
1315             } else {
1316                 s->strstart += s->match_length;
1317                 s->match_length = 0;
1318                 s->ins_h = s->window[s->strstart];
1319                 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
1320 #if MIN_MATCH != 3
1321                 Call UPDATE_HASH() MIN_MATCH-3 more times
1322 #endif
1323                 /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not
1324                  * matter since it will be recomputed at next deflate call.
1325                  */
1326             }
1327         } else {
1328             /* No match, output a literal byte */
1329             Tracevv((stderr,"%c", s->window[s->strstart]));
1330             bflush = ct_tally (s, 0, s->window[s->strstart]);
1331             s->lookahead--;
1332             s->strstart++;
1333         }
1334         if (bflush) FLUSH_BLOCK(s, Z_NO_FLUSH);
1335     }
1336     FLUSH_BLOCK(s, flush);
1337     return 0; /* normal exit */
1338 }
1339 
1340 /* ===========================================================================
1341  * Same as above, but achieves better compression. We use a lazy
1342  * evaluation for matches: a match is finally adopted only if there is
1343  * no better match at the next window position.
1344  */
1345 local int deflate_slow(s, flush)
1346     deflate_state *s;
1347     int flush;
1348 {
1349     IPos hash_head = NIL;    /* head of hash chain */
1350     int bflush;              /* set if current block must be flushed */
1351 
1352     /* Process the input block. */
1353     for (;;) {
1354         /* Make sure that we always have enough lookahead, except
1355          * at the end of the input file. We need MAX_MATCH bytes
1356          * for the next match, plus MIN_MATCH bytes to insert the
1357          * string following the next match.
1358          */
1359         if (s->lookahead < MIN_LOOKAHEAD) {
1360             fill_window(s);
1361             if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) return 1;
1362 
1363             if (s->lookahead == 0) break; /* flush the current block */
1364         }
1365 
1366         /* Insert the string window[strstart .. strstart+2] in the
1367          * dictionary, and set hash_head to the head of the hash chain:
1368          */
1369         if (s->lookahead >= MIN_MATCH) {
1370             INSERT_STRING(s, s->strstart, hash_head);
1371         }
1372 
1373         /* Find the longest match, discarding those <= prev_length.
1374          */
1375         s->prev_length = s->match_length, s->prev_match = s->match_start;
1376         s->match_length = MIN_MATCH-1;
1377 
1378         if (hash_head != NIL && s->prev_length < s->max_lazy_match &&
1379             s->strstart - hash_head <= MAX_DIST(s)) {
1380             /* To simplify the code, we prevent matches with the string
1381              * of window index 0 (in particular we have to avoid a match
1382              * of the string with itself at the start of the input file).
1383              */
1384             if (s->strategy != Z_HUFFMAN_ONLY) {
1385                 s->match_length = longest_match (s, hash_head);
1386             }
1387             /* longest_match() sets match_start */
1388             if (s->match_length > s->lookahead) s->match_length = s->lookahead;
1389 
1390             if (s->match_length <= 5 && (s->strategy == Z_FILTERED ||
1391                  (s->match_length == MIN_MATCH &&
1392                   s->strstart - s->match_start > TOO_FAR))) {
1393 
1394                 /* If prev_match is also MIN_MATCH, match_start is garbage
1395                  * but we will ignore the current match anyway.
1396                  */
1397                 s->match_length = MIN_MATCH-1;
1398             }
1399         }
1400         /* If there was a match at the previous step and the current
1401          * match is not better, output the previous match:
1402          */
1403         if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {
1404             uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
1405             /* Do not insert strings in hash table beyond this. */
1406 
1407             check_match(s, s->strstart-1, s->prev_match, s->prev_length);
1408 
1409             bflush = ct_tally(s, s->strstart -1 - s->prev_match,
1410                               s->prev_length - MIN_MATCH);
1411 
1412             /* Insert in hash table all strings up to the end of the match.
1413              * strstart-1 and strstart are already inserted. If there is not
1414              * enough lookahead, the last two strings are not inserted in
1415              * the hash table.
1416              */
1417             s->lookahead -= s->prev_length-1;
1418             s->prev_length -= 2;
1419             do {
1420                 if (++s->strstart <= max_insert) {
1421                     INSERT_STRING(s, s->strstart, hash_head);
1422                 }
1423             } while (--s->prev_length != 0);
1424             s->match_available = 0;
1425             s->match_length = MIN_MATCH-1;
1426             s->strstart++;
1427 
1428             if (bflush) FLUSH_BLOCK(s, Z_NO_FLUSH);
1429 
1430         } else if (s->match_available) {
1431             /* If there was no match at the previous position, output a
1432              * single literal. If there was a match but the current match
1433              * is longer, truncate the previous match to a single literal.
1434              */
1435             Tracevv((stderr,"%c", s->window[s->strstart-1]));
1436             if (ct_tally (s, 0, s->window[s->strstart-1])) {
1437                 FLUSH_BLOCK_ONLY(s, Z_NO_FLUSH);
1438             }
1439             s->strstart++;
1440             s->lookahead--;
1441             if (s->strm->avail_out == 0) return 1;
1442         } else {
1443             /* There is no previous match to compare with, wait for
1444              * the next step to decide.
1445              */
1446             s->match_available = 1;
1447             s->strstart++;
1448             s->lookahead--;
1449         }
1450     }
1451     Assert (flush != Z_NO_FLUSH, "no flush?");
1452     if (s->match_available) {
1453         Tracevv((stderr,"%c", s->window[s->strstart-1]));
1454         ct_tally (s, 0, s->window[s->strstart-1]);
1455         s->match_available = 0;
1456     }
1457     FLUSH_BLOCK(s, flush);
1458     return 0;
1459 }
1460 
1461 
1462 /*+++++*/
1463 /* trees.c -- output deflated data using Huffman coding
1464  * Copyright (C) 1995 Jean-loup Gailly
1465  * For conditions of distribution and use, see copyright notice in zlib.h
1466  */
1467 
1468 /*
1469  *  ALGORITHM
1470  *
1471  *      The "deflation" process uses several Huffman trees. The more
1472  *      common source values are represented by shorter bit sequences.
1473  *
1474  *      Each code tree is stored in a compressed form which is itself
1475  * a Huffman encoding of the lengths of all the code strings (in
1476  * ascending order by source values).  The actual code strings are
1477  * reconstructed from the lengths in the inflate process, as described
1478  * in the deflate specification.
1479  *
1480  *  REFERENCES
1481  *
1482  *      Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
1483  *      Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
1484  *
1485  *      Storer, James A.
1486  *          Data Compression:  Methods and Theory, pp. 49-50.
1487  *          Computer Science Press, 1988.  ISBN 0-7167-8156-5.
1488  *
1489  *      Sedgewick, R.
1490  *          Algorithms, p290.
1491  *          Addison-Wesley, 1983. ISBN 0-201-06672-6.
1492  */
1493 
1494 /* From: trees.c,v 1.5 1995/05/03 17:27:12 jloup Exp */
1495 
1496 #ifdef DEBUG_ZLIB
1497 #  include <ctype.h>
1498 #endif
1499 
1500 /* ===========================================================================
1501  * Constants
1502  */
1503 
1504 #define MAX_BL_BITS 7
1505 /* Bit length codes must not exceed MAX_BL_BITS bits */
1506 
1507 #define END_BLOCK 256
1508 /* end of block literal code */
1509 
1510 #define REP_3_6      16
1511 /* repeat previous bit length 3-6 times (2 bits of repeat count) */
1512 
1513 #define REPZ_3_10    17
1514 /* repeat a zero length 3-10 times  (3 bits of repeat count) */
1515 
1516 #define REPZ_11_138  18
1517 /* repeat a zero length 11-138 times  (7 bits of repeat count) */
1518 
1519 local int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
1520    = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
1521 
1522 local int extra_dbits[D_CODES] /* extra bits for each distance code */
1523    = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
1524 
1525 local int extra_blbits[BL_CODES]/* extra bits for each bit length code */
1526    = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
1527 
1528 local uch bl_order[BL_CODES]
1529    = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
1530 /* The lengths of the bit length codes are sent in order of decreasing
1531  * probability, to avoid transmitting the lengths for unused bit length codes.
1532  */
1533 
1534 #define Buf_size (8 * 2*sizeof(char))
1535 /* Number of bits used within bi_buf. (bi_buf might be implemented on
1536  * more than 16 bits on some systems.)
1537  */
1538 
1539 /* ===========================================================================
1540  * Local data. These are initialized only once.
1541  * To do: initialize at compile time to be completely reentrant. ???
1542  */
1543 
1544 local ct_data static_ltree[L_CODES+2];
1545 /* The static literal tree. Since the bit lengths are imposed, there is no
1546  * need for the L_CODES extra codes used during heap construction. However
1547  * The codes 286 and 287 are needed to build a canonical tree (see ct_init
1548  * below).
1549  */
1550 
1551 local ct_data static_dtree[D_CODES];
1552 /* The static distance tree. (Actually a trivial tree since all codes use
1553  * 5 bits.)
1554  */
1555 
1556 local uch dist_code[512];
1557 /* distance codes. The first 256 values correspond to the distances
1558  * 3 .. 258, the last 256 values correspond to the top 8 bits of
1559  * the 15 bit distances.
1560  */
1561 
1562 local uch length_code[MAX_MATCH-MIN_MATCH+1];
1563 /* length code for each normalized match length (0 == MIN_MATCH) */
1564 
1565 local int base_length[LENGTH_CODES];
1566 /* First normalized length for each code (0 = MIN_MATCH) */
1567 
1568 local int base_dist[D_CODES];
1569 /* First normalized distance for each code (0 = distance of 1) */
1570 
1571 struct static_tree_desc_s {
1572     ct_data *static_tree;        /* static tree or NULL */
1573     intf    *extra_bits;         /* extra bits for each code or NULL */
1574     int     extra_base;          /* base index for extra_bits */
1575     int     elems;               /* max number of elements in the tree */
1576     int     max_length;          /* max bit length for the codes */
1577 };
1578 
1579 local static_tree_desc  static_l_desc =
1580 {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
1581 
1582 local static_tree_desc  static_d_desc =
1583 {static_dtree, extra_dbits, 0,          D_CODES, MAX_BITS};
1584 
1585 local static_tree_desc  static_bl_desc =
1586 {(ct_data *)0, extra_blbits, 0,      BL_CODES, MAX_BL_BITS};
1587 
1588 /* ===========================================================================
1589  * Local (static) routines in this file.
1590  */
1591 
1592 local void ct_static_init OF((void));
1593 local void init_block     OF((deflate_state *s));
1594 local void pqdownheap     OF((deflate_state *s, ct_data *tree, int k));
1595 local void gen_bitlen     OF((deflate_state *s, tree_desc *desc));
1596 local void gen_codes      OF((ct_data *tree, int max_code, ushf *bl_count));
1597 local void build_tree     OF((deflate_state *s, tree_desc *desc));
1598 local void scan_tree      OF((deflate_state *s, ct_data *tree, int max_code));
1599 local void send_tree      OF((deflate_state *s, ct_data *tree, int max_code));
1600 local int  build_bl_tree  OF((deflate_state *s));
1601 local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
1602                               int blcodes));
1603 local void compress_block OF((deflate_state *s, ct_data *ltree,
1604                               ct_data *dtree));
1605 local void set_data_type  OF((deflate_state *s));
1606 local unsigned bi_reverse OF((unsigned value, int length));
1607 local void bi_windup      OF((deflate_state *s));
1608 local void bi_flush       OF((deflate_state *s));
1609 local void copy_block     OF((deflate_state *s, charf *buf, unsigned len,
1610                               int header));
1611 
1612 #ifndef DEBUG_ZLIB
1613 #  define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
1614    /* Send a code of the given tree. c and tree must not have side effects */
1615 
1616 #else /* DEBUG_ZLIB */
1617 #  define send_code(s, c, tree) \
1618      { if (verbose>1) fprintf(stderr,"\ncd %3d ",(c)); \
1619        send_bits(s, tree[c].Code, tree[c].Len); }
1620 #endif
1621 
1622 #define d_code(dist) \
1623    ((dist) < 256 ? dist_code[dist] : dist_code[256+((dist)>>7)])
1624 /* Mapping from a distance to a distance code. dist is the distance - 1 and
1625  * must not have side effects. dist_code[256] and dist_code[257] are never
1626  * used.
1627  */
1628 
1629 /* ===========================================================================
1630  * Output a short LSB first on the stream.
1631  * IN assertion: there is enough room in pendingBuf.
1632  */
1633 #define put_short(s, w) { \
1634     put_byte(s, (uch)((w) & 0xff)); \
1635     put_byte(s, (uch)((ush)(w) >> 8)); \
1636 }
1637 
1638 /* ===========================================================================
1639  * Send a value on a given number of bits.
1640  * IN assertion: length <= 16 and value fits in length bits.
1641  */
1642 #ifdef DEBUG_ZLIB
1643 local void send_bits      OF((deflate_state *s, int value, int length));
1644 
1645 local void send_bits(s, value, length)
1646     deflate_state *s;
1647     int value;  /* value to send */
1648     int length; /* number of bits */
1649 {
1650     Tracev((stderr," l %2d v %4x ", length, value));
1651     Assert(length > 0 && length <= 15, "invalid length");
1652     s->bits_sent += (ulg)length;
1653 
1654     /* If not enough room in bi_buf, use (valid) bits from bi_buf and
1655      * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
1656      * unused bits in value.
1657      */
1658     if (s->bi_valid > (int)Buf_size - length) {
1659         s->bi_buf |= (value << s->bi_valid);
1660         put_short(s, s->bi_buf);
1661         s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
1662         s->bi_valid += length - Buf_size;
1663     } else {
1664         s->bi_buf |= value << s->bi_valid;
1665         s->bi_valid += length;
1666     }
1667 }
1668 #else /* !DEBUG_ZLIB */
1669 
1670 #define send_bits(s, value, length) \
1671 { int len = length;\
1672   if (s->bi_valid > (int)Buf_size - len) {\
1673     int val = value;\
1674     s->bi_buf |= (val << s->bi_valid);\
1675     put_short(s, s->bi_buf);\
1676     s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
1677     s->bi_valid += len - Buf_size;\
1678   } else {\
1679     s->bi_buf |= (value) << s->bi_valid;\
1680     s->bi_valid += len;\
1681   }\
1682 }
1683 #endif /* DEBUG_ZLIB */
1684 
1685 
1686 #define MAX(a,b) (a >= b ? a : b)
1687 /* the arguments must not have side effects */
1688 
1689 /* ===========================================================================
1690  * Initialize the various 'constant' tables.
1691  * To do: do this at compile time.
1692  */
1693 local void ct_static_init()
1694 {
1695     int n;        /* iterates over tree elements */
1696     int bits;     /* bit counter */
1697     int length;   /* length value */
1698     int code;     /* code value */
1699     int dist;     /* distance index */
1700     ush bl_count[MAX_BITS+1];
1701     /* number of codes at each bit length for an optimal tree */
1702 
1703     /* Initialize the mapping length (0..255) -> length code (0..28) */
1704     length = 0;
1705     for (code = 0; code < LENGTH_CODES-1; code++) {
1706         base_length[code] = length;
1707         for (n = 0; n < (1<<extra_lbits[code]); n++) {
1708             length_code[length++] = (uch)code;
1709         }
1710     }
1711     Assert (length == 256, "ct_static_init: length != 256");
1712     /* Note that the length 255 (match length 258) can be represented
1713      * in two different ways: code 284 + 5 bits or code 285, so we
1714      * overwrite length_code[255] to use the best encoding:
1715      */
1716     length_code[length-1] = (uch)code;
1717 
1718     /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
1719     dist = 0;
1720     for (code = 0 ; code < 16; code++) {
1721         base_dist[code] = dist;
1722         for (n = 0; n < (1<<extra_dbits[code]); n++) {
1723             dist_code[dist++] = (uch)code;
1724         }
1725     }
1726     Assert (dist == 256, "ct_static_init: dist != 256");
1727     dist >>= 7; /* from now on, all distances are divided by 128 */
1728     for ( ; code < D_CODES; code++) {
1729         base_dist[code] = dist << 7;
1730         for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
1731             dist_code[256 + dist++] = (uch)code;
1732         }
1733     }
1734     Assert (dist == 256, "ct_static_init: 256+dist != 512");
1735 
1736     /* Construct the codes of the static literal tree */
1737     for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
1738     n = 0;
1739     while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
1740     while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
1741     while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
1742     while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
1743     /* Codes 286 and 287 do not exist, but we must include them in the
1744      * tree construction to get a canonical Huffman tree (longest code
1745      * all ones)
1746      */
1747     gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
1748 
1749     /* The static distance tree is trivial: */
1750     for (n = 0; n < D_CODES; n++) {
1751         static_dtree[n].Len = 5;
1752         static_dtree[n].Code = bi_reverse(n, 5);
1753     }
1754 }
1755 
1756 /* ===========================================================================
1757  * Initialize the tree data structures for a new zlib stream.
1758  */
1759 local void ct_init(s)
1760     deflate_state *s;
1761 {
1762     if (static_dtree[0].Len == 0) {
1763         ct_static_init();              /* To do: at compile time */
1764     }
1765 
1766     s->compressed_len = 0L;
1767 
1768     s->l_desc.dyn_tree = s->dyn_ltree;
1769     s->l_desc.stat_desc = &static_l_desc;
1770 
1771     s->d_desc.dyn_tree = s->dyn_dtree;
1772     s->d_desc.stat_desc = &static_d_desc;
1773 
1774     s->bl_desc.dyn_tree = s->bl_tree;
1775     s->bl_desc.stat_desc = &static_bl_desc;
1776 
1777     s->bi_buf = 0;
1778     s->bi_valid = 0;
1779     s->last_eob_len = 8; /* enough lookahead for inflate */
1780 #ifdef DEBUG_ZLIB
1781     s->bits_sent = 0L;
1782 #endif
1783     s->blocks_in_packet = 0;
1784 
1785     /* Initialize the first block of the first file: */
1786     init_block(s);
1787 }
1788 
1789 /* ===========================================================================
1790  * Initialize a new block.
1791  */
1792 local void init_block(s)
1793     deflate_state *s;
1794 {
1795     int n; /* iterates over tree elements */
1796 
1797     /* Initialize the trees. */
1798     for (n = 0; n < L_CODES;  n++) s->dyn_ltree[n].Freq = 0;
1799     for (n = 0; n < D_CODES;  n++) s->dyn_dtree[n].Freq = 0;
1800     for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
1801 
1802     s->dyn_ltree[END_BLOCK].Freq = 1;
1803     s->opt_len = s->static_len = 0L;
1804     s->last_lit = s->matches = 0;
1805 }
1806 
1807 #define SMALLEST 1
1808 /* Index within the heap array of least frequent node in the Huffman tree */
1809 
1810 
1811 /* ===========================================================================
1812  * Remove the smallest element from the heap and recreate the heap with
1813  * one less element. Updates heap and heap_len.
1814  */
1815 #define pqremove(s, tree, top) \
1816 {\
1817     top = s->heap[SMALLEST]; \
1818     s->heap[SMALLEST] = s->heap[s->heap_len--]; \
1819     pqdownheap(s, tree, SMALLEST); \
1820 }
1821 
1822 /* ===========================================================================
1823  * Compares to subtrees, using the tree depth as tie breaker when
1824  * the subtrees have equal frequency. This minimizes the worst case length.
1825  */
1826 #define smaller(tree, n, m, depth) \
1827    (tree[n].Freq < tree[m].Freq || \
1828    (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
1829 
1830 /* ===========================================================================
1831  * Restore the heap property by moving down the tree starting at node k,
1832  * exchanging a node with the smallest of its two sons if necessary, stopping
1833  * when the heap property is re-established (each father smaller than its
1834  * two sons).
1835  */
1836 local void pqdownheap(s, tree, k)
1837     deflate_state *s;
1838     ct_data *tree;  /* the tree to restore */
1839     int k;               /* node to move down */
1840 {
1841     int v = s->heap[k];
1842     int j = k << 1;  /* left son of k */
1843     while (j <= s->heap_len) {
1844         /* Set j to the smallest of the two sons: */
1845         if (j < s->heap_len &&
1846             smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
1847             j++;
1848         }
1849         /* Exit if v is smaller than both sons */
1850         if (smaller(tree, v, s->heap[j], s->depth)) break;
1851 
1852         /* Exchange v with the smallest son */
1853         s->heap[k] = s->heap[j];  k = j;
1854 
1855         /* And continue down the tree, setting j to the left son of k */
1856         j <<= 1;
1857     }
1858     s->heap[k] = v;
1859 }
1860 
1861 /* ===========================================================================
1862  * Compute the optimal bit lengths for a tree and update the total bit length
1863  * for the current block.
1864  * IN assertion: the fields freq and dad are set, heap[heap_max] and
1865  *    above are the tree nodes sorted by increasing frequency.
1866  * OUT assertions: the field len is set to the optimal bit length, the
1867  *     array bl_count contains the frequencies for each bit length.
1868  *     The length opt_len is updated; static_len is also updated if stree is
1869  *     not null.
1870  */
1871 local void gen_bitlen(s, desc)
1872     deflate_state *s;
1873     tree_desc *desc;    /* the tree descriptor */
1874 {
1875     ct_data *tree  = desc->dyn_tree;
1876     int max_code   = desc->max_code;
1877     ct_data *stree = desc->stat_desc->static_tree;
1878     intf *extra    = desc->stat_desc->extra_bits;
1879     int base       = desc->stat_desc->extra_base;
1880     int max_length = desc->stat_desc->max_length;
1881     int h;              /* heap index */
1882     int n, m;           /* iterate over the tree elements */
1883     int bits;           /* bit length */
1884     int xbits;          /* extra bits */
1885     ush f;              /* frequency */
1886     int overflow = 0;   /* number of elements with bit length too large */
1887 
1888     for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
1889 
1890     /* In a first pass, compute the optimal bit lengths (which may
1891      * overflow in the case of the bit length tree).
1892      */
1893     tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
1894 
1895     for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
1896         n = s->heap[h];
1897         bits = tree[tree[n].Dad].Len + 1;
1898         if (bits > max_length) bits = max_length, overflow++;
1899         tree[n].Len = (ush)bits;
1900         /* We overwrite tree[n].Dad which is no longer needed */
1901 
1902         if (n > max_code) continue; /* not a leaf node */
1903 
1904         s->bl_count[bits]++;
1905         xbits = 0;
1906         if (n >= base) xbits = extra[n-base];
1907         f = tree[n].Freq;
1908         s->opt_len += (ulg)f * (bits + xbits);
1909         if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);
1910     }
1911     if (overflow == 0) return;
1912 
1913     Trace((stderr,"\nbit length overflow\n"));
1914     /* This happens for example on obj2 and pic of the Calgary corpus */
1915 
1916     /* Find the first bit length which could increase: */
1917     do {
1918         bits = max_length-1;
1919         while (s->bl_count[bits] == 0) bits--;
1920         s->bl_count[bits]--;      /* move one leaf down the tree */
1921         s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
1922         s->bl_count[max_length]--;
1923         /* The brother of the overflow item also moves one step up,
1924          * but this does not affect bl_count[max_length]
1925          */
1926         overflow -= 2;
1927     } while (overflow > 0);
1928 
1929     /* Now recompute all bit lengths, scanning in increasing frequency.
1930      * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
1931      * lengths instead of fixing only the wrong ones. This idea is taken
1932      * from 'ar' written by Haruhiko Okumura.)
1933      */
1934     for (bits = max_length; bits != 0; bits--) {
1935         n = s->bl_count[bits];
1936         while (n != 0) {
1937             m = s->heap[--h];
1938             if (m > max_code) continue;
1939             if (tree[m].Len != (unsigned) bits) {
1940                 Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
1941                 s->opt_len += ((long)bits - (long)tree[m].Len)
1942                               *(long)tree[m].Freq;
1943                 tree[m].Len = (ush)bits;
1944             }
1945             n--;
1946         }
1947     }
1948 }
1949 
1950 /* ===========================================================================
1951  * Generate the codes for a given tree and bit counts (which need not be
1952  * optimal).
1953  * IN assertion: the array bl_count contains the bit length statistics for
1954  * the given tree and the field len is set for all tree elements.
1955  * OUT assertion: the field code is set for all tree elements of non
1956  *     zero code length.
1957  */
1958 local void gen_codes (tree, max_code, bl_count)
1959     ct_data *tree;             /* the tree to decorate */
1960     int max_code;              /* largest code with non zero frequency */
1961     ushf *bl_count;            /* number of codes at each bit length */
1962 {
1963     ush next_code[MAX_BITS+1]; /* next code value for each bit length */
1964     ush code = 0;              /* running code value */
1965     int bits;                  /* bit index */
1966     int n;                     /* code index */
1967 
1968     /* The distribution counts are first used to generate the code values
1969      * without bit reversal.
1970      */
1971     for (bits = 1; bits <= MAX_BITS; bits++) {
1972         next_code[bits] = code = (code + bl_count[bits-1]) << 1;
1973     }
1974     /* Check that the bit counts in bl_count are consistent. The last code
1975      * must be all ones.
1976      */
1977     Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
1978             "inconsistent bit counts");
1979     Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
1980 
1981     for (n = 0;  n <= max_code; n++) {
1982         int len = tree[n].Len;
1983         if (len == 0) continue;
1984         /* Now reverse the bits */
1985         tree[n].Code = bi_reverse(next_code[len]++, len);
1986 
1987         Tracec(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
1988              n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
1989     }
1990 }
1991 
1992 /* ===========================================================================
1993  * Construct one Huffman tree and assigns the code bit strings and lengths.
1994  * Update the total bit length for the current block.
1995  * IN assertion: the field freq is set for all tree elements.
1996  * OUT assertions: the fields len and code are set to the optimal bit length
1997  *     and corresponding code. The length opt_len is updated; static_len is
1998  *     also updated if stree is not null. The field max_code is set.
1999  */
2000 local void build_tree(s, desc)
2001     deflate_state *s;
2002     tree_desc *desc; /* the tree descriptor */
2003 {
2004     ct_data *tree   = desc->dyn_tree;
2005     ct_data *stree  = desc->stat_desc->static_tree;
2006     int elems       = desc->stat_desc->elems;
2007     int n, m;          /* iterate over heap elements */
2008     int max_code = -1; /* largest code with non zero frequency */
2009     int node;          /* new node being created */
2010 
2011     /* Construct the initial heap, with least frequent element in
2012      * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
2013      * heap[0] is not used.
2014      */
2015     s->heap_len = 0, s->heap_max = HEAP_SIZE;
2016 
2017     for (n = 0; n < elems; n++) {
2018         if (tree[n].Freq != 0) {
2019             s->heap[++(s->heap_len)] = max_code = n;
2020             s->depth[n] = 0;
2021         } else {
2022             tree[n].Len = 0;
2023         }
2024     }
2025 
2026     /* The pkzip format requires that at least one distance code exists,
2027      * and that at least one bit should be sent even if there is only one
2028      * possible code. So to avoid special checks later on we force at least
2029      * two codes of non zero frequency.
2030      */
2031     while (s->heap_len < 2) {
2032         node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
2033         tree[node].Freq = 1;
2034         s->depth[node] = 0;
2035         s->opt_len--; if (stree) s->static_len -= stree[node].Len;
2036         /* node is 0 or 1 so it does not have extra bits */
2037     }
2038     desc->max_code = max_code;
2039 
2040     /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
2041      * establish sub-heaps of increasing lengths:
2042      */
2043     for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
2044 
2045     /* Construct the Huffman tree by repeatedly combining the least two
2046      * frequent nodes.
2047      */
2048     node = elems;              /* next internal node of the tree */
2049     do {
2050         pqremove(s, tree, n);  /* n = node of least frequency */
2051         m = s->heap[SMALLEST]; /* m = node of next least frequency */
2052 
2053         s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
2054         s->heap[--(s->heap_max)] = m;
2055 
2056         /* Create a new node father of n and m */
2057         tree[node].Freq = tree[n].Freq + tree[m].Freq;
2058         s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1);
2059         tree[n].Dad = tree[m].Dad = (ush)node;
2060 #ifdef DUMP_BL_TREE
2061         if (tree == s->bl_tree) {
2062             fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
2063                     node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
2064         }
2065 #endif
2066         /* and insert the new node in the heap */
2067         s->heap[SMALLEST] = node++;
2068         pqdownheap(s, tree, SMALLEST);
2069 
2070     } while (s->heap_len >= 2);
2071 
2072     s->heap[--(s->heap_max)] = s->heap[SMALLEST];
2073 
2074     /* At this point, the fields freq and dad are set. We can now
2075      * generate the bit lengths.
2076      */
2077     gen_bitlen(s, (tree_desc *)desc);
2078 
2079     /* The field len is now set, we can generate the bit codes */
2080     gen_codes ((ct_data *)tree, max_code, s->bl_count);
2081 }
2082 
2083 /* ===========================================================================
2084  * Scan a literal or distance tree to determine the frequencies of the codes
2085  * in the bit length tree.
2086  */
2087 local void scan_tree (s, tree, max_code)
2088     deflate_state *s;
2089     ct_data *tree;   /* the tree to be scanned */
2090     int max_code;    /* and its largest code of non zero frequency */
2091 {
2092     int n;                     /* iterates over all tree elements */
2093     int prevlen = -1;          /* last emitted length */
2094     int curlen;                /* length of current code */
2095     int nextlen = tree[0].Len; /* length of next code */
2096     int count = 0;             /* repeat count of the current code */
2097     int max_count = 7;         /* max repeat count */
2098     int min_count = 4;         /* min repeat count */
2099 
2100     if (nextlen == 0) max_count = 138, min_count = 3;
2101     tree[max_code+1].Len = (ush)0xffff; /* guard */
2102 
2103     for (n = 0; n <= max_code; n++) {
2104         curlen = nextlen; nextlen = tree[n+1].Len;
2105         if (++count < max_count && curlen == nextlen) {
2106             continue;
2107         } else if (count < min_count) {
2108             s->bl_tree[curlen].Freq += count;
2109         } else if (curlen != 0) {
2110             if (curlen != prevlen) s->bl_tree[curlen].Freq++;
2111             s->bl_tree[REP_3_6].Freq++;
2112         } else if (count <= 10) {
2113             s->bl_tree[REPZ_3_10].Freq++;
2114         } else {
2115             s->bl_tree[REPZ_11_138].Freq++;
2116         }
2117         count = 0; prevlen = curlen;
2118         if (nextlen == 0) {
2119             max_count = 138, min_count = 3;
2120         } else if (curlen == nextlen) {
2121             max_count = 6, min_count = 3;
2122         } else {
2123             max_count = 7, min_count = 4;
2124         }
2125     }
2126 }
2127 
2128 /* ===========================================================================
2129  * Send a literal or distance tree in compressed form, using the codes in
2130  * bl_tree.
2131  */
2132 local void send_tree (s, tree, max_code)
2133     deflate_state *s;
2134     ct_data *tree; /* the tree to be scanned */
2135     int max_code;       /* and its largest code of non zero frequency */
2136 {
2137     int n;                     /* iterates over all tree elements */
2138     int prevlen = -1;          /* last emitted length */
2139     int curlen;                /* length of current code */
2140     int nextlen = tree[0].Len; /* length of next code */
2141     int count = 0;             /* repeat count of the current code */
2142     int max_count = 7;         /* max repeat count */
2143     int min_count = 4;         /* min repeat count */
2144 
2145     /* tree[max_code+1].Len = -1; */  /* guard already set */
2146     if (nextlen == 0) max_count = 138, min_count = 3;
2147 
2148     for (n = 0; n <= max_code; n++) {
2149         curlen = nextlen; nextlen = tree[n+1].Len;
2150         if (++count < max_count && curlen == nextlen) {
2151             continue;
2152         } else if (count < min_count) {
2153             do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
2154 
2155         } else if (curlen != 0) {
2156             if (curlen != prevlen) {
2157                 send_code(s, curlen, s->bl_tree); count--;
2158             }
2159             Assert(count >= 3 && count <= 6, " 3_6?");
2160             send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
2161 
2162         } else if (count <= 10) {
2163             send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
2164 
2165         } else {
2166             send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
2167         }
2168         count = 0; prevlen = curlen;
2169         if (nextlen == 0) {
2170             max_count = 138, min_count = 3;
2171         } else if (curlen == nextlen) {
2172             max_count = 6, min_count = 3;
2173         } else {
2174             max_count = 7, min_count = 4;
2175         }
2176     }
2177 }
2178 
2179 /* ===========================================================================
2180  * Construct the Huffman tree for the bit lengths and return the index in
2181  * bl_order of the last bit length code to send.
2182  */
2183 local int build_bl_tree(s)
2184     deflate_state *s;
2185 {
2186     int max_blindex;  /* index of last bit length code of non zero freq */
2187 
2188     /* Determine the bit length frequencies for literal and distance trees */
2189     scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
2190     scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
2191 
2192     /* Build the bit length tree: */
2193     build_tree(s, (tree_desc *)(&(s->bl_desc)));
2194     /* opt_len now includes the length of the tree representations, except
2195      * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
2196      */
2197 
2198     /* Determine the number of bit length codes to send. The pkzip format
2199      * requires that at least 4 bit length codes be sent. (appnote.txt says
2200      * 3 but the actual value used is 4.)
2201      */
2202     for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
2203         if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
2204     }
2205     /* Update opt_len to include the bit length tree and counts */
2206     s->opt_len += 3*(max_blindex+1) + 5+5+4;
2207     Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
2208             s->opt_len, s->static_len));
2209 
2210     return max_blindex;
2211 }
2212 
2213 /* ===========================================================================
2214  * Send the header for a block using dynamic Huffman trees: the counts, the
2215  * lengths of the bit length codes, the literal tree and the distance tree.
2216  * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
2217  */
2218 local void send_all_trees(s, lcodes, dcodes, blcodes)
2219     deflate_state *s;
2220     int lcodes, dcodes, blcodes; /* number of codes for each tree */
2221 {
2222     int rank;                    /* index in bl_order */
2223 
2224     Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
2225     Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
2226             "too many codes");
2227     Tracev((stderr, "\nbl counts: "));
2228     send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
2229     send_bits(s, dcodes-1,   5);
2230     send_bits(s, blcodes-4,  4); /* not -3 as stated in appnote.txt */
2231     for (rank = 0; rank < blcodes; rank++) {
2232         Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
2233         send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
2234     }
2235     Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
2236 
2237     send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
2238     Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
2239 
2240     send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
2241     Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
2242 }
2243 
2244 /* ===========================================================================
2245  * Send a stored block
2246  */
2247 local void ct_stored_block(s, buf, stored_len, eof)
2248     deflate_state *s;
2249     charf *buf;       /* input block */
2250     ulg stored_len;   /* length of input block */
2251     int eof;          /* true if this is the last block for a file */
2252 {
2253     send_bits(s, (STORED_BLOCK<<1)+eof, 3);  /* send block type */
2254     s->compressed_len = (s->compressed_len + 3 + 7) & ~7L;
2255     s->compressed_len += (stored_len + 4) << 3;
2256 
2257     copy_block(s, buf, (unsigned)stored_len, 1); /* with header */
2258 }
2259 
2260 /* Send just the `stored block' type code without any length bytes or data.
2261  */
2262 local void ct_stored_type_only(s)
2263     deflate_state *s;
2264 {
2265     send_bits(s, (STORED_BLOCK << 1), 3);
2266     bi_windup(s);
2267     s->compressed_len = (s->compressed_len + 3) & ~7L;
2268 }
2269 
2270 
2271 /* ===========================================================================
2272  * Send one empty static block to give enough lookahead for inflate.
2273  * This takes 10 bits, of which 7 may remain in the bit buffer.
2274  * The current inflate code requires 9 bits of lookahead. If the EOB
2275  * code for the previous block was coded on 5 bits or less, inflate
2276  * may have only 5+3 bits of lookahead to decode this EOB.
2277  * (There are no problems if the previous block is stored or fixed.)
2278  */
2279 local void ct_align(s)
2280     deflate_state *s;
2281 {
2282     send_bits(s, STATIC_TREES<<1, 3);
2283     send_code(s, END_BLOCK, static_ltree);
2284     s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
2285     bi_flush(s);
2286     /* Of the 10 bits for the empty block, we have already sent
2287      * (10 - bi_valid) bits. The lookahead for the EOB of the previous
2288      * block was thus its length plus what we have just sent.
2289      */
2290     if (s->last_eob_len + 10 - s->bi_valid < 9) {
2291         send_bits(s, STATIC_TREES<<1, 3);
2292         send_code(s, END_BLOCK, static_ltree);
2293         s->compressed_len += 10L;
2294         bi_flush(s);
2295     }
2296     s->last_eob_len = 7;
2297 }
2298 
2299 /* ===========================================================================
2300  * Determine the best encoding for the current block: dynamic trees, static
2301  * trees or store, and output the encoded block to the zip file. This function
2302  * returns the total compressed length for the file so far.
2303  */
2304 local ulg ct_flush_block(s, buf, stored_len, flush)
2305     deflate_state *s;
2306     charf *buf;       /* input block, or NULL if too old */
2307     ulg stored_len;   /* length of input block */
2308     int flush;        /* Z_FINISH if this is the last block for a file */
2309 {
2310     ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
2311     int max_blindex;  /* index of last bit length code of non zero freq */
2312     int eof = flush == Z_FINISH;
2313 
2314     ++s->blocks_in_packet;
2315 
2316     /* Check if the file is ascii or binary */
2317     if (s->data_type == UNKNOWN) set_data_type(s);
2318 
2319     /* Construct the literal and distance trees */
2320     build_tree(s, (tree_desc *)(&(s->l_desc)));
2321     Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
2322             s->static_len));
2323 
2324     build_tree(s, (tree_desc *)(&(s->d_desc)));
2325     Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
2326             s->static_len));
2327     /* At this point, opt_len and static_len are the total bit lengths of
2328      * the compressed block data, excluding the tree representations.
2329      */
2330 
2331     /* Build the bit length tree for the above two trees, and get the index
2332      * in bl_order of the last bit length code to send.
2333      */
2334     max_blindex = build_bl_tree(s);
2335 
2336     /* Determine the best encoding. Compute first the block length in bytes */
2337     opt_lenb = (s->opt_len+3+7)>>3;
2338     static_lenb = (s->static_len+3+7)>>3;
2339 
2340     Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
2341             opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
2342             s->last_lit));
2343 
2344     if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
2345 
2346     /* If compression failed and this is the first and last block,
2347      * and if the .zip file can be seeked (to rewrite the local header),
2348      * the whole file is transformed into a stored file:
2349      */
2350 #ifdef STORED_FILE_OK
2351 #  ifdef FORCE_STORED_FILE
2352     if (eof && compressed_len == 0L) /* force stored file */
2353 #  else
2354     if (stored_len <= opt_lenb && eof && s->compressed_len==0L && seekable())
2355 #  endif
2356     {
2357         /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
2358         if (buf == (charf*)0) error ("block vanished");
2359 
2360         copy_block(buf, (unsigned)stored_len, 0); /* without header */
2361         s->compressed_len = stored_len << 3;
2362         s->method = STORED;
2363     } else
2364 #endif /* STORED_FILE_OK */
2365 
2366     /* For Z_PACKET_FLUSH, if we don't achieve the required minimum
2367      * compression, and this block contains all the data since the last
2368      * time we used Z_PACKET_FLUSH, then just omit this block completely
2369      * from the output.
2370      */
2371     if (flush == Z_PACKET_FLUSH && s->blocks_in_packet == 1
2372 	&& opt_lenb > stored_len - s->minCompr) {
2373 	s->blocks_in_packet = 0;
2374 	/* output nothing */
2375     } else
2376 
2377 #ifdef FORCE_STORED
2378     if (buf != (char*)0) /* force stored block */
2379 #else
2380     if (stored_len+4 <= opt_lenb && buf != (char*)0)
2381                        /* 4: two words for the lengths */
2382 #endif
2383     {
2384         /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
2385          * Otherwise we can't have processed more than WSIZE input bytes since
2386          * the last block flush, because compression would have been
2387          * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
2388          * transform a block into a stored block.
2389          */
2390         ct_stored_block(s, buf, stored_len, eof);
2391     } else
2392 
2393 #ifdef FORCE_STATIC
2394     if (static_lenb >= 0) /* force static trees */
2395 #else
2396     if (static_lenb == opt_lenb)
2397 #endif
2398     {
2399         send_bits(s, (STATIC_TREES<<1)+eof, 3);
2400         compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree);
2401         s->compressed_len += 3 + s->static_len;
2402     } else {
2403         send_bits(s, (DYN_TREES<<1)+eof, 3);
2404         send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
2405                        max_blindex+1);
2406         compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree);
2407         s->compressed_len += 3 + s->opt_len;
2408     }
2409     Assert (s->compressed_len == s->bits_sent, "bad compressed size");
2410     init_block(s);
2411 
2412     if (eof) {
2413         bi_windup(s);
2414         s->compressed_len += 7;  /* align on byte boundary */
2415     }
2416     Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
2417            s->compressed_len-7*eof));
2418 
2419     return s->compressed_len >> 3;
2420 }
2421 
2422 /* ===========================================================================
2423  * Save the match info and tally the frequency counts. Return true if
2424  * the current block must be flushed.
2425  */
2426 local int ct_tally (s, dist, lc)
2427     deflate_state *s;
2428     int dist;  /* distance of matched string */
2429     int lc;    /* match length-MIN_MATCH or unmatched char (if dist==0) */
2430 {
2431     s->d_buf[s->last_lit] = (ush)dist;
2432     s->l_buf[s->last_lit++] = (uch)lc;
2433     if (dist == 0) {
2434         /* lc is the unmatched char */
2435         s->dyn_ltree[lc].Freq++;
2436     } else {
2437         s->matches++;
2438         /* Here, lc is the match length - MIN_MATCH */
2439         dist--;             /* dist = match distance - 1 */
2440         Assert((ush)dist < (ush)MAX_DIST(s) &&
2441                (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
2442                (ush)d_code(dist) < (ush)D_CODES,  "ct_tally: bad match");
2443 
2444         s->dyn_ltree[length_code[lc]+LITERALS+1].Freq++;
2445         s->dyn_dtree[d_code(dist)].Freq++;
2446     }
2447 
2448     /* Try to guess if it is profitable to stop the current block here */
2449     if (s->level > 2 && (s->last_lit & 0xfff) == 0) {
2450         /* Compute an upper bound for the compressed length */
2451         ulg out_length = (ulg)s->last_lit*8L;
2452         ulg in_length = (ulg)s->strstart - s->block_start;
2453         int dcode;
2454         for (dcode = 0; dcode < D_CODES; dcode++) {
2455             out_length += (ulg)s->dyn_dtree[dcode].Freq *
2456                 (5L+extra_dbits[dcode]);
2457         }
2458         out_length >>= 3;
2459         Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
2460                s->last_lit, in_length, out_length,
2461                100L - out_length*100L/in_length));
2462         if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
2463     }
2464     return (s->last_lit == s->lit_bufsize-1);
2465     /* We avoid equality with lit_bufsize because of wraparound at 64K
2466      * on 16 bit machines and because stored blocks are restricted to
2467      * 64K-1 bytes.
2468      */
2469 }
2470 
2471 /* ===========================================================================
2472  * Send the block data compressed using the given Huffman trees
2473  */
2474 local void compress_block(s, ltree, dtree)
2475     deflate_state *s;
2476     ct_data *ltree; /* literal tree */
2477     ct_data *dtree; /* distance tree */
2478 {
2479     unsigned dist;      /* distance of matched string */
2480     int lc;             /* match length or unmatched char (if dist == 0) */
2481     unsigned lx = 0;    /* running index in l_buf */
2482     unsigned code;      /* the code to send */
2483     int extra;          /* number of extra bits to send */
2484 
2485     if (s->last_lit != 0) do {
2486         dist = s->d_buf[lx];
2487         lc = s->l_buf[lx++];
2488         if (dist == 0) {
2489             send_code(s, lc, ltree); /* send a literal byte */
2490             Tracecv(isgraph(lc), (stderr," '%c' ", lc));
2491         } else {
2492             /* Here, lc is the match length - MIN_MATCH */
2493             code = length_code[lc];
2494             send_code(s, code+LITERALS+1, ltree); /* send the length code */
2495             extra = extra_lbits[code];
2496             if (extra != 0) {
2497                 lc -= base_length[code];
2498                 send_bits(s, lc, extra);       /* send the extra length bits */
2499             }
2500             dist--; /* dist is now the match distance - 1 */
2501             code = d_code(dist);
2502             Assert (code < D_CODES, "bad d_code");
2503 
2504             send_code(s, code, dtree);       /* send the distance code */
2505             extra = extra_dbits[code];
2506             if (extra != 0) {
2507                 dist -= base_dist[code];
2508                 send_bits(s, dist, extra);   /* send the extra distance bits */
2509             }
2510         } /* literal or match pair ? */
2511 
2512         /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
2513         Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow");
2514 
2515     } while (lx < s->last_lit);
2516 
2517     send_code(s, END_BLOCK, ltree);
2518     s->last_eob_len = ltree[END_BLOCK].Len;
2519 }
2520 
2521 /* ===========================================================================
2522  * Set the data type to ASCII or BINARY, using a crude approximation:
2523  * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise.
2524  * IN assertion: the fields freq of dyn_ltree are set and the total of all
2525  * frequencies does not exceed 64K (to fit in an int on 16 bit machines).
2526  */
2527 local void set_data_type(s)
2528     deflate_state *s;
2529 {
2530     int n = 0;
2531     unsigned ascii_freq = 0;
2532     unsigned bin_freq = 0;
2533     while (n < 7)        bin_freq += s->dyn_ltree[n++].Freq;
2534     while (n < 128)    ascii_freq += s->dyn_ltree[n++].Freq;
2535     while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq;
2536     s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? BINARY : ASCII);
2537 }
2538 
2539 /* ===========================================================================
2540  * Reverse the first len bits of a code, using straightforward code (a faster
2541  * method would use a table)
2542  * IN assertion: 1 <= len <= 15
2543  */
2544 local unsigned bi_reverse(code, len)
2545     unsigned code; /* the value to invert */
2546     int len;       /* its bit length */
2547 {
2548     register unsigned res = 0;
2549     do {
2550         res |= code & 1;
2551         code >>= 1, res <<= 1;
2552     } while (--len > 0);
2553     return res >> 1;
2554 }
2555 
2556 /* ===========================================================================
2557  * Flush the bit buffer, keeping at most 7 bits in it.
2558  */
2559 local void bi_flush(s)
2560     deflate_state *s;
2561 {
2562     if (s->bi_valid == 16) {
2563         put_short(s, s->bi_buf);
2564         s->bi_buf = 0;
2565         s->bi_valid = 0;
2566     } else if (s->bi_valid >= 8) {
2567         put_byte(s, (Byte)s->bi_buf);
2568         s->bi_buf >>= 8;
2569         s->bi_valid -= 8;
2570     }
2571 }
2572 
2573 /* ===========================================================================
2574  * Flush the bit buffer and align the output on a byte boundary
2575  */
2576 local void bi_windup(s)
2577     deflate_state *s;
2578 {
2579     if (s->bi_valid > 8) {
2580         put_short(s, s->bi_buf);
2581     } else if (s->bi_valid > 0) {
2582         put_byte(s, (Byte)s->bi_buf);
2583     }
2584     s->bi_buf = 0;
2585     s->bi_valid = 0;
2586 #ifdef DEBUG_ZLIB
2587     s->bits_sent = (s->bits_sent+7) & ~7;
2588 #endif
2589 }
2590 
2591 /* ===========================================================================
2592  * Copy a stored block, storing first the length and its
2593  * one's complement if requested.
2594  */
2595 local void copy_block(s, buf, len, header)
2596     deflate_state *s;
2597     charf    *buf;    /* the input data */
2598     unsigned len;     /* its length */
2599     int      header;  /* true if block header must be written */
2600 {
2601     bi_windup(s);        /* align on byte boundary */
2602     s->last_eob_len = 8; /* enough lookahead for inflate */
2603 
2604     if (header) {
2605         put_short(s, (ush)len);
2606         put_short(s, (ush)~len);
2607 #ifdef DEBUG_ZLIB
2608         s->bits_sent += 2*16;
2609 #endif
2610     }
2611 #ifdef DEBUG_ZLIB
2612     s->bits_sent += (ulg)len<<3;
2613 #endif
2614     while (len--) {
2615         put_byte(s, *buf++);
2616     }
2617 }
2618 
2619 
2620 /*+++++*/
2621 /* infblock.h -- header to use infblock.c
2622  * Copyright (C) 1995 Mark Adler
2623  * For conditions of distribution and use, see copyright notice in zlib.h
2624  */
2625 
2626 /* WARNING: this file should *not* be used by applications. It is
2627    part of the implementation of the compression library and is
2628    subject to change. Applications should only use zlib.h.
2629  */
2630 
2631 struct inflate_blocks_state;
2632 typedef struct inflate_blocks_state FAR inflate_blocks_statef;
2633 
2634 local inflate_blocks_statef * inflate_blocks_new OF((
2635     z_stream *z,
2636     check_func c,               /* check function */
2637     uInt w));                   /* window size */
2638 
2639 local int inflate_blocks OF((
2640     inflate_blocks_statef *,
2641     z_stream *,
2642     int));                      /* initial return code */
2643 
2644 local void inflate_blocks_reset OF((
2645     inflate_blocks_statef *,
2646     z_stream *,
2647     uLongf *));                  /* check value on output */
2648 
2649 local int inflate_blocks_free OF((
2650     inflate_blocks_statef *,
2651     z_stream *,
2652     uLongf *));                  /* check value on output */
2653 
2654 local int inflate_addhistory OF((
2655     inflate_blocks_statef *,
2656     z_stream *));
2657 
2658 local int inflate_packet_flush OF((
2659     inflate_blocks_statef *));
2660 
2661 /*+++++*/
2662 /* inftrees.h -- header to use inftrees.c
2663  * Copyright (C) 1995 Mark Adler
2664  * For conditions of distribution and use, see copyright notice in zlib.h
2665  */
2666 
2667 /* WARNING: this file should *not* be used by applications. It is
2668    part of the implementation of the compression library and is
2669    subject to change. Applications should only use zlib.h.
2670  */
2671 
2672 /* Huffman code lookup table entry--this entry is four bytes for machines
2673    that have 16-bit pointers (e.g. PC's in the small or medium model). */
2674 
2675 typedef struct inflate_huft_s FAR inflate_huft;
2676 
2677 struct inflate_huft_s {
2678   union {
2679     struct {
2680       Byte Exop;        /* number of extra bits or operation */
2681       Byte Bits;        /* number of bits in this code or subcode */
2682     } what;
2683     uInt Nalloc;	/* number of these allocated here */
2684     Bytef *pad;         /* pad structure to a power of 2 (4 bytes for */
2685   } word;               /*  16-bit, 8 bytes for 32-bit machines) */
2686   union {
2687     uInt Base;          /* literal, length base, or distance base */
2688     inflate_huft *Next; /* pointer to next level of table */
2689   } more;
2690 };
2691 
2692 #ifdef DEBUG_ZLIB
2693   local uInt inflate_hufts;
2694 #endif
2695 
2696 local int inflate_trees_bits OF((
2697     uIntf *,                    /* 19 code lengths */
2698     uIntf *,                    /* bits tree desired/actual depth */
2699     inflate_huft * FAR *,       /* bits tree result */
2700     z_stream *));               /* for zalloc, zfree functions */
2701 
2702 local int inflate_trees_dynamic OF((
2703     uInt,                       /* number of literal/length codes */
2704     uInt,                       /* number of distance codes */
2705     uIntf *,                    /* that many (total) code lengths */
2706     uIntf *,                    /* literal desired/actual bit depth */
2707     uIntf *,                    /* distance desired/actual bit depth */
2708     inflate_huft * FAR *,       /* literal/length tree result */
2709     inflate_huft * FAR *,       /* distance tree result */
2710     z_stream *));               /* for zalloc, zfree functions */
2711 
2712 local int inflate_trees_fixed OF((
2713     uIntf *,                    /* literal desired/actual bit depth */
2714     uIntf *,                    /* distance desired/actual bit depth */
2715     inflate_huft * FAR *,       /* literal/length tree result */
2716     inflate_huft * FAR *));     /* distance tree result */
2717 
2718 local int inflate_trees_free OF((
2719     inflate_huft *,             /* tables to free */
2720     z_stream *));               /* for zfree function */
2721 
2722 
2723 /*+++++*/
2724 /* infcodes.h -- header to use infcodes.c
2725  * Copyright (C) 1995 Mark Adler
2726  * For conditions of distribution and use, see copyright notice in zlib.h
2727  */
2728 
2729 /* WARNING: this file should *not* be used by applications. It is
2730    part of the implementation of the compression library and is
2731    subject to change. Applications should only use zlib.h.
2732  */
2733 
2734 struct inflate_codes_state;
2735 typedef struct inflate_codes_state FAR inflate_codes_statef;
2736 
2737 local inflate_codes_statef *inflate_codes_new OF((
2738     uInt, uInt,
2739     inflate_huft *, inflate_huft *,
2740     z_stream *));
2741 
2742 local int inflate_codes OF((
2743     inflate_blocks_statef *,
2744     z_stream *,
2745     int));
2746 
2747 local void inflate_codes_free OF((
2748     inflate_codes_statef *,
2749     z_stream *));
2750 
2751 
2752 /*+++++*/
2753 /* inflate.c -- zlib interface to inflate modules
2754  * Copyright (C) 1995 Mark Adler
2755  * For conditions of distribution and use, see copyright notice in zlib.h
2756  */
2757 
2758 /* inflate private state */
2759 struct internal_state {
2760 
2761   /* mode */
2762   enum {
2763       METHOD,   /* waiting for method byte */
2764       FLAG,     /* waiting for flag byte */
2765       BLOCKS,   /* decompressing blocks */
2766       CHECK4,   /* four check bytes to go */
2767       CHECK3,   /* three check bytes to go */
2768       CHECK2,   /* two check bytes to go */
2769       CHECK1,   /* one check byte to go */
2770       DONE,     /* finished check, done */
2771       BAD}      /* got an error--stay here */
2772     mode;               /* current inflate mode */
2773 
2774   /* mode dependent information */
2775   union {
2776     uInt method;        /* if FLAGS, method byte */
2777     struct {
2778       uLong was;                /* computed check value */
2779       uLong need;               /* stream check value */
2780     } check;            /* if CHECK, check values to compare */
2781     uInt marker;        /* if BAD, inflateSync's marker bytes count */
2782   } sub;        /* submode */
2783 
2784   /* mode independent information */
2785   int  nowrap;          /* flag for no wrapper */
2786   uInt wbits;           /* log2(window size)  (8..15, defaults to 15) */
2787   inflate_blocks_statef
2788     *blocks;            /* current inflate_blocks state */
2789 
2790 };
2791 
2792 
2793 int inflateReset(z)
2794 z_stream *z;
2795 {
2796   uLong c;
2797 
2798   if (z == Z_NULL || z->state == Z_NULL)
2799     return Z_STREAM_ERROR;
2800   z->total_in = z->total_out = 0;
2801   z->msg = Z_NULL;
2802   z->state->mode = z->state->nowrap ? BLOCKS : METHOD;
2803   inflate_blocks_reset(z->state->blocks, z, &c);
2804   Trace((stderr, "inflate: reset\n"));
2805   return Z_OK;
2806 }
2807 
2808 
2809 int inflateEnd(z)
2810 z_stream *z;
2811 {
2812   uLong c;
2813 
2814   if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL)
2815     return Z_STREAM_ERROR;
2816   if (z->state->blocks != Z_NULL)
2817     inflate_blocks_free(z->state->blocks, z, &c);
2818   ZFREE(z, z->state, sizeof(struct internal_state));
2819   z->state = Z_NULL;
2820   Trace((stderr, "inflate: end\n"));
2821   return Z_OK;
2822 }
2823 
2824 
2825 int inflateInit2(z, w)
2826 z_stream *z;
2827 int w;
2828 {
2829   /* initialize state */
2830   if (z == Z_NULL)
2831     return Z_STREAM_ERROR;
2832 /*  if (z->zalloc == Z_NULL) z->zalloc = zcalloc; */
2833 /*  if (z->zfree == Z_NULL) z->zfree = zcfree; */
2834   if ((z->state = (struct internal_state FAR *)
2835        ZALLOC(z,1,sizeof(struct internal_state))) == Z_NULL)
2836     return Z_MEM_ERROR;
2837   z->state->blocks = Z_NULL;
2838 
2839   /* handle undocumented nowrap option (no zlib header or check) */
2840   z->state->nowrap = 0;
2841   if (w < 0)
2842   {
2843     w = - w;
2844     z->state->nowrap = 1;
2845   }
2846 
2847   /* set window size */
2848   if (w < 8 || w > 15)
2849   {
2850     inflateEnd(z);
2851     return Z_STREAM_ERROR;
2852   }
2853   z->state->wbits = (uInt)w;
2854 
2855   /* create inflate_blocks state */
2856   if ((z->state->blocks =
2857        inflate_blocks_new(z, z->state->nowrap ? Z_NULL : adler32, 1 << w))
2858       == Z_NULL)
2859   {
2860     inflateEnd(z);
2861     return Z_MEM_ERROR;
2862   }
2863   Trace((stderr, "inflate: allocated\n"));
2864 
2865   /* reset state */
2866   inflateReset(z);
2867   return Z_OK;
2868 }
2869 
2870 
2871 int inflateInit(z)
2872 z_stream *z;
2873 {
2874   return inflateInit2(z, DEF_WBITS);
2875 }
2876 
2877 
2878 #define NEEDBYTE {if(z->avail_in==0)goto empty;r=Z_OK;}
2879 #define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++)
2880 
2881 int inflate(z, f)
2882 z_stream *z;
2883 int f;
2884 {
2885   int r;
2886   uInt b;
2887 
2888   if (z == Z_NULL || z->next_in == Z_NULL)
2889     return Z_STREAM_ERROR;
2890   r = Z_BUF_ERROR;
2891   while (1) switch (z->state->mode)
2892   {
2893     case METHOD:
2894       NEEDBYTE
2895       if (((z->state->sub.method = NEXTBYTE) & 0xf) != DEFLATED)
2896       {
2897         z->state->mode = BAD;
2898         z->msg = "unknown compression method";
2899         z->state->sub.marker = 5;       /* can't try inflateSync */
2900         break;
2901       }
2902       if ((z->state->sub.method >> 4) + 8 > z->state->wbits)
2903       {
2904         z->state->mode = BAD;
2905         z->msg = "invalid window size";
2906         z->state->sub.marker = 5;       /* can't try inflateSync */
2907         break;
2908       }
2909       z->state->mode = FLAG;
2910     case FLAG:
2911       NEEDBYTE
2912       if ((b = NEXTBYTE) & 0x20)
2913       {
2914         z->state->mode = BAD;
2915         z->msg = "invalid reserved bit";
2916         z->state->sub.marker = 5;       /* can't try inflateSync */
2917         break;
2918       }
2919       if (((z->state->sub.method << 8) + b) % 31)
2920       {
2921         z->state->mode = BAD;
2922         z->msg = "incorrect header check";
2923         z->state->sub.marker = 5;       /* can't try inflateSync */
2924         break;
2925       }
2926       Trace((stderr, "inflate: zlib header ok\n"));
2927       z->state->mode = BLOCKS;
2928     case BLOCKS:
2929       r = inflate_blocks(z->state->blocks, z, r);
2930       if (f == Z_PACKET_FLUSH && z->avail_in == 0 && z->avail_out != 0)
2931 	  r = inflate_packet_flush(z->state->blocks);
2932       if (r == Z_DATA_ERROR)
2933       {
2934         z->state->mode = BAD;
2935         z->state->sub.marker = 0;       /* can try inflateSync */
2936         break;
2937       }
2938       if (r != Z_STREAM_END)
2939         return r;
2940       r = Z_OK;
2941       inflate_blocks_reset(z->state->blocks, z, &z->state->sub.check.was);
2942       if (z->state->nowrap)
2943       {
2944         z->state->mode = DONE;
2945         break;
2946       }
2947       z->state->mode = CHECK4;
2948     case CHECK4:
2949       NEEDBYTE
2950       z->state->sub.check.need = (uLong)NEXTBYTE << 24;
2951       z->state->mode = CHECK3;
2952     case CHECK3:
2953       NEEDBYTE
2954       z->state->sub.check.need += (uLong)NEXTBYTE << 16;
2955       z->state->mode = CHECK2;
2956     case CHECK2:
2957       NEEDBYTE
2958       z->state->sub.check.need += (uLong)NEXTBYTE << 8;
2959       z->state->mode = CHECK1;
2960     case CHECK1:
2961       NEEDBYTE
2962       z->state->sub.check.need += (uLong)NEXTBYTE;
2963 
2964       if (z->state->sub.check.was != z->state->sub.check.need)
2965       {
2966         z->state->mode = BAD;
2967         z->msg = "incorrect data check";
2968         z->state->sub.marker = 5;       /* can't try inflateSync */
2969         break;
2970       }
2971       Trace((stderr, "inflate: zlib check ok\n"));
2972       z->state->mode = DONE;
2973     case DONE:
2974       return Z_STREAM_END;
2975     case BAD:
2976       return Z_DATA_ERROR;
2977     default:
2978       return Z_STREAM_ERROR;
2979   }
2980 
2981  empty:
2982   if (f != Z_PACKET_FLUSH)
2983     return r;
2984   z->state->mode = BAD;
2985   z->state->sub.marker = 0;       /* can try inflateSync */
2986   return Z_DATA_ERROR;
2987 }
2988 
2989 /*
2990  * This subroutine adds the data at next_in/avail_in to the output history
2991  * without performing any output.  The output buffer must be "caught up";
2992  * i.e. no pending output (hence s->read equals s->write), and the state must
2993  * be BLOCKS (i.e. we should be willing to see the start of a series of
2994  * BLOCKS).  On exit, the output will also be caught up, and the checksum
2995  * will have been updated if need be.
2996  */
2997 
2998 int inflateIncomp(z)
2999 z_stream *z;
3000 {
3001     if (z->state->mode != BLOCKS)
3002 	return Z_DATA_ERROR;
3003     return inflate_addhistory(z->state->blocks, z);
3004 }
3005 
3006 
3007 int inflateSync(z)
3008 z_stream *z;
3009 {
3010   uInt n;       /* number of bytes to look at */
3011   Bytef *p;     /* pointer to bytes */
3012   uInt m;       /* number of marker bytes found in a row */
3013   uLong r, w;   /* temporaries to save total_in and total_out */
3014 
3015   /* set up */
3016   if (z == Z_NULL || z->state == Z_NULL)
3017     return Z_STREAM_ERROR;
3018   if (z->state->mode != BAD)
3019   {
3020     z->state->mode = BAD;
3021     z->state->sub.marker = 0;
3022   }
3023   if ((n = z->avail_in) == 0)
3024     return Z_BUF_ERROR;
3025   p = z->next_in;
3026   m = z->state->sub.marker;
3027 
3028   /* search */
3029   while (n && m < 4)
3030   {
3031     if (*p == (Byte)(m < 2 ? 0 : 0xff))
3032       m++;
3033     else if (*p)
3034       m = 0;
3035     else
3036       m = 4 - m;
3037     p++, n--;
3038   }
3039 
3040   /* restore */
3041   z->total_in += p - z->next_in;
3042   z->next_in = p;
3043   z->avail_in = n;
3044   z->state->sub.marker = m;
3045 
3046   /* return no joy or set up to restart on a new block */
3047   if (m != 4)
3048     return Z_DATA_ERROR;
3049   r = z->total_in;  w = z->total_out;
3050   inflateReset(z);
3051   z->total_in = r;  z->total_out = w;
3052   z->state->mode = BLOCKS;
3053   return Z_OK;
3054 }
3055 
3056 #undef NEEDBYTE
3057 #undef NEXTBYTE
3058 
3059 /*+++++*/
3060 /* infutil.h -- types and macros common to blocks and codes
3061  * Copyright (C) 1995 Mark Adler
3062  * For conditions of distribution and use, see copyright notice in zlib.h
3063  */
3064 
3065 /* WARNING: this file should *not* be used by applications. It is
3066    part of the implementation of the compression library and is
3067    subject to change. Applications should only use zlib.h.
3068  */
3069 
3070 /* inflate blocks semi-private state */
3071 struct inflate_blocks_state {
3072 
3073   /* mode */
3074   enum {
3075       TYPE,     /* get type bits (3, including end bit) */
3076       LENS,     /* get lengths for stored */
3077       STORED,   /* processing stored block */
3078       TABLE,    /* get table lengths */
3079       BTREE,    /* get bit lengths tree for a dynamic block */
3080       DTREE,    /* get length, distance trees for a dynamic block */
3081       CODES,    /* processing fixed or dynamic block */
3082       DRY,      /* output remaining window bytes */
3083       DONEB,     /* finished last block, done */
3084       BADB}      /* got a data error--stuck here */
3085     mode;               /* current inflate_block mode */
3086 
3087   /* mode dependent information */
3088   union {
3089     uInt left;          /* if STORED, bytes left to copy */
3090     struct {
3091       uInt table;               /* table lengths (14 bits) */
3092       uInt index;               /* index into blens (or border) */
3093       uIntf *blens;             /* bit lengths of codes */
3094       uInt bb;                  /* bit length tree depth */
3095       inflate_huft *tb;         /* bit length decoding tree */
3096       int nblens;		/* # elements allocated at blens */
3097     } trees;            /* if DTREE, decoding info for trees */
3098     struct {
3099       inflate_huft *tl, *td;    /* trees to free */
3100       inflate_codes_statef
3101          *codes;
3102     } decode;           /* if CODES, current state */
3103   } sub;                /* submode */
3104   uInt last;            /* true if this block is the last block */
3105 
3106   /* mode independent information */
3107   uInt bitk;            /* bits in bit buffer */
3108   uLong bitb;           /* bit buffer */
3109   Bytef *window;        /* sliding window */
3110   Bytef *end;           /* one byte after sliding window */
3111   Bytef *read;          /* window read pointer */
3112   Bytef *write;         /* window write pointer */
3113   check_func checkfn;   /* check function */
3114   uLong check;          /* check on output */
3115 
3116 };
3117 
3118 
3119 /* defines for inflate input/output */
3120 /*   update pointers and return */
3121 #define UPDBITS {s->bitb=b;s->bitk=k;}
3122 #define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;}
3123 #define UPDOUT {s->write=q;}
3124 #define UPDATE {UPDBITS UPDIN UPDOUT}
3125 #define LEAVE {UPDATE return inflate_flush(s,z,r);}
3126 /*   get bytes and bits */
3127 #define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;}
3128 #define NEEDBYTE {if(n)r=Z_OK;else LEAVE}
3129 #define NEXTBYTE (n--,*p++)
3130 #define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<<k;k+=8;}}
3131 #define DUMPBITS(j) {b>>=(j);k-=(j);}
3132 /*   output bytes */
3133 #define WAVAIL (q<s->read?s->read-q-1:s->end-q)
3134 #define LOADOUT {q=s->write;m=WAVAIL;}
3135 #define WRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=WAVAIL;}}
3136 #define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT}
3137 #define NEEDOUT {if(m==0){WRAP if(m==0){FLUSH WRAP if(m==0) LEAVE}}r=Z_OK;}
3138 #define OUTBYTE(a) {*q++=(Byte)(a);m--;}
3139 /*   load local pointers */
3140 #define LOAD {LOADIN LOADOUT}
3141 
3142 /* And'ing with mask[n] masks the lower n bits */
3143 local uInt inflate_mask[] = {
3144     0x0000,
3145     0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
3146     0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
3147 };
3148 
3149 /* copy as much as possible from the sliding window to the output area */
3150 local int inflate_flush OF((
3151     inflate_blocks_statef *,
3152     z_stream *,
3153     int));
3154 
3155 /*+++++*/
3156 /* inffast.h -- header to use inffast.c
3157  * Copyright (C) 1995 Mark Adler
3158  * For conditions of distribution and use, see copyright notice in zlib.h
3159  */
3160 
3161 /* WARNING: this file should *not* be used by applications. It is
3162    part of the implementation of the compression library and is
3163    subject to change. Applications should only use zlib.h.
3164  */
3165 
3166 local int inflate_fast OF((
3167     uInt,
3168     uInt,
3169     inflate_huft *,
3170     inflate_huft *,
3171     inflate_blocks_statef *,
3172     z_stream *));
3173 
3174 
3175 /*+++++*/
3176 /* infblock.c -- interpret and process block types to last block
3177  * Copyright (C) 1995 Mark Adler
3178  * For conditions of distribution and use, see copyright notice in zlib.h
3179  */
3180 
3181 /* Table for deflate from PKZIP's appnote.txt. */
3182 local uInt border[] = { /* Order of the bit length code lengths */
3183         16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
3184 
3185 /*
3186    Notes beyond the 1.93a appnote.txt:
3187 
3188    1. Distance pointers never point before the beginning of the output
3189       stream.
3190    2. Distance pointers can point back across blocks, up to 32k away.
3191    3. There is an implied maximum of 7 bits for the bit length table and
3192       15 bits for the actual data.
3193    4. If only one code exists, then it is encoded using one bit.  (Zero
3194       would be more efficient, but perhaps a little confusing.)  If two
3195       codes exist, they are coded using one bit each (0 and 1).
3196    5. There is no way of sending zero distance codes--a dummy must be
3197       sent if there are none.  (History: a pre 2.0 version of PKZIP would
3198       store blocks with no distance codes, but this was discovered to be
3199       too harsh a criterion.)  Valid only for 1.93a.  2.04c does allow
3200       zero distance codes, which is sent as one code of zero bits in
3201       length.
3202    6. There are up to 286 literal/length codes.  Code 256 represents the
3203       end-of-block.  Note however that the static length tree defines
3204       288 codes just to fill out the Huffman codes.  Codes 286 and 287
3205       cannot be used though, since there is no length base or extra bits
3206       defined for them.  Similarily, there are up to 30 distance codes.
3207       However, static trees define 32 codes (all 5 bits) to fill out the
3208       Huffman codes, but the last two had better not show up in the data.
3209    7. Unzip can check dynamic Huffman blocks for complete code sets.
3210       The exception is that a single code would not be complete (see #4).
3211    8. The five bits following the block type is really the number of
3212       literal codes sent minus 257.
3213    9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits
3214       (1+6+6).  Therefore, to output three times the length, you output
3215       three codes (1+1+1), whereas to output four times the same length,
3216       you only need two codes (1+3).  Hmm.
3217   10. In the tree reconstruction algorithm, Code = Code + Increment
3218       only if BitLength(i) is not zero.  (Pretty obvious.)
3219   11. Correction: 4 Bits: # of Bit Length codes - 4     (4 - 19)
3220   12. Note: length code 284 can represent 227-258, but length code 285
3221       really is 258.  The last length deserves its own, short code
3222       since it gets used a lot in very redundant files.  The length
3223       258 is special since 258 - 3 (the min match length) is 255.
3224   13. The literal/length and distance code bit lengths are read as a
3225       single stream of lengths.  It is possible (and advantageous) for
3226       a repeat code (16, 17, or 18) to go across the boundary between
3227       the two sets of lengths.
3228  */
3229 
3230 
3231 local void inflate_blocks_reset(s, z, c)
3232 inflate_blocks_statef *s;
3233 z_stream *z;
3234 uLongf *c;
3235 {
3236   if (s->checkfn != Z_NULL)
3237     *c = s->check;
3238   if (s->mode == BTREE || s->mode == DTREE)
3239     ZFREE(z, s->sub.trees.blens, s->sub.trees.nblens * sizeof(uInt));
3240   if (s->mode == CODES)
3241   {
3242     inflate_codes_free(s->sub.decode.codes, z);
3243     inflate_trees_free(s->sub.decode.td, z);
3244     inflate_trees_free(s->sub.decode.tl, z);
3245   }
3246   s->mode = TYPE;
3247   s->bitk = 0;
3248   s->bitb = 0;
3249   s->read = s->write = s->window;
3250   if (s->checkfn != Z_NULL)
3251     s->check = (*s->checkfn)(0L, Z_NULL, 0);
3252   Trace((stderr, "inflate:   blocks reset\n"));
3253 }
3254 
3255 
3256 local inflate_blocks_statef *inflate_blocks_new(z, c, w)
3257 z_stream *z;
3258 check_func c;
3259 uInt w;
3260 {
3261   inflate_blocks_statef *s;
3262 
3263   if ((s = (inflate_blocks_statef *)ZALLOC
3264        (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL)
3265     return s;
3266   if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL)
3267   {
3268     ZFREE(z, s, sizeof(struct inflate_blocks_state));
3269     return Z_NULL;
3270   }
3271   s->end = s->window + w;
3272   s->checkfn = c;
3273   s->mode = TYPE;
3274   Trace((stderr, "inflate:   blocks allocated\n"));
3275   inflate_blocks_reset(s, z, &s->check);
3276   return s;
3277 }
3278 
3279 
3280 local int inflate_blocks(s, z, r)
3281 inflate_blocks_statef *s;
3282 z_stream *z;
3283 int r;
3284 {
3285   uInt t;               /* temporary storage */
3286   uLong b;              /* bit buffer */
3287   uInt k;               /* bits in bit buffer */
3288   Bytef *p;             /* input data pointer */
3289   uInt n;               /* bytes available there */
3290   Bytef *q;             /* output window write pointer */
3291   uInt m;               /* bytes to end of window or read pointer */
3292 
3293   /* copy input/output information to locals (UPDATE macro restores) */
3294   LOAD
3295 
3296   /* process input based on current state */
3297   while (1) switch (s->mode)
3298   {
3299     case TYPE:
3300       NEEDBITS(3)
3301       t = (uInt)b & 7;
3302       s->last = t & 1;
3303       switch (t >> 1)
3304       {
3305         case 0:                         /* stored */
3306           Trace((stderr, "inflate:     stored block%s\n",
3307                  s->last ? " (last)" : ""));
3308           DUMPBITS(3)
3309           t = k & 7;                    /* go to byte boundary */
3310           DUMPBITS(t)
3311           s->mode = LENS;               /* get length of stored block */
3312           break;
3313         case 1:                         /* fixed */
3314           Trace((stderr, "inflate:     fixed codes block%s\n",
3315                  s->last ? " (last)" : ""));
3316           {
3317             uInt bl, bd;
3318             inflate_huft *tl, *td;
3319 
3320             inflate_trees_fixed(&bl, &bd, &tl, &td);
3321             s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z);
3322             if (s->sub.decode.codes == Z_NULL)
3323             {
3324               r = Z_MEM_ERROR;
3325               LEAVE
3326             }
3327             s->sub.decode.tl = Z_NULL;  /* don't try to free these */
3328             s->sub.decode.td = Z_NULL;
3329           }
3330           DUMPBITS(3)
3331           s->mode = CODES;
3332           break;
3333         case 2:                         /* dynamic */
3334           Trace((stderr, "inflate:     dynamic codes block%s\n",
3335                  s->last ? " (last)" : ""));
3336           DUMPBITS(3)
3337           s->mode = TABLE;
3338           break;
3339         case 3:                         /* illegal */
3340           DUMPBITS(3)
3341           s->mode = BADB;
3342           z->msg = "invalid block type";
3343           r = Z_DATA_ERROR;
3344           LEAVE
3345       }
3346       break;
3347     case LENS:
3348       NEEDBITS(32)
3349       if (((~b) >> 16) != (b & 0xffff))
3350       {
3351         s->mode = BADB;
3352         z->msg = "invalid stored block lengths";
3353         r = Z_DATA_ERROR;
3354         LEAVE
3355       }
3356       s->sub.left = (uInt)b & 0xffff;
3357       b = k = 0;                      /* dump bits */
3358       Tracev((stderr, "inflate:       stored length %u\n", s->sub.left));
3359       s->mode = s->sub.left ? STORED : TYPE;
3360       break;
3361     case STORED:
3362       if (n == 0)
3363         LEAVE
3364       NEEDOUT
3365       t = s->sub.left;
3366       if (t > n) t = n;
3367       if (t > m) t = m;
3368       zmemcpy(q, p, t);
3369       p += t;  n -= t;
3370       q += t;  m -= t;
3371       if ((s->sub.left -= t) != 0)
3372         break;
3373       Tracev((stderr, "inflate:       stored end, %lu total out\n",
3374               z->total_out + (q >= s->read ? q - s->read :
3375               (s->end - s->read) + (q - s->window))));
3376       s->mode = s->last ? DRY : TYPE;
3377       break;
3378     case TABLE:
3379       NEEDBITS(14)
3380       s->sub.trees.table = t = (uInt)b & 0x3fff;
3381 #ifndef PKZIP_BUG_WORKAROUND
3382       if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29)
3383       {
3384         s->mode = BADB;
3385         z->msg = "too many length or distance symbols";
3386         r = Z_DATA_ERROR;
3387         LEAVE
3388       }
3389 #endif
3390       t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f);
3391       if (t < 19)
3392         t = 19;
3393       if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL)
3394       {
3395         r = Z_MEM_ERROR;
3396         LEAVE
3397       }
3398       s->sub.trees.nblens = t;
3399       DUMPBITS(14)
3400       s->sub.trees.index = 0;
3401       Tracev((stderr, "inflate:       table sizes ok\n"));
3402       s->mode = BTREE;
3403     case BTREE:
3404       while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10))
3405       {
3406         NEEDBITS(3)
3407         s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7;
3408         DUMPBITS(3)
3409       }
3410       while (s->sub.trees.index < 19)
3411         s->sub.trees.blens[border[s->sub.trees.index++]] = 0;
3412       s->sub.trees.bb = 7;
3413       t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb,
3414                              &s->sub.trees.tb, z);
3415       if (t != Z_OK)
3416       {
3417         r = t;
3418         if (r == Z_DATA_ERROR)
3419           s->mode = BADB;
3420         LEAVE
3421       }
3422       s->sub.trees.index = 0;
3423       Tracev((stderr, "inflate:       bits tree ok\n"));
3424       s->mode = DTREE;
3425     case DTREE:
3426       while (t = s->sub.trees.table,
3427              s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f))
3428       {
3429         inflate_huft *h;
3430         uInt i, j, c;
3431 
3432         t = s->sub.trees.bb;
3433         NEEDBITS(t)
3434         h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]);
3435         t = h->word.what.Bits;
3436         c = h->more.Base;
3437         if (c < 16)
3438         {
3439           DUMPBITS(t)
3440           s->sub.trees.blens[s->sub.trees.index++] = c;
3441         }
3442         else /* c == 16..18 */
3443         {
3444           i = c == 18 ? 7 : c - 14;
3445           j = c == 18 ? 11 : 3;
3446           NEEDBITS(t + i)
3447           DUMPBITS(t)
3448           j += (uInt)b & inflate_mask[i];
3449           DUMPBITS(i)
3450           i = s->sub.trees.index;
3451           t = s->sub.trees.table;
3452           if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) ||
3453               (c == 16 && i < 1))
3454           {
3455             s->mode = BADB;
3456             z->msg = "invalid bit length repeat";
3457             r = Z_DATA_ERROR;
3458             LEAVE
3459           }
3460           c = c == 16 ? s->sub.trees.blens[i - 1] : 0;
3461           do {
3462             s->sub.trees.blens[i++] = c;
3463           } while (--j);
3464           s->sub.trees.index = i;
3465         }
3466       }
3467       inflate_trees_free(s->sub.trees.tb, z);
3468       s->sub.trees.tb = Z_NULL;
3469       {
3470         uInt bl, bd;
3471         inflate_huft *tl, *td;
3472         inflate_codes_statef *c;
3473 
3474         bl = 9;         /* must be <= 9 for lookahead assumptions */
3475         bd = 6;         /* must be <= 9 for lookahead assumptions */
3476         t = s->sub.trees.table;
3477         t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f),
3478                                   s->sub.trees.blens, &bl, &bd, &tl, &td, z);
3479         if (t != Z_OK)
3480         {
3481           if (t == (uInt)Z_DATA_ERROR)
3482             s->mode = BADB;
3483           r = t;
3484           LEAVE
3485         }
3486         Tracev((stderr, "inflate:       trees ok\n"));
3487         if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL)
3488         {
3489           inflate_trees_free(td, z);
3490           inflate_trees_free(tl, z);
3491           r = Z_MEM_ERROR;
3492           LEAVE
3493         }
3494         ZFREE(z, s->sub.trees.blens, s->sub.trees.nblens * sizeof(uInt));
3495         s->sub.decode.codes = c;
3496         s->sub.decode.tl = tl;
3497         s->sub.decode.td = td;
3498       }
3499       s->mode = CODES;
3500     case CODES:
3501       UPDATE
3502       if ((r = inflate_codes(s, z, r)) != Z_STREAM_END)
3503         return inflate_flush(s, z, r);
3504       r = Z_OK;
3505       inflate_codes_free(s->sub.decode.codes, z);
3506       inflate_trees_free(s->sub.decode.td, z);
3507       inflate_trees_free(s->sub.decode.tl, z);
3508       LOAD
3509       Tracev((stderr, "inflate:       codes end, %lu total out\n",
3510               z->total_out + (q >= s->read ? q - s->read :
3511               (s->end - s->read) + (q - s->window))));
3512       if (!s->last)
3513       {
3514         s->mode = TYPE;
3515         break;
3516       }
3517       if (k > 7)              /* return unused byte, if any */
3518       {
3519         Assert(k < 16, "inflate_codes grabbed too many bytes")
3520         k -= 8;
3521         n++;
3522         p--;                    /* can always return one */
3523       }
3524       s->mode = DRY;
3525     case DRY:
3526       FLUSH
3527       if (s->read != s->write)
3528         LEAVE
3529       s->mode = DONEB;
3530     case DONEB:
3531       r = Z_STREAM_END;
3532       LEAVE
3533     case BADB:
3534       r = Z_DATA_ERROR;
3535       LEAVE
3536     default:
3537       r = Z_STREAM_ERROR;
3538       LEAVE
3539   }
3540 }
3541 
3542 
3543 local int inflate_blocks_free(s, z, c)
3544 inflate_blocks_statef *s;
3545 z_stream *z;
3546 uLongf *c;
3547 {
3548   inflate_blocks_reset(s, z, c);
3549   ZFREE(z, s->window, s->end - s->window);
3550   ZFREE(z, s, sizeof(struct inflate_blocks_state));
3551   Trace((stderr, "inflate:   blocks freed\n"));
3552   return Z_OK;
3553 }
3554 
3555 /*
3556  * This subroutine adds the data at next_in/avail_in to the output history
3557  * without performing any output.  The output buffer must be "caught up";
3558  * i.e. no pending output (hence s->read equals s->write), and the state must
3559  * be BLOCKS (i.e. we should be willing to see the start of a series of
3560  * BLOCKS).  On exit, the output will also be caught up, and the checksum
3561  * will have been updated if need be.
3562  */
3563 local int inflate_addhistory(s, z)
3564 inflate_blocks_statef *s;
3565 z_stream *z;
3566 {
3567     uLong b;              /* bit buffer */  /* NOT USED HERE */
3568     uInt k;               /* bits in bit buffer */ /* NOT USED HERE */
3569     uInt t;               /* temporary storage */
3570     Bytef *p;             /* input data pointer */
3571     uInt n;               /* bytes available there */
3572     Bytef *q;             /* output window write pointer */
3573     uInt m;               /* bytes to end of window or read pointer */
3574 
3575     if (s->read != s->write)
3576 	return Z_STREAM_ERROR;
3577     if (s->mode != TYPE)
3578 	return Z_DATA_ERROR;
3579 
3580     /* we're ready to rock */
3581     LOAD
3582     /* while there is input ready, copy to output buffer, moving
3583      * pointers as needed.
3584      */
3585     while (n) {
3586 	t = n;  /* how many to do */
3587 	/* is there room until end of buffer? */
3588 	if (t > m) t = m;
3589 	/* update check information */
3590 	if (s->checkfn != Z_NULL)
3591 	    s->check = (*s->checkfn)(s->check, q, t);
3592 	zmemcpy(q, p, t);
3593 	q += t;
3594 	p += t;
3595 	n -= t;
3596 	z->total_out += t;
3597 	s->read = q;    /* drag read pointer forward */
3598 /*      WRAP  */ 	/* expand WRAP macro by hand to handle s->read */
3599 	if (q == s->end) {
3600 	    s->read = q = s->window;
3601 	    m = WAVAIL;
3602 	}
3603     }
3604     UPDATE
3605     return Z_OK;
3606 }
3607 
3608 
3609 /*
3610  * At the end of a Deflate-compressed PPP packet, we expect to have seen
3611  * a `stored' block type value but not the (zero) length bytes.
3612  */
3613 local int inflate_packet_flush(s)
3614     inflate_blocks_statef *s;
3615 {
3616     if (s->mode != LENS)
3617 	return Z_DATA_ERROR;
3618     s->mode = TYPE;
3619     return Z_OK;
3620 }
3621 
3622 
3623 /*+++++*/
3624 /* inftrees.c -- generate Huffman trees for efficient decoding
3625  * Copyright (C) 1995 Mark Adler
3626  * For conditions of distribution and use, see copyright notice in zlib.h
3627  */
3628 
3629 /* simplify the use of the inflate_huft type with some defines */
3630 #define base more.Base
3631 #define next more.Next
3632 #define exop word.what.Exop
3633 #define bits word.what.Bits
3634 
3635 
3636 local int huft_build OF((
3637     uIntf *,            /* code lengths in bits */
3638     uInt,               /* number of codes */
3639     uInt,               /* number of "simple" codes */
3640     uIntf *,            /* list of base values for non-simple codes */
3641     uIntf *,            /* list of extra bits for non-simple codes */
3642     inflate_huft * FAR*,/* result: starting table */
3643     uIntf *,            /* maximum lookup bits (returns actual) */
3644     z_stream *));       /* for zalloc function */
3645 
3646 local voidpf falloc OF((
3647     voidpf,             /* opaque pointer (not used) */
3648     uInt,               /* number of items */
3649     uInt));             /* size of item */
3650 
3651 local void ffree OF((
3652     voidpf q,           /* opaque pointer (not used) */
3653     voidpf p,           /* what to free (not used) */
3654     uInt n));		/* number of bytes (not used) */
3655 
3656 /* Tables for deflate from PKZIP's appnote.txt. */
3657 local uInt cplens[] = { /* Copy lengths for literal codes 257..285 */
3658         3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
3659         35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
3660         /* actually lengths - 2; also see note #13 above about 258 */
3661 local uInt cplext[] = { /* Extra bits for literal codes 257..285 */
3662         0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
3663         3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 192, 192}; /* 192==invalid */
3664 local uInt cpdist[] = { /* Copy offsets for distance codes 0..29 */
3665         1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
3666         257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
3667         8193, 12289, 16385, 24577};
3668 local uInt cpdext[] = { /* Extra bits for distance codes */
3669         0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
3670         7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
3671         12, 12, 13, 13};
3672 
3673 /*
3674    Huffman code decoding is performed using a multi-level table lookup.
3675    The fastest way to decode is to simply build a lookup table whose
3676    size is determined by the longest code.  However, the time it takes
3677    to build this table can also be a factor if the data being decoded
3678    is not very long.  The most common codes are necessarily the
3679    shortest codes, so those codes dominate the decoding time, and hence
3680    the speed.  The idea is you can have a shorter table that decodes the
3681    shorter, more probable codes, and then point to subsidiary tables for
3682    the longer codes.  The time it costs to decode the longer codes is
3683    then traded against the time it takes to make longer tables.
3684 
3685    This results of this trade are in the variables lbits and dbits
3686    below.  lbits is the number of bits the first level table for literal/
3687    length codes can decode in one step, and dbits is the same thing for
3688    the distance codes.  Subsequent tables are also less than or equal to
3689    those sizes.  These values may be adjusted either when all of the
3690    codes are shorter than that, in which case the longest code length in
3691    bits is used, or when the shortest code is *longer* than the requested
3692    table size, in which case the length of the shortest code in bits is
3693    used.
3694 
3695    There are two different values for the two tables, since they code a
3696    different number of possibilities each.  The literal/length table
3697    codes 286 possible values, or in a flat code, a little over eight
3698    bits.  The distance table codes 30 possible values, or a little less
3699    than five bits, flat.  The optimum values for speed end up being
3700    about one bit more than those, so lbits is 8+1 and dbits is 5+1.
3701    The optimum values may differ though from machine to machine, and
3702    possibly even between compilers.  Your mileage may vary.
3703  */
3704 
3705 
3706 /* If BMAX needs to be larger than 16, then h and x[] should be uLong. */
3707 #define BMAX 15         /* maximum bit length of any code */
3708 #define N_MAX 288       /* maximum number of codes in any set */
3709 
3710 #ifdef DEBUG_ZLIB
3711   uInt inflate_hufts;
3712 #endif
3713 
3714 local int huft_build(b, n, s, d, e, t, m, zs)
3715 uIntf *b;               /* code lengths in bits (all assumed <= BMAX) */
3716 uInt n;                 /* number of codes (assumed <= N_MAX) */
3717 uInt s;                 /* number of simple-valued codes (0..s-1) */
3718 uIntf *d;               /* list of base values for non-simple codes */
3719 uIntf *e;               /* list of extra bits for non-simple codes */
3720 inflate_huft * FAR *t;  /* result: starting table */
3721 uIntf *m;               /* maximum lookup bits, returns actual */
3722 z_stream *zs;           /* for zalloc function */
3723 /* Given a list of code lengths and a maximum table size, make a set of
3724    tables to decode that set of codes.  Return Z_OK on success, Z_BUF_ERROR
3725    if the given code set is incomplete (the tables are still built in this
3726    case), Z_DATA_ERROR if the input is invalid (all zero length codes or an
3727    over-subscribed set of lengths), or Z_MEM_ERROR if not enough memory. */
3728 {
3729 
3730   uInt a;                       /* counter for codes of length k */
3731   uInt c[BMAX+1];               /* bit length count table */
3732   uInt f;                       /* i repeats in table every f entries */
3733   int g;                        /* maximum code length */
3734   int h;                        /* table level */
3735   register uInt i;              /* counter, current code */
3736   register uInt j;              /* counter */
3737   register int k;               /* number of bits in current code */
3738   int l;                        /* bits per table (returned in m) */
3739   register uIntf *p;            /* pointer into c[], b[], or v[] */
3740   inflate_huft *q;              /* points to current table */
3741   struct inflate_huft_s r;      /* table entry for structure assignment */
3742   inflate_huft *u[BMAX];        /* table stack */
3743   uInt v[N_MAX];                /* values in order of bit length */
3744   register int w;               /* bits before this table == (l * h) */
3745   uInt x[BMAX+1];               /* bit offsets, then code stack */
3746   uIntf *xp;                    /* pointer into x */
3747   int y;                        /* number of dummy codes added */
3748   uInt z;                       /* number of entries in current table */
3749 
3750 
3751   /* Generate counts for each bit length */
3752   p = c;
3753 #define C0 *p++ = 0;
3754 #define C2 C0 C0 C0 C0
3755 #define C4 C2 C2 C2 C2
3756   C4                            /* clear c[]--assume BMAX+1 is 16 */
3757   p = b;  i = n;
3758   do {
3759     c[*p++]++;                  /* assume all entries <= BMAX */
3760   } while (--i);
3761   if (c[0] == n)                /* null input--all zero length codes */
3762   {
3763     *t = (inflate_huft *)Z_NULL;
3764     *m = 0;
3765     return Z_OK;
3766   }
3767 
3768 
3769   /* Find minimum and maximum length, bound *m by those */
3770   l = *m;
3771   for (j = 1; j <= BMAX; j++)
3772     if (c[j])
3773       break;
3774   k = j;                        /* minimum code length */
3775   if ((uInt)l < j)
3776     l = j;
3777   for (i = BMAX; i; i--)
3778     if (c[i])
3779       break;
3780   g = i;                        /* maximum code length */
3781   if ((uInt)l > i)
3782     l = i;
3783   *m = l;
3784 
3785 
3786   /* Adjust last length count to fill out codes, if needed */
3787   for (y = 1 << j; j < i; j++, y <<= 1)
3788     if ((y -= c[j]) < 0)
3789       return Z_DATA_ERROR;
3790   if ((y -= c[i]) < 0)
3791     return Z_DATA_ERROR;
3792   c[i] += y;
3793 
3794 
3795   /* Generate starting offsets into the value table for each length */
3796   x[1] = j = 0;
3797   p = c + 1;  xp = x + 2;
3798   while (--i) {                 /* note that i == g from above */
3799     *xp++ = (j += *p++);
3800   }
3801 
3802 
3803   /* Make a table of values in order of bit lengths */
3804   p = b;  i = 0;
3805   do {
3806     if ((j = *p++) != 0)
3807       v[x[j]++] = i;
3808   } while (++i < n);
3809 
3810 
3811   /* Generate the Huffman codes and for each, make the table entries */
3812   x[0] = i = 0;                 /* first Huffman code is zero */
3813   p = v;                        /* grab values in bit order */
3814   h = -1;                       /* no tables yet--level -1 */
3815   w = -l;                       /* bits decoded == (l * h) */
3816   u[0] = (inflate_huft *)Z_NULL;        /* just to keep compilers happy */
3817   q = (inflate_huft *)Z_NULL;   /* ditto */
3818   z = 0;                        /* ditto */
3819 
3820   /* go through the bit lengths (k already is bits in shortest code) */
3821   for (; k <= g; k++)
3822   {
3823     a = c[k];
3824     while (a--)
3825     {
3826       /* here i is the Huffman code of length k bits for value *p */
3827       /* make tables up to required level */
3828       while (k > w + l)
3829       {
3830         h++;
3831         w += l;                 /* previous table always l bits */
3832 
3833         /* compute minimum size table less than or equal to l bits */
3834         z = (z = g - w) > (uInt)l ? l : z;      /* table size upper limit */
3835         if ((f = 1 << (j = k - w)) > a + 1)     /* try a k-w bit table */
3836         {                       /* too few codes for k-w bit table */
3837           f -= a + 1;           /* deduct codes from patterns left */
3838           xp = c + k;
3839           if (j < z)
3840             while (++j < z)     /* try smaller tables up to z bits */
3841             {
3842               if ((f <<= 1) <= *++xp)
3843                 break;          /* enough codes to use up j bits */
3844               f -= *xp;         /* else deduct codes from patterns */
3845             }
3846         }
3847         z = 1 << j;             /* table entries for j-bit table */
3848 
3849         /* allocate and link in new table */
3850         if ((q = (inflate_huft *)ZALLOC
3851              (zs,z + 1,sizeof(inflate_huft))) == Z_NULL)
3852         {
3853           if (h)
3854             inflate_trees_free(u[0], zs);
3855           return Z_MEM_ERROR;   /* not enough memory */
3856         }
3857 	q->word.Nalloc = z + 1;
3858 #ifdef DEBUG_ZLIB
3859         inflate_hufts += z + 1;
3860 #endif
3861         *t = q + 1;             /* link to list for huft_free() */
3862         *(t = &(q->next)) = Z_NULL;
3863         u[h] = ++q;             /* table starts after link */
3864 
3865         /* connect to last table, if there is one */
3866         if (h)
3867         {
3868           x[h] = i;             /* save pattern for backing up */
3869           r.bits = (Byte)l;     /* bits to dump before this table */
3870           r.exop = (Byte)j;     /* bits in this table */
3871           r.next = q;           /* pointer to this table */
3872           j = i >> (w - l);     /* (get around Turbo C bug) */
3873           u[h-1][j] = r;        /* connect to last table */
3874         }
3875       }
3876 
3877       /* set up table entry in r */
3878       r.bits = (Byte)(k - w);
3879       if (p >= v + n)
3880         r.exop = 128 + 64;      /* out of values--invalid code */
3881       else if (*p < s)
3882       {
3883         r.exop = (Byte)(*p < 256 ? 0 : 32 + 64);     /* 256 is end-of-block */
3884         r.base = *p++;          /* simple code is just the value */
3885       }
3886       else
3887       {
3888         r.exop = (Byte)e[*p - s] + 16 + 64; /* non-simple--look up in lists */
3889         r.base = d[*p++ - s];
3890       }
3891 
3892       /* fill code-like entries with r */
3893       f = 1 << (k - w);
3894       for (j = i >> w; j < z; j += f)
3895         q[j] = r;
3896 
3897       /* backwards increment the k-bit code i */
3898       for (j = 1 << (k - 1); i & j; j >>= 1)
3899         i ^= j;
3900       i ^= j;
3901 
3902       /* backup over finished tables */
3903       while ((i & ((1 << w) - 1)) != x[h])
3904       {
3905         h--;                    /* don't need to update q */
3906         w -= l;
3907       }
3908     }
3909   }
3910 
3911 
3912   /* Return Z_BUF_ERROR if we were given an incomplete table */
3913   return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK;
3914 }
3915 
3916 
3917 local int inflate_trees_bits(c, bb, tb, z)
3918 uIntf *c;               /* 19 code lengths */
3919 uIntf *bb;              /* bits tree desired/actual depth */
3920 inflate_huft * FAR *tb; /* bits tree result */
3921 z_stream *z;            /* for zfree function */
3922 {
3923   int r;
3924 
3925   r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, tb, bb, z);
3926   if (r == Z_DATA_ERROR)
3927     z->msg = "oversubscribed dynamic bit lengths tree";
3928   else if (r == Z_BUF_ERROR)
3929   {
3930     inflate_trees_free(*tb, z);
3931     z->msg = "incomplete dynamic bit lengths tree";
3932     r = Z_DATA_ERROR;
3933   }
3934   return r;
3935 }
3936 
3937 
3938 local int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, z)
3939 uInt nl;                /* number of literal/length codes */
3940 uInt nd;                /* number of distance codes */
3941 uIntf *c;               /* that many (total) code lengths */
3942 uIntf *bl;              /* literal desired/actual bit depth */
3943 uIntf *bd;              /* distance desired/actual bit depth */
3944 inflate_huft * FAR *tl; /* literal/length tree result */
3945 inflate_huft * FAR *td; /* distance tree result */
3946 z_stream *z;            /* for zfree function */
3947 {
3948   int r;
3949 
3950   /* build literal/length tree */
3951   if ((r = huft_build(c, nl, 257, cplens, cplext, tl, bl, z)) != Z_OK)
3952   {
3953     if (r == Z_DATA_ERROR)
3954       z->msg = "oversubscribed literal/length tree";
3955     else if (r == Z_BUF_ERROR)
3956     {
3957       inflate_trees_free(*tl, z);
3958       z->msg = "incomplete literal/length tree";
3959       r = Z_DATA_ERROR;
3960     }
3961     return r;
3962   }
3963 
3964   /* build distance tree */
3965   if ((r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, z)) != Z_OK)
3966   {
3967     if (r == Z_DATA_ERROR)
3968       z->msg = "oversubscribed literal/length tree";
3969     else if (r == Z_BUF_ERROR) {
3970 #ifdef PKZIP_BUG_WORKAROUND
3971       r = Z_OK;
3972     }
3973 #else
3974       inflate_trees_free(*td, z);
3975       z->msg = "incomplete literal/length tree";
3976       r = Z_DATA_ERROR;
3977     }
3978     inflate_trees_free(*tl, z);
3979     return r;
3980 #endif
3981   }
3982 
3983   /* done */
3984   return Z_OK;
3985 }
3986 
3987 
3988 /* build fixed tables only once--keep them here */
3989 local int fixed_lock = 0;
3990 local int fixed_built = 0;
3991 #define FIXEDH 530      /* number of hufts used by fixed tables */
3992 local uInt fixed_left = FIXEDH;
3993 local inflate_huft fixed_mem[FIXEDH];
3994 local uInt fixed_bl;
3995 local uInt fixed_bd;
3996 local inflate_huft *fixed_tl;
3997 local inflate_huft *fixed_td;
3998 
3999 
4000 local voidpf falloc(q, n, s)
4001 voidpf q;        /* opaque pointer (not used) */
4002 uInt n;         /* number of items */
4003 uInt s;         /* size of item */
4004 {
4005   Assert(s == sizeof(inflate_huft) && n <= fixed_left,
4006          "inflate_trees falloc overflow");
4007   if (q) s++; /* to make some compilers happy */
4008   fixed_left -= n;
4009   return (voidpf)(fixed_mem + fixed_left);
4010 }
4011 
4012 
4013 local void ffree(q, p, n)
4014 voidpf q;
4015 voidpf p;
4016 uInt n;
4017 {
4018   Assert(0, "inflate_trees ffree called!");
4019   if (q) q = p; /* to make some compilers happy */
4020 }
4021 
4022 
4023 local int inflate_trees_fixed(bl, bd, tl, td)
4024 uIntf *bl;               /* literal desired/actual bit depth */
4025 uIntf *bd;               /* distance desired/actual bit depth */
4026 inflate_huft * FAR *tl;  /* literal/length tree result */
4027 inflate_huft * FAR *td;  /* distance tree result */
4028 {
4029   /* build fixed tables if not built already--lock out other instances */
4030   while (++fixed_lock > 1)
4031     fixed_lock--;
4032   if (!fixed_built)
4033   {
4034     int k;              /* temporary variable */
4035     unsigned c[288];    /* length list for huft_build */
4036     z_stream z;         /* for falloc function */
4037 
4038     /* set up fake z_stream for memory routines */
4039     z.zalloc = falloc;
4040     z.zfree = ffree;
4041     z.opaque = Z_NULL;
4042 
4043     /* literal table */
4044     for (k = 0; k < 144; k++)
4045       c[k] = 8;
4046     for (; k < 256; k++)
4047       c[k] = 9;
4048     for (; k < 280; k++)
4049       c[k] = 7;
4050     for (; k < 288; k++)
4051       c[k] = 8;
4052     fixed_bl = 7;
4053     huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl, &z);
4054 
4055     /* distance table */
4056     for (k = 0; k < 30; k++)
4057       c[k] = 5;
4058     fixed_bd = 5;
4059     huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd, &z);
4060 
4061     /* done */
4062     fixed_built = 1;
4063   }
4064   fixed_lock--;
4065   *bl = fixed_bl;
4066   *bd = fixed_bd;
4067   *tl = fixed_tl;
4068   *td = fixed_td;
4069   return Z_OK;
4070 }
4071 
4072 
4073 local int inflate_trees_free(t, z)
4074 inflate_huft *t;        /* table to free */
4075 z_stream *z;            /* for zfree function */
4076 /* Free the malloc'ed tables built by huft_build(), which makes a linked
4077    list of the tables it made, with the links in a dummy first entry of
4078    each table. */
4079 {
4080   register inflate_huft *p, *q;
4081 
4082   /* Go through linked list, freeing from the malloced (t[-1]) address. */
4083   p = t;
4084   while (p != Z_NULL)
4085   {
4086     q = (--p)->next;
4087     ZFREE(z, p, p->word.Nalloc * sizeof(inflate_huft));
4088     p = q;
4089   }
4090   return Z_OK;
4091 }
4092 
4093 /*+++++*/
4094 /* infcodes.c -- process literals and length/distance pairs
4095  * Copyright (C) 1995 Mark Adler
4096  * For conditions of distribution and use, see copyright notice in zlib.h
4097  */
4098 
4099 /* simplify the use of the inflate_huft type with some defines */
4100 #define base more.Base
4101 #define next more.Next
4102 #define exop word.what.Exop
4103 #define bits word.what.Bits
4104 
4105 /* inflate codes private state */
4106 struct inflate_codes_state {
4107 
4108   /* mode */
4109   enum {        /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
4110       START,    /* x: set up for LEN */
4111       LEN,      /* i: get length/literal/eob next */
4112       LENEXT,   /* i: getting length extra (have base) */
4113       DIST,     /* i: get distance next */
4114       DISTEXT,  /* i: getting distance extra */
4115       COPY,     /* o: copying bytes in window, waiting for space */
4116       LIT,      /* o: got literal, waiting for output space */
4117       WASH,     /* o: got eob, possibly still output waiting */
4118       END,      /* x: got eob and all data flushed */
4119       BADCODE}  /* x: got error */
4120     mode;               /* current inflate_codes mode */
4121 
4122   /* mode dependent information */
4123   uInt len;
4124   union {
4125     struct {
4126       inflate_huft *tree;       /* pointer into tree */
4127       uInt need;                /* bits needed */
4128     } code;             /* if LEN or DIST, where in tree */
4129     uInt lit;           /* if LIT, literal */
4130     struct {
4131       uInt get;                 /* bits to get for extra */
4132       uInt dist;                /* distance back to copy from */
4133     } copy;             /* if EXT or COPY, where and how much */
4134   } sub;                /* submode */
4135 
4136   /* mode independent information */
4137   Byte lbits;           /* ltree bits decoded per branch */
4138   Byte dbits;           /* dtree bits decoder per branch */
4139   inflate_huft *ltree;          /* literal/length/eob tree */
4140   inflate_huft *dtree;          /* distance tree */
4141 
4142 };
4143 
4144 
4145 local inflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z)
4146 uInt bl, bd;
4147 inflate_huft *tl, *td;
4148 z_stream *z;
4149 {
4150   inflate_codes_statef *c;
4151 
4152   if ((c = (inflate_codes_statef *)
4153        ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL)
4154   {
4155     c->mode = START;
4156     c->lbits = (Byte)bl;
4157     c->dbits = (Byte)bd;
4158     c->ltree = tl;
4159     c->dtree = td;
4160     Tracev((stderr, "inflate:       codes new\n"));
4161   }
4162   return c;
4163 }
4164 
4165 
4166 local int inflate_codes(s, z, r)
4167 inflate_blocks_statef *s;
4168 z_stream *z;
4169 int r;
4170 {
4171   uInt j;               /* temporary storage */
4172   inflate_huft *t;      /* temporary pointer */
4173   uInt e;               /* extra bits or operation */
4174   uLong b;              /* bit buffer */
4175   uInt k;               /* bits in bit buffer */
4176   Bytef *p;             /* input data pointer */
4177   uInt n;               /* bytes available there */
4178   Bytef *q;             /* output window write pointer */
4179   uInt m;               /* bytes to end of window or read pointer */
4180   Bytef *f;             /* pointer to copy strings from */
4181   inflate_codes_statef *c = s->sub.decode.codes;  /* codes state */
4182 
4183   /* copy input/output information to locals (UPDATE macro restores) */
4184   LOAD
4185 
4186   /* process input and output based on current state */
4187   while (1) switch (c->mode)
4188   {             /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
4189     case START:         /* x: set up for LEN */
4190 #ifndef SLOW
4191       if (m >= 258 && n >= 10)
4192       {
4193         UPDATE
4194         r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z);
4195         LOAD
4196         if (r != Z_OK)
4197         {
4198           c->mode = r == Z_STREAM_END ? WASH : BADCODE;
4199           break;
4200         }
4201       }
4202 #endif /* !SLOW */
4203       c->sub.code.need = c->lbits;
4204       c->sub.code.tree = c->ltree;
4205       c->mode = LEN;
4206     case LEN:           /* i: get length/literal/eob next */
4207       j = c->sub.code.need;
4208       NEEDBITS(j)
4209       t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
4210       DUMPBITS(t->bits)
4211       e = (uInt)(t->exop);
4212       if (e == 0)               /* literal */
4213       {
4214         c->sub.lit = t->base;
4215         Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
4216                  "inflate:         literal '%c'\n" :
4217                  "inflate:         literal 0x%02x\n", t->base));
4218         c->mode = LIT;
4219         break;
4220       }
4221       if (e & 16)               /* length */
4222       {
4223         c->sub.copy.get = e & 15;
4224         c->len = t->base;
4225         c->mode = LENEXT;
4226         break;
4227       }
4228       if ((e & 64) == 0)        /* next table */
4229       {
4230         c->sub.code.need = e;
4231         c->sub.code.tree = t->next;
4232         break;
4233       }
4234       if (e & 32)               /* end of block */
4235       {
4236         Tracevv((stderr, "inflate:         end of block\n"));
4237         c->mode = WASH;
4238         break;
4239       }
4240       c->mode = BADCODE;        /* invalid code */
4241       z->msg = "invalid literal/length code";
4242       r = Z_DATA_ERROR;
4243       LEAVE
4244     case LENEXT:        /* i: getting length extra (have base) */
4245       j = c->sub.copy.get;
4246       NEEDBITS(j)
4247       c->len += (uInt)b & inflate_mask[j];
4248       DUMPBITS(j)
4249       c->sub.code.need = c->dbits;
4250       c->sub.code.tree = c->dtree;
4251       Tracevv((stderr, "inflate:         length %u\n", c->len));
4252       c->mode = DIST;
4253     case DIST:          /* i: get distance next */
4254       j = c->sub.code.need;
4255       NEEDBITS(j)
4256       t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
4257       DUMPBITS(t->bits)
4258       e = (uInt)(t->exop);
4259       if (e & 16)               /* distance */
4260       {
4261         c->sub.copy.get = e & 15;
4262         c->sub.copy.dist = t->base;
4263         c->mode = DISTEXT;
4264         break;
4265       }
4266       if ((e & 64) == 0)        /* next table */
4267       {
4268         c->sub.code.need = e;
4269         c->sub.code.tree = t->next;
4270         break;
4271       }
4272       c->mode = BADCODE;        /* invalid code */
4273       z->msg = "invalid distance code";
4274       r = Z_DATA_ERROR;
4275       LEAVE
4276     case DISTEXT:       /* i: getting distance extra */
4277       j = c->sub.copy.get;
4278       NEEDBITS(j)
4279       c->sub.copy.dist += (uInt)b & inflate_mask[j];
4280       DUMPBITS(j)
4281       Tracevv((stderr, "inflate:         distance %u\n", c->sub.copy.dist));
4282       c->mode = COPY;
4283     case COPY:          /* o: copying bytes in window, waiting for space */
4284 #ifndef __TURBOC__ /* Turbo C bug for following expression */
4285       f = (uInt)(q - s->window) < c->sub.copy.dist ?
4286           s->end - (c->sub.copy.dist - (q - s->window)) :
4287           q - c->sub.copy.dist;
4288 #else
4289       f = q - c->sub.copy.dist;
4290       if ((uInt)(q - s->window) < c->sub.copy.dist)
4291         f = s->end - (c->sub.copy.dist - (q - s->window));
4292 #endif
4293       while (c->len)
4294       {
4295         NEEDOUT
4296         OUTBYTE(*f++)
4297         if (f == s->end)
4298           f = s->window;
4299         c->len--;
4300       }
4301       c->mode = START;
4302       break;
4303     case LIT:           /* o: got literal, waiting for output space */
4304       NEEDOUT
4305       OUTBYTE(c->sub.lit)
4306       c->mode = START;
4307       break;
4308     case WASH:          /* o: got eob, possibly more output */
4309       FLUSH
4310       if (s->read != s->write)
4311         LEAVE
4312       c->mode = END;
4313     case END:
4314       r = Z_STREAM_END;
4315       LEAVE
4316     case BADCODE:       /* x: got error */
4317       r = Z_DATA_ERROR;
4318       LEAVE
4319     default:
4320       r = Z_STREAM_ERROR;
4321       LEAVE
4322   }
4323 }
4324 
4325 
4326 local void inflate_codes_free(c, z)
4327 inflate_codes_statef *c;
4328 z_stream *z;
4329 {
4330   ZFREE(z, c, sizeof(struct inflate_codes_state));
4331   Tracev((stderr, "inflate:       codes free\n"));
4332 }
4333 
4334 /*+++++*/
4335 /* inflate_util.c -- data and routines common to blocks and codes
4336  * Copyright (C) 1995 Mark Adler
4337  * For conditions of distribution and use, see copyright notice in zlib.h
4338  */
4339 
4340 /* copy as much as possible from the sliding window to the output area */
4341 local int inflate_flush(s, z, r)
4342 inflate_blocks_statef *s;
4343 z_stream *z;
4344 int r;
4345 {
4346   uInt n;
4347   Bytef *p, *q;
4348 
4349   /* local copies of source and destination pointers */
4350   p = z->next_out;
4351   q = s->read;
4352 
4353   /* compute number of bytes to copy as far as end of window */
4354   n = (uInt)((q <= s->write ? s->write : s->end) - q);
4355   if (n > z->avail_out) n = z->avail_out;
4356   if (n && r == Z_BUF_ERROR) r = Z_OK;
4357 
4358   /* update counters */
4359   z->avail_out -= n;
4360   z->total_out += n;
4361 
4362   /* update check information */
4363   if (s->checkfn != Z_NULL)
4364     s->check = (*s->checkfn)(s->check, q, n);
4365 
4366   /* copy as far as end of window */
4367   if (p != NULL) {
4368     zmemcpy(p, q, n);
4369     p += n;
4370   }
4371   q += n;
4372 
4373   /* see if more to copy at beginning of window */
4374   if (q == s->end)
4375   {
4376     /* wrap pointers */
4377     q = s->window;
4378     if (s->write == s->end)
4379       s->write = s->window;
4380 
4381     /* compute bytes to copy */
4382     n = (uInt)(s->write - q);
4383     if (n > z->avail_out) n = z->avail_out;
4384     if (n && r == Z_BUF_ERROR) r = Z_OK;
4385 
4386     /* update counters */
4387     z->avail_out -= n;
4388     z->total_out += n;
4389 
4390     /* update check information */
4391     if (s->checkfn != Z_NULL)
4392       s->check = (*s->checkfn)(s->check, q, n);
4393 
4394     /* copy */
4395     if (p != NULL) {
4396       zmemcpy(p, q, n);
4397       p += n;
4398     }
4399     q += n;
4400   }
4401 
4402   /* update pointers */
4403   z->next_out = p;
4404   s->read = q;
4405 
4406   /* done */
4407   return r;
4408 }
4409 
4410 
4411 /*+++++*/
4412 /* inffast.c -- process literals and length/distance pairs fast
4413  * Copyright (C) 1995 Mark Adler
4414  * For conditions of distribution and use, see copyright notice in zlib.h
4415  */
4416 
4417 /* simplify the use of the inflate_huft type with some defines */
4418 #define base more.Base
4419 #define next more.Next
4420 #define exop word.what.Exop
4421 #define bits word.what.Bits
4422 
4423 /* macros for bit input with no checking and for returning unused bytes */
4424 #define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<<k;k+=8;}}
4425 #define UNGRAB {n+=(c=k>>3);p-=c;k&=7;}
4426 
4427 /* Called with number of bytes left to write in window at least 258
4428    (the maximum string length) and number of input bytes available
4429    at least ten.  The ten bytes are six bytes for the longest length/
4430    distance pair plus four bytes for overloading the bit buffer. */
4431 
4432 local int inflate_fast(bl, bd, tl, td, s, z)
4433 uInt bl, bd;
4434 inflate_huft *tl, *td;
4435 inflate_blocks_statef *s;
4436 z_stream *z;
4437 {
4438   inflate_huft *t;      /* temporary pointer */
4439   uInt e;               /* extra bits or operation */
4440   uLong b;              /* bit buffer */
4441   uInt k;               /* bits in bit buffer */
4442   Bytef *p;             /* input data pointer */
4443   uInt n;               /* bytes available there */
4444   Bytef *q;             /* output window write pointer */
4445   uInt m;               /* bytes to end of window or read pointer */
4446   uInt ml;              /* mask for literal/length tree */
4447   uInt md;              /* mask for distance tree */
4448   uInt c;               /* bytes to copy */
4449   uInt d;               /* distance back to copy from */
4450   Bytef *r;             /* copy source pointer */
4451 
4452   /* load input, output, bit values */
4453   LOAD
4454 
4455   /* initialize masks */
4456   ml = inflate_mask[bl];
4457   md = inflate_mask[bd];
4458 
4459   /* do until not enough input or output space for fast loop */
4460   do {                          /* assume called with m >= 258 && n >= 10 */
4461     /* get literal/length code */
4462     GRABBITS(20)                /* max bits for literal/length code */
4463     if ((e = (t = tl + ((uInt)b & ml))->exop) == 0)
4464     {
4465       DUMPBITS(t->bits)
4466       Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
4467                 "inflate:         * literal '%c'\n" :
4468                 "inflate:         * literal 0x%02x\n", t->base));
4469       *q++ = (Byte)t->base;
4470       m--;
4471       continue;
4472     }
4473     do {
4474       DUMPBITS(t->bits)
4475       if (e & 16)
4476       {
4477         /* get extra bits for length */
4478         e &= 15;
4479         c = t->base + ((uInt)b & inflate_mask[e]);
4480         DUMPBITS(e)
4481         Tracevv((stderr, "inflate:         * length %u\n", c));
4482 
4483         /* decode distance base of block to copy */
4484         GRABBITS(15);           /* max bits for distance code */
4485         e = (t = td + ((uInt)b & md))->exop;
4486         do {
4487           DUMPBITS(t->bits)
4488           if (e & 16)
4489           {
4490             /* get extra bits to add to distance base */
4491             e &= 15;
4492             GRABBITS(e)         /* get extra bits (up to 13) */
4493             d = t->base + ((uInt)b & inflate_mask[e]);
4494             DUMPBITS(e)
4495             Tracevv((stderr, "inflate:         * distance %u\n", d));
4496 
4497             /* do the copy */
4498             m -= c;
4499             if ((uInt)(q - s->window) >= d)     /* offset before dest */
4500             {                                   /*  just copy */
4501               r = q - d;
4502               *q++ = *r++;  c--;        /* minimum count is three, */
4503               *q++ = *r++;  c--;        /*  so unroll loop a little */
4504             }
4505             else                        /* else offset after destination */
4506             {
4507               e = d - (q - s->window);  /* bytes from offset to end */
4508               r = s->end - e;           /* pointer to offset */
4509               if (c > e)                /* if source crosses, */
4510               {
4511                 c -= e;                 /* copy to end of window */
4512                 do {
4513                   *q++ = *r++;
4514                 } while (--e);
4515                 r = s->window;          /* copy rest from start of window */
4516               }
4517             }
4518             do {                        /* copy all or what's left */
4519               *q++ = *r++;
4520             } while (--c);
4521             break;
4522           }
4523           else if ((e & 64) == 0)
4524             e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop;
4525           else
4526           {
4527             z->msg = "invalid distance code";
4528             UNGRAB
4529             UPDATE
4530             return Z_DATA_ERROR;
4531           }
4532         } while (1);
4533         break;
4534       }
4535       if ((e & 64) == 0)
4536       {
4537         if ((e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop) == 0)
4538         {
4539           DUMPBITS(t->bits)
4540           Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
4541                     "inflate:         * literal '%c'\n" :
4542                     "inflate:         * literal 0x%02x\n", t->base));
4543           *q++ = (Byte)t->base;
4544           m--;
4545           break;
4546         }
4547       }
4548       else if (e & 32)
4549       {
4550         Tracevv((stderr, "inflate:         * end of block\n"));
4551         UNGRAB
4552         UPDATE
4553         return Z_STREAM_END;
4554       }
4555       else
4556       {
4557         z->msg = "invalid literal/length code";
4558         UNGRAB
4559         UPDATE
4560         return Z_DATA_ERROR;
4561       }
4562     } while (1);
4563   } while (m >= 258 && n >= 10);
4564 
4565   /* not enough input or output--restore pointers and return */
4566   UNGRAB
4567   UPDATE
4568   return Z_OK;
4569 }
4570 
4571 
4572 /*+++++*/
4573 /* zutil.c -- target dependent utility functions for the compression library
4574  * Copyright (C) 1995 Jean-loup Gailly.
4575  * For conditions of distribution and use, see copyright notice in zlib.h
4576  */
4577 
4578 /* From: zutil.c,v 1.8 1995/05/03 17:27:12 jloup Exp */
4579 
4580 char *zlib_version = ZLIB_VERSION;
4581 
4582 char *z_errmsg[] = {
4583 "stream end",          /* Z_STREAM_END    1 */
4584 "",                    /* Z_OK            0 */
4585 "file error",          /* Z_ERRNO        (-1) */
4586 "stream error",        /* Z_STREAM_ERROR (-2) */
4587 "data error",          /* Z_DATA_ERROR   (-3) */
4588 "insufficient memory", /* Z_MEM_ERROR    (-4) */
4589 "buffer error",        /* Z_BUF_ERROR    (-5) */
4590 ""};
4591 
4592 
4593 /*+++++*/
4594 /* adler32.c -- compute the Adler-32 checksum of a data stream
4595  * Copyright (C) 1995 Mark Adler
4596  * For conditions of distribution and use, see copyright notice in zlib.h
4597  */
4598 
4599 /* From: adler32.c,v 1.6 1995/05/03 17:27:08 jloup Exp */
4600 
4601 #define BASE 65521L /* largest prime smaller than 65536 */
4602 #define NMAX 5552
4603 /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
4604 
4605 #define DO1(buf)  {s1 += *buf++; s2 += s1;}
4606 #define DO2(buf)  DO1(buf); DO1(buf);
4607 #define DO4(buf)  DO2(buf); DO2(buf);
4608 #define DO8(buf)  DO4(buf); DO4(buf);
4609 #define DO16(buf) DO8(buf); DO8(buf);
4610 
4611 /* ========================================================================= */
4612 uLong adler32(adler, buf, len)
4613     uLong adler;
4614     Bytef *buf;
4615     uInt len;
4616 {
4617     unsigned long s1 = adler & 0xffff;
4618     unsigned long s2 = (adler >> 16) & 0xffff;
4619     int k;
4620 
4621     if (buf == Z_NULL) return 1L;
4622 
4623     while (len > 0) {
4624         k = len < NMAX ? len : NMAX;
4625         len -= k;
4626         while (k >= 16) {
4627             DO16(buf);
4628             k -= 16;
4629         }
4630         if (k != 0) do {
4631             DO1(buf);
4632         } while (--k);
4633         s1 %= BASE;
4634         s2 %= BASE;
4635     }
4636     return (s2 << 16) | s1;
4637 }
4638