1 /* $NetBSD: zlib.c,v 1.30 2008/05/05 13:41:30 ad Exp $ */ 2 /* 3 * This file is derived from various .h and .c files from the zlib-1.0.4 4 * distribution by Jean-loup Gailly and Mark Adler, with some additions 5 * by Paul Mackerras to aid in implementing Deflate compression and 6 * decompression for PPP packets. See zlib.h for conditions of 7 * distribution and use. 8 * 9 * Changes that have been made include: 10 * - added Z_PACKET_FLUSH (see zlib.h for details) 11 * - added inflateIncomp and deflateOutputPending 12 * - allow strm->next_out to be NULL, meaning discard the output 13 * 14 * $Id: zlib.c,v 1.30 2008/05/05 13:41:30 ad Exp $ 15 */ 16 17 /* 18 * ==FILEVERSION 020312== 19 * 20 * This marker is used by the Linux installation script to determine 21 * whether an up-to-date version of this file is already installed. 22 */ 23 24 #include <sys/cdefs.h> 25 __KERNEL_RCSID(0, "$NetBSD: zlib.c,v 1.30 2008/05/05 13:41:30 ad Exp $"); 26 27 #define NO_DUMMY_DECL 28 #define NO_ZCFUNCS 29 #define MY_ZCALLOC 30 31 #if defined(__FreeBSD__) && (defined(KERNEL) || defined(_KERNEL)) 32 #define inflate inflate_ppp /* FreeBSD already has an inflate :-( */ 33 #endif 34 35 36 /* +++ zutil.h */ 37 38 /* zutil.h -- internal interface and configuration of the compression library 39 * Copyright (C) 1995-2002 Jean-loup Gailly. 40 * For conditions of distribution and use, see copyright notice in zlib.h 41 */ 42 43 /* WARNING: this file should *not* be used by applications. It is 44 part of the implementation of the compression library and is 45 subject to change. Applications should only use zlib.h. 46 */ 47 48 /* @(#) $Id: zlib.c,v 1.30 2008/05/05 13:41:30 ad Exp $ */ 49 50 #ifndef _Z_UTIL_H 51 #define _Z_UTIL_H 52 53 #include "zlib.h" 54 55 #if defined(KERNEL) || defined(_KERNEL) 56 /* Assume this is a *BSD or SVR4 kernel */ 57 #include <sys/param.h> 58 #include <sys/time.h> 59 #include <sys/systm.h> 60 # define HAVE_MEMCPY 61 #else 62 #if defined(__KERNEL__) 63 /* Assume this is a Linux kernel */ 64 #include <linux/string.h> 65 #define HAVE_MEMCPY 66 67 #else /* not kernel */ 68 69 #if defined(__NetBSD__) && (defined(_KERNEL) || defined(_STANDALONE)) 70 71 /* XXX doesn't seem to need anything at all, but this is for consistency. */ 72 # include <lib/libkern/libkern.h> 73 74 #else 75 # include <sys/types.h> 76 # include <sys/param.h> 77 #ifdef STDC 78 # include <stddef.h> 79 # include <string.h> 80 # include <stdlib.h> 81 #endif 82 #ifdef NO_ERRNO_H 83 extern int errno; 84 #else 85 # include <errno.h> 86 #endif 87 #endif /* __NetBSD__ && _STANDALONE */ 88 #endif /* __KERNEL__ */ 89 #endif /* _KERNEL || KERNEL */ 90 91 #ifndef local 92 # define local static 93 #endif 94 /* compile with -Dlocal if your debugger can't find static symbols */ 95 96 typedef unsigned char uch; 97 typedef uch FAR uchf; 98 typedef unsigned short ush; 99 typedef ush FAR ushf; 100 typedef unsigned long ulg; 101 102 extern const char *z_errmsg[10]; /* indexed by 2-zlib_error */ 103 /* (size given to avoid silly warnings with Visual C++) */ 104 105 #define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)] 106 107 #define ERR_RETURN(strm,err) \ 108 return (strm->msg = ERR_MSG(err), (err)) 109 /* To be used only when the state is known to be valid */ 110 111 /* common constants */ 112 113 #ifndef DEF_WBITS 114 # define DEF_WBITS MAX_WBITS 115 #endif 116 /* default windowBits for decompression. MAX_WBITS is for compression only */ 117 118 #if MAX_MEM_LEVEL >= 8 119 # define DEF_MEM_LEVEL 8 120 #else 121 # define DEF_MEM_LEVEL MAX_MEM_LEVEL 122 #endif 123 /* default memLevel */ 124 125 #define STORED_BLOCK 0 126 #define STATIC_TREES 1 127 #define DYN_TREES 2 128 /* The three kinds of block type */ 129 130 #define MIN_MATCH 3 131 #define MAX_MATCH 258 132 /* The minimum and maximum match lengths */ 133 134 #define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */ 135 136 /* target dependencies */ 137 138 #ifdef MSDOS 139 # define OS_CODE 0x00 140 # if defined(__TURBOC__) || defined(__BORLANDC__) 141 # if(__STDC__ == 1) && (defined(__LARGE__) || defined(__COMPACT__)) 142 /* Allow compilation with ANSI keywords only enabled */ 143 void _Cdecl farfree( void *block ); 144 void *_Cdecl farmalloc( unsigned long nbytes ); 145 # else 146 # include <alloc.h> 147 # endif 148 # else /* MSC or DJGPP */ 149 # include <malloc.h> 150 # endif 151 #endif 152 153 #ifdef OS2 154 # define OS_CODE 0x06 155 #endif 156 157 #ifdef WIN32 /* Window 95 & Windows NT */ 158 # define OS_CODE 0x0b 159 #endif 160 161 #if defined(VAXC) || defined(VMS) 162 # define OS_CODE 0x02 163 # define F_OPEN(name, mode) \ 164 fopen((name), (mode), "mbc=60", "ctx=stm", "rfm=fix", "mrs=512") 165 #endif 166 167 #ifdef AMIGA 168 # define OS_CODE 0x01 169 #endif 170 171 #if defined(ATARI) || defined(atarist) 172 # define OS_CODE 0x05 173 #endif 174 175 #if defined(MACOS) || defined(TARGET_OS_MAC) 176 # define OS_CODE 0x07 177 # if defined(__MWERKS__) && __dest_os != __be_os && __dest_os != __win32_os 178 # include <unix.h> /* for fdopen */ 179 # else 180 # ifndef fdopen 181 # define fdopen(fd,mode) NULL /* No fdopen() */ 182 # endif 183 # endif 184 #endif 185 186 #ifdef __50SERIES /* Prime/PRIMOS */ 187 # define OS_CODE 0x0F 188 #endif 189 190 #ifdef TOPS20 191 # define OS_CODE 0x0a 192 #endif 193 194 #if defined(_BEOS_) || defined(RISCOS) 195 # define fdopen(fd,mode) NULL /* No fdopen() */ 196 #endif 197 198 #if (defined(_MSC_VER) && (_MSC_VER > 600)) 199 # define fdopen(fd,type) _fdopen(fd,type) 200 #endif 201 202 203 /* Common defaults */ 204 205 #ifndef OS_CODE 206 # define OS_CODE 0x03 /* assume Unix */ 207 #endif 208 209 #ifndef F_OPEN 210 # define F_OPEN(name, mode) fopen((name), (mode)) 211 #endif 212 213 /* functions */ 214 215 #ifdef HAVE_STRERROR 216 extern char *strerror __P((int)); 217 # define zstrerror(errnum) strerror(errnum) 218 #else 219 # define zstrerror(errnum) "" 220 #endif 221 222 #if defined(pyr) 223 # define NO_MEMCPY 224 #endif 225 #if defined(SMALL_MEDIUM) && !defined(_MSC_VER) && !defined(__SC__) 226 /* Use our own functions for small and medium model with MSC <= 5.0. 227 * You may have to use the same strategy for Borland C (untested). 228 * The __SC__ check is for Symantec. 229 */ 230 # define NO_MEMCPY 231 #endif 232 #if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY) 233 # define HAVE_MEMCPY 234 #endif 235 #ifdef HAVE_MEMCPY 236 # ifdef SMALL_MEDIUM /* MSDOS small or medium model */ 237 # define zmemcpy _fmemcpy 238 # define zmemcmp _fmemcmp 239 # define zmemzero(dest, len) _fmemset(dest, 0, len) 240 # else 241 # define zmemcpy memcpy 242 # define zmemcmp memcmp 243 # define zmemzero(dest, len) memset(dest, 0, len) 244 # endif 245 #else 246 extern void zmemcpy __P((Bytef* dest, const Bytef* source, uInt len)); 247 extern int zmemcmp __P((const Bytef* s1, const Bytef* s2, uInt len)); 248 extern void zmemzero __P((Bytef* dest, uInt len)); 249 #endif 250 251 /* Diagnostic functions */ 252 #if defined(DEBUG_ZLIB) && !defined(_KERNEL) && !defined(_STANDALONE) 253 # include <stdio.h> 254 extern int z_verbose; 255 extern void z_error __P((char *m)); 256 # define Assert(cond,msg) {if(!(cond)) z_error(msg);} 257 # define Trace(x) {if (z_verbose>=0) fprintf x ;} 258 # define Tracev(x) {if (z_verbose>0) fprintf x ;} 259 # define Tracevv(x) {if (z_verbose>1) fprintf x ;} 260 # define Tracec(c,x) {if (z_verbose>0 && (c)) fprintf x ;} 261 # define Tracecv(c,x) {if (z_verbose>1 && (c)) fprintf x ;} 262 #else 263 # define Assert(cond,msg) 264 # define Trace(x) 265 # define Tracev(x) 266 # define Tracevv(x) 267 # define Tracec(c,x) 268 # define Tracecv(c,x) 269 #endif 270 271 272 typedef uLong (ZEXPORT *check_func) __P((uLong check, const Bytef *buf, 273 uInt len)); 274 voidpf zcalloc __P((voidpf opaque, unsigned items, unsigned size)); 275 void zcfree __P((voidpf opaque, voidpf ptr)); 276 277 #define ZALLOC(strm, items, size) \ 278 (*((strm)->zalloc))((strm)->opaque, (items), (size)) 279 #define ZFREE(strm, addr) (*((strm)->zfree))((strm)->opaque, (voidpf)(addr)) 280 #define TRY_FREE(s, p) {if (p) ZFREE(s, p);} 281 282 #endif /* _Z_UTIL_H */ 283 /* --- zutil.h */ 284 285 /* +++ deflate.h */ 286 287 /* deflate.h -- internal compression state 288 * Copyright (C) 1995-2002 Jean-loup Gailly 289 * For conditions of distribution and use, see copyright notice in zlib.h 290 */ 291 292 /* WARNING: this file should *not* be used by applications. It is 293 part of the implementation of the compression library and is 294 subject to change. Applications should only use zlib.h. 295 */ 296 297 /* @(#) $Id: zlib.c,v 1.30 2008/05/05 13:41:30 ad Exp $ */ 298 299 #ifndef _DEFLATE_H 300 #define _DEFLATE_H 301 302 /* #include "zutil.h" */ 303 304 /* =========================================================================== 305 * Internal compression state. 306 */ 307 308 #define LENGTH_CODES 29 309 /* number of length codes, not counting the special END_BLOCK code */ 310 311 #define LITERALS 256 312 /* number of literal bytes 0..255 */ 313 314 #define L_CODES (LITERALS+1+LENGTH_CODES) 315 /* number of Literal or Length codes, including the END_BLOCK code */ 316 317 #define D_CODES 30 318 /* number of distance codes */ 319 320 #define BL_CODES 19 321 /* number of codes used to transfer the bit lengths */ 322 323 #define HEAP_SIZE (2*L_CODES+1) 324 /* maximum heap size */ 325 326 #define MAX_BITS 15 327 /* All codes must not exceed MAX_BITS bits */ 328 329 #define INIT_STATE 42 330 #define BUSY_STATE 113 331 #define FINISH_STATE 666 332 /* Stream status */ 333 334 335 /* Data structure describing a single value and its code string. */ 336 typedef struct ct_data_s { 337 union { 338 ush freq; /* frequency count */ 339 ush code; /* bit string */ 340 } fc; 341 union { 342 ush dad; /* father node in Huffman tree */ 343 ush len; /* length of bit string */ 344 } dl; 345 } FAR ct_data; 346 347 #define Freq fc.freq 348 #define Code fc.code 349 #define Dad dl.dad 350 #define Len dl.len 351 352 typedef struct static_tree_desc_s static_tree_desc; 353 354 typedef struct tree_desc_s { 355 ct_data *dyn_tree; /* the dynamic tree */ 356 int max_code; /* largest code with non zero frequency */ 357 static_tree_desc *stat_desc; /* the corresponding static tree */ 358 } FAR tree_desc; 359 360 typedef ush Pos; 361 typedef Pos FAR Posf; 362 typedef unsigned IPos; 363 364 /* A Pos is an index in the character window. We use short instead of int to 365 * save space in the various tables. IPos is used only for parameter passing. 366 */ 367 368 typedef struct deflate_state { 369 z_streamp strm; /* pointer back to this zlib stream */ 370 int status; /* as the name implies */ 371 Bytef *pending_buf; /* output still pending */ 372 ulg pending_buf_size; /* size of pending_buf */ 373 Bytef *pending_out; /* next pending byte to output to the stream */ 374 int pending; /* nb of bytes in the pending buffer */ 375 int noheader; /* suppress zlib header and adler32 */ 376 Byte data_type; /* UNKNOWN, BINARY or ASCII */ 377 Byte method; /* STORED (for zip only) or DEFLATED */ 378 int last_flush; /* value of flush param for previous deflate call */ 379 380 /* used by deflate.c: */ 381 382 uInt w_size; /* LZ77 window size (32K by default) */ 383 uInt w_bits; /* log2(w_size) (8..16) */ 384 uInt w_mask; /* w_size - 1 */ 385 386 Bytef *window; 387 /* Sliding window. Input bytes are read into the second half of the window, 388 * and move to the first half later to keep a dictionary of at least wSize 389 * bytes. With this organization, matches are limited to a distance of 390 * wSize-MAX_MATCH bytes, but this ensures that IO is always 391 * performed with a length multiple of the block size. Also, it limits 392 * the window size to 64K, which is quite useful on MSDOS. 393 * To do: use the user input buffer as sliding window. 394 */ 395 396 ulg window_size; 397 /* Actual size of window: 2*wSize, except when the user input buffer 398 * is directly used as sliding window. 399 */ 400 401 Posf *prev; 402 /* Link to older string with same hash index. To limit the size of this 403 * array to 64K, this link is maintained only for the last 32K strings. 404 * An index in this array is thus a window index modulo 32K. 405 */ 406 407 Posf *head; /* Heads of the hash chains or NIL. */ 408 409 uInt ins_h; /* hash index of string to be inserted */ 410 uInt hash_size; /* number of elements in hash table */ 411 uInt hash_bits; /* log2(hash_size) */ 412 uInt hash_mask; /* hash_size-1 */ 413 414 uInt hash_shift; 415 /* Number of bits by which ins_h must be shifted at each input 416 * step. It must be such that after MIN_MATCH steps, the oldest 417 * byte no longer takes part in the hash key, that is: 418 * hash_shift * MIN_MATCH >= hash_bits 419 */ 420 421 long block_start; 422 /* Window position at the beginning of the current output block. Gets 423 * negative when the window is moved backwards. 424 */ 425 426 uInt match_length; /* length of best match */ 427 IPos prev_match; /* previous match */ 428 int match_available; /* set if previous match exists */ 429 uInt strstart; /* start of string to insert */ 430 uInt match_start; /* start of matching string */ 431 uInt lookahead; /* number of valid bytes ahead in window */ 432 433 uInt prev_length; 434 /* Length of the best match at previous step. Matches not greater than this 435 * are discarded. This is used in the lazy match evaluation. 436 */ 437 438 uInt max_chain_length; 439 /* To speed up deflation, hash chains are never searched beyond this 440 * length. A higher limit improves compression ratio but degrades the 441 * speed. 442 */ 443 444 uInt max_lazy_match; 445 /* Attempt to find a better match only when the current match is strictly 446 * smaller than this value. This mechanism is used only for compression 447 * levels >= 4. 448 */ 449 # define max_insert_length max_lazy_match 450 /* Insert new strings in the hash table only if the match length is not 451 * greater than this length. This saves time but degrades compression. 452 * max_insert_length is used only for compression levels <= 3. 453 */ 454 455 int level; /* compression level (1..9) */ 456 int strategy; /* favor or force Huffman coding*/ 457 458 uInt good_match; 459 /* Use a faster search when the previous match is longer than this */ 460 461 int nice_match; /* Stop searching when current match exceeds this */ 462 463 /* used by trees.c: */ 464 /* Didn't use ct_data typedef below to supress compiler warning */ 465 struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ 466 struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ 467 struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ 468 469 struct tree_desc_s l_desc; /* desc. for literal tree */ 470 struct tree_desc_s d_desc; /* desc. for distance tree */ 471 struct tree_desc_s bl_desc; /* desc. for bit length tree */ 472 473 ush bl_count[MAX_BITS+1]; 474 /* number of codes at each bit length for an optimal tree */ 475 476 int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ 477 int heap_len; /* number of elements in the heap */ 478 int heap_max; /* element of largest frequency */ 479 /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. 480 * The same heap array is used to build all trees. 481 */ 482 483 uch depth[2*L_CODES+1]; 484 /* Depth of each subtree used as tie breaker for trees of equal frequency 485 */ 486 487 uchf *l_buf; /* buffer for literals or lengths */ 488 489 uInt lit_bufsize; 490 /* Size of match buffer for literals/lengths. There are 4 reasons for 491 * limiting lit_bufsize to 64K: 492 * - frequencies can be kept in 16 bit counters 493 * - if compression is not successful for the first block, all input 494 * data is still in the window so we can still emit a stored block even 495 * when input comes from standard input. (This can also be done for 496 * all blocks if lit_bufsize is not greater than 32K.) 497 * - if compression is not successful for a file smaller than 64K, we can 498 * even emit a stored file instead of a stored block (saving 5 bytes). 499 * This is applicable only for zip (not gzip or zlib). 500 * - creating new Huffman trees less frequently may not provide fast 501 * adaptation to changes in the input data statistics. (Take for 502 * example a binary file with poorly compressible code followed by 503 * a highly compressible string table.) Smaller buffer sizes give 504 * fast adaptation but have of course the overhead of transmitting 505 * trees more frequently. 506 * - I can't count above 4 507 */ 508 509 uInt last_lit; /* running index in l_buf */ 510 511 ushf *d_buf; 512 /* Buffer for distances. To simplify the code, d_buf and l_buf have 513 * the same number of elements. To use different lengths, an extra flag 514 * array would be necessary. 515 */ 516 517 ulg opt_len; /* bit length of current block with optimal trees */ 518 ulg static_len; /* bit length of current block with static trees */ 519 uInt matches; /* number of string matches in current block */ 520 int last_eob_len; /* bit length of EOB code for last block */ 521 522 #ifdef DEBUG_ZLIB 523 ulg compressed_len; /* total bit length of compressed file mod 2^32 */ 524 ulg bits_sent; /* bit length of compressed data sent mod 2^32 */ 525 #endif 526 527 ush bi_buf; 528 /* Output buffer. bits are inserted starting at the bottom (least 529 * significant bits). 530 */ 531 int bi_valid; 532 /* Number of valid bits in bi_buf. All bits above the last valid bit 533 * are always zero. 534 */ 535 536 } FAR deflate_state; 537 538 /* Output a byte on the stream. 539 * IN assertion: there is enough room in pending_buf. 540 */ 541 #define put_byte(s, c) {s->pending_buf[s->pending++] = (c);} 542 543 544 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) 545 /* Minimum amount of lookahead, except at the end of the input file. 546 * See deflate.c for comments about the MIN_MATCH+1. 547 */ 548 549 #define MAX_DIST(s) ((s)->w_size-MIN_LOOKAHEAD) 550 /* In order to simplify the code, particularly on 16 bit machines, match 551 * distances are limited to MAX_DIST instead of WSIZE. 552 */ 553 554 /* in trees.c */ 555 void _tr_init __P((deflate_state *s)); 556 int _tr_tally __P((deflate_state *s, unsigned dist, unsigned lc)); 557 void _tr_flush_block __P((deflate_state *s, charf *buf, ulg stored_len, 558 int eof)); 559 void _tr_align __P((deflate_state *s)); 560 void _tr_stored_block __P((deflate_state *s, charf *buf, ulg stored_len, 561 int eof)); 562 void _tr_stored_type_only __P((deflate_state *)); 563 564 #define d_code(dist) \ 565 ((dist) < 256 ? _dist_code[dist] : _dist_code[256+((dist)>>7)]) 566 /* Mapping from a distance to a distance code. dist is the distance - 1 and 567 * must not have side effects. _dist_code[256] and _dist_code[257] are never 568 * used. 569 */ 570 571 #ifndef DEBUG_ZLIB 572 /* Inline versions of _tr_tally for speed: */ 573 574 #if defined(GEN_TREES_H) || !defined(STDC) 575 extern uch _length_code[]; 576 extern uch _dist_code[]; 577 #else 578 extern const uch _length_code[]; 579 extern const uch _dist_code[]; 580 #endif 581 582 # define _tr_tally_lit(s, c, flush) \ 583 { uch cc = (c); \ 584 s->d_buf[s->last_lit] = 0; \ 585 s->l_buf[s->last_lit++] = cc; \ 586 s->dyn_ltree[cc].Freq++; \ 587 flush = (s->last_lit == s->lit_bufsize-1); \ 588 } 589 # define _tr_tally_dist(s, distance, length, flush) \ 590 { uch len = (length); \ 591 ush dist = (distance); \ 592 s->d_buf[s->last_lit] = dist; \ 593 s->l_buf[s->last_lit++] = len; \ 594 dist--; \ 595 s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \ 596 s->dyn_dtree[d_code(dist)].Freq++; \ 597 flush = (s->last_lit == s->lit_bufsize-1); \ 598 } 599 #else 600 # define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c) 601 # define _tr_tally_dist(s, distance, length, flush) \ 602 flush = _tr_tally(s, distance, length) 603 #endif 604 605 #endif 606 /* --- deflate.h */ 607 608 /* +++ deflate.c */ 609 610 /* deflate.c -- compress data using the deflation algorithm 611 * Copyright (C) 1995-2002 Jean-loup Gailly. 612 * For conditions of distribution and use, see copyright notice in zlib.h 613 */ 614 615 /* 616 * ALGORITHM 617 * 618 * The "deflation" process depends on being able to identify portions 619 * of the input text which are identical to earlier input (within a 620 * sliding window trailing behind the input currently being processed). 621 * 622 * The most straightforward technique turns out to be the fastest for 623 * most input files: try all possible matches and select the longest. 624 * The key feature of this algorithm is that insertions into the string 625 * dictionary are very simple and thus fast, and deletions are avoided 626 * completely. Insertions are performed at each input character, whereas 627 * string matches are performed only when the previous match ends. So it 628 * is preferable to spend more time in matches to allow very fast string 629 * insertions and avoid deletions. The matching algorithm for small 630 * strings is inspired from that of Rabin & Karp. A brute force approach 631 * is used to find longer strings when a small match has been found. 632 * A similar algorithm is used in comic (by Jan-Mark Wams) and freeze 633 * (by Leonid Broukhis). 634 * A previous version of this file used a more sophisticated algorithm 635 * (by Fiala and Greene) which is guaranteed to run in linear amortized 636 * time, but has a larger average cost, uses more memory and is patented. 637 * However the F&G algorithm may be faster for some highly redundant 638 * files if the parameter max_chain_length (described below) is too large. 639 * 640 * ACKNOWLEDGEMENTS 641 * 642 * The idea of lazy evaluation of matches is due to Jan-Mark Wams, and 643 * I found it in 'freeze' written by Leonid Broukhis. 644 * Thanks to many people for bug reports and testing. 645 * 646 * REFERENCES 647 * 648 * Deutsch, L.P.,"DEFLATE Compressed Data Format Specification". 649 * Available in ftp://ds.internic.net/rfc/rfc1951.txt 650 * 651 * A description of the Rabin and Karp algorithm is given in the book 652 * "Algorithms" by R. Sedgewick, Addison-Wesley, p252. 653 * 654 * Fiala,E.R., and Greene,D.H. 655 * Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595 656 * 657 */ 658 659 /* @(#) $Id: zlib.c,v 1.30 2008/05/05 13:41:30 ad Exp $ */ 660 661 /* #include "deflate.h" */ 662 663 const char deflate_copyright[] = 664 " deflate 1.1.4 Copyright 1995-2002 Jean-loup Gailly "; 665 /* 666 If you use the zlib library in a product, an acknowledgment is welcome 667 in the documentation of your product. If for some reason you cannot 668 include such an acknowledgment, I would appreciate that you keep this 669 copyright string in the executable of your product. 670 */ 671 672 /* =========================================================================== 673 * Function prototypes. 674 */ 675 typedef enum { 676 need_more, /* block not completed, need more input or more output */ 677 block_done, /* block flush performed */ 678 finish_started, /* finish started, need only more output at next deflate */ 679 finish_done /* finish done, accept no more input or output */ 680 } block_state; 681 682 typedef block_state (*compress_func) __P((deflate_state *s, int flush)); 683 /* Compression function. Returns the block state after the call. */ 684 685 local void fill_window __P((deflate_state *s)); 686 local block_state deflate_stored __P((deflate_state *s, int flush)); 687 local block_state deflate_fast __P((deflate_state *s, int flush)); 688 local block_state deflate_slow __P((deflate_state *s, int flush)); 689 local void lm_init __P((deflate_state *s)); 690 local void putShortMSB __P((deflate_state *s, uInt b)); 691 local void flush_pending __P((z_streamp strm)); 692 local int read_buf __P((z_streamp strm, Bytef *buf, unsigned size)); 693 #ifdef ASMV 694 void match_init __P((void)); /* asm code initialization */ 695 uInt longest_match __P((deflate_state *s, IPos cur_match)); 696 #else 697 local uInt longest_match __P((deflate_state *s, IPos cur_match)); 698 #endif 699 700 #ifdef DEBUG_ZLIB 701 local void check_match __P((deflate_state *s, IPos start, IPos match, 702 int length)); 703 #endif 704 705 /* =========================================================================== 706 * Local data 707 */ 708 709 #define NIL 0 710 /* Tail of hash chains */ 711 712 #ifndef TOO_FAR 713 # define TOO_FAR 4096 714 #endif 715 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */ 716 717 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) 718 /* Minimum amount of lookahead, except at the end of the input file. 719 * See deflate.c for comments about the MIN_MATCH+1. 720 */ 721 722 /* Values for max_lazy_match, good_match and max_chain_length, depending on 723 * the desired pack level (0..9). The values given below have been tuned to 724 * exclude worst case performance for pathological files. Better values may be 725 * found for specific files. 726 */ 727 typedef struct config_s { 728 ush good_length; /* reduce lazy search above this match length */ 729 ush max_lazy; /* do not perform lazy search above this match length */ 730 ush nice_length; /* quit search above this match length */ 731 ush max_chain; 732 compress_func func; 733 } config; 734 735 local const config configuration_table[10] = { 736 /* good lazy nice chain */ 737 /* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */ 738 /* 1 */ {4, 4, 8, 4, deflate_fast}, /* maximum speed, no lazy matches */ 739 /* 2 */ {4, 5, 16, 8, deflate_fast}, 740 /* 3 */ {4, 6, 32, 32, deflate_fast}, 741 742 /* 4 */ {4, 4, 16, 16, deflate_slow}, /* lazy matches */ 743 /* 5 */ {8, 16, 32, 32, deflate_slow}, 744 /* 6 */ {8, 16, 128, 128, deflate_slow}, 745 /* 7 */ {8, 32, 128, 256, deflate_slow}, 746 /* 8 */ {32, 128, 258, 1024, deflate_slow}, 747 /* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* maximum compression */ 748 749 /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4 750 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different 751 * meaning. 752 */ 753 754 #define EQUAL 0 755 /* result of memcmp for equal strings */ 756 757 #ifndef NO_DUMMY_DECL 758 struct static_tree_desc_s {int dummy;}; /* for buggy compilers */ 759 #endif 760 761 /* =========================================================================== 762 * Update a hash value with the given input byte 763 * IN assertion: all calls to to UPDATE_HASH are made with consecutive 764 * input characters, so that a running hash key can be computed from the 765 * previous key instead of complete recalculation each time. 766 */ 767 #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask) 768 769 770 /* =========================================================================== 771 * Insert string str in the dictionary and set match_head to the previous head 772 * of the hash chain (the most recent string with same hash key). Return 773 * the previous length of the hash chain. 774 * If this file is compiled with -DFASTEST, the compression level is forced 775 * to 1, and no hash chains are maintained. 776 * IN assertion: all calls to to INSERT_STRING are made with consecutive 777 * input characters and the first MIN_MATCH bytes of str are valid 778 * (except for the last MIN_MATCH-1 bytes of the input file). 779 */ 780 #ifdef FASTEST 781 #define INSERT_STRING(s, str, match_head) \ 782 (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \ 783 match_head = s->head[s->ins_h], \ 784 s->head[s->ins_h] = (Pos)(str)) 785 #else 786 #define INSERT_STRING(s, str, match_head) \ 787 (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \ 788 s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \ 789 s->head[s->ins_h] = (Pos)(str)) 790 #endif 791 792 /* =========================================================================== 793 * Initialize the hash table (avoiding 64K overflow for 16 bit systems). 794 * prev[] will be initialized on the fly. 795 */ 796 #define CLEAR_HASH(s) \ 797 s->head[s->hash_size-1] = NIL; \ 798 zmemzero((Bytef *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head)); 799 800 /* ========================================================================= */ 801 #if 0 802 int ZEXPORT deflateInit_(strm, level, version, stream_size) 803 z_streamp strm; 804 int level; 805 const char *version; 806 int stream_size; 807 { 808 return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, 809 Z_DEFAULT_STRATEGY, version, stream_size); 810 /* To do: ignore strm->next_in if we use it as window */ 811 } 812 #endif 813 814 /* ========================================================================= */ 815 int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy, 816 vers, stream_size) 817 z_streamp strm; 818 int level; 819 int method; 820 int windowBits; 821 int memLevel; 822 int strategy; 823 const char *vers; 824 int stream_size; 825 { 826 deflate_state *s; 827 int noheader = 0; 828 static const char* my_version = ZLIB_VERSION; 829 830 ushf *overlay; 831 /* We overlay pending_buf and d_buf+l_buf. This works since the average 832 * output size for (length,distance) codes is <= 24 bits. 833 */ 834 835 if (vers == Z_NULL || vers[0] != my_version[0] || 836 stream_size != sizeof(z_stream)) { 837 return Z_VERSION_ERROR; 838 } 839 if (strm == Z_NULL) return Z_STREAM_ERROR; 840 841 strm->msg = Z_NULL; 842 #ifndef NO_ZCFUNCS 843 if (strm->zalloc == Z_NULL) { 844 strm->zalloc = zcalloc; 845 strm->opaque = (voidpf)0; 846 } 847 if (strm->zfree == Z_NULL) strm->zfree = zcfree; 848 #endif 849 850 if (level == Z_DEFAULT_COMPRESSION) level = 6; 851 #ifdef FASTEST 852 level = 1; 853 #endif 854 855 if (windowBits < 0) { /* undocumented feature: suppress zlib header */ 856 noheader = 1; 857 windowBits = -windowBits; 858 } 859 if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED || 860 windowBits < 9 || windowBits > 15 || level < 0 || level > 9 || 861 strategy < 0 || strategy > Z_HUFFMAN_ONLY) { 862 return Z_STREAM_ERROR; 863 } 864 s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state)); 865 if (s == Z_NULL) return Z_MEM_ERROR; 866 strm->state = (struct internal_state FAR *)s; 867 s->strm = strm; 868 869 s->noheader = noheader; 870 s->w_bits = windowBits; 871 s->w_size = 1 << s->w_bits; 872 s->w_mask = s->w_size - 1; 873 874 s->hash_bits = memLevel + 7; 875 s->hash_size = 1 << s->hash_bits; 876 s->hash_mask = s->hash_size - 1; 877 s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH); 878 879 s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte)); 880 s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos)); 881 s->head = (Posf *) ZALLOC(strm, s->hash_size, sizeof(Pos)); 882 883 s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ 884 885 overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); 886 s->pending_buf = (uchf *) overlay; 887 s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L); 888 889 if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL || 890 s->pending_buf == Z_NULL) { 891 strm->msg = ERR_MSG(Z_MEM_ERROR); 892 s->status = INIT_STATE; 893 deflateEnd (strm); 894 return Z_MEM_ERROR; 895 } 896 s->d_buf = overlay + s->lit_bufsize/sizeof(ush); 897 s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; 898 899 s->level = level; 900 s->strategy = strategy; 901 s->method = (Byte)method; 902 903 return deflateReset(strm); 904 } 905 906 /* ========================================================================= */ 907 #if 0 908 int ZEXPORT deflateSetDictionary (strm, dictionary, dictLength) 909 z_streamp strm; 910 const Bytef *dictionary; 911 uInt dictLength; 912 { 913 deflate_state *s; 914 uInt length = dictLength; 915 uInt n; 916 IPos hash_head = 0; 917 918 if (strm == Z_NULL || strm->state == Z_NULL || dictionary == Z_NULL) 919 return Z_STREAM_ERROR; 920 921 s = (deflate_state *)strm->state; 922 if (s->status != INIT_STATE) return Z_STREAM_ERROR; 923 924 strm->adler = adler32(strm->adler, dictionary, dictLength); 925 926 if (length < MIN_MATCH) return Z_OK; 927 if (length > MAX_DIST(s)) { 928 length = MAX_DIST(s); 929 #ifndef USE_DICT_HEAD 930 dictionary += dictLength - length; /* use the tail of the dictionary */ 931 #endif 932 } 933 zmemcpy(s->window, dictionary, length); 934 s->strstart = length; 935 s->block_start = (long)length; 936 937 /* Insert all strings in the hash table (except for the last two bytes). 938 * s->lookahead stays null, so s->ins_h will be recomputed at the next 939 * call of fill_window. 940 */ 941 s->ins_h = s->window[0]; 942 UPDATE_HASH(s, s->ins_h, s->window[1]); 943 for (n = 0; n <= length - MIN_MATCH; n++) { 944 INSERT_STRING(s, n, hash_head); 945 } 946 if (hash_head) hash_head = 0; /* to make compiler happy */ 947 return Z_OK; 948 } 949 #endif 950 951 /* ========================================================================= */ 952 int ZEXPORT deflateReset (strm) 953 z_streamp strm; 954 { 955 deflate_state *s; 956 957 if (strm == Z_NULL || strm->state == Z_NULL || 958 strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return Z_STREAM_ERROR; 959 960 strm->total_in = strm->total_out = 0; 961 strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */ 962 strm->data_type = Z_UNKNOWN; 963 964 s = (deflate_state *)strm->state; 965 s->pending = 0; 966 s->pending_out = s->pending_buf; 967 968 if (s->noheader < 0) { 969 s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */ 970 } 971 s->status = s->noheader ? BUSY_STATE : INIT_STATE; 972 strm->adler = 1; 973 s->last_flush = Z_NO_FLUSH; 974 975 _tr_init(s); 976 lm_init(s); 977 978 return Z_OK; 979 } 980 981 /* ========================================================================= */ 982 #if 0 983 int ZEXPORT deflateParams(strm, level, strategy) 984 z_streamp strm; 985 int level; 986 int strategy; 987 { 988 deflate_state *s; 989 compress_func func; 990 int err = Z_OK; 991 992 if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR; 993 s = (deflate_state *)strm->state; 994 995 if (level == Z_DEFAULT_COMPRESSION) { 996 level = 6; 997 } 998 if (level < 0 || level > 9 || strategy < 0 || strategy > Z_HUFFMAN_ONLY) { 999 return Z_STREAM_ERROR; 1000 } 1001 func = configuration_table[s->level].func; 1002 1003 if (func != configuration_table[level].func && strm->total_in != 0) { 1004 /* Flush the last buffer: */ 1005 err = deflate(strm, Z_PARTIAL_FLUSH); 1006 } 1007 if (s->level != level) { 1008 s->level = level; 1009 s->max_lazy_match = configuration_table[level].max_lazy; 1010 s->good_match = configuration_table[level].good_length; 1011 s->nice_match = configuration_table[level].nice_length; 1012 s->max_chain_length = configuration_table[level].max_chain; 1013 } 1014 s->strategy = strategy; 1015 return err; 1016 } 1017 #endif 1018 1019 /* ========================================================================= 1020 * Put a short in the pending buffer. The 16-bit value is put in MSB order. 1021 * IN assertion: the stream state is correct and there is enough room in 1022 * pending_buf. 1023 */ 1024 local void putShortMSB (s, b) 1025 deflate_state *s; 1026 uInt b; 1027 { 1028 put_byte(s, (Byte)(b >> 8)); 1029 put_byte(s, (Byte)(b & 0xff)); 1030 } 1031 1032 /* ========================================================================= 1033 * Flush as much pending output as possible. All deflate() output goes 1034 * through this function so some applications may wish to modify it 1035 * to avoid allocating a large strm->next_out buffer and copying into it. 1036 * (See also read_buf()). 1037 */ 1038 local void flush_pending(strm) 1039 z_streamp strm; 1040 { 1041 deflate_state *s = (deflate_state *) strm->state; 1042 unsigned len = s->pending; 1043 1044 if (len > strm->avail_out) len = strm->avail_out; 1045 if (len == 0) return; 1046 1047 if (strm->next_out != Z_NULL) { 1048 zmemcpy(strm->next_out, s->pending_out, len); 1049 strm->next_out += len; 1050 } 1051 s->pending_out += len; 1052 strm->total_out += len; 1053 strm->avail_out -= len; 1054 s->pending -= len; 1055 if (s->pending == 0) { 1056 s->pending_out = s->pending_buf; 1057 } 1058 } 1059 1060 /* ========================================================================= */ 1061 int ZEXPORT deflate (strm, flush) 1062 z_streamp strm; 1063 int flush; 1064 { 1065 int old_flush; /* value of flush param for previous deflate call */ 1066 deflate_state *s; 1067 1068 if (strm == Z_NULL || strm->state == Z_NULL || 1069 flush > Z_FINISH || flush < 0) { 1070 return Z_STREAM_ERROR; 1071 } 1072 s = (deflate_state *)strm->state; 1073 1074 if ((strm->next_in == Z_NULL && strm->avail_in != 0) || 1075 (s->status == FINISH_STATE && flush != Z_FINISH)) { 1076 ERR_RETURN(strm, Z_STREAM_ERROR); 1077 } 1078 if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR); 1079 1080 s->strm = strm; /* just in case */ 1081 old_flush = s->last_flush; 1082 s->last_flush = flush; 1083 1084 /* Write the zlib header */ 1085 if (s->status == INIT_STATE) { 1086 1087 uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8; 1088 uInt level_flags = (s->level-1) >> 1; 1089 1090 if (level_flags > 3) level_flags = 3; 1091 header |= (level_flags << 6); 1092 if (s->strstart != 0) header |= PRESET_DICT; 1093 header += 31 - (header % 31); 1094 1095 s->status = BUSY_STATE; 1096 putShortMSB(s, header); 1097 1098 /* Save the adler32 of the preset dictionary: */ 1099 if (s->strstart != 0) { 1100 putShortMSB(s, (uInt)(strm->adler >> 16)); 1101 putShortMSB(s, (uInt)(strm->adler & 0xffff)); 1102 } 1103 strm->adler = 1L; 1104 } 1105 1106 /* Flush as much pending output as possible */ 1107 if (s->pending != 0) { 1108 flush_pending(strm); 1109 if (strm->avail_out == 0) { 1110 /* Since avail_out is 0, deflate will be called again with 1111 * more output space, but possibly with both pending and 1112 * avail_in equal to zero. There won't be anything to do, 1113 * but this is not an error situation so make sure we 1114 * return OK instead of BUF_ERROR at next call of deflate: 1115 */ 1116 s->last_flush = -1; 1117 return Z_OK; 1118 } 1119 1120 /* Make sure there is something to do and avoid duplicate consecutive 1121 * flushes. For repeated and useless calls with Z_FINISH, we keep 1122 * returning Z_STREAM_END instead of Z_BUFF_ERROR. 1123 */ 1124 } else if (strm->avail_in == 0 && flush <= old_flush && 1125 flush != Z_FINISH) { 1126 ERR_RETURN(strm, Z_BUF_ERROR); 1127 } 1128 1129 /* User must not provide more input after the first FINISH: */ 1130 if (s->status == FINISH_STATE && strm->avail_in != 0) { 1131 ERR_RETURN(strm, Z_BUF_ERROR); 1132 } 1133 1134 /* Start a new block or continue the current one. 1135 */ 1136 if (strm->avail_in != 0 || s->lookahead != 0 || 1137 (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) { 1138 block_state bstate; 1139 1140 bstate = (*(configuration_table[s->level].func))(s, flush); 1141 1142 if (bstate == finish_started || bstate == finish_done) { 1143 s->status = FINISH_STATE; 1144 } 1145 if (bstate == need_more || bstate == finish_started) { 1146 if (strm->avail_out == 0) { 1147 s->last_flush = -1; /* avoid BUF_ERROR next call, see above */ 1148 } 1149 return Z_OK; 1150 /* If flush != Z_NO_FLUSH && avail_out == 0, the next call 1151 * of deflate should use the same flush parameter to make sure 1152 * that the flush is complete. So we don't have to output an 1153 * empty block here, this will be done at next call. This also 1154 * ensures that for a very small output buffer, we emit at most 1155 * one empty block. 1156 */ 1157 } 1158 if (bstate == block_done) { 1159 if (flush == Z_PARTIAL_FLUSH) { 1160 _tr_align(s); 1161 } else if (flush == Z_PACKET_FLUSH) { 1162 /* Output just the 3-bit `stored' block type value, 1163 but not a zero length. */ 1164 _tr_stored_type_only(s); 1165 } else { /* FULL_FLUSH or SYNC_FLUSH */ 1166 _tr_stored_block(s, (char*)0, 0L, 0); 1167 /* For a full flush, this empty block will be recognized 1168 * as a special marker by inflate_sync(). 1169 */ 1170 if (flush == Z_FULL_FLUSH) { 1171 CLEAR_HASH(s); /* forget history */ 1172 } 1173 } 1174 flush_pending(strm); 1175 if (strm->avail_out == 0) { 1176 s->last_flush = -1; /* avoid BUF_ERROR at next call, see above */ 1177 return Z_OK; 1178 } 1179 } 1180 } 1181 Assert(strm->avail_out > 0, "bug2"); 1182 1183 if (flush != Z_FINISH) return Z_OK; 1184 if (s->noheader) return Z_STREAM_END; 1185 1186 /* Write the zlib trailer (adler32) */ 1187 putShortMSB(s, (uInt)(strm->adler >> 16)); 1188 putShortMSB(s, (uInt)(strm->adler & 0xffff)); 1189 flush_pending(strm); 1190 /* If avail_out is zero, the application will call deflate again 1191 * to flush the rest. 1192 */ 1193 s->noheader = -1; /* write the trailer only once! */ 1194 return s->pending != 0 ? Z_OK : Z_STREAM_END; 1195 } 1196 1197 /* ========================================================================= */ 1198 int ZEXPORT deflateEnd (strm) 1199 z_streamp strm; 1200 { 1201 int status; 1202 deflate_state *s; 1203 1204 if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR; 1205 s = (deflate_state *) strm->state; 1206 1207 status = s->status; 1208 if (status != INIT_STATE && status != BUSY_STATE && 1209 status != FINISH_STATE) { 1210 return Z_STREAM_ERROR; 1211 } 1212 1213 /* Deallocate in reverse order of allocations: */ 1214 TRY_FREE(strm, s->pending_buf); 1215 TRY_FREE(strm, s->head); 1216 TRY_FREE(strm, s->prev); 1217 TRY_FREE(strm, s->window); 1218 1219 ZFREE(strm, s); 1220 strm->state = Z_NULL; 1221 1222 return status == BUSY_STATE ? Z_DATA_ERROR : Z_OK; 1223 } 1224 1225 /* ========================================================================= 1226 * Copy the source state to the destination state. 1227 * To simplify the source, this is not supported for 16-bit MSDOS (which 1228 * doesn't have enough memory anyway to duplicate compression states). 1229 */ 1230 #if 0 1231 int ZEXPORT deflateCopy (dest, source) 1232 z_streamp dest; 1233 z_streamp source; 1234 { 1235 #ifdef MAXSEG_64K 1236 return Z_STREAM_ERROR; 1237 #else 1238 deflate_state *ds; 1239 deflate_state *ss; 1240 ushf *overlay; 1241 1242 1243 if (source == Z_NULL || dest == Z_NULL || source->state == Z_NULL) { 1244 return Z_STREAM_ERROR; 1245 } 1246 1247 ss = (deflate_state *)source->state; 1248 1249 *dest = *source; 1250 1251 ds = (deflate_state *) ZALLOC(dest, 1, sizeof(deflate_state)); 1252 if (ds == Z_NULL) return Z_MEM_ERROR; 1253 dest->state = (void *) ds; 1254 *ds = *ss; 1255 ds->strm = dest; 1256 1257 ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte)); 1258 ds->prev = (Posf *) ZALLOC(dest, ds->w_size, sizeof(Pos)); 1259 ds->head = (Posf *) ZALLOC(dest, ds->hash_size, sizeof(Pos)); 1260 overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2); 1261 ds->pending_buf = (uchf *) overlay; 1262 1263 if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL || 1264 ds->pending_buf == Z_NULL) { 1265 ds->status = INIT_STATE; 1266 deflateEnd (dest); 1267 return Z_MEM_ERROR; 1268 } 1269 /* following zmemcpy do not work for 16-bit MSDOS */ 1270 zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof(Byte)); 1271 zmemcpy(ds->prev, ss->prev, ds->w_size * sizeof(Pos)); 1272 zmemcpy(ds->head, ss->head, ds->hash_size * sizeof(Pos)); 1273 zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size); 1274 1275 ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf); 1276 ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush); 1277 ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize; 1278 1279 ds->l_desc.dyn_tree = ds->dyn_ltree; 1280 ds->d_desc.dyn_tree = ds->dyn_dtree; 1281 ds->bl_desc.dyn_tree = ds->bl_tree; 1282 1283 return Z_OK; 1284 #endif 1285 } 1286 #endif 1287 1288 /* =========================================================================== 1289 * Return the number of bytes of output which are immediately available 1290 * for output from the decompressor. 1291 */ 1292 #if 0 1293 int deflateOutputPending (strm) 1294 z_streamp strm; 1295 { 1296 if (strm == Z_NULL || strm->state == Z_NULL) return 0; 1297 1298 return ((deflate_state *)(strm->state))->pending; 1299 } 1300 #endif 1301 1302 /* =========================================================================== 1303 * Read a new buffer from the current input stream, update the adler32 1304 * and total number of bytes read. All deflate() input goes through 1305 * this function so some applications may wish to modify it to avoid 1306 * allocating a large strm->next_in buffer and copying from it. 1307 * (See also flush_pending()). 1308 */ 1309 local int read_buf(strm, buf, size) 1310 z_streamp strm; 1311 Bytef *buf; 1312 unsigned size; 1313 { 1314 unsigned len = strm->avail_in; 1315 1316 if (len > size) len = size; 1317 if (len == 0) return 0; 1318 1319 strm->avail_in -= len; 1320 1321 if (!((deflate_state *)(strm->state))->noheader) { 1322 strm->adler = adler32(strm->adler, strm->next_in, len); 1323 } 1324 zmemcpy(buf, strm->next_in, len); 1325 strm->next_in += len; 1326 strm->total_in += len; 1327 1328 return (int)len; 1329 } 1330 1331 /* =========================================================================== 1332 * Initialize the "longest match" routines for a new zlib stream 1333 */ 1334 local void lm_init (s) 1335 deflate_state *s; 1336 { 1337 s->window_size = (ulg)2L*s->w_size; 1338 1339 CLEAR_HASH(s); 1340 1341 /* Set the default configuration parameters: 1342 */ 1343 s->max_lazy_match = configuration_table[s->level].max_lazy; 1344 s->good_match = configuration_table[s->level].good_length; 1345 s->nice_match = configuration_table[s->level].nice_length; 1346 s->max_chain_length = configuration_table[s->level].max_chain; 1347 1348 s->strstart = 0; 1349 s->block_start = 0L; 1350 s->lookahead = 0; 1351 s->match_length = s->prev_length = MIN_MATCH-1; 1352 s->match_available = 0; 1353 s->ins_h = 0; 1354 #ifdef ASMV 1355 match_init(); /* initialize the asm code */ 1356 #endif 1357 } 1358 1359 /* =========================================================================== 1360 * Set match_start to the longest match starting at the given string and 1361 * return its length. Matches shorter or equal to prev_length are discarded, 1362 * in which case the result is equal to prev_length and match_start is 1363 * garbage. 1364 * IN assertions: cur_match is the head of the hash chain for the current 1365 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 1366 * OUT assertion: the match length is not greater than s->lookahead. 1367 */ 1368 #ifndef ASMV 1369 /* For 80x86 and 680x0, an optimized version will be provided in match.asm or 1370 * match.S. The code will be functionally equivalent. 1371 */ 1372 #ifndef FASTEST 1373 local uInt longest_match(s, cur_match) 1374 deflate_state *s; 1375 IPos cur_match; /* current match */ 1376 { 1377 unsigned chain_length = s->max_chain_length;/* max hash chain length */ 1378 Bytef *scan = s->window + s->strstart; /* current string */ 1379 Bytef *match; /* matched string */ 1380 int len; /* length of current match */ 1381 int best_len = s->prev_length; /* best match length so far */ 1382 int nice_match = s->nice_match; /* stop if match long enough */ 1383 IPos limit = s->strstart > (IPos)MAX_DIST(s) ? 1384 s->strstart - (IPos)MAX_DIST(s) : NIL; 1385 /* Stop when cur_match becomes <= limit. To simplify the code, 1386 * we prevent matches with the string of window index 0. 1387 */ 1388 Posf *prev = s->prev; 1389 uInt wmask = s->w_mask; 1390 1391 #ifdef UNALIGNED_OK 1392 /* Compare two bytes at a time. Note: this is not always beneficial. 1393 * Try with and without -DUNALIGNED_OK to check. 1394 */ 1395 Bytef *strend = s->window + s->strstart + MAX_MATCH - 1; 1396 ush scan_start = *(ushf*)scan; 1397 ush scan_end = *(ushf*)(scan+best_len-1); 1398 #else 1399 Bytef *strend = s->window + s->strstart + MAX_MATCH; 1400 Byte scan_end1 = scan[best_len-1]; 1401 Byte scan_end = scan[best_len]; 1402 #endif 1403 1404 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. 1405 * It is easy to get rid of this optimization if necessary. 1406 */ 1407 Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); 1408 1409 /* Do not waste too much time if we already have a good match: */ 1410 if (s->prev_length >= s->good_match) { 1411 chain_length >>= 2; 1412 } 1413 /* Do not look for matches beyond the end of the input. This is necessary 1414 * to make deflate deterministic. 1415 */ 1416 if ((uInt)nice_match > s->lookahead) nice_match = s->lookahead; 1417 1418 Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); 1419 1420 do { 1421 Assert(cur_match < s->strstart, "no future"); 1422 match = s->window + cur_match; 1423 1424 /* Skip to next match if the match length cannot increase 1425 * or if the match length is less than 2: 1426 */ 1427 #if (defined(UNALIGNED_OK) && MAX_MATCH == 258) 1428 /* This code assumes sizeof(unsigned short) == 2. Do not use 1429 * UNALIGNED_OK if your compiler uses a different size. 1430 */ 1431 if (*(ushf*)(match+best_len-1) != scan_end || 1432 *(ushf*)match != scan_start) continue; 1433 1434 /* It is not necessary to compare scan[2] and match[2] since they are 1435 * always equal when the other bytes match, given that the hash keys 1436 * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at 1437 * strstart+3, +5, ... up to strstart+257. We check for insufficient 1438 * lookahead only every 4th comparison; the 128th check will be made 1439 * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is 1440 * necessary to put more guard bytes at the end of the window, or 1441 * to check more often for insufficient lookahead. 1442 */ 1443 Assert(scan[2] == match[2], "scan[2]?"); 1444 scan++, match++; 1445 do { 1446 } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) && 1447 *(ushf*)(scan+=2) == *(ushf*)(match+=2) && 1448 *(ushf*)(scan+=2) == *(ushf*)(match+=2) && 1449 *(ushf*)(scan+=2) == *(ushf*)(match+=2) && 1450 scan < strend); 1451 /* The funny "do {}" generates better code on most compilers */ 1452 1453 /* Here, scan <= window+strstart+257 */ 1454 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); 1455 if (*scan == *match) scan++; 1456 1457 len = (MAX_MATCH - 1) - (int)(strend-scan); 1458 scan = strend - (MAX_MATCH-1); 1459 1460 #else /* UNALIGNED_OK */ 1461 1462 if (match[best_len] != scan_end || 1463 match[best_len-1] != scan_end1 || 1464 *match != *scan || 1465 *++match != scan[1]) continue; 1466 1467 /* The check at best_len-1 can be removed because it will be made 1468 * again later. (This heuristic is not always a win.) 1469 * It is not necessary to compare scan[2] and match[2] since they 1470 * are always equal when the other bytes match, given that 1471 * the hash keys are equal and that HASH_BITS >= 8. 1472 */ 1473 scan += 2, match++; 1474 Assert(*scan == *match, "match[2]?"); 1475 1476 /* We check for insufficient lookahead only every 8th comparison; 1477 * the 256th check will be made at strstart+258. 1478 */ 1479 do { 1480 } while (*++scan == *++match && *++scan == *++match && 1481 *++scan == *++match && *++scan == *++match && 1482 *++scan == *++match && *++scan == *++match && 1483 *++scan == *++match && *++scan == *++match && 1484 scan < strend); 1485 1486 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); 1487 1488 len = MAX_MATCH - (int)(strend - scan); 1489 scan = strend - MAX_MATCH; 1490 1491 #endif /* UNALIGNED_OK */ 1492 1493 if (len > best_len) { 1494 s->match_start = cur_match; 1495 best_len = len; 1496 if (len >= nice_match) break; 1497 #ifdef UNALIGNED_OK 1498 scan_end = *(ushf*)(scan+best_len-1); 1499 #else 1500 scan_end1 = scan[best_len-1]; 1501 scan_end = scan[best_len]; 1502 #endif 1503 } 1504 } while ((cur_match = prev[cur_match & wmask]) > limit 1505 && --chain_length != 0); 1506 1507 if ((uInt)best_len <= s->lookahead) return (uInt)best_len; 1508 return s->lookahead; 1509 } 1510 1511 #else /* FASTEST */ 1512 /* --------------------------------------------------------------------------- 1513 * Optimized version for level == 1 only 1514 */ 1515 local uInt longest_match(s, cur_match) 1516 deflate_state *s; 1517 IPos cur_match; /* current match */ 1518 { 1519 register Bytef *scan = s->window + s->strstart; /* current string */ 1520 register Bytef *match; /* matched string */ 1521 register int len; /* length of current match */ 1522 register Bytef *strend = s->window + s->strstart + MAX_MATCH; 1523 1524 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. 1525 * It is easy to get rid of this optimization if necessary. 1526 */ 1527 Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); 1528 1529 Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); 1530 1531 Assert(cur_match < s->strstart, "no future"); 1532 1533 match = s->window + cur_match; 1534 1535 /* Return failure if the match length is less than 2: 1536 */ 1537 if (match[0] != scan[0] || match[1] != scan[1]) return MIN_MATCH-1; 1538 1539 /* The check at best_len-1 can be removed because it will be made 1540 * again later. (This heuristic is not always a win.) 1541 * It is not necessary to compare scan[2] and match[2] since they 1542 * are always equal when the other bytes match, given that 1543 * the hash keys are equal and that HASH_BITS >= 8. 1544 */ 1545 scan += 2, match += 2; 1546 Assert(*scan == *match, "match[2]?"); 1547 1548 /* We check for insufficient lookahead only every 8th comparison; 1549 * the 256th check will be made at strstart+258. 1550 */ 1551 do { 1552 } while (*++scan == *++match && *++scan == *++match && 1553 *++scan == *++match && *++scan == *++match && 1554 *++scan == *++match && *++scan == *++match && 1555 *++scan == *++match && *++scan == *++match && 1556 scan < strend); 1557 1558 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); 1559 1560 len = MAX_MATCH - (int)(strend - scan); 1561 1562 if (len < MIN_MATCH) return MIN_MATCH - 1; 1563 1564 s->match_start = cur_match; 1565 return len <= s->lookahead ? len : s->lookahead; 1566 } 1567 #endif /* FASTEST */ 1568 #endif /* ASMV */ 1569 1570 #ifdef DEBUG_ZLIB 1571 /* =========================================================================== 1572 * Check that the match at match_start is indeed a match. 1573 */ 1574 local void check_match(s, start, match, length) 1575 deflate_state *s; 1576 IPos start, match; 1577 int length; 1578 { 1579 /* check that the match is indeed a match */ 1580 if (zmemcmp(s->window + match, 1581 s->window + start, length) != EQUAL) { 1582 fprintf(stderr, " start %u, match %u, length %d\n", 1583 start, match, length); 1584 do { 1585 fprintf(stderr, "%c%c", s->window[match++], s->window[start++]); 1586 } while (--length != 0); 1587 z_error("invalid match"); 1588 } 1589 if (z_verbose > 1) { 1590 fprintf(stderr,"\\[%d,%d]", start-match, length); 1591 do { putc(s->window[start++], stderr); } while (--length != 0); 1592 } 1593 } 1594 #else 1595 # define check_match(s, start, match, length) 1596 #endif 1597 1598 /* =========================================================================== 1599 * Fill the window when the lookahead becomes insufficient. 1600 * Updates strstart and lookahead. 1601 * 1602 * IN assertion: lookahead < MIN_LOOKAHEAD 1603 * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD 1604 * At least one byte has been read, or avail_in == 0; reads are 1605 * performed for at least two bytes (required for the zip translate_eol 1606 * option -- not supported here). 1607 */ 1608 local void fill_window(s) 1609 deflate_state *s; 1610 { 1611 unsigned n, m; 1612 Posf *p; 1613 unsigned more; /* Amount of free space at the end of the window. */ 1614 uInt wsize = s->w_size; 1615 1616 do { 1617 more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart); 1618 1619 /* Deal with !@#$% 64K limit: */ 1620 if (more == 0 && s->strstart == 0 && s->lookahead == 0) { 1621 more = wsize; 1622 1623 } else if (more == (unsigned)(-1)) { 1624 /* Very unlikely, but possible on 16 bit machine if strstart == 0 1625 * and lookahead == 1 (input done one byte at time) 1626 */ 1627 more--; 1628 1629 /* If the window is almost full and there is insufficient lookahead, 1630 * move the upper half to the lower one to make room in the upper half. 1631 */ 1632 } else if (s->strstart >= wsize+MAX_DIST(s)) { 1633 1634 zmemcpy(s->window, s->window+wsize, (unsigned)wsize); 1635 s->match_start -= wsize; 1636 s->strstart -= wsize; /* we now have strstart >= MAX_DIST */ 1637 s->block_start -= (long) wsize; 1638 1639 /* Slide the hash table (could be avoided with 32 bit values 1640 at the expense of memory usage). We slide even when level == 0 1641 to keep the hash table consistent if we switch back to level > 0 1642 later. (Using level 0 permanently is not an optimal usage of 1643 zlib, so we don't care about this pathological case.) 1644 */ 1645 n = s->hash_size; 1646 p = &s->head[n]; 1647 do { 1648 m = *--p; 1649 *p = (Pos)(m >= wsize ? m-wsize : NIL); 1650 } while (--n); 1651 1652 n = wsize; 1653 #ifndef FASTEST 1654 p = &s->prev[n]; 1655 do { 1656 m = *--p; 1657 *p = (Pos)(m >= wsize ? m-wsize : NIL); 1658 /* If n is not on any hash chain, prev[n] is garbage but 1659 * its value will never be used. 1660 */ 1661 } while (--n); 1662 #endif 1663 more += wsize; 1664 } 1665 if (s->strm->avail_in == 0) return; 1666 1667 /* If there was no sliding: 1668 * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && 1669 * more == window_size - lookahead - strstart 1670 * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) 1671 * => more >= window_size - 2*WSIZE + 2 1672 * In the BIG_MEM or MMAP case (not yet supported), 1673 * window_size == input_size + MIN_LOOKAHEAD && 1674 * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. 1675 * Otherwise, window_size == 2*WSIZE so more >= 2. 1676 * If there was sliding, more >= WSIZE. So in all cases, more >= 2. 1677 */ 1678 Assert(more >= 2, "more < 2"); 1679 1680 n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more); 1681 s->lookahead += n; 1682 1683 /* Initialize the hash value now that we have some input: */ 1684 if (s->lookahead >= MIN_MATCH) { 1685 s->ins_h = s->window[s->strstart]; 1686 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]); 1687 #if MIN_MATCH != 3 1688 Call UPDATE_HASH() MIN_MATCH-3 more times 1689 #endif 1690 } 1691 /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, 1692 * but this is not important since only literal bytes will be emitted. 1693 */ 1694 1695 } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0); 1696 } 1697 1698 /* =========================================================================== 1699 * Flush the current block, with given end-of-file flag. 1700 * IN assertion: strstart is set to the end of the current match. 1701 */ 1702 #define FLUSH_BLOCK_ONLY(s, eof) { \ 1703 _tr_flush_block(s, (s->block_start >= 0L ? \ 1704 (charf *)&s->window[(unsigned)s->block_start] : \ 1705 (charf *)Z_NULL), \ 1706 (ulg)((long)s->strstart - s->block_start), \ 1707 (eof)); \ 1708 s->block_start = s->strstart; \ 1709 flush_pending(s->strm); \ 1710 Tracev((stderr,"[FLUSH]")); \ 1711 } 1712 1713 /* Same but force premature exit if necessary. */ 1714 #define FLUSH_BLOCK(s, eof) { \ 1715 FLUSH_BLOCK_ONLY(s, eof); \ 1716 if (s->strm->avail_out == 0) return (eof) ? finish_started : need_more; \ 1717 } 1718 1719 /* =========================================================================== 1720 * Copy without compression as much as possible from the input stream, return 1721 * the current block state. 1722 * This function does not insert new strings in the dictionary since 1723 * uncompressible data is probably not useful. This function is used 1724 * only for the level=0 compression option. 1725 * NOTE: this function should be optimized to avoid extra copying from 1726 * window to pending_buf. 1727 */ 1728 local block_state deflate_stored(s, flush) 1729 deflate_state *s; 1730 int flush; 1731 { 1732 /* Stored blocks are limited to 0xffff bytes, pending_buf is limited 1733 * to pending_buf_size, and each stored block has a 5 byte header: 1734 */ 1735 ulg max_block_size = 0xffff; 1736 ulg max_start; 1737 1738 if (max_block_size > s->pending_buf_size - 5) { 1739 max_block_size = s->pending_buf_size - 5; 1740 } 1741 1742 /* Copy as much as possible from input to output: */ 1743 for (;;) { 1744 /* Fill the window as much as possible: */ 1745 if (s->lookahead <= 1) { 1746 1747 Assert(s->strstart < s->w_size+MAX_DIST(s) || 1748 s->block_start >= (long)s->w_size, "slide too late"); 1749 1750 fill_window(s); 1751 if (s->lookahead == 0 && flush == Z_NO_FLUSH) return need_more; 1752 1753 if (s->lookahead == 0) break; /* flush the current block */ 1754 } 1755 Assert(s->block_start >= 0L, "block gone"); 1756 1757 s->strstart += s->lookahead; 1758 s->lookahead = 0; 1759 1760 /* Emit a stored block if pending_buf will be full: */ 1761 max_start = s->block_start + max_block_size; 1762 if (s->strstart == 0 || (ulg)s->strstart >= max_start) { 1763 /* strstart == 0 is possible when wraparound on 16-bit machine */ 1764 s->lookahead = (uInt)(s->strstart - max_start); 1765 s->strstart = (uInt)max_start; 1766 FLUSH_BLOCK(s, 0); 1767 } 1768 /* Flush if we may have to slide, otherwise block_start may become 1769 * negative and the data will be gone: 1770 */ 1771 if (s->strstart - (uInt)s->block_start >= MAX_DIST(s)) { 1772 FLUSH_BLOCK(s, 0); 1773 } 1774 } 1775 FLUSH_BLOCK(s, flush == Z_FINISH); 1776 return flush == Z_FINISH ? finish_done : block_done; 1777 } 1778 1779 /* =========================================================================== 1780 * Compress as much as possible from the input stream, return the current 1781 * block state. 1782 * This function does not perform lazy evaluation of matches and inserts 1783 * new strings in the dictionary only for unmatched strings or for short 1784 * matches. It is used only for the fast compression options. 1785 */ 1786 local block_state deflate_fast(s, flush) 1787 deflate_state *s; 1788 int flush; 1789 { 1790 IPos hash_head = NIL; /* head of the hash chain */ 1791 int bflush; /* set if current block must be flushed */ 1792 1793 for (;;) { 1794 /* Make sure that we always have enough lookahead, except 1795 * at the end of the input file. We need MAX_MATCH bytes 1796 * for the next match, plus MIN_MATCH bytes to insert the 1797 * string following the next match. 1798 */ 1799 if (s->lookahead < MIN_LOOKAHEAD) { 1800 fill_window(s); 1801 if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) { 1802 return need_more; 1803 } 1804 if (s->lookahead == 0) break; /* flush the current block */ 1805 } 1806 1807 /* Insert the string window[strstart .. strstart+2] in the 1808 * dictionary, and set hash_head to the head of the hash chain: 1809 */ 1810 if (s->lookahead >= MIN_MATCH) { 1811 INSERT_STRING(s, s->strstart, hash_head); 1812 } 1813 1814 /* Find the longest match, discarding those <= prev_length. 1815 * At this point we have always match_length < MIN_MATCH 1816 */ 1817 if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) { 1818 /* To simplify the code, we prevent matches with the string 1819 * of window index 0 (in particular we have to avoid a match 1820 * of the string with itself at the start of the input file). 1821 */ 1822 if (s->strategy != Z_HUFFMAN_ONLY) { 1823 s->match_length = longest_match (s, hash_head); 1824 } 1825 /* longest_match() sets match_start */ 1826 } 1827 if (s->match_length >= MIN_MATCH) { 1828 check_match(s, s->strstart, s->match_start, s->match_length); 1829 1830 _tr_tally_dist(s, s->strstart - s->match_start, 1831 s->match_length - MIN_MATCH, bflush); 1832 1833 s->lookahead -= s->match_length; 1834 1835 /* Insert new strings in the hash table only if the match length 1836 * is not too large. This saves time but degrades compression. 1837 */ 1838 #ifndef FASTEST 1839 if (s->match_length <= s->max_insert_length && 1840 s->lookahead >= MIN_MATCH) { 1841 s->match_length--; /* string at strstart already in hash table */ 1842 do { 1843 s->strstart++; 1844 INSERT_STRING(s, s->strstart, hash_head); 1845 /* strstart never exceeds WSIZE-MAX_MATCH, so there are 1846 * always MIN_MATCH bytes ahead. 1847 */ 1848 } while (--s->match_length != 0); 1849 s->strstart++; 1850 } else 1851 #endif 1852 { 1853 s->strstart += s->match_length; 1854 s->match_length = 0; 1855 s->ins_h = s->window[s->strstart]; 1856 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]); 1857 #if MIN_MATCH != 3 1858 Call UPDATE_HASH() MIN_MATCH-3 more times 1859 #endif 1860 /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not 1861 * matter since it will be recomputed at next deflate call. 1862 */ 1863 } 1864 } else { 1865 /* No match, output a literal byte */ 1866 Tracevv((stderr,"%c", s->window[s->strstart])); 1867 _tr_tally_lit (s, s->window[s->strstart], bflush); 1868 s->lookahead--; 1869 s->strstart++; 1870 } 1871 if (bflush) FLUSH_BLOCK(s, 0); 1872 } 1873 FLUSH_BLOCK(s, flush == Z_FINISH); 1874 return flush == Z_FINISH ? finish_done : block_done; 1875 } 1876 1877 /* =========================================================================== 1878 * Same as above, but achieves better compression. We use a lazy 1879 * evaluation for matches: a match is finally adopted only if there is 1880 * no better match at the next window position. 1881 */ 1882 local block_state deflate_slow(s, flush) 1883 deflate_state *s; 1884 int flush; 1885 { 1886 IPos hash_head = NIL; /* head of hash chain */ 1887 int bflush; /* set if current block must be flushed */ 1888 1889 /* Process the input block. */ 1890 for (;;) { 1891 /* Make sure that we always have enough lookahead, except 1892 * at the end of the input file. We need MAX_MATCH bytes 1893 * for the next match, plus MIN_MATCH bytes to insert the 1894 * string following the next match. 1895 */ 1896 if (s->lookahead < MIN_LOOKAHEAD) { 1897 fill_window(s); 1898 if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) { 1899 return need_more; 1900 } 1901 if (s->lookahead == 0) break; /* flush the current block */ 1902 } 1903 1904 /* Insert the string window[strstart .. strstart+2] in the 1905 * dictionary, and set hash_head to the head of the hash chain: 1906 */ 1907 if (s->lookahead >= MIN_MATCH) { 1908 INSERT_STRING(s, s->strstart, hash_head); 1909 } 1910 1911 /* Find the longest match, discarding those <= prev_length. 1912 */ 1913 s->prev_length = s->match_length, s->prev_match = s->match_start; 1914 s->match_length = MIN_MATCH-1; 1915 1916 if (hash_head != NIL && s->prev_length < s->max_lazy_match && 1917 s->strstart - hash_head <= MAX_DIST(s)) { 1918 /* To simplify the code, we prevent matches with the string 1919 * of window index 0 (in particular we have to avoid a match 1920 * of the string with itself at the start of the input file). 1921 */ 1922 if (s->strategy != Z_HUFFMAN_ONLY) { 1923 s->match_length = longest_match (s, hash_head); 1924 } 1925 /* longest_match() sets match_start */ 1926 1927 if (s->match_length <= 5 && (s->strategy == Z_FILTERED || 1928 (s->match_length == MIN_MATCH && 1929 s->strstart - s->match_start > TOO_FAR))) { 1930 1931 /* If prev_match is also MIN_MATCH, match_start is garbage 1932 * but we will ignore the current match anyway. 1933 */ 1934 s->match_length = MIN_MATCH-1; 1935 } 1936 } 1937 /* If there was a match at the previous step and the current 1938 * match is not better, output the previous match: 1939 */ 1940 if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) { 1941 uInt max_insert = s->strstart + s->lookahead - MIN_MATCH; 1942 /* Do not insert strings in hash table beyond this. */ 1943 1944 check_match(s, s->strstart-1, s->prev_match, s->prev_length); 1945 1946 _tr_tally_dist(s, s->strstart -1 - s->prev_match, 1947 s->prev_length - MIN_MATCH, bflush); 1948 1949 /* Insert in hash table all strings up to the end of the match. 1950 * strstart-1 and strstart are already inserted. If there is not 1951 * enough lookahead, the last two strings are not inserted in 1952 * the hash table. 1953 */ 1954 s->lookahead -= s->prev_length-1; 1955 s->prev_length -= 2; 1956 do { 1957 if (++s->strstart <= max_insert) { 1958 INSERT_STRING(s, s->strstart, hash_head); 1959 } 1960 } while (--s->prev_length != 0); 1961 s->match_available = 0; 1962 s->match_length = MIN_MATCH-1; 1963 s->strstart++; 1964 1965 if (bflush) FLUSH_BLOCK(s, 0); 1966 1967 } else if (s->match_available) { 1968 /* If there was no match at the previous position, output a 1969 * single literal. If there was a match but the current match 1970 * is longer, truncate the previous match to a single literal. 1971 */ 1972 Tracevv((stderr,"%c", s->window[s->strstart-1])); 1973 _tr_tally_lit(s, s->window[s->strstart-1], bflush); 1974 if (bflush) { 1975 FLUSH_BLOCK_ONLY(s, 0); 1976 } 1977 s->strstart++; 1978 s->lookahead--; 1979 if (s->strm->avail_out == 0) return need_more; 1980 } else { 1981 /* There is no previous match to compare with, wait for 1982 * the next step to decide. 1983 */ 1984 s->match_available = 1; 1985 s->strstart++; 1986 s->lookahead--; 1987 } 1988 } 1989 Assert (flush != Z_NO_FLUSH, "no flush?"); 1990 if (s->match_available) { 1991 Tracevv((stderr,"%c", s->window[s->strstart-1])); 1992 _tr_tally_lit(s, s->window[s->strstart-1], bflush); 1993 s->match_available = 0; 1994 } 1995 FLUSH_BLOCK(s, flush == Z_FINISH); 1996 return flush == Z_FINISH ? finish_done : block_done; 1997 } 1998 /* --- deflate.c */ 1999 2000 /* +++ trees.c */ 2001 2002 /* trees.c -- output deflated data using Huffman coding 2003 * Copyright (C) 1995-2002 Jean-loup Gailly 2004 * For conditions of distribution and use, see copyright notice in zlib.h 2005 */ 2006 2007 /* 2008 * ALGORITHM 2009 * 2010 * The "deflation" process uses several Huffman trees. The more 2011 * common source values are represented by shorter bit sequences. 2012 * 2013 * Each code tree is stored in a compressed form which is itself 2014 * a Huffman encoding of the lengths of all the code strings (in 2015 * ascending order by source values). The actual code strings are 2016 * reconstructed from the lengths in the inflate process, as described 2017 * in the deflate specification. 2018 * 2019 * REFERENCES 2020 * 2021 * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification". 2022 * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc 2023 * 2024 * Storer, James A. 2025 * Data Compression: Methods and Theory, pp. 49-50. 2026 * Computer Science Press, 1988. ISBN 0-7167-8156-5. 2027 * 2028 * Sedgewick, R. 2029 * Algorithms, p290. 2030 * Addison-Wesley, 1983. ISBN 0-201-06672-6. 2031 */ 2032 2033 /* @(#) $Id: zlib.c,v 1.30 2008/05/05 13:41:30 ad Exp $ */ 2034 2035 /* #define GEN_TREES_H */ 2036 2037 /* #include "deflate.h" */ 2038 2039 #ifdef DEBUG_ZLIB 2040 # include <ctype.h> 2041 #endif 2042 2043 /* =========================================================================== 2044 * Constants 2045 */ 2046 2047 #define MAX_BL_BITS 7 2048 /* Bit length codes must not exceed MAX_BL_BITS bits */ 2049 2050 #define END_BLOCK 256 2051 /* end of block literal code */ 2052 2053 #define REP_3_6 16 2054 /* repeat previous bit length 3-6 times (2 bits of repeat count) */ 2055 2056 #define REPZ_3_10 17 2057 /* repeat a zero length 3-10 times (3 bits of repeat count) */ 2058 2059 #define REPZ_11_138 18 2060 /* repeat a zero length 11-138 times (7 bits of repeat count) */ 2061 2062 local const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */ 2063 = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0}; 2064 2065 local const int extra_dbits[D_CODES] /* extra bits for each distance code */ 2066 = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13}; 2067 2068 local const int extra_blbits[BL_CODES]/* extra bits for each bit length code */ 2069 = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7}; 2070 2071 local const uch bl_order[BL_CODES] 2072 = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15}; 2073 /* The lengths of the bit length codes are sent in order of decreasing 2074 * probability, to avoid transmitting the lengths for unused bit length codes. 2075 */ 2076 2077 #define Buf_size (8 * 2*sizeof(char)) 2078 /* Number of bits used within bi_buf. (bi_buf might be implemented on 2079 * more than 16 bits on some systems.) 2080 */ 2081 2082 /* =========================================================================== 2083 * Local data. These are initialized only once. 2084 */ 2085 2086 #define DIST_CODE_LEN 512 /* see definition of array dist_code below */ 2087 2088 #if defined(GEN_TREES_H) || !defined(STDC) 2089 /* non ANSI compilers may not accept trees.h */ 2090 2091 local ct_data static_ltree[L_CODES+2]; 2092 /* The static literal tree. Since the bit lengths are imposed, there is no 2093 * need for the L_CODES extra codes used during heap construction. However 2094 * The codes 286 and 287 are needed to build a canonical tree (see _tr_init 2095 * below). 2096 */ 2097 2098 local ct_data static_dtree[D_CODES]; 2099 /* The static distance tree. (Actually a trivial tree since all codes use 2100 * 5 bits.) 2101 */ 2102 2103 uch _dist_code[DIST_CODE_LEN]; 2104 /* Distance codes. The first 256 values correspond to the distances 2105 * 3 .. 258, the last 256 values correspond to the top 8 bits of 2106 * the 15 bit distances. 2107 */ 2108 2109 uch _length_code[MAX_MATCH-MIN_MATCH+1]; 2110 /* length code for each normalized match length (0 == MIN_MATCH) */ 2111 2112 local int base_length[LENGTH_CODES]; 2113 /* First normalized length for each code (0 = MIN_MATCH) */ 2114 2115 local int base_dist[D_CODES]; 2116 /* First normalized distance for each code (0 = distance of 1) */ 2117 2118 #else 2119 /* +++ trees.h */ 2120 2121 /* header created automatically with -DGEN_TREES_H */ 2122 2123 local const ct_data static_ltree[L_CODES+2] = { 2124 {{ 12},{ 8}}, {{140},{ 8}}, {{ 76},{ 8}}, {{204},{ 8}}, {{ 44},{ 8}}, 2125 {{172},{ 8}}, {{108},{ 8}}, {{236},{ 8}}, {{ 28},{ 8}}, {{156},{ 8}}, 2126 {{ 92},{ 8}}, {{220},{ 8}}, {{ 60},{ 8}}, {{188},{ 8}}, {{124},{ 8}}, 2127 {{252},{ 8}}, {{ 2},{ 8}}, {{130},{ 8}}, {{ 66},{ 8}}, {{194},{ 8}}, 2128 {{ 34},{ 8}}, {{162},{ 8}}, {{ 98},{ 8}}, {{226},{ 8}}, {{ 18},{ 8}}, 2129 {{146},{ 8}}, {{ 82},{ 8}}, {{210},{ 8}}, {{ 50},{ 8}}, {{178},{ 8}}, 2130 {{114},{ 8}}, {{242},{ 8}}, {{ 10},{ 8}}, {{138},{ 8}}, {{ 74},{ 8}}, 2131 {{202},{ 8}}, {{ 42},{ 8}}, {{170},{ 8}}, {{106},{ 8}}, {{234},{ 8}}, 2132 {{ 26},{ 8}}, {{154},{ 8}}, {{ 90},{ 8}}, {{218},{ 8}}, {{ 58},{ 8}}, 2133 {{186},{ 8}}, {{122},{ 8}}, {{250},{ 8}}, {{ 6},{ 8}}, {{134},{ 8}}, 2134 {{ 70},{ 8}}, {{198},{ 8}}, {{ 38},{ 8}}, {{166},{ 8}}, {{102},{ 8}}, 2135 {{230},{ 8}}, {{ 22},{ 8}}, {{150},{ 8}}, {{ 86},{ 8}}, {{214},{ 8}}, 2136 {{ 54},{ 8}}, {{182},{ 8}}, {{118},{ 8}}, {{246},{ 8}}, {{ 14},{ 8}}, 2137 {{142},{ 8}}, {{ 78},{ 8}}, {{206},{ 8}}, {{ 46},{ 8}}, {{174},{ 8}}, 2138 {{110},{ 8}}, {{238},{ 8}}, {{ 30},{ 8}}, {{158},{ 8}}, {{ 94},{ 8}}, 2139 {{222},{ 8}}, {{ 62},{ 8}}, {{190},{ 8}}, {{126},{ 8}}, {{254},{ 8}}, 2140 {{ 1},{ 8}}, {{129},{ 8}}, {{ 65},{ 8}}, {{193},{ 8}}, {{ 33},{ 8}}, 2141 {{161},{ 8}}, {{ 97},{ 8}}, {{225},{ 8}}, {{ 17},{ 8}}, {{145},{ 8}}, 2142 {{ 81},{ 8}}, {{209},{ 8}}, {{ 49},{ 8}}, {{177},{ 8}}, {{113},{ 8}}, 2143 {{241},{ 8}}, {{ 9},{ 8}}, {{137},{ 8}}, {{ 73},{ 8}}, {{201},{ 8}}, 2144 {{ 41},{ 8}}, {{169},{ 8}}, {{105},{ 8}}, {{233},{ 8}}, {{ 25},{ 8}}, 2145 {{153},{ 8}}, {{ 89},{ 8}}, {{217},{ 8}}, {{ 57},{ 8}}, {{185},{ 8}}, 2146 {{121},{ 8}}, {{249},{ 8}}, {{ 5},{ 8}}, {{133},{ 8}}, {{ 69},{ 8}}, 2147 {{197},{ 8}}, {{ 37},{ 8}}, {{165},{ 8}}, {{101},{ 8}}, {{229},{ 8}}, 2148 {{ 21},{ 8}}, {{149},{ 8}}, {{ 85},{ 8}}, {{213},{ 8}}, {{ 53},{ 8}}, 2149 {{181},{ 8}}, {{117},{ 8}}, {{245},{ 8}}, {{ 13},{ 8}}, {{141},{ 8}}, 2150 {{ 77},{ 8}}, {{205},{ 8}}, {{ 45},{ 8}}, {{173},{ 8}}, {{109},{ 8}}, 2151 {{237},{ 8}}, {{ 29},{ 8}}, {{157},{ 8}}, {{ 93},{ 8}}, {{221},{ 8}}, 2152 {{ 61},{ 8}}, {{189},{ 8}}, {{125},{ 8}}, {{253},{ 8}}, {{ 19},{ 9}}, 2153 {{275},{ 9}}, {{147},{ 9}}, {{403},{ 9}}, {{ 83},{ 9}}, {{339},{ 9}}, 2154 {{211},{ 9}}, {{467},{ 9}}, {{ 51},{ 9}}, {{307},{ 9}}, {{179},{ 9}}, 2155 {{435},{ 9}}, {{115},{ 9}}, {{371},{ 9}}, {{243},{ 9}}, {{499},{ 9}}, 2156 {{ 11},{ 9}}, {{267},{ 9}}, {{139},{ 9}}, {{395},{ 9}}, {{ 75},{ 9}}, 2157 {{331},{ 9}}, {{203},{ 9}}, {{459},{ 9}}, {{ 43},{ 9}}, {{299},{ 9}}, 2158 {{171},{ 9}}, {{427},{ 9}}, {{107},{ 9}}, {{363},{ 9}}, {{235},{ 9}}, 2159 {{491},{ 9}}, {{ 27},{ 9}}, {{283},{ 9}}, {{155},{ 9}}, {{411},{ 9}}, 2160 {{ 91},{ 9}}, {{347},{ 9}}, {{219},{ 9}}, {{475},{ 9}}, {{ 59},{ 9}}, 2161 {{315},{ 9}}, {{187},{ 9}}, {{443},{ 9}}, {{123},{ 9}}, {{379},{ 9}}, 2162 {{251},{ 9}}, {{507},{ 9}}, {{ 7},{ 9}}, {{263},{ 9}}, {{135},{ 9}}, 2163 {{391},{ 9}}, {{ 71},{ 9}}, {{327},{ 9}}, {{199},{ 9}}, {{455},{ 9}}, 2164 {{ 39},{ 9}}, {{295},{ 9}}, {{167},{ 9}}, {{423},{ 9}}, {{103},{ 9}}, 2165 {{359},{ 9}}, {{231},{ 9}}, {{487},{ 9}}, {{ 23},{ 9}}, {{279},{ 9}}, 2166 {{151},{ 9}}, {{407},{ 9}}, {{ 87},{ 9}}, {{343},{ 9}}, {{215},{ 9}}, 2167 {{471},{ 9}}, {{ 55},{ 9}}, {{311},{ 9}}, {{183},{ 9}}, {{439},{ 9}}, 2168 {{119},{ 9}}, {{375},{ 9}}, {{247},{ 9}}, {{503},{ 9}}, {{ 15},{ 9}}, 2169 {{271},{ 9}}, {{143},{ 9}}, {{399},{ 9}}, {{ 79},{ 9}}, {{335},{ 9}}, 2170 {{207},{ 9}}, {{463},{ 9}}, {{ 47},{ 9}}, {{303},{ 9}}, {{175},{ 9}}, 2171 {{431},{ 9}}, {{111},{ 9}}, {{367},{ 9}}, {{239},{ 9}}, {{495},{ 9}}, 2172 {{ 31},{ 9}}, {{287},{ 9}}, {{159},{ 9}}, {{415},{ 9}}, {{ 95},{ 9}}, 2173 {{351},{ 9}}, {{223},{ 9}}, {{479},{ 9}}, {{ 63},{ 9}}, {{319},{ 9}}, 2174 {{191},{ 9}}, {{447},{ 9}}, {{127},{ 9}}, {{383},{ 9}}, {{255},{ 9}}, 2175 {{511},{ 9}}, {{ 0},{ 7}}, {{ 64},{ 7}}, {{ 32},{ 7}}, {{ 96},{ 7}}, 2176 {{ 16},{ 7}}, {{ 80},{ 7}}, {{ 48},{ 7}}, {{112},{ 7}}, {{ 8},{ 7}}, 2177 {{ 72},{ 7}}, {{ 40},{ 7}}, {{104},{ 7}}, {{ 24},{ 7}}, {{ 88},{ 7}}, 2178 {{ 56},{ 7}}, {{120},{ 7}}, {{ 4},{ 7}}, {{ 68},{ 7}}, {{ 36},{ 7}}, 2179 {{100},{ 7}}, {{ 20},{ 7}}, {{ 84},{ 7}}, {{ 52},{ 7}}, {{116},{ 7}}, 2180 {{ 3},{ 8}}, {{131},{ 8}}, {{ 67},{ 8}}, {{195},{ 8}}, {{ 35},{ 8}}, 2181 {{163},{ 8}}, {{ 99},{ 8}}, {{227},{ 8}} 2182 }; 2183 2184 local const ct_data static_dtree[D_CODES] = { 2185 {{ 0},{ 5}}, {{16},{ 5}}, {{ 8},{ 5}}, {{24},{ 5}}, {{ 4},{ 5}}, 2186 {{20},{ 5}}, {{12},{ 5}}, {{28},{ 5}}, {{ 2},{ 5}}, {{18},{ 5}}, 2187 {{10},{ 5}}, {{26},{ 5}}, {{ 6},{ 5}}, {{22},{ 5}}, {{14},{ 5}}, 2188 {{30},{ 5}}, {{ 1},{ 5}}, {{17},{ 5}}, {{ 9},{ 5}}, {{25},{ 5}}, 2189 {{ 5},{ 5}}, {{21},{ 5}}, {{13},{ 5}}, {{29},{ 5}}, {{ 3},{ 5}}, 2190 {{19},{ 5}}, {{11},{ 5}}, {{27},{ 5}}, {{ 7},{ 5}}, {{23},{ 5}} 2191 }; 2192 2193 const uch _dist_code[DIST_CODE_LEN] = { 2194 0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8, 2195 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10, 2196 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 2197 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 2198 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13, 2199 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 2200 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 2201 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 2202 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 2203 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15, 2204 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 2205 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 2206 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 0, 0, 16, 17, 2207 18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22, 2208 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 2209 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 2210 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 2211 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 2212 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 2213 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 2214 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 2215 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 2216 28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 2217 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 2218 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 2219 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29 2220 }; 2221 2222 const uch _length_code[MAX_MATCH-MIN_MATCH+1]= { 2223 0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 12, 12, 2224 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 2225 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 2226 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 2227 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 2228 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23, 2229 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 2230 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 2231 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 2232 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26, 2233 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 2234 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 2235 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28 2236 }; 2237 2238 local const int base_length[LENGTH_CODES] = { 2239 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56, 2240 64, 80, 96, 112, 128, 160, 192, 224, 0 2241 }; 2242 2243 local const int base_dist[D_CODES] = { 2244 0, 1, 2, 3, 4, 6, 8, 12, 16, 24, 2245 32, 48, 64, 96, 128, 192, 256, 384, 512, 768, 2246 1024, 1536, 2048, 3072, 4096, 6144, 8192, 12288, 16384, 24576 2247 }; 2248 /* --- trees.h */ 2249 2250 #endif /* GEN_TREES_H */ 2251 2252 struct static_tree_desc_s { 2253 const ct_data *static_tree; /* static tree or NULL */ 2254 const intf *extra_bits; /* extra bits for each code or NULL */ 2255 int extra_base; /* base index for extra_bits */ 2256 int elems; /* max number of elements in the tree */ 2257 int max_length; /* max bit length for the codes */ 2258 }; 2259 2260 local static_tree_desc static_l_desc = 2261 {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; 2262 2263 local static_tree_desc static_d_desc = 2264 {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS}; 2265 2266 local static_tree_desc static_bl_desc = 2267 {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS}; 2268 2269 /* =========================================================================== 2270 * Local (static) routines in this file. 2271 */ 2272 2273 local void tr_static_init __P((void)); 2274 local void init_block __P((deflate_state *s)); 2275 local void pqdownheap __P((deflate_state *s, ct_data *tree, int k)); 2276 local void gen_bitlen __P((deflate_state *s, tree_desc *desc)); 2277 local void gen_codes __P((ct_data *tree, int max_code, ushf *bl_count)); 2278 local void build_tree __P((deflate_state *s, tree_desc *desc)); 2279 local void scan_tree __P((deflate_state *s, ct_data *tree, int max_code)); 2280 local void send_tree __P((deflate_state *s, ct_data *tree, int max_code)); 2281 local int build_bl_tree __P((deflate_state *s)); 2282 local void send_all_trees __P((deflate_state *s, int lcodes, int dcodes, 2283 int blcodes)); 2284 local void compress_block __P((deflate_state *s, const ct_data *ltree, 2285 const ct_data *dtree)); 2286 local void set_data_type __P((deflate_state *s)); 2287 local unsigned bi_reverse __P((unsigned value, int length)); 2288 local void bi_windup __P((deflate_state *s)); 2289 local void bi_flush __P((deflate_state *s)); 2290 local void copy_block __P((deflate_state *s, charf *buf, unsigned len, 2291 int header)); 2292 2293 #ifdef GEN_TREES_H 2294 local void gen_trees_header __P((void)); 2295 #endif 2296 2297 #ifndef DEBUG_ZLIB 2298 # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) 2299 /* Send a code of the given tree. c and tree must not have side effects */ 2300 2301 #else /* DEBUG_ZLIB */ 2302 # define send_code(s, c, tree) \ 2303 { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \ 2304 send_bits(s, tree[c].Code, tree[c].Len); } 2305 #endif 2306 2307 /* =========================================================================== 2308 * Output a short LSB first on the stream. 2309 * IN assertion: there is enough room in pendingBuf. 2310 */ 2311 #define put_short(s, w) { \ 2312 put_byte(s, (uch)((w) & 0xff)); \ 2313 put_byte(s, (uch)((ush)(w) >> 8)); \ 2314 } 2315 2316 /* =========================================================================== 2317 * Send a value on a given number of bits. 2318 * IN assertion: length <= 16 and value fits in length bits. 2319 */ 2320 #ifdef DEBUG_ZLIB 2321 local void send_bits __P((deflate_state *s, int value, int length)); 2322 2323 local void send_bits(s, value, length) 2324 deflate_state *s; 2325 int value; /* value to send */ 2326 int length; /* number of bits */ 2327 { 2328 Tracevv((stderr," l %2d v %4x ", length, value)); 2329 Assert(length > 0 && length <= 15, "invalid length"); 2330 s->bits_sent += (ulg)length; 2331 2332 /* If not enough room in bi_buf, use (valid) bits from bi_buf and 2333 * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) 2334 * unused bits in value. 2335 */ 2336 if (s->bi_valid > (int)Buf_size - length) { 2337 s->bi_buf |= (value << s->bi_valid); 2338 put_short(s, s->bi_buf); 2339 s->bi_buf = (ush)value >> (Buf_size - s->bi_valid); 2340 s->bi_valid += length - Buf_size; 2341 } else { 2342 s->bi_buf |= value << s->bi_valid; 2343 s->bi_valid += length; 2344 } 2345 } 2346 #else /* !DEBUG_ZLIB */ 2347 2348 #define send_bits(s, value, length) \ 2349 { int len = length;\ 2350 if (s->bi_valid > (int)Buf_size - len) {\ 2351 int val = value;\ 2352 s->bi_buf |= (val << s->bi_valid);\ 2353 put_short(s, s->bi_buf);\ 2354 s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\ 2355 s->bi_valid += len - Buf_size;\ 2356 } else {\ 2357 s->bi_buf |= (value) << s->bi_valid;\ 2358 s->bi_valid += len;\ 2359 }\ 2360 } 2361 #endif /* DEBUG_ZLIB */ 2362 2363 2364 /* =========================================================================== 2365 * Initialize the various 'constant' tables. 2366 */ 2367 local void tr_static_init() 2368 { 2369 #if defined(GEN_TREES_H) || !defined(STDC) 2370 static int static_init_done = 0; 2371 int n; /* iterates over tree elements */ 2372 int bits; /* bit counter */ 2373 int length; /* length value */ 2374 int code; /* code value */ 2375 int dist; /* distance index */ 2376 ush bl_count[MAX_BITS+1]; 2377 /* number of codes at each bit length for an optimal tree */ 2378 2379 if (static_init_done) return; 2380 2381 /* For some embedded targets, global variables are not initialized: */ 2382 static_l_desc.static_tree = static_ltree; 2383 static_l_desc.extra_bits = extra_lbits; 2384 static_d_desc.static_tree = static_dtree; 2385 static_d_desc.extra_bits = extra_dbits; 2386 static_bl_desc.extra_bits = extra_blbits; 2387 2388 /* Initialize the mapping length (0..255) -> length code (0..28) */ 2389 length = 0; 2390 for (code = 0; code < LENGTH_CODES-1; code++) { 2391 base_length[code] = length; 2392 for (n = 0; n < (1<<extra_lbits[code]); n++) { 2393 _length_code[length++] = (uch)code; 2394 } 2395 } 2396 Assert (length == 256, "tr_static_init: length != 256"); 2397 /* Note that the length 255 (match length 258) can be represented 2398 * in two different ways: code 284 + 5 bits or code 285, so we 2399 * overwrite length_code[255] to use the best encoding: 2400 */ 2401 _length_code[length-1] = (uch)code; 2402 2403 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ 2404 dist = 0; 2405 for (code = 0 ; code < 16; code++) { 2406 base_dist[code] = dist; 2407 for (n = 0; n < (1<<extra_dbits[code]); n++) { 2408 _dist_code[dist++] = (uch)code; 2409 } 2410 } 2411 Assert (dist == 256, "tr_static_init: dist != 256"); 2412 dist >>= 7; /* from now on, all distances are divided by 128 */ 2413 for ( ; code < D_CODES; code++) { 2414 base_dist[code] = dist << 7; 2415 for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { 2416 _dist_code[256 + dist++] = (uch)code; 2417 } 2418 } 2419 Assert (dist == 256, "tr_static_init: 256+dist != 512"); 2420 2421 /* Construct the codes of the static literal tree */ 2422 for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; 2423 n = 0; 2424 while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; 2425 while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; 2426 while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; 2427 while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; 2428 /* Codes 286 and 287 do not exist, but we must include them in the 2429 * tree construction to get a canonical Huffman tree (longest code 2430 * all ones) 2431 */ 2432 gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count); 2433 2434 /* The static distance tree is trivial: */ 2435 for (n = 0; n < D_CODES; n++) { 2436 static_dtree[n].Len = 5; 2437 static_dtree[n].Code = bi_reverse((unsigned)n, 5); 2438 } 2439 static_init_done = 1; 2440 2441 # ifdef GEN_TREES_H 2442 gen_trees_header(); 2443 # endif 2444 #endif /* defined(GEN_TREES_H) || !defined(STDC) */ 2445 } 2446 2447 /* =========================================================================== 2448 * Genererate the file trees.h describing the static trees. 2449 */ 2450 #ifdef GEN_TREES_H 2451 # ifndef DEBUG_ZLIB 2452 # include <stdio.h> 2453 # endif 2454 2455 # define SEPARATOR(i, last, width) \ 2456 ((i) == (last)? "\n};\n\n" : \ 2457 ((i) % (width) == (width)-1 ? ",\n" : ", ")) 2458 2459 void gen_trees_header() 2460 { 2461 FILE *header = fopen("trees.h", "w"); 2462 int i; 2463 2464 Assert (header != NULL, "Can't open trees.h"); 2465 fprintf(header, 2466 "/* header created automatically with -DGEN_TREES_H */\n\n"); 2467 2468 fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n"); 2469 for (i = 0; i < L_CODES+2; i++) { 2470 fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code, 2471 static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5)); 2472 } 2473 2474 fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n"); 2475 for (i = 0; i < D_CODES; i++) { 2476 fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code, 2477 static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5)); 2478 } 2479 2480 fprintf(header, "const uch _dist_code[DIST_CODE_LEN] = {\n"); 2481 for (i = 0; i < DIST_CODE_LEN; i++) { 2482 fprintf(header, "%2u%s", _dist_code[i], 2483 SEPARATOR(i, DIST_CODE_LEN-1, 20)); 2484 } 2485 2486 fprintf(header, "const uch _length_code[MAX_MATCH-MIN_MATCH+1]= {\n"); 2487 for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) { 2488 fprintf(header, "%2u%s", _length_code[i], 2489 SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20)); 2490 } 2491 2492 fprintf(header, "local const int base_length[LENGTH_CODES] = {\n"); 2493 for (i = 0; i < LENGTH_CODES; i++) { 2494 fprintf(header, "%1u%s", base_length[i], 2495 SEPARATOR(i, LENGTH_CODES-1, 20)); 2496 } 2497 2498 fprintf(header, "local const int base_dist[D_CODES] = {\n"); 2499 for (i = 0; i < D_CODES; i++) { 2500 fprintf(header, "%5u%s", base_dist[i], 2501 SEPARATOR(i, D_CODES-1, 10)); 2502 } 2503 2504 fclose(header); 2505 } 2506 #endif /* GEN_TREES_H */ 2507 2508 /* =========================================================================== 2509 * Initialize the tree data structures for a new zlib stream. 2510 */ 2511 void _tr_init(s) 2512 deflate_state *s; 2513 { 2514 tr_static_init(); 2515 2516 s->l_desc.dyn_tree = s->dyn_ltree; 2517 s->l_desc.stat_desc = &static_l_desc; 2518 2519 s->d_desc.dyn_tree = s->dyn_dtree; 2520 s->d_desc.stat_desc = &static_d_desc; 2521 2522 s->bl_desc.dyn_tree = s->bl_tree; 2523 s->bl_desc.stat_desc = &static_bl_desc; 2524 2525 s->bi_buf = 0; 2526 s->bi_valid = 0; 2527 s->last_eob_len = 8; /* enough lookahead for inflate */ 2528 #ifdef DEBUG_ZLIB 2529 s->compressed_len = 0L; 2530 s->bits_sent = 0L; 2531 #endif 2532 2533 /* Initialize the first block of the first file: */ 2534 init_block(s); 2535 } 2536 2537 /* =========================================================================== 2538 * Initialize a new block. 2539 */ 2540 local void init_block(s) 2541 deflate_state *s; 2542 { 2543 int n; /* iterates over tree elements */ 2544 2545 /* Initialize the trees. */ 2546 for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0; 2547 for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0; 2548 for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0; 2549 2550 s->dyn_ltree[END_BLOCK].Freq = 1; 2551 s->opt_len = s->static_len = 0L; 2552 s->last_lit = s->matches = 0; 2553 } 2554 2555 #define SMALLEST 1 2556 /* Index within the heap array of least frequent node in the Huffman tree */ 2557 2558 2559 /* =========================================================================== 2560 * Remove the smallest element from the heap and recreate the heap with 2561 * one less element. Updates heap and heap_len. 2562 */ 2563 #define pqremove(s, tree, top) \ 2564 {\ 2565 top = s->heap[SMALLEST]; \ 2566 s->heap[SMALLEST] = s->heap[s->heap_len--]; \ 2567 pqdownheap(s, tree, SMALLEST); \ 2568 } 2569 2570 /* =========================================================================== 2571 * Compares to subtrees, using the tree depth as tie breaker when 2572 * the subtrees have equal frequency. This minimizes the worst case length. 2573 */ 2574 #define smaller(tree, n, m, depth) \ 2575 (tree[n].Freq < tree[m].Freq || \ 2576 (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m])) 2577 2578 /* =========================================================================== 2579 * Restore the heap property by moving down the tree starting at node k, 2580 * exchanging a node with the smallest of its two sons if necessary, stopping 2581 * when the heap property is re-established (each father smaller than its 2582 * two sons). 2583 */ 2584 local void pqdownheap(s, tree, k) 2585 deflate_state *s; 2586 ct_data *tree; /* the tree to restore */ 2587 int k; /* node to move down */ 2588 { 2589 int v = s->heap[k]; 2590 int j = k << 1; /* left son of k */ 2591 while (j <= s->heap_len) { 2592 /* Set j to the smallest of the two sons: */ 2593 if (j < s->heap_len && 2594 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { 2595 j++; 2596 } 2597 /* Exit if v is smaller than both sons */ 2598 if (smaller(tree, v, s->heap[j], s->depth)) break; 2599 2600 /* Exchange v with the smallest son */ 2601 s->heap[k] = s->heap[j]; k = j; 2602 2603 /* And continue down the tree, setting j to the left son of k */ 2604 j <<= 1; 2605 } 2606 s->heap[k] = v; 2607 } 2608 2609 /* =========================================================================== 2610 * Compute the optimal bit lengths for a tree and update the total bit length 2611 * for the current block. 2612 * IN assertion: the fields freq and dad are set, heap[heap_max] and 2613 * above are the tree nodes sorted by increasing frequency. 2614 * OUT assertions: the field len is set to the optimal bit length, the 2615 * array bl_count contains the frequencies for each bit length. 2616 * The length opt_len is updated; static_len is also updated if stree is 2617 * not null. 2618 */ 2619 local void gen_bitlen(s, desc) 2620 deflate_state *s; 2621 tree_desc *desc; /* the tree descriptor */ 2622 { 2623 ct_data *tree = desc->dyn_tree; 2624 int max_code = desc->max_code; 2625 const ct_data *stree = desc->stat_desc->static_tree; 2626 const intf *extra = desc->stat_desc->extra_bits; 2627 int base = desc->stat_desc->extra_base; 2628 int max_length = desc->stat_desc->max_length; 2629 int h; /* heap index */ 2630 int n, m; /* iterate over the tree elements */ 2631 int bits; /* bit length */ 2632 int xbits; /* extra bits */ 2633 ush f; /* frequency */ 2634 int overflow = 0; /* number of elements with bit length too large */ 2635 2636 for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0; 2637 2638 /* In a first pass, compute the optimal bit lengths (which may 2639 * overflow in the case of the bit length tree). 2640 */ 2641 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ 2642 2643 for (h = s->heap_max+1; h < HEAP_SIZE; h++) { 2644 n = s->heap[h]; 2645 bits = tree[tree[n].Dad].Len + 1; 2646 if (bits > max_length) bits = max_length, overflow++; 2647 tree[n].Len = (ush)bits; 2648 /* We overwrite tree[n].Dad which is no longer needed */ 2649 2650 if (n > max_code) continue; /* not a leaf node */ 2651 2652 s->bl_count[bits]++; 2653 xbits = 0; 2654 if (n >= base) xbits = extra[n-base]; 2655 f = tree[n].Freq; 2656 s->opt_len += (ulg)f * (bits + xbits); 2657 if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits); 2658 } 2659 if (overflow == 0) return; 2660 2661 Trace((stderr,"\nbit length overflow\n")); 2662 /* This happens for example on obj2 and pic of the Calgary corpus */ 2663 2664 /* Find the first bit length which could increase: */ 2665 do { 2666 bits = max_length-1; 2667 while (s->bl_count[bits] == 0) bits--; 2668 s->bl_count[bits]--; /* move one leaf down the tree */ 2669 s->bl_count[bits+1] += 2; /* move one overflow item as its brother */ 2670 s->bl_count[max_length]--; 2671 /* The brother of the overflow item also moves one step up, 2672 * but this does not affect bl_count[max_length] 2673 */ 2674 overflow -= 2; 2675 } while (overflow > 0); 2676 2677 /* Now recompute all bit lengths, scanning in increasing frequency. 2678 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all 2679 * lengths instead of fixing only the wrong ones. This idea is taken 2680 * from 'ar' written by Haruhiko Okumura.) 2681 */ 2682 for (bits = max_length; bits != 0; bits--) { 2683 n = s->bl_count[bits]; 2684 while (n != 0) { 2685 m = s->heap[--h]; 2686 if (m > max_code) continue; 2687 if (tree[m].Len != (unsigned) bits) { 2688 Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); 2689 s->opt_len += ((long)bits - (long)tree[m].Len) 2690 *(long)tree[m].Freq; 2691 tree[m].Len = (ush)bits; 2692 } 2693 n--; 2694 } 2695 } 2696 } 2697 2698 /* =========================================================================== 2699 * Generate the codes for a given tree and bit counts (which need not be 2700 * optimal). 2701 * IN assertion: the array bl_count contains the bit length statistics for 2702 * the given tree and the field len is set for all tree elements. 2703 * OUT assertion: the field code is set for all tree elements of non 2704 * zero code length. 2705 */ 2706 local void gen_codes (tree, max_code, bl_count) 2707 ct_data *tree; /* the tree to decorate */ 2708 int max_code; /* largest code with non zero frequency */ 2709 ushf *bl_count; /* number of codes at each bit length */ 2710 { 2711 ush next_code[MAX_BITS+1]; /* next code value for each bit length */ 2712 ush code = 0; /* running code value */ 2713 int bits; /* bit index */ 2714 int n; /* code index */ 2715 2716 /* The distribution counts are first used to generate the code values 2717 * without bit reversal. 2718 */ 2719 for (bits = 1; bits <= MAX_BITS; bits++) { 2720 next_code[bits] = code = (code + bl_count[bits-1]) << 1; 2721 } 2722 /* Check that the bit counts in bl_count are consistent. The last code 2723 * must be all ones. 2724 */ 2725 Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1, 2726 "inconsistent bit counts"); 2727 Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); 2728 2729 for (n = 0; n <= max_code; n++) { 2730 int len = tree[n].Len; 2731 if (len == 0) continue; 2732 /* Now reverse the bits */ 2733 tree[n].Code = bi_reverse(next_code[len]++, len); 2734 2735 Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", 2736 n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1)); 2737 } 2738 } 2739 2740 /* =========================================================================== 2741 * Construct one Huffman tree and assigns the code bit strings and lengths. 2742 * Update the total bit length for the current block. 2743 * IN assertion: the field freq is set for all tree elements. 2744 * OUT assertions: the fields len and code are set to the optimal bit length 2745 * and corresponding code. The length opt_len is updated; static_len is 2746 * also updated if stree is not null. The field max_code is set. 2747 */ 2748 local void build_tree(s, desc) 2749 deflate_state *s; 2750 tree_desc *desc; /* the tree descriptor */ 2751 { 2752 ct_data *tree = desc->dyn_tree; 2753 const ct_data *stree = desc->stat_desc->static_tree; 2754 int elems = desc->stat_desc->elems; 2755 int n, m; /* iterate over heap elements */ 2756 int max_code = -1; /* largest code with non zero frequency */ 2757 int node; /* new node being created */ 2758 2759 /* Construct the initial heap, with least frequent element in 2760 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. 2761 * heap[0] is not used. 2762 */ 2763 s->heap_len = 0, s->heap_max = HEAP_SIZE; 2764 2765 for (n = 0; n < elems; n++) { 2766 if (tree[n].Freq != 0) { 2767 s->heap[++(s->heap_len)] = max_code = n; 2768 s->depth[n] = 0; 2769 } else { 2770 tree[n].Len = 0; 2771 } 2772 } 2773 2774 /* The pkzip format requires that at least one distance code exists, 2775 * and that at least one bit should be sent even if there is only one 2776 * possible code. So to avoid special checks later on we force at least 2777 * two codes of non zero frequency. 2778 */ 2779 while (s->heap_len < 2) { 2780 node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0); 2781 tree[node].Freq = 1; 2782 s->depth[node] = 0; 2783 s->opt_len--; if (stree) s->static_len -= stree[node].Len; 2784 /* node is 0 or 1 so it does not have extra bits */ 2785 } 2786 desc->max_code = max_code; 2787 2788 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, 2789 * establish sub-heaps of increasing lengths: 2790 */ 2791 for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); 2792 2793 /* Construct the Huffman tree by repeatedly combining the least two 2794 * frequent nodes. 2795 */ 2796 node = elems; /* next internal node of the tree */ 2797 do { 2798 pqremove(s, tree, n); /* n = node of least frequency */ 2799 m = s->heap[SMALLEST]; /* m = node of next least frequency */ 2800 2801 s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */ 2802 s->heap[--(s->heap_max)] = m; 2803 2804 /* Create a new node father of n and m */ 2805 tree[node].Freq = tree[n].Freq + tree[m].Freq; 2806 s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1); 2807 tree[n].Dad = tree[m].Dad = (ush)node; 2808 #ifdef DUMP_BL_TREE 2809 if (tree == s->bl_tree) { 2810 fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)", 2811 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); 2812 } 2813 #endif 2814 /* and insert the new node in the heap */ 2815 s->heap[SMALLEST] = node++; 2816 pqdownheap(s, tree, SMALLEST); 2817 2818 } while (s->heap_len >= 2); 2819 2820 s->heap[--(s->heap_max)] = s->heap[SMALLEST]; 2821 2822 /* At this point, the fields freq and dad are set. We can now 2823 * generate the bit lengths. 2824 */ 2825 gen_bitlen(s, (tree_desc *)desc); 2826 2827 /* The field len is now set, we can generate the bit codes */ 2828 gen_codes ((ct_data *)tree, max_code, s->bl_count); 2829 } 2830 2831 /* =========================================================================== 2832 * Scan a literal or distance tree to determine the frequencies of the codes 2833 * in the bit length tree. 2834 */ 2835 local void scan_tree (s, tree, max_code) 2836 deflate_state *s; 2837 ct_data *tree; /* the tree to be scanned */ 2838 int max_code; /* and its largest code of non zero frequency */ 2839 { 2840 int n; /* iterates over all tree elements */ 2841 int prevlen = -1; /* last emitted length */ 2842 int curlen; /* length of current code */ 2843 int nextlen = tree[0].Len; /* length of next code */ 2844 int count = 0; /* repeat count of the current code */ 2845 int max_count = 7; /* max repeat count */ 2846 int min_count = 4; /* min repeat count */ 2847 2848 if (nextlen == 0) max_count = 138, min_count = 3; 2849 tree[max_code+1].Len = (ush)0xffff; /* guard */ 2850 2851 for (n = 0; n <= max_code; n++) { 2852 curlen = nextlen; nextlen = tree[n+1].Len; 2853 if (++count < max_count && curlen == nextlen) { 2854 continue; 2855 } else if (count < min_count) { 2856 s->bl_tree[curlen].Freq += count; 2857 } else if (curlen != 0) { 2858 if (curlen != prevlen) s->bl_tree[curlen].Freq++; 2859 s->bl_tree[REP_3_6].Freq++; 2860 } else if (count <= 10) { 2861 s->bl_tree[REPZ_3_10].Freq++; 2862 } else { 2863 s->bl_tree[REPZ_11_138].Freq++; 2864 } 2865 count = 0; prevlen = curlen; 2866 if (nextlen == 0) { 2867 max_count = 138, min_count = 3; 2868 } else if (curlen == nextlen) { 2869 max_count = 6, min_count = 3; 2870 } else { 2871 max_count = 7, min_count = 4; 2872 } 2873 } 2874 } 2875 2876 /* =========================================================================== 2877 * Send a literal or distance tree in compressed form, using the codes in 2878 * bl_tree. 2879 */ 2880 local void send_tree (s, tree, max_code) 2881 deflate_state *s; 2882 ct_data *tree; /* the tree to be scanned */ 2883 int max_code; /* and its largest code of non zero frequency */ 2884 { 2885 int n; /* iterates over all tree elements */ 2886 int prevlen = -1; /* last emitted length */ 2887 int curlen; /* length of current code */ 2888 int nextlen = tree[0].Len; /* length of next code */ 2889 int count = 0; /* repeat count of the current code */ 2890 int max_count = 7; /* max repeat count */ 2891 int min_count = 4; /* min repeat count */ 2892 2893 /* tree[max_code+1].Len = -1; */ /* guard already set */ 2894 if (nextlen == 0) max_count = 138, min_count = 3; 2895 2896 for (n = 0; n <= max_code; n++) { 2897 curlen = nextlen; nextlen = tree[n+1].Len; 2898 if (++count < max_count && curlen == nextlen) { 2899 continue; 2900 } else if (count < min_count) { 2901 do { send_code(s, curlen, s->bl_tree); } while (--count != 0); 2902 2903 } else if (curlen != 0) { 2904 if (curlen != prevlen) { 2905 send_code(s, curlen, s->bl_tree); count--; 2906 } 2907 Assert(count >= 3 && count <= 6, " 3_6?"); 2908 send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2); 2909 2910 } else if (count <= 10) { 2911 send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3); 2912 2913 } else { 2914 send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7); 2915 } 2916 count = 0; prevlen = curlen; 2917 if (nextlen == 0) { 2918 max_count = 138, min_count = 3; 2919 } else if (curlen == nextlen) { 2920 max_count = 6, min_count = 3; 2921 } else { 2922 max_count = 7, min_count = 4; 2923 } 2924 } 2925 } 2926 2927 /* =========================================================================== 2928 * Construct the Huffman tree for the bit lengths and return the index in 2929 * bl_order of the last bit length code to send. 2930 */ 2931 local int build_bl_tree(s) 2932 deflate_state *s; 2933 { 2934 int max_blindex; /* index of last bit length code of non zero freq */ 2935 2936 /* Determine the bit length frequencies for literal and distance trees */ 2937 scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); 2938 scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); 2939 2940 /* Build the bit length tree: */ 2941 build_tree(s, (tree_desc *)(&(s->bl_desc))); 2942 /* opt_len now includes the length of the tree representations, except 2943 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. 2944 */ 2945 2946 /* Determine the number of bit length codes to send. The pkzip format 2947 * requires that at least 4 bit length codes be sent. (appnote.txt says 2948 * 3 but the actual value used is 4.) 2949 */ 2950 for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { 2951 if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; 2952 } 2953 /* Update opt_len to include the bit length tree and counts */ 2954 s->opt_len += 3*(max_blindex+1) + 5+5+4; 2955 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", 2956 s->opt_len, s->static_len)); 2957 2958 return max_blindex; 2959 } 2960 2961 /* =========================================================================== 2962 * Send the header for a block using dynamic Huffman trees: the counts, the 2963 * lengths of the bit length codes, the literal tree and the distance tree. 2964 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. 2965 */ 2966 local void send_all_trees(s, lcodes, dcodes, blcodes) 2967 deflate_state *s; 2968 int lcodes, dcodes, blcodes; /* number of codes for each tree */ 2969 { 2970 int rank; /* index in bl_order */ 2971 2972 Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); 2973 Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, 2974 "too many codes"); 2975 Tracev((stderr, "\nbl counts: ")); 2976 send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */ 2977 send_bits(s, dcodes-1, 5); 2978 send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */ 2979 for (rank = 0; rank < blcodes; rank++) { 2980 Tracev((stderr, "\nbl code %2d ", bl_order[rank])); 2981 send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); 2982 } 2983 Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); 2984 2985 send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */ 2986 Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); 2987 2988 send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */ 2989 Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); 2990 } 2991 2992 /* =========================================================================== 2993 * Send a stored block 2994 */ 2995 void _tr_stored_block(s, buf, stored_len, eof) 2996 deflate_state *s; 2997 charf *buf; /* input block */ 2998 ulg stored_len; /* length of input block */ 2999 int eof; /* true if this is the last block for a file */ 3000 { 3001 send_bits(s, (STORED_BLOCK<<1)+eof, 3); /* send block type */ 3002 #ifdef DEBUG_ZLIB 3003 s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; 3004 s->compressed_len += (stored_len + 4) << 3; 3005 #endif 3006 copy_block(s, buf, (unsigned)stored_len, 1); /* with header */ 3007 } 3008 3009 /* Send just the `stored block' type code without any length bytes or data. 3010 */ 3011 void _tr_stored_type_only(s) 3012 deflate_state *s; 3013 { 3014 send_bits(s, (STORED_BLOCK << 1), 3); 3015 bi_windup(s); 3016 #ifdef DEBUG_ZLIB 3017 s->compressed_len = (s->compressed_len + 3) & ~7L; 3018 #endif 3019 } 3020 3021 /* =========================================================================== 3022 * Send one empty static block to give enough lookahead for inflate. 3023 * This takes 10 bits, of which 7 may remain in the bit buffer. 3024 * The current inflate code requires 9 bits of lookahead. If the 3025 * last two codes for the previous block (real code plus EOB) were coded 3026 * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode 3027 * the last real code. In this case we send two empty static blocks instead 3028 * of one. (There are no problems if the previous block is stored or fixed.) 3029 * To simplify the code, we assume the worst case of last real code encoded 3030 * on one bit only. 3031 */ 3032 void _tr_align(s) 3033 deflate_state *s; 3034 { 3035 send_bits(s, STATIC_TREES<<1, 3); 3036 send_code(s, END_BLOCK, static_ltree); 3037 #ifdef DEBUG_ZLIB 3038 s->compressed_len += 10L; /* 3 for block type, 7 for EOB */ 3039 #endif 3040 bi_flush(s); 3041 /* Of the 10 bits for the empty block, we have already sent 3042 * (10 - bi_valid) bits. The lookahead for the last real code (before 3043 * the EOB of the previous block) was thus at least one plus the length 3044 * of the EOB plus what we have just sent of the empty static block. 3045 */ 3046 if (1 + s->last_eob_len + 10 - s->bi_valid < 9) { 3047 send_bits(s, STATIC_TREES<<1, 3); 3048 send_code(s, END_BLOCK, static_ltree); 3049 #ifdef DEBUG_ZLIB 3050 s->compressed_len += 10L; 3051 #endif 3052 bi_flush(s); 3053 } 3054 s->last_eob_len = 7; 3055 } 3056 3057 /* =========================================================================== 3058 * Determine the best encoding for the current block: dynamic trees, static 3059 * trees or store, and output the encoded block to the zip file. 3060 */ 3061 void _tr_flush_block(s, buf, stored_len, eof) 3062 deflate_state *s; 3063 charf *buf; /* input block, or NULL if too old */ 3064 ulg stored_len; /* length of input block */ 3065 int eof; /* true if this is the last block for a file */ 3066 { 3067 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ 3068 int max_blindex = 0; /* index of last bit length code of non zero freq */ 3069 3070 /* Build the Huffman trees unless a stored block is forced */ 3071 if (s->level > 0) { 3072 3073 /* Check if the file is ascii or binary */ 3074 if (s->data_type == Z_UNKNOWN) set_data_type(s); 3075 3076 /* Construct the literal and distance trees */ 3077 build_tree(s, (tree_desc *)(&(s->l_desc))); 3078 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, 3079 s->static_len)); 3080 3081 build_tree(s, (tree_desc *)(&(s->d_desc))); 3082 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, 3083 s->static_len)); 3084 /* At this point, opt_len and static_len are the total bit lengths of 3085 * the compressed block data, excluding the tree representations. 3086 */ 3087 3088 /* Build the bit length tree for the above two trees, and get the index 3089 * in bl_order of the last bit length code to send. 3090 */ 3091 max_blindex = build_bl_tree(s); 3092 3093 /* Determine the best encoding. Compute first the block length in bytes*/ 3094 opt_lenb = (s->opt_len+3+7)>>3; 3095 static_lenb = (s->static_len+3+7)>>3; 3096 3097 Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", 3098 opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, 3099 s->last_lit)); 3100 3101 if (static_lenb <= opt_lenb) opt_lenb = static_lenb; 3102 3103 } else { 3104 Assert(buf != (char*)0, "lost buf"); 3105 opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ 3106 } 3107 3108 #ifdef FORCE_STORED 3109 if (buf != (char*)0) { /* force stored block */ 3110 #else 3111 if (stored_len+4 <= opt_lenb && buf != (char*)0) { 3112 /* 4: two words for the lengths */ 3113 #endif 3114 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. 3115 * Otherwise we can't have processed more than WSIZE input bytes since 3116 * the last block flush, because compression would have been 3117 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to 3118 * transform a block into a stored block. 3119 */ 3120 _tr_stored_block(s, buf, stored_len, eof); 3121 3122 #ifdef FORCE_STATIC 3123 } else if (static_lenb >= 0) { /* force static trees */ 3124 #else 3125 } else if (static_lenb == opt_lenb) { 3126 #endif 3127 send_bits(s, (STATIC_TREES<<1)+eof, 3); 3128 compress_block(s, (const ct_data *)static_ltree, (const ct_data *)static_dtree); 3129 #ifdef DEBUG_ZLIB 3130 s->compressed_len += 3 + s->static_len; 3131 #endif 3132 } else { 3133 send_bits(s, (DYN_TREES<<1)+eof, 3); 3134 send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1, 3135 max_blindex+1); 3136 compress_block(s, (const ct_data *)s->dyn_ltree, (const ct_data *)s->dyn_dtree); 3137 #ifdef DEBUG_ZLIB 3138 s->compressed_len += 3 + s->opt_len; 3139 #endif 3140 } 3141 Assert (s->compressed_len == s->bits_sent, "bad compressed size"); 3142 /* The above check is made mod 2^32, for files larger than 512 MB 3143 * and uLong implemented on 32 bits. 3144 */ 3145 init_block(s); 3146 3147 if (eof) { 3148 bi_windup(s); 3149 #ifdef DEBUG_ZLIB 3150 s->compressed_len += 7; /* align on byte boundary */ 3151 #endif 3152 } 3153 Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, 3154 s->compressed_len-7*eof)); 3155 } 3156 3157 /* =========================================================================== 3158 * Save the match info and tally the frequency counts. Return true if 3159 * the current block must be flushed. 3160 */ 3161 #if 0 3162 int _tr_tally (s, dist, lc) 3163 deflate_state *s; 3164 unsigned dist; /* distance of matched string */ 3165 unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ 3166 { 3167 s->d_buf[s->last_lit] = (ush)dist; 3168 s->l_buf[s->last_lit++] = (uch)lc; 3169 if (dist == 0) { 3170 /* lc is the unmatched char */ 3171 s->dyn_ltree[lc].Freq++; 3172 } else { 3173 s->matches++; 3174 /* Here, lc is the match length - MIN_MATCH */ 3175 dist--; /* dist = match distance - 1 */ 3176 Assert((ush)dist < (ush)MAX_DIST(s) && 3177 (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && 3178 (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); 3179 3180 s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++; 3181 s->dyn_dtree[d_code(dist)].Freq++; 3182 } 3183 3184 #ifdef TRUNCATE_BLOCK 3185 /* Try to guess if it is profitable to stop the current block here */ 3186 if ((s->last_lit & 0x1fff) == 0 && s->level > 2) { 3187 /* Compute an upper bound for the compressed length */ 3188 ulg out_length = (ulg)s->last_lit*8L; 3189 ulg in_length = (ulg)((long)s->strstart - s->block_start); 3190 int dcode; 3191 for (dcode = 0; dcode < D_CODES; dcode++) { 3192 out_length += (ulg)s->dyn_dtree[dcode].Freq * 3193 (5L+extra_dbits[dcode]); 3194 } 3195 out_length >>= 3; 3196 Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", 3197 s->last_lit, in_length, out_length, 3198 100L - out_length*100L/in_length)); 3199 if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1; 3200 } 3201 #endif 3202 return (s->last_lit == s->lit_bufsize-1); 3203 /* We avoid equality with lit_bufsize because of wraparound at 64K 3204 * on 16 bit machines and because stored blocks are restricted to 3205 * 64K-1 bytes. 3206 */ 3207 } 3208 #endif 3209 3210 /* =========================================================================== 3211 * Send the block data compressed using the given Huffman trees 3212 */ 3213 local void compress_block(s, ltree, dtree) 3214 deflate_state *s; 3215 const ct_data *ltree; /* literal tree */ 3216 const ct_data *dtree; /* distance tree */ 3217 { 3218 unsigned dist; /* distance of matched string */ 3219 int lc; /* match length or unmatched char (if dist == 0) */ 3220 unsigned lx = 0; /* running index in l_buf */ 3221 unsigned code; /* the code to send */ 3222 int extra; /* number of extra bits to send */ 3223 3224 if (s->last_lit != 0) do { 3225 dist = s->d_buf[lx]; 3226 lc = s->l_buf[lx++]; 3227 if (dist == 0) { 3228 send_code(s, lc, ltree); /* send a literal byte */ 3229 Tracecv(isgraph(lc), (stderr," '%c' ", lc)); 3230 } else { 3231 /* Here, lc is the match length - MIN_MATCH */ 3232 code = _length_code[lc]; 3233 send_code(s, code+LITERALS+1, ltree); /* send the length code */ 3234 extra = extra_lbits[code]; 3235 if (extra != 0) { 3236 lc -= base_length[code]; 3237 send_bits(s, lc, extra); /* send the extra length bits */ 3238 } 3239 dist--; /* dist is now the match distance - 1 */ 3240 code = d_code(dist); 3241 Assert (code < D_CODES, "bad d_code"); 3242 3243 send_code(s, code, dtree); /* send the distance code */ 3244 extra = extra_dbits[code]; 3245 if (extra != 0) { 3246 dist -= base_dist[code]; 3247 send_bits(s, dist, extra); /* send the extra distance bits */ 3248 } 3249 } /* literal or match pair ? */ 3250 3251 /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ 3252 Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow"); 3253 3254 } while (lx < s->last_lit); 3255 3256 send_code(s, END_BLOCK, ltree); 3257 s->last_eob_len = ltree[END_BLOCK].Len; 3258 } 3259 3260 /* =========================================================================== 3261 * Set the data type to ASCII or BINARY, using a crude approximation: 3262 * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise. 3263 * IN assertion: the fields freq of dyn_ltree are set and the total of all 3264 * frequencies does not exceed 64K (to fit in an int on 16 bit machines). 3265 */ 3266 local void set_data_type(s) 3267 deflate_state *s; 3268 { 3269 int n = 0; 3270 unsigned ascii_freq = 0; 3271 unsigned bin_freq = 0; 3272 while (n < 7) bin_freq += s->dyn_ltree[n++].Freq; 3273 while (n < 128) ascii_freq += s->dyn_ltree[n++].Freq; 3274 while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq; 3275 s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII); 3276 } 3277 3278 /* =========================================================================== 3279 * Reverse the first len bits of a code, using straightforward code (a faster 3280 * method would use a table) 3281 * IN assertion: 1 <= len <= 15 3282 */ 3283 local unsigned bi_reverse(code, len) 3284 unsigned code; /* the value to invert */ 3285 int len; /* its bit length */ 3286 { 3287 unsigned res = 0; 3288 do { 3289 res |= code & 1; 3290 code >>= 1, res <<= 1; 3291 } while (--len > 0); 3292 return res >> 1; 3293 } 3294 3295 /* =========================================================================== 3296 * Flush the bit buffer, keeping at most 7 bits in it. 3297 */ 3298 local void bi_flush(s) 3299 deflate_state *s; 3300 { 3301 if (s->bi_valid == 16) { 3302 put_short(s, s->bi_buf); 3303 s->bi_buf = 0; 3304 s->bi_valid = 0; 3305 } else if (s->bi_valid >= 8) { 3306 put_byte(s, (Byte)s->bi_buf); 3307 s->bi_buf >>= 8; 3308 s->bi_valid -= 8; 3309 } 3310 } 3311 3312 /* =========================================================================== 3313 * Flush the bit buffer and align the output on a byte boundary 3314 */ 3315 local void bi_windup(s) 3316 deflate_state *s; 3317 { 3318 if (s->bi_valid > 8) { 3319 put_short(s, s->bi_buf); 3320 } else if (s->bi_valid > 0) { 3321 put_byte(s, (Byte)s->bi_buf); 3322 } 3323 s->bi_buf = 0; 3324 s->bi_valid = 0; 3325 #ifdef DEBUG_ZLIB 3326 s->bits_sent = (s->bits_sent+7) & ~7; 3327 #endif 3328 } 3329 3330 /* =========================================================================== 3331 * Copy a stored block, storing first the length and its 3332 * one's complement if requested. 3333 */ 3334 local void copy_block(s, buf, len, header) 3335 deflate_state *s; 3336 charf *buf; /* the input data */ 3337 unsigned len; /* its length */ 3338 int header; /* true if block header must be written */ 3339 { 3340 bi_windup(s); /* align on byte boundary */ 3341 s->last_eob_len = 8; /* enough lookahead for inflate */ 3342 3343 if (header) { 3344 put_short(s, (ush)len); 3345 put_short(s, (ush)~len); 3346 #ifdef DEBUG_ZLIB 3347 s->bits_sent += 2*16; 3348 #endif 3349 } 3350 #ifdef DEBUG_ZLIB 3351 s->bits_sent += (ulg)len<<3; 3352 #endif 3353 /* bundle up the put_byte(s, *buf++) calls */ 3354 zmemcpy(&s->pending_buf[s->pending], buf, len); 3355 s->pending += len; 3356 } 3357 /* --- trees.c */ 3358 3359 /* +++ inflate.c */ 3360 3361 /* inflate.c -- zlib interface to inflate modules 3362 * Copyright (C) 1995-2002 Mark Adler 3363 * For conditions of distribution and use, see copyright notice in zlib.h 3364 */ 3365 3366 /* #include "zutil.h" */ 3367 3368 /* +++ infblock.h */ 3369 3370 /* infblock.h -- header to use infblock.c 3371 * Copyright (C) 1995-2002 Mark Adler 3372 * For conditions of distribution and use, see copyright notice in zlib.h 3373 */ 3374 3375 /* WARNING: this file should *not* be used by applications. It is 3376 part of the implementation of the compression library and is 3377 subject to change. Applications should only use zlib.h. 3378 */ 3379 3380 struct inflate_blocks_state; 3381 typedef struct inflate_blocks_state FAR inflate_blocks_statef; 3382 3383 extern inflate_blocks_statef * inflate_blocks_new __P(( 3384 z_streamp z, 3385 check_func c, /* check function */ 3386 uInt w)); /* window size */ 3387 3388 extern int inflate_blocks __P(( 3389 inflate_blocks_statef *, 3390 z_streamp , 3391 int)); /* initial return code */ 3392 3393 extern void inflate_blocks_reset __P(( 3394 inflate_blocks_statef *, 3395 z_streamp , 3396 uLongf *)); /* check value on output */ 3397 3398 extern int inflate_blocks_free __P(( 3399 inflate_blocks_statef *, 3400 z_streamp)); 3401 3402 extern void inflate_set_dictionary __P(( 3403 inflate_blocks_statef *s, 3404 const Bytef *d, /* dictionary */ 3405 uInt n)); /* dictionary length */ 3406 3407 extern int inflate_blocks_sync_point __P(( 3408 inflate_blocks_statef *s)); 3409 extern int inflate_addhistory __P(( 3410 inflate_blocks_statef *, 3411 z_streamp)); 3412 3413 extern int inflate_packet_flush __P(( 3414 inflate_blocks_statef *)); 3415 3416 /* --- infblock.h */ 3417 3418 #ifndef NO_DUMMY_DECL 3419 struct inflate_blocks_state {int dummy;}; /* for buggy compilers */ 3420 #endif 3421 3422 typedef enum { 3423 METHOD, /* waiting for method byte */ 3424 FLAG, /* waiting for flag byte */ 3425 DICT4, /* four dictionary check bytes to go */ 3426 DICT3, /* three dictionary check bytes to go */ 3427 DICT2, /* two dictionary check bytes to go */ 3428 DICT1, /* one dictionary check byte to go */ 3429 DICT0, /* waiting for inflateSetDictionary */ 3430 BLOCKS, /* decompressing blocks */ 3431 CHECK4, /* four check bytes to go */ 3432 CHECK3, /* three check bytes to go */ 3433 CHECK2, /* two check bytes to go */ 3434 CHECK1, /* one check byte to go */ 3435 DONE, /* finished check, done */ 3436 BAD} /* got an error--stay here */ 3437 inflate_mode; 3438 3439 /* inflate private state */ 3440 struct internal_state { 3441 3442 /* mode */ 3443 inflate_mode mode; /* current inflate mode */ 3444 3445 /* mode dependent information */ 3446 union { 3447 uInt method; /* if FLAGS, method byte */ 3448 struct { 3449 uLong was; /* computed check value */ 3450 uLong need; /* stream check value */ 3451 } check; /* if CHECK, check values to compare */ 3452 uInt marker; /* if BAD, inflateSync's marker bytes count */ 3453 } sub; /* submode */ 3454 3455 /* mode independent information */ 3456 int nowrap; /* flag for no wrapper */ 3457 uInt wbits; /* log2(window size) (8..15, defaults to 15) */ 3458 inflate_blocks_statef 3459 *blocks; /* current inflate_blocks state */ 3460 3461 }; 3462 3463 3464 int ZEXPORT inflateReset(z) 3465 z_streamp z; 3466 { 3467 if (z == Z_NULL || z->state == Z_NULL) 3468 return Z_STREAM_ERROR; 3469 z->total_in = z->total_out = 0; 3470 z->msg = Z_NULL; 3471 z->state->mode = z->state->nowrap ? BLOCKS : METHOD; 3472 inflate_blocks_reset(z->state->blocks, z, Z_NULL); 3473 Tracev((stderr, "inflate: reset\n")); 3474 return Z_OK; 3475 } 3476 3477 3478 int ZEXPORT inflateEnd(z) 3479 z_streamp z; 3480 { 3481 if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL) 3482 return Z_STREAM_ERROR; 3483 if (z->state->blocks != Z_NULL) 3484 inflate_blocks_free(z->state->blocks, z); 3485 ZFREE(z, z->state); 3486 z->state = Z_NULL; 3487 Tracev((stderr, "inflate: end\n")); 3488 return Z_OK; 3489 } 3490 3491 3492 int ZEXPORT inflateInit2_(z, w, vers, stream_size) 3493 z_streamp z; 3494 int w; 3495 const char *vers; 3496 int stream_size; 3497 { 3498 if (vers == Z_NULL || vers[0] != ZLIB_VERSION[0] || 3499 stream_size != sizeof(z_stream)) 3500 return Z_VERSION_ERROR; 3501 3502 /* initialize state */ 3503 if (z == Z_NULL) 3504 return Z_STREAM_ERROR; 3505 z->msg = Z_NULL; 3506 #ifndef NO_ZCFUNCS 3507 if (z->zalloc == Z_NULL) 3508 { 3509 z->zalloc = zcalloc; 3510 z->opaque = (voidpf)0; 3511 } 3512 if (z->zfree == Z_NULL) z->zfree = zcfree; 3513 #endif 3514 if ((z->state = (struct internal_state FAR *) 3515 ZALLOC(z,1,sizeof(struct internal_state))) == Z_NULL) 3516 return Z_MEM_ERROR; 3517 z->state->blocks = Z_NULL; 3518 3519 /* handle undocumented nowrap option (no zlib header or check) */ 3520 z->state->nowrap = 0; 3521 if (w < 0) 3522 { 3523 w = - w; 3524 z->state->nowrap = 1; 3525 } 3526 3527 /* set window size */ 3528 if (w < 8 || w > 15) 3529 { 3530 inflateEnd(z); 3531 return Z_STREAM_ERROR; 3532 } 3533 z->state->wbits = (uInt)w; 3534 3535 /* create inflate_blocks state */ 3536 if ((z->state->blocks = 3537 inflate_blocks_new(z, z->state->nowrap ? Z_NULL : adler32, (uInt)1 << w)) 3538 == Z_NULL) 3539 { 3540 inflateEnd(z); 3541 return Z_MEM_ERROR; 3542 } 3543 Tracev((stderr, "inflate: allocated\n")); 3544 3545 /* reset state */ 3546 inflateReset(z); 3547 return Z_OK; 3548 } 3549 3550 3551 #if 0 3552 int ZEXPORT inflateInit_(z, vers, stream_size) 3553 z_streamp z; 3554 const char *vers; 3555 int stream_size; 3556 { 3557 return inflateInit2_(z, DEF_WBITS, vers, stream_size); 3558 } 3559 #endif 3560 3561 3562 #define NEEDBYTE {if(z->avail_in==0)goto empty;r=Z_OK;} 3563 #define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++) 3564 3565 int ZEXPORT inflate(z, f) 3566 z_streamp z; 3567 int f; 3568 { 3569 int r, r2; 3570 uInt b; 3571 3572 if (z == Z_NULL || z->state == Z_NULL || z->next_in == Z_NULL) 3573 return Z_STREAM_ERROR; 3574 r2 = f == Z_FINISH ? Z_BUF_ERROR : Z_OK; 3575 r = Z_BUF_ERROR; 3576 while (1) switch (z->state->mode) 3577 { 3578 case METHOD: 3579 NEEDBYTE 3580 if (((z->state->sub.method = NEXTBYTE) & 0xf) != Z_DEFLATED) 3581 { 3582 z->state->mode = BAD; 3583 z->msg = "unknown compression method"; 3584 z->state->sub.marker = 5; /* can't try inflateSync */ 3585 break; 3586 } 3587 if ((z->state->sub.method >> 4) + 8 > z->state->wbits) 3588 { 3589 z->state->mode = BAD; 3590 z->msg = "invalid window size"; 3591 z->state->sub.marker = 5; /* can't try inflateSync */ 3592 break; 3593 } 3594 z->state->mode = FLAG; 3595 case FLAG: 3596 NEEDBYTE 3597 b = NEXTBYTE; 3598 if (((z->state->sub.method << 8) + b) % 31) 3599 { 3600 z->state->mode = BAD; 3601 z->msg = "incorrect header check"; 3602 z->state->sub.marker = 5; /* can't try inflateSync */ 3603 break; 3604 } 3605 Tracev((stderr, "inflate: zlib header ok\n")); 3606 if (!(b & PRESET_DICT)) 3607 { 3608 z->state->mode = BLOCKS; 3609 break; 3610 } 3611 z->state->mode = DICT4; 3612 case DICT4: 3613 NEEDBYTE 3614 z->state->sub.check.need = (uLong)NEXTBYTE << 24; 3615 z->state->mode = DICT3; 3616 case DICT3: 3617 NEEDBYTE 3618 z->state->sub.check.need += (uLong)NEXTBYTE << 16; 3619 z->state->mode = DICT2; 3620 case DICT2: 3621 NEEDBYTE 3622 z->state->sub.check.need += (uLong)NEXTBYTE << 8; 3623 z->state->mode = DICT1; 3624 case DICT1: 3625 NEEDBYTE 3626 z->state->sub.check.need += (uLong)NEXTBYTE; 3627 z->adler = z->state->sub.check.need; 3628 z->state->mode = DICT0; 3629 return Z_NEED_DICT; 3630 case DICT0: 3631 z->state->mode = BAD; 3632 z->msg = "need dictionary"; 3633 z->state->sub.marker = 0; /* can try inflateSync */ 3634 return Z_STREAM_ERROR; 3635 case BLOCKS: 3636 r = inflate_blocks(z->state->blocks, z, r); 3637 if (f == Z_PACKET_FLUSH && z->avail_in == 0 && z->avail_out != 0) 3638 r = inflate_packet_flush(z->state->blocks); 3639 if (r == Z_DATA_ERROR) 3640 { 3641 z->state->mode = BAD; 3642 z->state->sub.marker = 0; /* can try inflateSync */ 3643 break; 3644 } 3645 if (r == Z_OK) 3646 r = r2; 3647 if (r != Z_STREAM_END) 3648 return r; 3649 r = r2; 3650 inflate_blocks_reset(z->state->blocks, z, &z->state->sub.check.was); 3651 if (z->state->nowrap) 3652 { 3653 z->state->mode = DONE; 3654 break; 3655 } 3656 z->state->mode = CHECK4; 3657 case CHECK4: 3658 NEEDBYTE 3659 z->state->sub.check.need = (uLong)NEXTBYTE << 24; 3660 z->state->mode = CHECK3; 3661 case CHECK3: 3662 NEEDBYTE 3663 z->state->sub.check.need += (uLong)NEXTBYTE << 16; 3664 z->state->mode = CHECK2; 3665 case CHECK2: 3666 NEEDBYTE 3667 z->state->sub.check.need += (uLong)NEXTBYTE << 8; 3668 z->state->mode = CHECK1; 3669 case CHECK1: 3670 NEEDBYTE 3671 z->state->sub.check.need += (uLong)NEXTBYTE; 3672 3673 if (z->state->sub.check.was != z->state->sub.check.need) 3674 { 3675 z->state->mode = BAD; 3676 z->msg = "incorrect data check"; 3677 z->state->sub.marker = 5; /* can't try inflateSync */ 3678 break; 3679 } 3680 Tracev((stderr, "inflate: zlib check ok\n")); 3681 z->state->mode = DONE; 3682 case DONE: 3683 return Z_STREAM_END; 3684 case BAD: 3685 return Z_DATA_ERROR; 3686 default: 3687 return Z_STREAM_ERROR; 3688 } 3689 empty: 3690 if (f != Z_PACKET_FLUSH) 3691 return r; 3692 z->state->mode = BAD; 3693 z->msg = "need more for packet flush"; 3694 z->state->sub.marker = 0; 3695 return Z_DATA_ERROR; 3696 } 3697 3698 3699 #if 0 3700 int ZEXPORT inflateSetDictionary(z, dictionary, dictLength) 3701 z_streamp z; 3702 const Bytef *dictionary; 3703 uInt dictLength; 3704 { 3705 uInt length = dictLength; 3706 3707 if (z == Z_NULL || z->state == Z_NULL || z->state->mode != DICT0) 3708 return Z_STREAM_ERROR; 3709 3710 if (adler32(1L, dictionary, dictLength) != z->adler) return Z_DATA_ERROR; 3711 z->adler = 1L; 3712 3713 if (length >= ((uInt)1<<z->state->wbits)) 3714 { 3715 length = (1<<z->state->wbits)-1; 3716 dictionary += dictLength - length; 3717 } 3718 inflate_set_dictionary(z->state->blocks, dictionary, length); 3719 z->state->mode = BLOCKS; 3720 return Z_OK; 3721 } 3722 #endif 3723 3724 /* 3725 * This subroutine adds the data at next_in/avail_in to the output history 3726 * without performing any output. The output buffer must be "caught up"; 3727 * i.e. no pending output (hence s->read equals s->write), and the state must 3728 * be BLOCKS (i.e. we should be willing to see the start of a series of 3729 * BLOCKS). On exit, the output will also be caught up, and the checksum 3730 * will have been updated if need be. 3731 */ 3732 3733 int inflateIncomp(z) 3734 z_stream *z; 3735 { 3736 if (z->state->mode != BLOCKS) 3737 return Z_DATA_ERROR; 3738 return inflate_addhistory(z->state->blocks, z); 3739 } 3740 3741 #if 0 3742 int ZEXPORT inflateSync(z) 3743 z_streamp z; 3744 { 3745 uInt n; /* number of bytes to look at */ 3746 Bytef *p; /* pointer to bytes */ 3747 uInt m; /* number of marker bytes found in a row */ 3748 uLong r, w; /* temporaries to save total_in and total_out */ 3749 3750 /* set up */ 3751 if (z == Z_NULL || z->state == Z_NULL) 3752 return Z_STREAM_ERROR; 3753 if (z->state->mode != BAD) 3754 { 3755 z->state->mode = BAD; 3756 z->state->sub.marker = 0; 3757 } 3758 if ((n = z->avail_in) == 0) 3759 return Z_BUF_ERROR; 3760 p = z->next_in; 3761 m = z->state->sub.marker; 3762 3763 /* search */ 3764 while (n && m < 4) 3765 { 3766 static const Byte mark[4] = {0, 0, 0xff, 0xff}; 3767 if (*p == mark[m]) 3768 m++; 3769 else if (*p) 3770 m = 0; 3771 else 3772 m = 4 - m; 3773 p++, n--; 3774 } 3775 3776 /* restore */ 3777 z->total_in += p - z->next_in; 3778 z->next_in = p; 3779 z->avail_in = n; 3780 z->state->sub.marker = m; 3781 3782 /* return no joy or set up to restart on a new block */ 3783 if (m != 4) 3784 return Z_DATA_ERROR; 3785 r = z->total_in; w = z->total_out; 3786 inflateReset(z); 3787 z->total_in = r; z->total_out = w; 3788 z->state->mode = BLOCKS; 3789 return Z_OK; 3790 } 3791 #endif 3792 3793 3794 /* Returns true if inflate is currently at the end of a block generated 3795 * by Z_SYNC_FLUSH or Z_FULL_FLUSH. This function is used by one PPP 3796 * implementation to provide an additional safety check. PPP uses Z_SYNC_FLUSH 3797 * but removes the length bytes of the resulting empty stored block. When 3798 * decompressing, PPP checks that at the end of input packet, inflate is 3799 * waiting for these length bytes. 3800 */ 3801 #if 0 3802 int ZEXPORT inflateSyncPoint(z) 3803 z_streamp z; 3804 { 3805 if (z == Z_NULL || z->state == Z_NULL || z->state->blocks == Z_NULL) 3806 return Z_STREAM_ERROR; 3807 return inflate_blocks_sync_point(z->state->blocks); 3808 } 3809 #endif 3810 #undef NEEDBYTE 3811 #undef NEXTBYTE 3812 /* --- inflate.c */ 3813 3814 /* +++ infblock.c */ 3815 3816 /* infblock.c -- interpret and process block types to last block 3817 * Copyright (C) 1995-2002 Mark Adler 3818 * For conditions of distribution and use, see copyright notice in zlib.h 3819 */ 3820 3821 /* #include "zutil.h" */ 3822 /* #include "infblock.h" */ 3823 3824 /* +++ inftrees.h */ 3825 3826 /* inftrees.h -- header to use inftrees.c 3827 * Copyright (C) 1995-2002 Mark Adler 3828 * For conditions of distribution and use, see copyright notice in zlib.h 3829 */ 3830 3831 /* WARNING: this file should *not* be used by applications. It is 3832 part of the implementation of the compression library and is 3833 subject to change. Applications should only use zlib.h. 3834 */ 3835 3836 /* Huffman code lookup table entry--this entry is four bytes for machines 3837 that have 16-bit pointers (e.g. PC's in the small or medium model). */ 3838 3839 typedef struct inflate_huft_s FAR inflate_huft; 3840 3841 struct inflate_huft_s { 3842 union { 3843 struct { 3844 Byte Exop; /* number of extra bits or operation */ 3845 Byte Bits; /* number of bits in this code or subcode */ 3846 } what; 3847 uInt pad; /* pad structure to a power of 2 (4 bytes for */ 3848 } word; /* 16-bit, 8 bytes for 32-bit int's) */ 3849 uInt base; /* literal, length base, distance base, 3850 or table offset */ 3851 }; 3852 3853 /* Maximum size of dynamic tree. The maximum found in a long but non- 3854 exhaustive search was 1004 huft structures (850 for length/literals 3855 and 154 for distances, the latter actually the result of an 3856 exhaustive search). The actual maximum is not known, but the 3857 value below is more than safe. */ 3858 #define MANY 1440 3859 3860 extern int inflate_trees_bits __P(( 3861 uIntf *, /* 19 code lengths */ 3862 uIntf *, /* bits tree desired/actual depth */ 3863 inflate_huft * FAR *, /* bits tree result */ 3864 inflate_huft *, /* space for trees */ 3865 z_streamp)); /* for messages */ 3866 3867 extern int inflate_trees_dynamic __P(( 3868 uInt, /* number of literal/length codes */ 3869 uInt, /* number of distance codes */ 3870 uIntf *, /* that many (total) code lengths */ 3871 uIntf *, /* literal desired/actual bit depth */ 3872 uIntf *, /* distance desired/actual bit depth */ 3873 inflate_huft * FAR *, /* literal/length tree result */ 3874 inflate_huft * FAR *, /* distance tree result */ 3875 inflate_huft *, /* space for trees */ 3876 z_streamp)); /* for messages */ 3877 3878 extern int inflate_trees_fixed __P(( 3879 uIntf *, /* literal desired/actual bit depth */ 3880 uIntf *, /* distance desired/actual bit depth */ 3881 inflate_huft * FAR *, /* literal/length tree result */ 3882 inflate_huft * FAR *, /* distance tree result */ 3883 z_streamp)); /* for memory allocation */ 3884 /* --- inftrees.h */ 3885 3886 /* +++ infcodes.h */ 3887 3888 /* infcodes.h -- header to use infcodes.c 3889 * Copyright (C) 1995-2002 Mark Adler 3890 * For conditions of distribution and use, see copyright notice in zlib.h 3891 */ 3892 3893 /* WARNING: this file should *not* be used by applications. It is 3894 part of the implementation of the compression library and is 3895 subject to change. Applications should only use zlib.h. 3896 */ 3897 3898 struct inflate_codes_state; 3899 typedef struct inflate_codes_state FAR inflate_codes_statef; 3900 3901 extern inflate_codes_statef *inflate_codes_new __P(( 3902 uInt, uInt, 3903 inflate_huft *, inflate_huft *, 3904 z_streamp )); 3905 3906 extern int inflate_codes __P(( 3907 inflate_blocks_statef *, 3908 z_streamp , 3909 int)); 3910 3911 extern void inflate_codes_free __P(( 3912 inflate_codes_statef *, 3913 z_streamp )); 3914 3915 /* --- infcodes.h */ 3916 3917 /* +++ infutil.h */ 3918 3919 /* infutil.h -- types and macros common to blocks and codes 3920 * Copyright (C) 1995-2002 Mark Adler 3921 * For conditions of distribution and use, see copyright notice in zlib.h 3922 */ 3923 3924 /* WARNING: this file should *not* be used by applications. It is 3925 part of the implementation of the compression library and is 3926 subject to change. Applications should only use zlib.h. 3927 */ 3928 3929 #ifndef _INFUTIL_H 3930 #define _INFUTIL_H 3931 3932 typedef enum { 3933 TYPE, /* get type bits (3, including end bit) */ 3934 LENS, /* get lengths for stored */ 3935 STORED, /* processing stored block */ 3936 TABLE, /* get table lengths */ 3937 BTREE, /* get bit lengths tree for a dynamic block */ 3938 DTREE, /* get length, distance trees for a dynamic block */ 3939 CODES, /* processing fixed or dynamic block */ 3940 DRY, /* output remaining window bytes */ 3941 DONEB, /* finished last block, done */ 3942 BADB} /* got a data error--stuck here */ 3943 inflate_block_mode; 3944 3945 /* inflate blocks semi-private state */ 3946 struct inflate_blocks_state { 3947 3948 /* mode */ 3949 inflate_block_mode mode; /* current inflate_block mode */ 3950 3951 /* mode dependent information */ 3952 union { 3953 uInt left; /* if STORED, bytes left to copy */ 3954 struct { 3955 uInt table; /* table lengths (14 bits) */ 3956 uInt index; /* index into blens (or border) */ 3957 uIntf *blens; /* bit lengths of codes */ 3958 uInt bb; /* bit length tree depth */ 3959 inflate_huft *tb; /* bit length decoding tree */ 3960 } trees; /* if DTREE, decoding info for trees */ 3961 struct { 3962 inflate_codes_statef 3963 *codes; 3964 } decode; /* if CODES, current state */ 3965 } sub; /* submode */ 3966 uInt last; /* true if this block is the last block */ 3967 3968 /* mode independent information */ 3969 uInt bitk; /* bits in bit buffer */ 3970 uLong bitb; /* bit buffer */ 3971 inflate_huft *hufts; /* single malloc for tree space */ 3972 Bytef *window; /* sliding window */ 3973 Bytef *end; /* one byte after sliding window */ 3974 Bytef *read; /* window read pointer */ 3975 Bytef *write; /* window write pointer */ 3976 check_func checkfn; /* check function */ 3977 uLong check; /* check on output */ 3978 3979 }; 3980 3981 3982 /* defines for inflate input/output */ 3983 /* update pointers and return */ 3984 #define UPDBITS {s->bitb=b;s->bitk=k;} 3985 #define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;} 3986 #define UPDOUT {s->write=q;} 3987 #define UPDATE {UPDBITS UPDIN UPDOUT} 3988 #define LEAVE {UPDATE return inflate_flush(s,z,r);} 3989 /* get bytes and bits */ 3990 #define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;} 3991 #define NEEDBYTE {if(n)r=Z_OK;else LEAVE} 3992 #define NEXTBYTE (n--,*p++) 3993 #define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<<k;k+=8;}} 3994 #define DUMPBITS(j) {b>>=(j);k-=(j);} 3995 /* output bytes */ 3996 #define WAVAIL (uInt)(q<s->read?s->read-q-1:s->end-q) 3997 #define LOADOUT {q=s->write;m=(uInt)WAVAIL;} 3998 #define WRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=(uInt)WAVAIL;}} 3999 #define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT} 4000 #define NEEDOUT {if(m==0){WRAP if(m==0){FLUSH WRAP if(m==0) LEAVE}}r=Z_OK;} 4001 #define OUTBYTE(a) {*q++=(Byte)(a);m--;} 4002 /* load local pointers */ 4003 #define LOAD {LOADIN LOADOUT} 4004 4005 /* masks for lower bits (size given to avoid silly warnings with Visual C++) */ 4006 extern uInt inflate_mask[17]; 4007 4008 /* copy as much as possible from the sliding window to the output area */ 4009 extern int inflate_flush __P(( 4010 inflate_blocks_statef *, 4011 z_streamp , 4012 int)); 4013 4014 #ifndef NO_DUMMY_DECL 4015 struct internal_state {int dummy;}; /* for buggy compilers */ 4016 #endif 4017 4018 #endif 4019 /* --- infutil.h */ 4020 4021 #ifndef NO_DUMMY_DECL 4022 struct inflate_codes_state {int dummy;}; /* for buggy compilers */ 4023 #endif 4024 4025 /* simplify the use of the inflate_huft type with some defines */ 4026 #define exop word.what.Exop 4027 #define bits word.what.Bits 4028 4029 /* Table for deflate from PKZIP's appnote.txt. */ 4030 local const uInt border[] = { /* Order of the bit length code lengths */ 4031 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}; 4032 4033 /* 4034 Notes beyond the 1.93a appnote.txt: 4035 4036 1. Distance pointers never point before the beginning of the output 4037 stream. 4038 2. Distance pointers can point back across blocks, up to 32k away. 4039 3. There is an implied maximum of 7 bits for the bit length table and 4040 15 bits for the actual data. 4041 4. If only one code exists, then it is encoded using one bit. (Zero 4042 would be more efficient, but perhaps a little confusing.) If two 4043 codes exist, they are coded using one bit each (0 and 1). 4044 5. There is no way of sending zero distance codes--a dummy must be 4045 sent if there are none. (History: a pre 2.0 version of PKZIP would 4046 store blocks with no distance codes, but this was discovered to be 4047 too harsh a criterion.) Valid only for 1.93a. 2.04c does allow 4048 zero distance codes, which is sent as one code of zero bits in 4049 length. 4050 6. There are up to 286 literal/length codes. Code 256 represents the 4051 end-of-block. Note however that the static length tree defines 4052 288 codes just to fill out the Huffman codes. Codes 286 and 287 4053 cannot be used though, since there is no length base or extra bits 4054 defined for them. Similarily, there are up to 30 distance codes. 4055 However, static trees define 32 codes (all 5 bits) to fill out the 4056 Huffman codes, but the last two had better not show up in the data. 4057 7. Unzip can check dynamic Huffman blocks for complete code sets. 4058 The exception is that a single code would not be complete (see #4). 4059 8. The five bits following the block type is really the number of 4060 literal codes sent minus 257. 4061 9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits 4062 (1+6+6). Therefore, to output three times the length, you output 4063 three codes (1+1+1), whereas to output four times the same length, 4064 you only need two codes (1+3). Hmm. 4065 10. In the tree reconstruction algorithm, Code = Code + Increment 4066 only if BitLength(i) is not zero. (Pretty obvious.) 4067 11. Correction: 4 Bits: # of Bit Length codes - 4 (4 - 19) 4068 12. Note: length code 284 can represent 227-258, but length code 285 4069 really is 258. The last length deserves its own, short code 4070 since it gets used a lot in very redundant files. The length 4071 258 is special since 258 - 3 (the min match length) is 255. 4072 13. The literal/length and distance code bit lengths are read as a 4073 single stream of lengths. It is possible (and advantageous) for 4074 a repeat code (16, 17, or 18) to go across the boundary between 4075 the two sets of lengths. 4076 */ 4077 4078 4079 void inflate_blocks_reset(s, z, c) 4080 inflate_blocks_statef *s; 4081 z_streamp z; 4082 uLongf *c; 4083 { 4084 if (c != Z_NULL) 4085 *c = s->check; 4086 if (s->mode == BTREE || s->mode == DTREE) 4087 ZFREE(z, s->sub.trees.blens); 4088 if (s->mode == CODES) 4089 inflate_codes_free(s->sub.decode.codes, z); 4090 s->mode = TYPE; 4091 s->bitk = 0; 4092 s->bitb = 0; 4093 s->read = s->write = s->window; 4094 if (s->checkfn != Z_NULL) 4095 z->adler = s->check = (*s->checkfn)(0L, (const Bytef *)Z_NULL, 0); 4096 Tracev((stderr, "inflate: blocks reset\n")); 4097 } 4098 4099 4100 inflate_blocks_statef *inflate_blocks_new(z, c, w) 4101 z_streamp z; 4102 check_func c; 4103 uInt w; 4104 { 4105 inflate_blocks_statef *s; 4106 4107 if ((s = (inflate_blocks_statef *)ZALLOC 4108 (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL) 4109 return s; 4110 if ((s->hufts = 4111 (inflate_huft *)ZALLOC(z, sizeof(inflate_huft), MANY)) == Z_NULL) 4112 { 4113 ZFREE(z, s); 4114 return Z_NULL; 4115 } 4116 if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL) 4117 { 4118 ZFREE(z, s->hufts); 4119 ZFREE(z, s); 4120 return Z_NULL; 4121 } 4122 s->end = s->window + w; 4123 s->checkfn = c; 4124 s->mode = TYPE; 4125 Tracev((stderr, "inflate: blocks allocated\n")); 4126 inflate_blocks_reset(s, z, Z_NULL); 4127 return s; 4128 } 4129 4130 4131 int inflate_blocks(s, z, r) 4132 inflate_blocks_statef *s; 4133 z_streamp z; 4134 int r; 4135 { 4136 uInt t; /* temporary storage */ 4137 uLong b; /* bit buffer */ 4138 uInt k; /* bits in bit buffer */ 4139 Bytef *p; /* input data pointer */ 4140 uInt n; /* bytes available there */ 4141 Bytef *q; /* output window write pointer */ 4142 uInt m; /* bytes to end of window or read pointer */ 4143 4144 /* copy input/output information to locals (UPDATE macro restores) */ 4145 LOAD 4146 4147 /* process input based on current state */ 4148 while (1) switch (s->mode) 4149 { 4150 case TYPE: 4151 NEEDBITS(3) 4152 t = (uInt)b & 7; 4153 s->last = t & 1; 4154 switch (t >> 1) 4155 { 4156 case 0: /* stored */ 4157 Tracev((stderr, "inflate: stored block%s\n", 4158 s->last ? " (last)" : "")); 4159 DUMPBITS(3) 4160 t = k & 7; /* go to byte boundary */ 4161 DUMPBITS(t) 4162 s->mode = LENS; /* get length of stored block */ 4163 break; 4164 case 1: /* fixed */ 4165 Tracev((stderr, "inflate: fixed codes block%s\n", 4166 s->last ? " (last)" : "")); 4167 { 4168 uInt bl, bd; 4169 inflate_huft *tl, *td; 4170 4171 inflate_trees_fixed(&bl, &bd, &tl, &td, z); 4172 s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z); 4173 if (s->sub.decode.codes == Z_NULL) 4174 { 4175 r = Z_MEM_ERROR; 4176 LEAVE 4177 } 4178 } 4179 DUMPBITS(3) 4180 s->mode = CODES; 4181 break; 4182 case 2: /* dynamic */ 4183 Tracev((stderr, "inflate: dynamic codes block%s\n", 4184 s->last ? " (last)" : "")); 4185 DUMPBITS(3) 4186 s->mode = TABLE; 4187 break; 4188 case 3: /* illegal */ 4189 DUMPBITS(3) 4190 s->mode = BADB; 4191 z->msg = "invalid block type"; 4192 r = Z_DATA_ERROR; 4193 LEAVE 4194 } 4195 break; 4196 case LENS: 4197 NEEDBITS(32) 4198 if ((((~b) >> 16) & 0xffff) != (b & 0xffff)) 4199 { 4200 s->mode = BADB; 4201 z->msg = "invalid stored block lengths"; 4202 r = Z_DATA_ERROR; 4203 LEAVE 4204 } 4205 s->sub.left = (uInt)b & 0xffff; 4206 b = k = 0; /* dump bits */ 4207 Tracev((stderr, "inflate: stored length %u\n", s->sub.left)); 4208 s->mode = s->sub.left ? STORED : (s->last ? DRY : TYPE); 4209 break; 4210 case STORED: 4211 if (n == 0) 4212 LEAVE 4213 NEEDOUT 4214 t = s->sub.left; 4215 if (t > n) t = n; 4216 if (t > m) t = m; 4217 zmemcpy(q, p, t); 4218 p += t; n -= t; 4219 q += t; m -= t; 4220 if ((s->sub.left -= t) != 0) 4221 break; 4222 Tracev((stderr, "inflate: stored end, %lu total out\n", 4223 z->total_out + (q >= s->read ? q - s->read : 4224 (s->end - s->read) + (q - s->window)))); 4225 s->mode = s->last ? DRY : TYPE; 4226 break; 4227 case TABLE: 4228 NEEDBITS(14) 4229 s->sub.trees.table = t = (uInt)b & 0x3fff; 4230 #ifndef PKZIP_BUG_WORKAROUND 4231 if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29) 4232 { 4233 s->mode = BADB; 4234 z->msg = "too many length or distance symbols"; 4235 r = Z_DATA_ERROR; 4236 LEAVE 4237 } 4238 #endif 4239 t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f); 4240 if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL) 4241 { 4242 r = Z_MEM_ERROR; 4243 LEAVE 4244 } 4245 DUMPBITS(14) 4246 s->sub.trees.index = 0; 4247 Tracev((stderr, "inflate: table sizes ok\n")); 4248 s->mode = BTREE; 4249 case BTREE: 4250 while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10)) 4251 { 4252 NEEDBITS(3) 4253 s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7; 4254 DUMPBITS(3) 4255 } 4256 while (s->sub.trees.index < 19) 4257 s->sub.trees.blens[border[s->sub.trees.index++]] = 0; 4258 s->sub.trees.bb = 7; 4259 t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb, 4260 &s->sub.trees.tb, s->hufts, z); 4261 if (t != Z_OK) 4262 { 4263 r = t; 4264 if (r == Z_DATA_ERROR) 4265 { 4266 ZFREE(z, s->sub.trees.blens); 4267 s->mode = BADB; 4268 } 4269 LEAVE 4270 } 4271 s->sub.trees.index = 0; 4272 Tracev((stderr, "inflate: bits tree ok\n")); 4273 s->mode = DTREE; 4274 case DTREE: 4275 while (t = s->sub.trees.table, 4276 s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f)) 4277 { 4278 inflate_huft *h; 4279 uInt i, j, c; 4280 4281 t = s->sub.trees.bb; 4282 NEEDBITS(t) 4283 h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]); 4284 t = h->bits; 4285 c = h->base; 4286 if (c < 16) 4287 { 4288 DUMPBITS(t) 4289 s->sub.trees.blens[s->sub.trees.index++] = c; 4290 } 4291 else /* c == 16..18 */ 4292 { 4293 i = c == 18 ? 7 : c - 14; 4294 j = c == 18 ? 11 : 3; 4295 NEEDBITS(t + i) 4296 DUMPBITS(t) 4297 j += (uInt)b & inflate_mask[i]; 4298 DUMPBITS(i) 4299 i = s->sub.trees.index; 4300 t = s->sub.trees.table; 4301 if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) || 4302 (c == 16 && i < 1)) 4303 { 4304 ZFREE(z, s->sub.trees.blens); 4305 s->mode = BADB; 4306 z->msg = "invalid bit length repeat"; 4307 r = Z_DATA_ERROR; 4308 LEAVE 4309 } 4310 c = c == 16 ? s->sub.trees.blens[i - 1] : 0; 4311 do { 4312 s->sub.trees.blens[i++] = c; 4313 } while (--j); 4314 s->sub.trees.index = i; 4315 } 4316 } 4317 s->sub.trees.tb = Z_NULL; 4318 { 4319 uInt bl, bd; 4320 inflate_huft *tl, *td; 4321 inflate_codes_statef *c; 4322 4323 bl = 9; /* must be <= 9 for lookahead assumptions */ 4324 bd = 6; /* must be <= 9 for lookahead assumptions */ 4325 t = s->sub.trees.table; 4326 t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f), 4327 s->sub.trees.blens, &bl, &bd, &tl, &td, 4328 s->hufts, z); 4329 if (t != Z_OK) 4330 { 4331 if (t == (uInt)Z_DATA_ERROR) 4332 { 4333 ZFREE(z, s->sub.trees.blens); 4334 s->mode = BADB; 4335 } 4336 r = t; 4337 LEAVE 4338 } 4339 Tracev((stderr, "inflate: trees ok\n")); 4340 if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL) 4341 { 4342 r = Z_MEM_ERROR; 4343 LEAVE 4344 } 4345 s->sub.decode.codes = c; 4346 } 4347 ZFREE(z, s->sub.trees.blens); 4348 s->mode = CODES; 4349 case CODES: 4350 UPDATE 4351 if ((r = inflate_codes(s, z, r)) != Z_STREAM_END) 4352 return inflate_flush(s, z, r); 4353 r = Z_OK; 4354 inflate_codes_free(s->sub.decode.codes, z); 4355 LOAD 4356 Tracev((stderr, "inflate: codes end, %lu total out\n", 4357 z->total_out + (q >= s->read ? q - s->read : 4358 (s->end - s->read) + (q - s->window)))); 4359 if (!s->last) 4360 { 4361 s->mode = TYPE; 4362 break; 4363 } 4364 s->mode = DRY; 4365 case DRY: 4366 FLUSH 4367 if (s->read != s->write) 4368 LEAVE 4369 s->mode = DONEB; 4370 case DONEB: 4371 r = Z_STREAM_END; 4372 LEAVE 4373 case BADB: 4374 r = Z_DATA_ERROR; 4375 LEAVE 4376 default: 4377 r = Z_STREAM_ERROR; 4378 LEAVE 4379 } 4380 } 4381 4382 4383 int inflate_blocks_free(s, z) 4384 inflate_blocks_statef *s; 4385 z_streamp z; 4386 { 4387 inflate_blocks_reset(s, z, Z_NULL); 4388 ZFREE(z, s->window); 4389 ZFREE(z, s->hufts); 4390 ZFREE(z, s); 4391 Tracev((stderr, "inflate: blocks freed\n")); 4392 return Z_OK; 4393 } 4394 4395 4396 #if 0 4397 void inflate_set_dictionary(s, d, n) 4398 inflate_blocks_statef *s; 4399 const Bytef *d; 4400 uInt n; 4401 { 4402 zmemcpy(s->window, d, n); 4403 s->read = s->write = s->window + n; 4404 } 4405 #endif 4406 4407 /* 4408 * This subroutine adds the data at next_in/avail_in to the output history 4409 * without performing any output. The output buffer must be "caught up"; 4410 * i.e. no pending output (hence s->read equals s->write), and the state must 4411 * be BLOCKS (i.e. we should be willing to see the start of a series of 4412 * BLOCKS). On exit, the output will also be caught up, and the checksum 4413 * will have been updated if need be. 4414 */ 4415 int inflate_addhistory(s, z) 4416 inflate_blocks_statef *s; 4417 z_stream *z; 4418 { 4419 uLong b; /* bit buffer */ /* NOT USED HERE */ 4420 uInt k; /* bits in bit buffer */ /* NOT USED HERE */ 4421 uInt t; /* temporary storage */ 4422 Bytef *p; /* input data pointer */ 4423 uInt n; /* bytes available there */ 4424 Bytef *q; /* output window write pointer */ 4425 uInt m; /* bytes to end of window or read pointer */ 4426 4427 if (s->read != s->write) 4428 return Z_STREAM_ERROR; 4429 if (s->mode != TYPE) 4430 return Z_DATA_ERROR; 4431 4432 /* we're ready to rock */ 4433 LOAD 4434 /* while there is input ready, copy to output buffer, moving 4435 * pointers as needed. 4436 */ 4437 while (n) { 4438 t = n; /* how many to do */ 4439 /* is there room until end of buffer? */ 4440 if (t > m) t = m; 4441 /* update check information */ 4442 if (s->checkfn != Z_NULL) 4443 s->check = (*s->checkfn)(s->check, q, t); 4444 zmemcpy(q, p, t); 4445 q += t; 4446 p += t; 4447 n -= t; 4448 z->total_out += t; 4449 s->read = q; /* drag read pointer forward */ 4450 /* WWRAP */ /* expand WWRAP macro by hand to handle s->read */ 4451 if (q == s->end) { 4452 s->read = q = s->window; 4453 m = WAVAIL; 4454 } 4455 } 4456 UPDATE 4457 return Z_OK; 4458 } 4459 4460 4461 /* 4462 * At the end of a Deflate-compressed PPP packet, we expect to have seen 4463 * a `stored' block type value but not the (zero) length bytes. 4464 */ 4465 int inflate_packet_flush(s) 4466 inflate_blocks_statef *s; 4467 { 4468 if (s->mode != LENS) 4469 return Z_DATA_ERROR; 4470 s->mode = TYPE; 4471 return Z_OK; 4472 } 4473 4474 /* Returns true if inflate is currently at the end of a block generated 4475 * by Z_SYNC_FLUSH or Z_FULL_FLUSH. 4476 * IN assertion: s != Z_NULL 4477 */ 4478 #if 0 4479 int inflate_blocks_sync_point(s) 4480 inflate_blocks_statef *s; 4481 { 4482 return s->mode == LENS; 4483 } 4484 #endif 4485 /* --- infblock.c */ 4486 4487 4488 /* +++ inftrees.c */ 4489 4490 /* inftrees.c -- generate Huffman trees for efficient decoding 4491 * Copyright (C) 1995-2002 Mark Adler 4492 * For conditions of distribution and use, see copyright notice in zlib.h 4493 */ 4494 4495 /* #include "zutil.h" */ 4496 /* #include "inftrees.h" */ 4497 4498 #if !defined(BUILDFIXED) && !defined(STDC) 4499 # define BUILDFIXED /* non ANSI compilers may not accept inffixed.h */ 4500 #endif 4501 4502 const char inflate_copyright[] = 4503 " inflate 1.1.4 Copyright 1995-2002 Mark Adler "; 4504 /* 4505 If you use the zlib library in a product, an acknowledgment is welcome 4506 in the documentation of your product. If for some reason you cannot 4507 include such an acknowledgment, I would appreciate that you keep this 4508 copyright string in the executable of your product. 4509 */ 4510 4511 #ifndef NO_DUMMY_DECL 4512 struct internal_state {int dummy;}; /* for buggy compilers */ 4513 #endif 4514 4515 /* simplify the use of the inflate_huft type with some defines */ 4516 #define exop word.what.Exop 4517 #define bits word.what.Bits 4518 4519 4520 local int huft_build __P(( 4521 uIntf *, /* code lengths in bits */ 4522 uInt, /* number of codes */ 4523 uInt, /* number of "simple" codes */ 4524 const uIntf *, /* list of base values for non-simple codes */ 4525 const uIntf *, /* list of extra bits for non-simple codes */ 4526 inflate_huft * FAR*,/* result: starting table */ 4527 uIntf *, /* maximum lookup bits (returns actual) */ 4528 inflate_huft *, /* space for trees */ 4529 uInt *, /* hufts used in space */ 4530 uIntf * )); /* space for values */ 4531 4532 /* Tables for deflate from PKZIP's appnote.txt. */ 4533 local const uInt cplens[31] = { /* Copy lengths for literal codes 257..285 */ 4534 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 4535 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0}; 4536 /* see note #13 above about 258 */ 4537 local const uInt cplext[31] = { /* Extra bits for literal codes 257..285 */ 4538 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 4539 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112}; /* 112==invalid */ 4540 local const uInt cpdist[30] = { /* Copy offsets for distance codes 0..29 */ 4541 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 4542 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, 4543 8193, 12289, 16385, 24577}; 4544 local const uInt cpdext[30] = { /* Extra bits for distance codes */ 4545 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 4546 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 4547 12, 12, 13, 13}; 4548 4549 /* 4550 Huffman code decoding is performed using a multi-level table lookup. 4551 The fastest way to decode is to simply build a lookup table whose 4552 size is determined by the longest code. However, the time it takes 4553 to build this table can also be a factor if the data being decoded 4554 is not very long. The most common codes are necessarily the 4555 shortest codes, so those codes dominate the decoding time, and hence 4556 the speed. The idea is you can have a shorter table that decodes the 4557 shorter, more probable codes, and then point to subsidiary tables for 4558 the longer codes. The time it costs to decode the longer codes is 4559 then traded against the time it takes to make longer tables. 4560 4561 This results of this trade are in the variables lbits and dbits 4562 below. lbits is the number of bits the first level table for literal/ 4563 length codes can decode in one step, and dbits is the same thing for 4564 the distance codes. Subsequent tables are also less than or equal to 4565 those sizes. These values may be adjusted either when all of the 4566 codes are shorter than that, in which case the longest code length in 4567 bits is used, or when the shortest code is *longer* than the requested 4568 table size, in which case the length of the shortest code in bits is 4569 used. 4570 4571 There are two different values for the two tables, since they code a 4572 different number of possibilities each. The literal/length table 4573 codes 286 possible values, or in a flat code, a little over eight 4574 bits. The distance table codes 30 possible values, or a little less 4575 than five bits, flat. The optimum values for speed end up being 4576 about one bit more than those, so lbits is 8+1 and dbits is 5+1. 4577 The optimum values may differ though from machine to machine, and 4578 possibly even between compilers. Your mileage may vary. 4579 */ 4580 4581 4582 /* If BMAX needs to be larger than 16, then h and x[] should be uLong. */ 4583 #define BMAX 15 /* maximum bit length of any code */ 4584 4585 local int huft_build(b, n, s, d, e, t, m, hp, hn, v) 4586 uIntf *b; /* code lengths in bits (all assumed <= BMAX) */ 4587 uInt n; /* number of codes (assumed <= 288) */ 4588 uInt s; /* number of simple-valued codes (0..s-1) */ 4589 const uIntf *d; /* list of base values for non-simple codes */ 4590 const uIntf *e; /* list of extra bits for non-simple codes */ 4591 inflate_huft * FAR *t; /* result: starting table */ 4592 uIntf *m; /* maximum lookup bits, returns actual */ 4593 inflate_huft *hp; /* space for trees */ 4594 uInt *hn; /* hufts used in space */ 4595 uIntf *v; /* working area: values in order of bit length */ 4596 /* Given a list of code lengths and a maximum table size, make a set of 4597 tables to decode that set of codes. Return Z_OK on success, Z_BUF_ERROR 4598 if the given code set is incomplete (the tables are still built in this 4599 case), or Z_DATA_ERROR if the input is invalid. */ 4600 { 4601 4602 uInt a; /* counter for codes of length k */ 4603 uInt c[BMAX+1]; /* bit length count table */ 4604 uInt f; /* i repeats in table every f entries */ 4605 int g; /* maximum code length */ 4606 int h; /* table level */ 4607 uInt i; /* counter, current code */ 4608 uInt j; /* counter */ 4609 int k; /* number of bits in current code */ 4610 int l; /* bits per table (returned in m) */ 4611 uInt mask; /* (1 << w) - 1, to avoid cc -O bug on HP */ 4612 uIntf *p; /* pointer into c[], b[], or v[] */ 4613 inflate_huft *q; /* points to current table */ 4614 struct inflate_huft_s r; /* table entry for structure assignment */ 4615 inflate_huft *u[BMAX]; /* table stack */ 4616 int w; /* bits before this table == (l * h) */ 4617 uInt x[BMAX+1]; /* bit offsets, then code stack */ 4618 uIntf *xp; /* pointer into x */ 4619 int y; /* number of dummy codes added */ 4620 uInt z; /* number of entries in current table */ 4621 4622 r.base = 0; /* XXX gcc */ 4623 4624 /* Generate counts for each bit length */ 4625 p = c; 4626 #define C0 *p++ = 0; 4627 #define C2 C0 C0 C0 C0 4628 #define C4 C2 C2 C2 C2 4629 C4 /* clear c[]--assume BMAX+1 is 16 */ 4630 p = b; i = n; 4631 do { 4632 c[*p++]++; /* assume all entries <= BMAX */ 4633 } while (--i); 4634 if (c[0] == n) /* null input--all zero length codes */ 4635 { 4636 *t = (inflate_huft *)Z_NULL; 4637 *m = 0; 4638 return Z_OK; 4639 } 4640 4641 4642 /* Find minimum and maximum length, bound *m by those */ 4643 l = *m; 4644 for (j = 1; j <= BMAX; j++) 4645 if (c[j]) 4646 break; 4647 k = j; /* minimum code length */ 4648 if ((uInt)l < j) 4649 l = j; 4650 for (i = BMAX; i; i--) 4651 if (c[i]) 4652 break; 4653 g = i; /* maximum code length */ 4654 if ((uInt)l > i) 4655 l = i; 4656 *m = l; 4657 4658 4659 /* Adjust last length count to fill out codes, if needed */ 4660 for (y = 1 << j; j < i; j++, y <<= 1) 4661 if ((y -= c[j]) < 0) 4662 return Z_DATA_ERROR; 4663 if ((y -= c[i]) < 0) 4664 return Z_DATA_ERROR; 4665 c[i] += y; 4666 4667 4668 /* Generate starting offsets into the value table for each length */ 4669 x[1] = j = 0; 4670 p = c + 1; xp = x + 2; 4671 while (--i) { /* note that i == g from above */ 4672 *xp++ = (j += *p++); 4673 } 4674 4675 4676 /* Make a table of values in order of bit lengths */ 4677 p = b; i = 0; 4678 do { 4679 if ((j = *p++) != 0) 4680 v[x[j]++] = i; 4681 } while (++i < n); 4682 n = x[g]; /* set n to length of v */ 4683 4684 4685 /* Generate the Huffman codes and for each, make the table entries */ 4686 x[0] = i = 0; /* first Huffman code is zero */ 4687 p = v; /* grab values in bit order */ 4688 h = -1; /* no tables yet--level -1 */ 4689 w = -l; /* bits decoded == (l * h) */ 4690 u[0] = (inflate_huft *)Z_NULL; /* just to keep compilers happy */ 4691 q = (inflate_huft *)Z_NULL; /* ditto */ 4692 z = 0; /* ditto */ 4693 4694 /* go through the bit lengths (k already is bits in shortest code) */ 4695 for (; k <= g; k++) 4696 { 4697 a = c[k]; 4698 while (a--) 4699 { 4700 /* here i is the Huffman code of length k bits for value *p */ 4701 /* make tables up to required level */ 4702 while (k > w + l) 4703 { 4704 h++; 4705 w += l; /* previous table always l bits */ 4706 4707 /* compute minimum size table less than or equal to l bits */ 4708 z = g - w; 4709 z = z > (uInt)l ? l : z; /* table size upper limit */ 4710 if ((f = 1 << (j = k - w)) > a + 1) /* try a k-w bit table */ 4711 { /* too few codes for k-w bit table */ 4712 f -= a + 1; /* deduct codes from patterns left */ 4713 xp = c + k; 4714 if (j < z) 4715 while (++j < z) /* try smaller tables up to z bits */ 4716 { 4717 if ((f <<= 1) <= *++xp) 4718 break; /* enough codes to use up j bits */ 4719 f -= *xp; /* else deduct codes from patterns */ 4720 } 4721 } 4722 z = 1 << j; /* table entries for j-bit table */ 4723 4724 /* allocate new table */ 4725 if (*hn + z > MANY) /* (note: doesn't matter for fixed) */ 4726 return Z_DATA_ERROR; /* overflow of MANY */ 4727 u[h] = q = hp + *hn; 4728 *hn += z; 4729 4730 /* connect to last table, if there is one */ 4731 if (h) 4732 { 4733 x[h] = i; /* save pattern for backing up */ 4734 r.bits = (Byte)l; /* bits to dump before this table */ 4735 r.exop = (Byte)j; /* bits in this table */ 4736 j = i >> (w - l); 4737 r.base = (uInt)(q - u[h-1] - j); /* offset to this table */ 4738 u[h-1][j] = r; /* connect to last table */ 4739 } 4740 else 4741 *t = q; /* first table is returned result */ 4742 } 4743 4744 /* set up table entry in r */ 4745 r.bits = (Byte)(k - w); 4746 if (p >= v + n) 4747 r.exop = 128 + 64; /* out of values--invalid code */ 4748 else if (*p < s) 4749 { 4750 r.exop = (Byte)(*p < 256 ? 0 : 32 + 64); /* 256 is end-of-block */ 4751 r.base = *p++; /* simple code is just the value */ 4752 } 4753 else 4754 { 4755 r.exop = (Byte)(e[*p - s] + 16 + 64);/* non-simple--look up in lists */ 4756 r.base = d[*p++ - s]; 4757 } 4758 4759 /* fill code-like entries with r */ 4760 f = 1 << (k - w); 4761 for (j = i >> w; j < z; j += f) 4762 q[j] = r; 4763 4764 /* backwards increment the k-bit code i */ 4765 for (j = 1 << (k - 1); i & j; j >>= 1) 4766 i ^= j; 4767 i ^= j; 4768 4769 /* backup over finished tables */ 4770 mask = (1 << w) - 1; /* needed on HP, cc -O bug */ 4771 if (h == -1) 4772 return Z_BUF_ERROR; 4773 while ((i & mask) != x[h]) 4774 { 4775 h--; /* don't need to update q */ 4776 w -= l; 4777 mask = (1 << w) - 1; 4778 } 4779 } 4780 } 4781 4782 4783 /* Return Z_BUF_ERROR if we were given an incomplete table */ 4784 return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK; 4785 } 4786 4787 4788 int inflate_trees_bits(c, bb, tb, hp, z) 4789 uIntf *c; /* 19 code lengths */ 4790 uIntf *bb; /* bits tree desired/actual depth */ 4791 inflate_huft * FAR *tb; /* bits tree result */ 4792 inflate_huft *hp; /* space for trees */ 4793 z_streamp z; /* for messages */ 4794 { 4795 int r; 4796 uInt hn = 0; /* hufts used in space */ 4797 uIntf *v; /* work area for huft_build */ 4798 4799 if ((v = (uIntf*)ZALLOC(z, 19, sizeof(uInt))) == Z_NULL) 4800 return Z_MEM_ERROR; 4801 r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, 4802 tb, bb, hp, &hn, v); 4803 if (r == Z_DATA_ERROR) 4804 z->msg = "oversubscribed dynamic bit lengths tree"; 4805 else if (r == Z_BUF_ERROR || *bb == 0) 4806 { 4807 z->msg = "incomplete dynamic bit lengths tree"; 4808 r = Z_DATA_ERROR; 4809 } 4810 ZFREE(z, v); 4811 return r; 4812 } 4813 4814 4815 int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, hp, z) 4816 uInt nl; /* number of literal/length codes */ 4817 uInt nd; /* number of distance codes */ 4818 uIntf *c; /* that many (total) code lengths */ 4819 uIntf *bl; /* literal desired/actual bit depth */ 4820 uIntf *bd; /* distance desired/actual bit depth */ 4821 inflate_huft * FAR *tl; /* literal/length tree result */ 4822 inflate_huft * FAR *td; /* distance tree result */ 4823 inflate_huft *hp; /* space for trees */ 4824 z_streamp z; /* for messages */ 4825 { 4826 int r; 4827 uInt hn = 0; /* hufts used in space */ 4828 uIntf *v; /* work area for huft_build */ 4829 4830 /* allocate work area */ 4831 if ((v = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL) 4832 return Z_MEM_ERROR; 4833 4834 /* build literal/length tree */ 4835 r = huft_build(c, nl, 257, cplens, cplext, tl, bl, hp, &hn, v); 4836 if (r != Z_OK || *bl == 0) 4837 { 4838 if (r == Z_DATA_ERROR) 4839 z->msg = "oversubscribed literal/length tree"; 4840 else if (r != Z_MEM_ERROR) 4841 { 4842 z->msg = "incomplete literal/length tree"; 4843 r = Z_DATA_ERROR; 4844 } 4845 ZFREE(z, v); 4846 return r; 4847 } 4848 4849 /* build distance tree */ 4850 r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, hp, &hn, v); 4851 if (r != Z_OK || (*bd == 0 && nl > 257)) 4852 { 4853 if (r == Z_DATA_ERROR) 4854 z->msg = "oversubscribed distance tree"; 4855 else if (r == Z_BUF_ERROR) { 4856 #ifdef PKZIP_BUG_WORKAROUND 4857 r = Z_OK; 4858 } 4859 #else 4860 z->msg = "incomplete distance tree"; 4861 r = Z_DATA_ERROR; 4862 } 4863 else if (r != Z_MEM_ERROR) 4864 { 4865 z->msg = "empty distance tree with lengths"; 4866 r = Z_DATA_ERROR; 4867 } 4868 ZFREE(z, v); 4869 return r; 4870 #endif 4871 } 4872 4873 /* done */ 4874 ZFREE(z, v); 4875 return Z_OK; 4876 } 4877 4878 4879 /* build fixed tables only once--keep them here */ 4880 #ifdef BUILDFIXED 4881 local int fixed_built = 0; 4882 #define FIXEDH 544 /* number of hufts used by fixed tables */ 4883 local inflate_huft fixed_mem[FIXEDH]; 4884 local uInt fixed_bl; 4885 local uInt fixed_bd; 4886 local inflate_huft *fixed_tl; 4887 local inflate_huft *fixed_td; 4888 #else 4889 4890 /* +++ inffixed.h */ 4891 /* inffixed.h -- table for decoding fixed codes 4892 * Generated automatically by the maketree.c program 4893 */ 4894 4895 /* WARNING: this file should *not* be used by applications. It is 4896 part of the implementation of the compression library and is 4897 subject to change. Applications should only use zlib.h. 4898 */ 4899 4900 local uInt fixed_bl = 9; 4901 local uInt fixed_bd = 5; 4902 local inflate_huft fixed_tl[] = { 4903 {{{96,7}},256}, {{{0,8}},80}, {{{0,8}},16}, {{{84,8}},115}, 4904 {{{82,7}},31}, {{{0,8}},112}, {{{0,8}},48}, {{{0,9}},192}, 4905 {{{80,7}},10}, {{{0,8}},96}, {{{0,8}},32}, {{{0,9}},160}, 4906 {{{0,8}},0}, {{{0,8}},128}, {{{0,8}},64}, {{{0,9}},224}, 4907 {{{80,7}},6}, {{{0,8}},88}, {{{0,8}},24}, {{{0,9}},144}, 4908 {{{83,7}},59}, {{{0,8}},120}, {{{0,8}},56}, {{{0,9}},208}, 4909 {{{81,7}},17}, {{{0,8}},104}, {{{0,8}},40}, {{{0,9}},176}, 4910 {{{0,8}},8}, {{{0,8}},136}, {{{0,8}},72}, {{{0,9}},240}, 4911 {{{80,7}},4}, {{{0,8}},84}, {{{0,8}},20}, {{{85,8}},227}, 4912 {{{83,7}},43}, {{{0,8}},116}, {{{0,8}},52}, {{{0,9}},200}, 4913 {{{81,7}},13}, {{{0,8}},100}, {{{0,8}},36}, {{{0,9}},168}, 4914 {{{0,8}},4}, {{{0,8}},132}, {{{0,8}},68}, {{{0,9}},232}, 4915 {{{80,7}},8}, {{{0,8}},92}, {{{0,8}},28}, {{{0,9}},152}, 4916 {{{84,7}},83}, {{{0,8}},124}, {{{0,8}},60}, {{{0,9}},216}, 4917 {{{82,7}},23}, {{{0,8}},108}, {{{0,8}},44}, {{{0,9}},184}, 4918 {{{0,8}},12}, {{{0,8}},140}, {{{0,8}},76}, {{{0,9}},248}, 4919 {{{80,7}},3}, {{{0,8}},82}, {{{0,8}},18}, {{{85,8}},163}, 4920 {{{83,7}},35}, {{{0,8}},114}, {{{0,8}},50}, {{{0,9}},196}, 4921 {{{81,7}},11}, {{{0,8}},98}, {{{0,8}},34}, {{{0,9}},164}, 4922 {{{0,8}},2}, {{{0,8}},130}, {{{0,8}},66}, {{{0,9}},228}, 4923 {{{80,7}},7}, {{{0,8}},90}, {{{0,8}},26}, {{{0,9}},148}, 4924 {{{84,7}},67}, {{{0,8}},122}, {{{0,8}},58}, {{{0,9}},212}, 4925 {{{82,7}},19}, {{{0,8}},106}, {{{0,8}},42}, {{{0,9}},180}, 4926 {{{0,8}},10}, {{{0,8}},138}, {{{0,8}},74}, {{{0,9}},244}, 4927 {{{80,7}},5}, {{{0,8}},86}, {{{0,8}},22}, {{{192,8}},0}, 4928 {{{83,7}},51}, {{{0,8}},118}, {{{0,8}},54}, {{{0,9}},204}, 4929 {{{81,7}},15}, {{{0,8}},102}, {{{0,8}},38}, {{{0,9}},172}, 4930 {{{0,8}},6}, {{{0,8}},134}, {{{0,8}},70}, {{{0,9}},236}, 4931 {{{80,7}},9}, {{{0,8}},94}, {{{0,8}},30}, {{{0,9}},156}, 4932 {{{84,7}},99}, {{{0,8}},126}, {{{0,8}},62}, {{{0,9}},220}, 4933 {{{82,7}},27}, {{{0,8}},110}, {{{0,8}},46}, {{{0,9}},188}, 4934 {{{0,8}},14}, {{{0,8}},142}, {{{0,8}},78}, {{{0,9}},252}, 4935 {{{96,7}},256}, {{{0,8}},81}, {{{0,8}},17}, {{{85,8}},131}, 4936 {{{82,7}},31}, {{{0,8}},113}, {{{0,8}},49}, {{{0,9}},194}, 4937 {{{80,7}},10}, {{{0,8}},97}, {{{0,8}},33}, {{{0,9}},162}, 4938 {{{0,8}},1}, {{{0,8}},129}, {{{0,8}},65}, {{{0,9}},226}, 4939 {{{80,7}},6}, {{{0,8}},89}, {{{0,8}},25}, {{{0,9}},146}, 4940 {{{83,7}},59}, {{{0,8}},121}, {{{0,8}},57}, {{{0,9}},210}, 4941 {{{81,7}},17}, {{{0,8}},105}, {{{0,8}},41}, {{{0,9}},178}, 4942 {{{0,8}},9}, {{{0,8}},137}, {{{0,8}},73}, {{{0,9}},242}, 4943 {{{80,7}},4}, {{{0,8}},85}, {{{0,8}},21}, {{{80,8}},258}, 4944 {{{83,7}},43}, {{{0,8}},117}, {{{0,8}},53}, {{{0,9}},202}, 4945 {{{81,7}},13}, {{{0,8}},101}, {{{0,8}},37}, {{{0,9}},170}, 4946 {{{0,8}},5}, {{{0,8}},133}, {{{0,8}},69}, {{{0,9}},234}, 4947 {{{80,7}},8}, {{{0,8}},93}, {{{0,8}},29}, {{{0,9}},154}, 4948 {{{84,7}},83}, {{{0,8}},125}, {{{0,8}},61}, {{{0,9}},218}, 4949 {{{82,7}},23}, {{{0,8}},109}, {{{0,8}},45}, {{{0,9}},186}, 4950 {{{0,8}},13}, {{{0,8}},141}, {{{0,8}},77}, {{{0,9}},250}, 4951 {{{80,7}},3}, {{{0,8}},83}, {{{0,8}},19}, {{{85,8}},195}, 4952 {{{83,7}},35}, {{{0,8}},115}, {{{0,8}},51}, {{{0,9}},198}, 4953 {{{81,7}},11}, {{{0,8}},99}, {{{0,8}},35}, {{{0,9}},166}, 4954 {{{0,8}},3}, {{{0,8}},131}, {{{0,8}},67}, {{{0,9}},230}, 4955 {{{80,7}},7}, {{{0,8}},91}, {{{0,8}},27}, {{{0,9}},150}, 4956 {{{84,7}},67}, {{{0,8}},123}, {{{0,8}},59}, {{{0,9}},214}, 4957 {{{82,7}},19}, {{{0,8}},107}, {{{0,8}},43}, {{{0,9}},182}, 4958 {{{0,8}},11}, {{{0,8}},139}, {{{0,8}},75}, {{{0,9}},246}, 4959 {{{80,7}},5}, {{{0,8}},87}, {{{0,8}},23}, {{{192,8}},0}, 4960 {{{83,7}},51}, {{{0,8}},119}, {{{0,8}},55}, {{{0,9}},206}, 4961 {{{81,7}},15}, {{{0,8}},103}, {{{0,8}},39}, {{{0,9}},174}, 4962 {{{0,8}},7}, {{{0,8}},135}, {{{0,8}},71}, {{{0,9}},238}, 4963 {{{80,7}},9}, {{{0,8}},95}, {{{0,8}},31}, {{{0,9}},158}, 4964 {{{84,7}},99}, {{{0,8}},127}, {{{0,8}},63}, {{{0,9}},222}, 4965 {{{82,7}},27}, {{{0,8}},111}, {{{0,8}},47}, {{{0,9}},190}, 4966 {{{0,8}},15}, {{{0,8}},143}, {{{0,8}},79}, {{{0,9}},254}, 4967 {{{96,7}},256}, {{{0,8}},80}, {{{0,8}},16}, {{{84,8}},115}, 4968 {{{82,7}},31}, {{{0,8}},112}, {{{0,8}},48}, {{{0,9}},193}, 4969 {{{80,7}},10}, {{{0,8}},96}, {{{0,8}},32}, {{{0,9}},161}, 4970 {{{0,8}},0}, {{{0,8}},128}, {{{0,8}},64}, {{{0,9}},225}, 4971 {{{80,7}},6}, {{{0,8}},88}, {{{0,8}},24}, {{{0,9}},145}, 4972 {{{83,7}},59}, {{{0,8}},120}, {{{0,8}},56}, {{{0,9}},209}, 4973 {{{81,7}},17}, {{{0,8}},104}, {{{0,8}},40}, {{{0,9}},177}, 4974 {{{0,8}},8}, {{{0,8}},136}, {{{0,8}},72}, {{{0,9}},241}, 4975 {{{80,7}},4}, {{{0,8}},84}, {{{0,8}},20}, {{{85,8}},227}, 4976 {{{83,7}},43}, {{{0,8}},116}, {{{0,8}},52}, {{{0,9}},201}, 4977 {{{81,7}},13}, {{{0,8}},100}, {{{0,8}},36}, {{{0,9}},169}, 4978 {{{0,8}},4}, {{{0,8}},132}, {{{0,8}},68}, {{{0,9}},233}, 4979 {{{80,7}},8}, {{{0,8}},92}, {{{0,8}},28}, {{{0,9}},153}, 4980 {{{84,7}},83}, {{{0,8}},124}, {{{0,8}},60}, {{{0,9}},217}, 4981 {{{82,7}},23}, {{{0,8}},108}, {{{0,8}},44}, {{{0,9}},185}, 4982 {{{0,8}},12}, {{{0,8}},140}, {{{0,8}},76}, {{{0,9}},249}, 4983 {{{80,7}},3}, {{{0,8}},82}, {{{0,8}},18}, {{{85,8}},163}, 4984 {{{83,7}},35}, {{{0,8}},114}, {{{0,8}},50}, {{{0,9}},197}, 4985 {{{81,7}},11}, {{{0,8}},98}, {{{0,8}},34}, {{{0,9}},165}, 4986 {{{0,8}},2}, {{{0,8}},130}, {{{0,8}},66}, {{{0,9}},229}, 4987 {{{80,7}},7}, {{{0,8}},90}, {{{0,8}},26}, {{{0,9}},149}, 4988 {{{84,7}},67}, {{{0,8}},122}, {{{0,8}},58}, {{{0,9}},213}, 4989 {{{82,7}},19}, {{{0,8}},106}, {{{0,8}},42}, {{{0,9}},181}, 4990 {{{0,8}},10}, {{{0,8}},138}, {{{0,8}},74}, {{{0,9}},245}, 4991 {{{80,7}},5}, {{{0,8}},86}, {{{0,8}},22}, {{{192,8}},0}, 4992 {{{83,7}},51}, {{{0,8}},118}, {{{0,8}},54}, {{{0,9}},205}, 4993 {{{81,7}},15}, {{{0,8}},102}, {{{0,8}},38}, {{{0,9}},173}, 4994 {{{0,8}},6}, {{{0,8}},134}, {{{0,8}},70}, {{{0,9}},237}, 4995 {{{80,7}},9}, {{{0,8}},94}, {{{0,8}},30}, {{{0,9}},157}, 4996 {{{84,7}},99}, {{{0,8}},126}, {{{0,8}},62}, {{{0,9}},221}, 4997 {{{82,7}},27}, {{{0,8}},110}, {{{0,8}},46}, {{{0,9}},189}, 4998 {{{0,8}},14}, {{{0,8}},142}, {{{0,8}},78}, {{{0,9}},253}, 4999 {{{96,7}},256}, {{{0,8}},81}, {{{0,8}},17}, {{{85,8}},131}, 5000 {{{82,7}},31}, {{{0,8}},113}, {{{0,8}},49}, {{{0,9}},195}, 5001 {{{80,7}},10}, {{{0,8}},97}, {{{0,8}},33}, {{{0,9}},163}, 5002 {{{0,8}},1}, {{{0,8}},129}, {{{0,8}},65}, {{{0,9}},227}, 5003 {{{80,7}},6}, {{{0,8}},89}, {{{0,8}},25}, {{{0,9}},147}, 5004 {{{83,7}},59}, {{{0,8}},121}, {{{0,8}},57}, {{{0,9}},211}, 5005 {{{81,7}},17}, {{{0,8}},105}, {{{0,8}},41}, {{{0,9}},179}, 5006 {{{0,8}},9}, {{{0,8}},137}, {{{0,8}},73}, {{{0,9}},243}, 5007 {{{80,7}},4}, {{{0,8}},85}, {{{0,8}},21}, {{{80,8}},258}, 5008 {{{83,7}},43}, {{{0,8}},117}, {{{0,8}},53}, {{{0,9}},203}, 5009 {{{81,7}},13}, {{{0,8}},101}, {{{0,8}},37}, {{{0,9}},171}, 5010 {{{0,8}},5}, {{{0,8}},133}, {{{0,8}},69}, {{{0,9}},235}, 5011 {{{80,7}},8}, {{{0,8}},93}, {{{0,8}},29}, {{{0,9}},155}, 5012 {{{84,7}},83}, {{{0,8}},125}, {{{0,8}},61}, {{{0,9}},219}, 5013 {{{82,7}},23}, {{{0,8}},109}, {{{0,8}},45}, {{{0,9}},187}, 5014 {{{0,8}},13}, {{{0,8}},141}, {{{0,8}},77}, {{{0,9}},251}, 5015 {{{80,7}},3}, {{{0,8}},83}, {{{0,8}},19}, {{{85,8}},195}, 5016 {{{83,7}},35}, {{{0,8}},115}, {{{0,8}},51}, {{{0,9}},199}, 5017 {{{81,7}},11}, {{{0,8}},99}, {{{0,8}},35}, {{{0,9}},167}, 5018 {{{0,8}},3}, {{{0,8}},131}, {{{0,8}},67}, {{{0,9}},231}, 5019 {{{80,7}},7}, {{{0,8}},91}, {{{0,8}},27}, {{{0,9}},151}, 5020 {{{84,7}},67}, {{{0,8}},123}, {{{0,8}},59}, {{{0,9}},215}, 5021 {{{82,7}},19}, {{{0,8}},107}, {{{0,8}},43}, {{{0,9}},183}, 5022 {{{0,8}},11}, {{{0,8}},139}, {{{0,8}},75}, {{{0,9}},247}, 5023 {{{80,7}},5}, {{{0,8}},87}, {{{0,8}},23}, {{{192,8}},0}, 5024 {{{83,7}},51}, {{{0,8}},119}, {{{0,8}},55}, {{{0,9}},207}, 5025 {{{81,7}},15}, {{{0,8}},103}, {{{0,8}},39}, {{{0,9}},175}, 5026 {{{0,8}},7}, {{{0,8}},135}, {{{0,8}},71}, {{{0,9}},239}, 5027 {{{80,7}},9}, {{{0,8}},95}, {{{0,8}},31}, {{{0,9}},159}, 5028 {{{84,7}},99}, {{{0,8}},127}, {{{0,8}},63}, {{{0,9}},223}, 5029 {{{82,7}},27}, {{{0,8}},111}, {{{0,8}},47}, {{{0,9}},191}, 5030 {{{0,8}},15}, {{{0,8}},143}, {{{0,8}},79}, {{{0,9}},255} 5031 }; 5032 local inflate_huft fixed_td[] = { 5033 {{{80,5}},1}, {{{87,5}},257}, {{{83,5}},17}, {{{91,5}},4097}, 5034 {{{81,5}},5}, {{{89,5}},1025}, {{{85,5}},65}, {{{93,5}},16385}, 5035 {{{80,5}},3}, {{{88,5}},513}, {{{84,5}},33}, {{{92,5}},8193}, 5036 {{{82,5}},9}, {{{90,5}},2049}, {{{86,5}},129}, {{{192,5}},24577}, 5037 {{{80,5}},2}, {{{87,5}},385}, {{{83,5}},25}, {{{91,5}},6145}, 5038 {{{81,5}},7}, {{{89,5}},1537}, {{{85,5}},97}, {{{93,5}},24577}, 5039 {{{80,5}},4}, {{{88,5}},769}, {{{84,5}},49}, {{{92,5}},12289}, 5040 {{{82,5}},13}, {{{90,5}},3073}, {{{86,5}},193}, {{{192,5}},24577} 5041 }; 5042 /* --- inffixed.h */ 5043 5044 #endif 5045 5046 5047 int inflate_trees_fixed( 5048 uIntf *bl, /* literal desired/actual bit depth */ 5049 uIntf *bd, /* distance desired/actual bit depth */ 5050 inflate_huft * FAR *tl, /* literal/length tree result */ 5051 inflate_huft * FAR *td, /* distance tree result */ 5052 z_streamp z) /* for memory allocation */ 5053 { 5054 #ifdef BUILDFIXED 5055 /* build fixed tables if not already */ 5056 if (!fixed_built) 5057 { 5058 int k; /* temporary variable */ 5059 uInt f = 0; /* number of hufts used in fixed_mem */ 5060 uIntf *c; /* length list for huft_build */ 5061 uIntf *v; /* work area for huft_build */ 5062 5063 /* allocate memory */ 5064 if ((c = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL) 5065 return Z_MEM_ERROR; 5066 if ((v = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL) 5067 { 5068 ZFREE(z, c); 5069 return Z_MEM_ERROR; 5070 } 5071 5072 /* literal table */ 5073 for (k = 0; k < 144; k++) 5074 c[k] = 8; 5075 for (; k < 256; k++) 5076 c[k] = 9; 5077 for (; k < 280; k++) 5078 c[k] = 7; 5079 for (; k < 288; k++) 5080 c[k] = 8; 5081 fixed_bl = 9; 5082 huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl, 5083 fixed_mem, &f, v); 5084 5085 /* distance table */ 5086 for (k = 0; k < 30; k++) 5087 c[k] = 5; 5088 fixed_bd = 5; 5089 huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd, 5090 fixed_mem, &f, v); 5091 5092 /* done */ 5093 ZFREE(z, v); 5094 ZFREE(z, c); 5095 fixed_built = 1; 5096 } 5097 #endif 5098 *bl = fixed_bl; 5099 *bd = fixed_bd; 5100 *tl = fixed_tl; 5101 *td = fixed_td; 5102 return Z_OK; 5103 } 5104 /* --- inftrees.c */ 5105 5106 /* +++ infcodes.c */ 5107 5108 /* infcodes.c -- process literals and length/distance pairs 5109 * Copyright (C) 1995-2002 Mark Adler 5110 * For conditions of distribution and use, see copyright notice in zlib.h 5111 */ 5112 5113 /* #include "zutil.h" */ 5114 /* #include "inftrees.h" */ 5115 /* #include "infblock.h" */ 5116 /* #include "infcodes.h" */ 5117 /* #include "infutil.h" */ 5118 5119 /* +++ inffast.h */ 5120 5121 /* inffast.h -- header to use inffast.c 5122 * Copyright (C) 1995-2002 Mark Adler 5123 * For conditions of distribution and use, see copyright notice in zlib.h 5124 */ 5125 5126 /* WARNING: this file should *not* be used by applications. It is 5127 part of the implementation of the compression library and is 5128 subject to change. Applications should only use zlib.h. 5129 */ 5130 5131 extern int inflate_fast __P(( 5132 uInt, 5133 uInt, 5134 inflate_huft *, 5135 inflate_huft *, 5136 inflate_blocks_statef *, 5137 z_streamp )); 5138 /* --- inffast.h */ 5139 5140 /* simplify the use of the inflate_huft type with some defines */ 5141 #define exop word.what.Exop 5142 #define bits word.what.Bits 5143 5144 typedef enum { /* waiting for "i:"=input, "o:"=output, "x:"=nothing */ 5145 START, /* x: set up for LEN */ 5146 LEN, /* i: get length/literal/eob next */ 5147 LENEXT, /* i: getting length extra (have base) */ 5148 DIST, /* i: get distance next */ 5149 DISTEXT, /* i: getting distance extra */ 5150 COPY, /* o: copying bytes in window, waiting for space */ 5151 LIT, /* o: got literal, waiting for output space */ 5152 WASH, /* o: got eob, possibly still output waiting */ 5153 END, /* x: got eob and all data flushed */ 5154 BADCODE} /* x: got error */ 5155 inflate_codes_mode; 5156 5157 /* inflate codes private state */ 5158 struct inflate_codes_state { 5159 5160 /* mode */ 5161 inflate_codes_mode mode; /* current inflate_codes mode */ 5162 5163 /* mode dependent information */ 5164 uInt len; 5165 union { 5166 struct { 5167 inflate_huft *tree; /* pointer into tree */ 5168 uInt need; /* bits needed */ 5169 } code; /* if LEN or DIST, where in tree */ 5170 uInt lit; /* if LIT, literal */ 5171 struct { 5172 uInt get; /* bits to get for extra */ 5173 uInt dist; /* distance back to copy from */ 5174 } copy; /* if EXT or COPY, where and how much */ 5175 } sub; /* submode */ 5176 5177 /* mode independent information */ 5178 Byte lbits; /* ltree bits decoded per branch */ 5179 Byte dbits; /* dtree bits decoder per branch */ 5180 inflate_huft *ltree; /* literal/length/eob tree */ 5181 inflate_huft *dtree; /* distance tree */ 5182 5183 }; 5184 5185 5186 inflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z) 5187 uInt bl, bd; 5188 inflate_huft *tl; 5189 inflate_huft *td; /* need separate declaration for Borland C++ */ 5190 z_streamp z; 5191 { 5192 inflate_codes_statef *c; 5193 5194 if ((c = (inflate_codes_statef *) 5195 ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL) 5196 { 5197 c->mode = START; 5198 c->lbits = (Byte)bl; 5199 c->dbits = (Byte)bd; 5200 c->ltree = tl; 5201 c->dtree = td; 5202 Tracev((stderr, "inflate: codes new\n")); 5203 } 5204 return c; 5205 } 5206 5207 5208 int inflate_codes(s, z, r) 5209 inflate_blocks_statef *s; 5210 z_streamp z; 5211 int r; 5212 { 5213 uInt j; /* temporary storage */ 5214 inflate_huft *t; /* temporary pointer */ 5215 uInt e; /* extra bits or operation */ 5216 uLong b; /* bit buffer */ 5217 uInt k; /* bits in bit buffer */ 5218 Bytef *p; /* input data pointer */ 5219 uInt n; /* bytes available there */ 5220 Bytef *q; /* output window write pointer */ 5221 uInt m; /* bytes to end of window or read pointer */ 5222 Bytef *f; /* pointer to copy strings from */ 5223 inflate_codes_statef *c = s->sub.decode.codes; /* codes state */ 5224 5225 /* copy input/output information to locals (UPDATE macro restores) */ 5226 LOAD 5227 5228 /* process input and output based on current state */ 5229 while (1) switch (c->mode) 5230 { /* waiting for "i:"=input, "o:"=output, "x:"=nothing */ 5231 case START: /* x: set up for LEN */ 5232 #ifndef SLOW 5233 if (m >= 258 && n >= 10) 5234 { 5235 UPDATE 5236 r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z); 5237 LOAD 5238 if (r != Z_OK) 5239 { 5240 c->mode = r == Z_STREAM_END ? WASH : BADCODE; 5241 break; 5242 } 5243 } 5244 #endif /* !SLOW */ 5245 c->sub.code.need = c->lbits; 5246 c->sub.code.tree = c->ltree; 5247 c->mode = LEN; 5248 case LEN: /* i: get length/literal/eob next */ 5249 j = c->sub.code.need; 5250 NEEDBITS(j) 5251 t = c->sub.code.tree + ((uInt)b & inflate_mask[j]); 5252 DUMPBITS(t->bits) 5253 e = (uInt)(t->exop); 5254 if (e == 0) /* literal */ 5255 { 5256 c->sub.lit = t->base; 5257 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? 5258 "inflate: literal '%c'\n" : 5259 "inflate: literal 0x%02x\n", t->base)); 5260 c->mode = LIT; 5261 break; 5262 } 5263 if (e & 16) /* length */ 5264 { 5265 c->sub.copy.get = e & 15; 5266 c->len = t->base; 5267 c->mode = LENEXT; 5268 break; 5269 } 5270 if ((e & 64) == 0) /* next table */ 5271 { 5272 c->sub.code.need = e; 5273 c->sub.code.tree = t + t->base; 5274 break; 5275 } 5276 if (e & 32) /* end of block */ 5277 { 5278 Tracevv((stderr, "inflate: end of block\n")); 5279 c->mode = WASH; 5280 break; 5281 } 5282 c->mode = BADCODE; /* invalid code */ 5283 z->msg = "invalid literal/length code"; 5284 r = Z_DATA_ERROR; 5285 LEAVE 5286 case LENEXT: /* i: getting length extra (have base) */ 5287 j = c->sub.copy.get; 5288 NEEDBITS(j) 5289 c->len += (uInt)b & inflate_mask[j]; 5290 DUMPBITS(j) 5291 c->sub.code.need = c->dbits; 5292 c->sub.code.tree = c->dtree; 5293 Tracevv((stderr, "inflate: length %u\n", c->len)); 5294 c->mode = DIST; 5295 case DIST: /* i: get distance next */ 5296 j = c->sub.code.need; 5297 NEEDBITS(j) 5298 t = c->sub.code.tree + ((uInt)b & inflate_mask[j]); 5299 DUMPBITS(t->bits) 5300 e = (uInt)(t->exop); 5301 if (e & 16) /* distance */ 5302 { 5303 c->sub.copy.get = e & 15; 5304 c->sub.copy.dist = t->base; 5305 c->mode = DISTEXT; 5306 break; 5307 } 5308 if ((e & 64) == 0) /* next table */ 5309 { 5310 c->sub.code.need = e; 5311 c->sub.code.tree = t + t->base; 5312 break; 5313 } 5314 c->mode = BADCODE; /* invalid code */ 5315 z->msg = "invalid distance code"; 5316 r = Z_DATA_ERROR; 5317 LEAVE 5318 case DISTEXT: /* i: getting distance extra */ 5319 j = c->sub.copy.get; 5320 NEEDBITS(j) 5321 c->sub.copy.dist += (uInt)b & inflate_mask[j]; 5322 DUMPBITS(j) 5323 Tracevv((stderr, "inflate: distance %u\n", c->sub.copy.dist)); 5324 c->mode = COPY; 5325 case COPY: /* o: copying bytes in window, waiting for space */ 5326 f = q - c->sub.copy.dist; 5327 while (f < s->window) /* modulo window size-"while" instead */ 5328 f += s->end - s->window; /* of "if" handles invalid distances */ 5329 while (c->len) 5330 { 5331 NEEDOUT 5332 OUTBYTE(*f++) 5333 if (f == s->end) 5334 f = s->window; 5335 c->len--; 5336 } 5337 c->mode = START; 5338 break; 5339 case LIT: /* o: got literal, waiting for output space */ 5340 NEEDOUT 5341 OUTBYTE(c->sub.lit) 5342 c->mode = START; 5343 break; 5344 case WASH: /* o: got eob, possibly more output */ 5345 if (k > 7) /* return unused byte, if any */ 5346 { 5347 Assert(k < 16, "inflate_codes grabbed too many bytes") 5348 k -= 8; 5349 n++; 5350 p--; /* can always return one */ 5351 } 5352 FLUSH 5353 if (s->read != s->write) 5354 LEAVE 5355 c->mode = END; 5356 case END: 5357 r = Z_STREAM_END; 5358 LEAVE 5359 case BADCODE: /* x: got error */ 5360 r = Z_DATA_ERROR; 5361 LEAVE 5362 default: 5363 r = Z_STREAM_ERROR; 5364 LEAVE 5365 } 5366 #ifdef NEED_DUMMY_RETURN 5367 return Z_STREAM_ERROR; /* Some dumb compilers complain without this */ 5368 #endif 5369 } 5370 5371 5372 void inflate_codes_free(c, z) 5373 inflate_codes_statef *c; 5374 z_streamp z; 5375 { 5376 ZFREE(z, c); 5377 Tracev((stderr, "inflate: codes free\n")); 5378 } 5379 /* --- infcodes.c */ 5380 5381 /* +++ infutil.c */ 5382 5383 /* inflate_util.c -- data and routines common to blocks and codes 5384 * Copyright (C) 1995-2002 Mark Adler 5385 * For conditions of distribution and use, see copyright notice in zlib.h 5386 */ 5387 5388 /* #include "zutil.h" */ 5389 /* #include "infblock.h" */ 5390 /* #include "inftrees.h" */ 5391 /* #include "infcodes.h" */ 5392 /* #include "infutil.h" */ 5393 5394 #ifndef NO_DUMMY_DECL 5395 struct inflate_codes_state {int dummy;}; /* for buggy compilers */ 5396 #endif 5397 5398 /* And'ing with mask[n] masks the lower n bits */ 5399 uInt inflate_mask[17] = { 5400 0x0000, 5401 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff, 5402 0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff 5403 }; 5404 5405 5406 /* copy as much as possible from the sliding window to the output area */ 5407 int inflate_flush(s, z, r) 5408 inflate_blocks_statef *s; 5409 z_streamp z; 5410 int r; 5411 { 5412 uInt n; 5413 Bytef *p; 5414 Bytef *q; 5415 5416 /* local copies of source and destination pointers */ 5417 p = z->next_out; 5418 q = s->read; 5419 5420 /* compute number of bytes to copy as far as end of window */ 5421 n = (uInt)((q <= s->write ? s->write : s->end) - q); 5422 if (n > z->avail_out) n = z->avail_out; 5423 if (n && r == Z_BUF_ERROR) r = Z_OK; 5424 5425 /* update counters */ 5426 z->avail_out -= n; 5427 z->total_out += n; 5428 5429 /* update check information */ 5430 if (s->checkfn != Z_NULL) 5431 z->adler = s->check = (*s->checkfn)(s->check, q, n); 5432 5433 /* copy as far as end of window */ 5434 if (p != Z_NULL) { 5435 zmemcpy(p, q, n); 5436 p += n; 5437 } 5438 q += n; 5439 5440 /* see if more to copy at beginning of window */ 5441 if (q == s->end) 5442 { 5443 /* wrap pointers */ 5444 q = s->window; 5445 if (s->write == s->end) 5446 s->write = s->window; 5447 5448 /* compute bytes to copy */ 5449 n = (uInt)(s->write - q); 5450 if (n > z->avail_out) n = z->avail_out; 5451 if (n && r == Z_BUF_ERROR) r = Z_OK; 5452 5453 /* update counters */ 5454 z->avail_out -= n; 5455 z->total_out += n; 5456 5457 /* update check information */ 5458 if (s->checkfn != Z_NULL) 5459 z->adler = s->check = (*s->checkfn)(s->check, q, n); 5460 5461 /* copy */ 5462 if (p != NULL) { 5463 zmemcpy(p, q, n); 5464 p += n; 5465 } 5466 q += n; 5467 } 5468 5469 /* update pointers */ 5470 z->next_out = p; 5471 s->read = q; 5472 5473 /* done */ 5474 return r; 5475 } 5476 /* --- infutil.c */ 5477 5478 /* +++ inffast.c */ 5479 5480 /* inffast.c -- process literals and length/distance pairs fast 5481 * Copyright (C) 1995-2002 Mark Adler 5482 * For conditions of distribution and use, see copyright notice in zlib.h 5483 */ 5484 5485 /* #include "zutil.h" */ 5486 /* #include "inftrees.h" */ 5487 /* #include "infblock.h" */ 5488 /* #include "infcodes.h" */ 5489 /* #include "infutil.h" */ 5490 /* #include "inffast.h" */ 5491 5492 #ifndef NO_DUMMY_DECL 5493 struct inflate_codes_state {int dummy;}; /* for buggy compilers */ 5494 #endif 5495 5496 /* simplify the use of the inflate_huft type with some defines */ 5497 #define exop word.what.Exop 5498 #define bits word.what.Bits 5499 5500 /* macros for bit input with no checking and for returning unused bytes */ 5501 #define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<<k;k+=8;}} 5502 #define UNGRAB {c=z->avail_in-n;c=(k>>3)<c?k>>3:c;n+=c;p-=c;k-=c<<3;} 5503 5504 /* Called with number of bytes left to write in window at least 258 5505 (the maximum string length) and number of input bytes available 5506 at least ten. The ten bytes are six bytes for the longest length/ 5507 distance pair plus four bytes for overloading the bit buffer. */ 5508 5509 int inflate_fast(bl, bd, tl, td, s, z) 5510 uInt bl, bd; 5511 inflate_huft *tl; 5512 inflate_huft *td; /* need separate declaration for Borland C++ */ 5513 inflate_blocks_statef *s; 5514 z_streamp z; 5515 { 5516 inflate_huft *t; /* temporary pointer */ 5517 uInt e; /* extra bits or operation */ 5518 uLong b; /* bit buffer */ 5519 uInt k; /* bits in bit buffer */ 5520 Bytef *p; /* input data pointer */ 5521 uInt n; /* bytes available there */ 5522 Bytef *q; /* output window write pointer */ 5523 uInt m; /* bytes to end of window or read pointer */ 5524 uInt ml; /* mask for literal/length tree */ 5525 uInt md; /* mask for distance tree */ 5526 uInt c; /* bytes to copy */ 5527 uInt d; /* distance back to copy from */ 5528 Bytef *r; /* copy source pointer */ 5529 5530 /* load input, output, bit values */ 5531 LOAD 5532 5533 /* initialize masks */ 5534 ml = inflate_mask[bl]; 5535 md = inflate_mask[bd]; 5536 5537 /* do until not enough input or output space for fast loop */ 5538 do { /* assume called with m >= 258 && n >= 10 */ 5539 /* get literal/length code */ 5540 GRABBITS(20) /* max bits for literal/length code */ 5541 if ((e = (t = tl + ((uInt)b & ml))->exop) == 0) 5542 { 5543 DUMPBITS(t->bits) 5544 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? 5545 "inflate: * literal '%c'\n" : 5546 "inflate: * literal 0x%02x\n", t->base)); 5547 *q++ = (Byte)t->base; 5548 m--; 5549 continue; 5550 } 5551 do { 5552 DUMPBITS(t->bits) 5553 if (e & 16) 5554 { 5555 /* get extra bits for length */ 5556 e &= 15; 5557 c = t->base + ((uInt)b & inflate_mask[e]); 5558 DUMPBITS(e) 5559 Tracevv((stderr, "inflate: * length %u\n", c)); 5560 5561 /* decode distance base of block to copy */ 5562 GRABBITS(15); /* max bits for distance code */ 5563 e = (t = td + ((uInt)b & md))->exop; 5564 do { 5565 DUMPBITS(t->bits) 5566 if (e & 16) 5567 { 5568 /* get extra bits to add to distance base */ 5569 e &= 15; 5570 GRABBITS(e) /* get extra bits (up to 13) */ 5571 d = t->base + ((uInt)b & inflate_mask[e]); 5572 DUMPBITS(e) 5573 Tracevv((stderr, "inflate: * distance %u\n", d)); 5574 5575 /* do the copy */ 5576 m -= c; 5577 r = q - d; 5578 if (r < s->window) /* wrap if needed */ 5579 { 5580 do { 5581 r += s->end - s->window; /* force pointer in window */ 5582 } while (r < s->window); /* covers invalid distances */ 5583 e = s->end - r; 5584 if (c > e) 5585 { 5586 c -= e; /* wrapped copy */ 5587 do { 5588 *q++ = *r++; 5589 } while (--e); 5590 r = s->window; 5591 do { 5592 *q++ = *r++; 5593 } while (--c); 5594 } 5595 else /* normal copy */ 5596 { 5597 *q++ = *r++; c--; 5598 *q++ = *r++; c--; 5599 do { 5600 *q++ = *r++; 5601 } while (--c); 5602 } 5603 } 5604 else /* normal copy */ 5605 { 5606 *q++ = *r++; c--; 5607 *q++ = *r++; c--; 5608 do { 5609 *q++ = *r++; 5610 } while (--c); 5611 } 5612 break; 5613 } 5614 else if ((e & 64) == 0) 5615 { 5616 t += t->base; 5617 e = (t += ((uInt)b & inflate_mask[e]))->exop; 5618 } 5619 else 5620 { 5621 z->msg = "invalid distance code"; 5622 UNGRAB 5623 UPDATE 5624 return Z_DATA_ERROR; 5625 } 5626 } while (1); 5627 break; 5628 } 5629 if ((e & 64) == 0) 5630 { 5631 t += t->base; 5632 if ((e = (t += ((uInt)b & inflate_mask[e]))->exop) == 0) 5633 { 5634 DUMPBITS(t->bits) 5635 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? 5636 "inflate: * literal '%c'\n" : 5637 "inflate: * literal 0x%02x\n", t->base)); 5638 *q++ = (Byte)t->base; 5639 m--; 5640 break; 5641 } 5642 } 5643 else if (e & 32) 5644 { 5645 Tracevv((stderr, "inflate: * end of block\n")); 5646 UNGRAB 5647 UPDATE 5648 return Z_STREAM_END; 5649 } 5650 else 5651 { 5652 z->msg = "invalid literal/length code"; 5653 UNGRAB 5654 UPDATE 5655 return Z_DATA_ERROR; 5656 } 5657 } while (1); 5658 } while (m >= 258 && n >= 10); 5659 5660 /* not enough input or output--restore pointers and return */ 5661 UNGRAB 5662 UPDATE 5663 return Z_OK; 5664 } 5665 /* --- inffast.c */ 5666 5667 /* +++ zutil.c */ 5668 5669 /* zutil.c -- target dependent utility functions for the compression library 5670 * Copyright (C) 1995-2002 Jean-loup Gailly. 5671 * For conditions of distribution and use, see copyright notice in zlib.h 5672 */ 5673 5674 /* @(#) Id */ 5675 5676 #ifdef DEBUG_ZLIB 5677 #include <stdio.h> 5678 #endif 5679 5680 /* #include "zutil.h" */ 5681 5682 #ifndef NO_DUMMY_DECL 5683 struct internal_state {int dummy;}; /* for buggy compilers */ 5684 #endif 5685 5686 #ifndef STDC 5687 extern void exit __P((int)); 5688 #endif 5689 5690 const char *z_errmsg[10] = { 5691 "need dictionary", /* Z_NEED_DICT 2 */ 5692 "stream end", /* Z_STREAM_END 1 */ 5693 "", /* Z_OK 0 */ 5694 "file error", /* Z_ERRNO (-1) */ 5695 "stream error", /* Z_STREAM_ERROR (-2) */ 5696 "data error", /* Z_DATA_ERROR (-3) */ 5697 "insufficient memory", /* Z_MEM_ERROR (-4) */ 5698 "buffer error", /* Z_BUF_ERROR (-5) */ 5699 "incompatible version",/* Z_VERSION_ERROR (-6) */ 5700 ""}; 5701 5702 5703 #if 0 5704 const char * ZEXPORT zlibVersion() 5705 { 5706 return ZLIB_VERSION; 5707 } 5708 #endif 5709 5710 #ifdef DEBUG_ZLIB 5711 5712 # ifndef verbose 5713 # define verbose 0 5714 # endif 5715 int z_verbose = verbose; 5716 5717 void z_error (m) 5718 char *m; 5719 { 5720 fprintf(stderr, "%s\n", m); 5721 exit(1); 5722 } 5723 #endif 5724 5725 /* exported to allow conversion of error code to string for compress() and 5726 * uncompress() 5727 */ 5728 #if 0 5729 const char * ZEXPORT zError(err) 5730 int err; 5731 { 5732 return ERR_MSG(err); 5733 } 5734 #endif 5735 5736 5737 #ifndef HAVE_MEMCPY 5738 5739 void zmemcpy(dest, source, len) 5740 Bytef* dest; 5741 const Bytef* source; 5742 uInt len; 5743 { 5744 if (len == 0) return; 5745 do { 5746 *dest++ = *source++; /* ??? to be unrolled */ 5747 } while (--len != 0); 5748 } 5749 5750 int zmemcmp(s1, s2, len) 5751 const Bytef* s1; 5752 const Bytef* s2; 5753 uInt len; 5754 { 5755 uInt j; 5756 5757 for (j = 0; j < len; j++) { 5758 if (s1[j] != s2[j]) return 2*(s1[j] > s2[j])-1; 5759 } 5760 return 0; 5761 } 5762 5763 void zmemzero(dest, len) 5764 Bytef* dest; 5765 uInt len; 5766 { 5767 if (len == 0) return; 5768 do { 5769 *dest++ = 0; /* ??? to be unrolled */ 5770 } while (--len != 0); 5771 } 5772 #endif 5773 5774 #ifdef __TURBOC__ 5775 #if (defined( __BORLANDC__) || !defined(SMALL_MEDIUM)) && !defined(__32BIT__) 5776 /* Small and medium model in Turbo C are for now limited to near allocation 5777 * with reduced MAX_WBITS and MAX_MEM_LEVEL 5778 */ 5779 # define MY_ZCALLOC 5780 5781 /* Turbo C malloc() does not allow dynamic allocation of 64K bytes 5782 * and farmalloc(64K) returns a pointer with an offset of 8, so we 5783 * must fix the pointer. Warning: the pointer must be put back to its 5784 * original form in order to free it, use zcfree(). 5785 */ 5786 5787 #define MAX_PTR 10 5788 /* 10*64K = 640K */ 5789 5790 local int next_ptr = 0; 5791 5792 typedef struct ptr_table_s { 5793 voidpf org_ptr; 5794 voidpf new_ptr; 5795 } ptr_table; 5796 5797 local ptr_table table[MAX_PTR]; 5798 /* This table is used to remember the original form of pointers 5799 * to large buffers (64K). Such pointers are normalized with a zero offset. 5800 * Since MSDOS is not a preemptive multitasking OS, this table is not 5801 * protected from concurrent access. This hack doesn't work anyway on 5802 * a protected system like OS/2. Use Microsoft C instead. 5803 */ 5804 5805 voidpf zcalloc (voidpf opaque, unsigned items, unsigned size) 5806 { 5807 voidpf buf = opaque; /* just to make some compilers happy */ 5808 ulg bsize = (ulg)items*size; 5809 5810 /* If we allocate less than 65520 bytes, we assume that farmalloc 5811 * will return a usable pointer which doesn't have to be normalized. 5812 */ 5813 if (bsize < 65520L) { 5814 buf = farmalloc(bsize); 5815 if (*(ush*)&buf != 0) return buf; 5816 } else { 5817 buf = farmalloc(bsize + 16L); 5818 } 5819 if (buf == NULL || next_ptr >= MAX_PTR) return NULL; 5820 table[next_ptr].org_ptr = buf; 5821 5822 /* Normalize the pointer to seg:0 */ 5823 *((ush*)&buf+1) += ((ush)((uch*)buf-0) + 15) >> 4; 5824 *(ush*)&buf = 0; 5825 table[next_ptr++].new_ptr = buf; 5826 return buf; 5827 } 5828 5829 void zcfree (voidpf opaque, voidpf ptr) 5830 { 5831 int n; 5832 if (*(ush*)&ptr != 0) { /* object < 64K */ 5833 farfree(ptr); 5834 return; 5835 } 5836 /* Find the original pointer */ 5837 for (n = 0; n < next_ptr; n++) { 5838 if (ptr != table[n].new_ptr) continue; 5839 5840 farfree(table[n].org_ptr); 5841 while (++n < next_ptr) { 5842 table[n-1] = table[n]; 5843 } 5844 next_ptr--; 5845 return; 5846 } 5847 ptr = opaque; /* just to make some compilers happy */ 5848 Assert(0, "zcfree: ptr not found"); 5849 } 5850 #endif 5851 #endif /* __TURBOC__ */ 5852 5853 5854 #if defined(M_I86) && !defined(__32BIT__) 5855 /* Microsoft C in 16-bit mode */ 5856 5857 # define MY_ZCALLOC 5858 5859 #if (!defined(_MSC_VER) || (_MSC_VER <= 600)) 5860 # define _halloc halloc 5861 # define _hfree hfree 5862 #endif 5863 5864 voidpf zcalloc (voidpf opaque, unsigned items, unsigned size) 5865 { 5866 if (opaque) opaque = 0; /* to make compiler happy */ 5867 return _halloc((long)items, size); 5868 } 5869 5870 void zcfree (voidpf opaque, voidpf ptr) 5871 { 5872 if (opaque) opaque = 0; /* to make compiler happy */ 5873 _hfree(ptr); 5874 } 5875 5876 #endif /* MSC */ 5877 5878 5879 #ifndef MY_ZCALLOC /* Any system without a special alloc function */ 5880 5881 #ifndef STDC 5882 extern voidp calloc __P((uInt items, uInt size)); 5883 extern void free __P((voidpf ptr)); 5884 #endif 5885 5886 voidpf zcalloc (opaque, items, size) 5887 voidpf opaque; 5888 unsigned items; 5889 unsigned size; 5890 { 5891 if (opaque) items += size - size; /* make compiler happy */ 5892 return (voidpf)calloc(items, size); 5893 } 5894 5895 void zcfree (opaque, ptr) 5896 voidpf opaque; 5897 voidpf ptr; 5898 { 5899 free(ptr); 5900 if (opaque) return; /* make compiler happy */ 5901 } 5902 5903 #endif /* MY_ZCALLOC */ 5904 /* --- zutil.c */ 5905 5906 /* +++ adler32.c */ 5907 /* adler32.c -- compute the Adler-32 checksum of a data stream 5908 * Copyright (C) 1995-2002 Mark Adler 5909 * For conditions of distribution and use, see copyright notice in zlib.h 5910 */ 5911 5912 /* @(#) $Id: zlib.c,v 1.30 2008/05/05 13:41:30 ad Exp $ */ 5913 5914 /* #include "zlib.h" */ 5915 5916 #define BASE 65521L /* largest prime smaller than 65536 */ 5917 #define NMAX 5552 5918 /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */ 5919 5920 #define DO1(buf,i) {s1 += buf[i]; s2 += s1;} 5921 #define DO2(buf,i) DO1(buf,i); DO1(buf,i+1); 5922 #define DO4(buf,i) DO2(buf,i); DO2(buf,i+2); 5923 #define DO8(buf,i) DO4(buf,i); DO4(buf,i+4); 5924 #define DO16(buf) DO8(buf,0); DO8(buf,8); 5925 5926 /* ========================================================================= */ 5927 uLong ZEXPORT adler32(adler, buf, len) 5928 uLong adler; 5929 const Bytef *buf; 5930 uInt len; 5931 { 5932 unsigned long s1 = adler & 0xffff; 5933 unsigned long s2 = (adler >> 16) & 0xffff; 5934 int k; 5935 5936 if (buf == Z_NULL) return 1L; 5937 5938 while (len > 0) { 5939 k = len < NMAX ? len : NMAX; 5940 len -= k; 5941 while (k >= 16) { 5942 DO16(buf); 5943 buf += 16; 5944 k -= 16; 5945 } 5946 if (k != 0) do { 5947 s1 += *buf++; 5948 s2 += s1; 5949 } while (--k); 5950 s1 %= BASE; 5951 s2 %= BASE; 5952 } 5953 return (s2 << 16) | s1; 5954 } 5955 /* --- adler32.c */ 5956 5957