1 /* 2 * Copyright 2007 Sun Microsystems, Inc. All rights reserved. 3 * Use is subject to license terms. 4 */ 5 6 /* 7 * Updated from zlib-1.0.4 to zlib-1.1.3 by James Carlson. 8 * 9 * This file is derived from various .h and .c files from the zlib-1.0.4 10 * distribution by Jean-loup Gailly and Mark Adler, with some additions 11 * by Paul Mackerras to aid in implementing Deflate compression and 12 * decompression for PPP packets. See zlib.h for conditions of 13 * distribution and use. 14 * 15 * Changes that have been made include: 16 * - added Z_PACKET_FLUSH (see zlib.h for details) 17 * - added inflateIncomp and deflateOutputPending 18 * - allow strm->next_out to be NULL, meaning discard the output 19 * 20 * $Id: zlib.c,v 1.11 1998/09/13 23:37:12 paulus Exp $ 21 */ 22 23 #pragma ident "%Z%%M% %I% %E% SMI" 24 25 /* 26 * ==FILEVERSION 971210== 27 * 28 * This marker is used by the Linux installation script to determine 29 * whether an up-to-date version of this file is already installed. 30 */ 31 32 #define NO_DUMMY_DECL 33 #define NO_ZCFUNCS 34 #define MY_ZCALLOC 35 36 #if defined(__FreeBSD__) && (defined(KERNEL) || defined(_KERNEL)) 37 #define inflate inflate_ppp /* FreeBSD already has an inflate :-( */ 38 #endif 39 40 41 /* +++ zutil.h */ 42 /* 43 * 44 * zutil.h -- internal interface and configuration of the compression library 45 * Copyright (C) 1995-1998 Jean-loup Gailly. 46 * For conditions of distribution and use, see copyright notice in zlib.h 47 */ 48 49 /* 50 * WARNING: this file should *not* be used by applications. It is part 51 * of the implementation of the compression library and is subject to 52 * change. Applications should only use zlib.h. 53 */ 54 55 /* From: zutil.h,v 1.16 1996/07/24 13:41:13 me Exp $ */ 56 57 #ifndef _Z_UTIL_H 58 #define _Z_UTIL_H 59 60 #include "zlib.h" 61 62 #if defined(KERNEL) || defined(_KERNEL) 63 /* Assume this is a *BSD or SVR4 kernel */ 64 #include <sys/types.h> 65 #include <sys/time.h> 66 #include <sys/systm.h> 67 #ifdef SOL2 68 #include <sys/cmn_err.h> 69 #endif 70 #undef u 71 #define HAVE_MEMCPY 72 #define memcmp bcmp 73 74 #else 75 #if defined(__KERNEL__) 76 /* Assume this is a Linux kernel */ 77 #include <linux/string.h> 78 #define HAVE_MEMCPY 79 80 #else /* not kernel */ 81 82 #include <stddef.h> 83 #ifdef NO_ERRNO_H 84 extern int errno; 85 #else 86 #include <errno.h> 87 #endif 88 #ifdef STDC 89 #include <string.h> 90 #include <stdlib.h> 91 #endif 92 #endif /* __KERNEL__ */ 93 #endif /* _KERNEL || KERNEL */ 94 95 #ifndef local 96 #define local static 97 #endif 98 /* compile with -Dlocal if your debugger can't find static symbols */ 99 100 typedef unsigned char uch; 101 typedef uch FAR uchf; 102 typedef unsigned short ush; 103 typedef ush FAR ushf; 104 typedef unsigned long ulg; 105 106 static const char *z_errmsg[10]; /* indexed by 2-zlib_error */ 107 /* (size given to avoid silly warnings with Visual C++) */ 108 109 #define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)] 110 111 #define ERR_RETURN(strm, err) \ 112 return (strm->msg = ERR_MSG(err), (err)) 113 /* To be used only when the state is known to be valid */ 114 115 /* common constants */ 116 117 #ifndef DEF_WBITS 118 #define DEF_WBITS MAX_WBITS 119 #endif 120 /* default windowBits for decompression. MAX_WBITS is for compression only */ 121 122 #if MAX_MEM_LEVEL >= 8 123 #define DEF_MEM_LEVEL 8 124 #else 125 #define DEF_MEM_LEVEL MAX_MEM_LEVEL 126 #endif 127 /* default memLevel */ 128 129 #define STORED_BLOCK 0 130 #define STATIC_TREES 1 131 #define DYN_TREES 2 132 /* The three kinds of block type */ 133 134 #define MIN_MATCH 3 135 #define MAX_MATCH 258 136 /* The minimum and maximum match lengths */ 137 138 #define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */ 139 140 /* target dependencies */ 141 142 #ifdef MSDOS 143 #define OS_CODE 0x00 144 #ifdef __TURBOC__ 145 #include <alloc.h> 146 #else /* MSC or DJGPP */ 147 #include <malloc.h> 148 #endif 149 #endif 150 151 #ifdef OS2 152 #define OS_CODE 0x06 153 #endif 154 155 #ifdef WIN32 /* Window 95 & Windows NT */ 156 #define OS_CODE 0x0b 157 #endif 158 159 #if defined(VAXC) || defined(VMS) 160 #define OS_CODE 0x02 161 #define F_OPEN(name, mode) \ 162 fopen((name), (mode), "mbc=60", "ctx=stm", "rfm=fix", "mrs=512") 163 #endif 164 165 #ifdef AMIGA 166 #define OS_CODE 0x01 167 #endif 168 169 #if defined(ATARI) || defined(atarist) 170 #define OS_CODE 0x05 171 #endif 172 173 #ifdef MACOS 174 #define OS_CODE 0x07 175 #endif 176 177 #ifdef __50SERIES /* Prime/PRIMOS */ 178 #define OS_CODE 0x0F 179 #endif 180 181 #ifdef TOPS20 182 #define OS_CODE 0x0a 183 #endif 184 185 #if defined(_BEOS_) || defined(RISCOS) 186 #define fdopen(fd, mode) NULL /* No fdopen() */ 187 #endif 188 189 /* Common defaults */ 190 191 #ifndef OS_CODE 192 #define OS_CODE 0x03 /* assume Unix */ 193 #endif 194 195 #ifndef F_OPEN 196 #define F_OPEN(name, mode) fopen((name), (mode)) 197 #endif 198 199 /* functions */ 200 201 #ifdef HAVE_STRERROR 202 extern char *strerror OF((int)); 203 #define zstrerror(errnum) strerror(errnum) 204 #else 205 #define zstrerror(errnum) "" 206 #endif 207 208 #if defined(pyr) 209 #define NO_MEMCPY 210 #endif 211 #if (defined(M_I86SM) || defined(M_I86MM)) && !defined(_MSC_VER) 212 /* 213 * Use our own functions for small and medium model with MSC <= 5.0. 214 * You may have to use the same strategy for Borland C (untested). 215 */ 216 #define NO_MEMCPY 217 #endif 218 #if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY) 219 #define HAVE_MEMCPY 220 #endif 221 #ifdef HAVE_MEMCPY 222 #ifdef SMALL_MEDIUM /* MSDOS small or medium model */ 223 #define zmemcpy _fmemcpy 224 #define zmemcmp _fmemcmp 225 #define zmemzero(dest, len) _fmemset(dest, 0, len) 226 #else 227 #define zmemcpy (void) memcpy 228 #define zmemcmp memcmp 229 #define zmemzero(dest, len) (void) memset(dest, 0, len) 230 #endif 231 #else 232 extern void zmemcpy OF((Bytef* dest, const Bytef* source, uInt len)); 233 extern int zmemcmp OF((const Bytef* s1, const Bytef* s2, uInt len)); 234 extern void zmemzero OF((Bytef* dest, uInt len)); 235 #endif 236 237 /* Diagnostic functions */ 238 #ifdef DEBUG_ZLIB 239 #include <stdio.h> 240 #ifndef verbose 241 #define verbose 0 242 #endif 243 extern void z_error OF((char *m)); 244 #define Assert(cond, msg) { if (!(cond)) z_error(msg); } 245 #define Trace(x) {if (z_verbose >= 0) fprintf x; } 246 #define Tracev(x) {if (z_verbose > 0) fprintf x; } 247 #define Tracevv(x) {if (z_verbose > 1) fprintf x; } 248 #define Tracec(c, x) {if (z_verbose > 0 && (c)) fprintf x; } 249 #define Tracecv(c, x) {if (z_verbose > 1 && (c)) fprintf x; } 250 #else 251 #if defined(SOL2) && defined(DEBUG) 252 #define Assert(cond, msg) ((cond) ? ((void)0) : panic(msg)) 253 #else 254 #define Assert(cond, msg) ((void)0) 255 #endif 256 #define Trace(x) ((void)0) 257 #define Tracev(x) ((void)0) 258 #define Tracevv(x) ((void)0) 259 #define Tracec(c, x) ((void)0) 260 #define Tracecv(c, x) ((void)0) 261 #endif 262 263 264 typedef uLong (*check_func) OF((uLong check, const Bytef *buf, uInt len)); 265 266 /* voidpf zcalloc OF((voidpf opaque, unsigned items, unsigned size)); */ 267 /* void zcfree OF((voidpf opaque, voidpf ptr)); */ 268 269 #define ZALLOC(strm, items, size) \ 270 (*((strm)->zalloc))((strm)->opaque, (items), (size)) 271 #define ZFREE(strm, addr) (*((strm)->zfree))((strm)->opaque, (voidpf)(addr)) 272 #define TRY_FREE(s, p) {if (p) ZFREE(s, p); } 273 274 #endif /* _Z_UTIL_H */ 275 /* --- zutil.h */ 276 277 /* +++ deflate.h */ 278 /* 279 * deflate.h -- internal compression state 280 * Copyright (C) 1995-1998 Jean-loup Gailly 281 * For conditions of distribution and use, see copyright notice in zlib.h 282 */ 283 284 /* 285 * WARNING: this file should *not* be used by applications. It is part 286 * of the implementation of the compression library and is subject to 287 * change. Applications should only use zlib.h. 288 */ 289 290 /* From: deflate.h,v 1.10 1996/07/02 12:41:00 me Exp $ */ 291 292 #ifndef _DEFLATE_H 293 #define _DEFLATE_H 294 295 /* #include "zutil.h" */ 296 297 /* 298 * =========================================================================== 299 * Internal compression state. 300 */ 301 302 #define LENGTH_CODES 29 303 /* number of length codes, not counting the special END_BLOCK code */ 304 305 #define LITERALS 256 306 /* number of literal bytes 0..255 */ 307 308 #define L_CODES (LITERALS+1+LENGTH_CODES) 309 /* number of Literal or Length codes, including the END_BLOCK code */ 310 311 #define D_CODES 30 312 /* number of distance codes */ 313 314 #define BL_CODES 19 315 /* number of codes used to transfer the bit lengths */ 316 317 #define HEAP_SIZE (2*L_CODES+1) 318 /* maximum heap size */ 319 320 #define MAX_BITS 15 321 /* All codes must not exceed MAX_BITS bits */ 322 323 #define INIT_STATE 42 324 #define BUSY_STATE 113 325 #define FINISH_STATE 666 326 /* Stream status */ 327 328 329 /* Data structure describing a single value and its code string. */ 330 typedef struct ct_data_s { 331 union { 332 ush freq; /* frequency count */ 333 ush code; /* bit string */ 334 } fc; 335 union { 336 ush dad; /* father node in Huffman tree */ 337 ush len; /* length of bit string */ 338 } dl; 339 } FAR ct_data; 340 341 #define Freq fc.freq 342 #define Code fc.code 343 #define Dad dl.dad 344 #define Len dl.len 345 346 typedef struct static_tree_desc_s static_tree_desc; 347 348 typedef struct tree_desc_s { 349 ct_data *dyn_tree; /* the dynamic tree */ 350 int max_code; /* largest code with non zero frequency */ 351 static_tree_desc *stat_desc; /* the corresponding static tree */ 352 } FAR tree_desc; 353 354 typedef ush Pos; 355 typedef Pos FAR Posf; 356 typedef unsigned IPos; 357 358 /* 359 * A Pos is an index in the character window. We use short instead of 360 * int to save space in the various tables. IPos is used only for 361 * parameter passing. 362 */ 363 364 typedef struct deflate_state { 365 z_streamp strm; /* pointer back to this zlib stream */ 366 int status; /* as the name implies */ 367 Bytef *pending_buf; /* output still pending */ 368 ulg pending_buf_size; /* size of pending_buf */ 369 Bytef *pending_out; /* next pending byte to output to the stream */ 370 int pending; /* nb of bytes in the pending buffer */ 371 int noheader; /* suppress zlib header and adler32 */ 372 Byte data_type; /* UNKNOWN, BINARY or ASCII */ 373 Byte method; /* STORED (for zip only) or DEFLATED */ 374 /* value of flush param for previous deflate call */ 375 int last_flush; 376 377 /* used by deflate.c: */ 378 379 uInt w_size; /* LZ77 window size (32K by default) */ 380 uInt w_bits; /* log2(w_size) (8..16) */ 381 uInt w_mask; /* w_size - 1 */ 382 383 Bytef *window; 384 /* 385 * Sliding window. Input bytes are read into the second half 386 * of the window, and move to the first half later to keep a 387 * dictionary of at least wSize bytes. With this organization, 388 * matches are limited to a distance of wSize-MAX_MATCH bytes, 389 * but this ensures that IO is always performed with a length 390 * multiple of the block size. Also, it limits the window size 391 * to 64K, which is quite useful on MSDOS. To do: use the 392 * user input buffer as sliding window. 393 */ 394 395 ulg window_size; 396 /* 397 * Actual size of window: 2*wSize, except when the user input 398 * buffer is directly used as sliding window. 399 */ 400 401 Posf *prev; 402 /* 403 * Link to older string with same hash index. To limit the 404 * size of this array to 64K, this link is maintained only for 405 * the last 32K strings. An index in this array is thus a 406 * window index modulo 32K. 407 */ 408 409 Posf *head; /* Heads of the hash chains or NIL. */ 410 411 uInt ins_h; /* hash index of string to be inserted */ 412 uInt hash_size; /* number of elements in hash table */ 413 uInt hash_bits; /* log2(hash_size) */ 414 uInt hash_mask; /* hash_size-1 */ 415 416 uInt hash_shift; 417 /* 418 * Number of bits by which ins_h must be shifted at each input 419 * step. It must be such that after MIN_MATCH steps, the 420 * oldest byte no longer takes part in the hash key, that is: 421 * hash_shift * MIN_MATCH >= hash_bits 422 */ 423 424 long block_start; 425 /* 426 * Window position at the beginning of the current output 427 * block. Gets negative when the window is moved backwards. 428 */ 429 430 uInt match_length; /* length of best match */ 431 IPos prev_match; /* previous match */ 432 int match_available; /* set if previous match exists */ 433 uInt strstart; /* start of string to insert */ 434 uInt match_start; /* start of matching string */ 435 uInt lookahead; /* number of valid bytes ahead in window */ 436 437 uInt prev_length; 438 /* 439 * Length of the best match at previous step. Matches not 440 * greater than this are discarded. This is used in the lazy 441 * match evaluation. 442 */ 443 444 uInt max_chain_length; 445 /* 446 * To speed up deflation, hash chains are never searched 447 * beyond *this length. A higher limit improves compression 448 * ratio but *degrades the speed. 449 */ 450 451 uInt max_lazy_match; 452 /* 453 * Attempt to find a better match only when the current match 454 * is strictly smaller than this value. This mechanism is used 455 * only for compression levels >= 4. 456 */ 457 #define max_insert_length max_lazy_match 458 /* 459 * Insert new strings in the hash table only if the match 460 * length is not greater than this length. This saves time but 461 * degrades compression. max_insert_length is used only for 462 * compression levels <= 3. 463 */ 464 465 int level; /* compression level (1..9) */ 466 int strategy; /* favor or force Huffman coding */ 467 468 uInt good_match; 469 /* Use a faster search when the previous match is longer than this */ 470 471 int nice_match; /* Stop searching when current match exceeds this */ 472 473 /* used by trees.c: */ 474 /* Didn't use ct_data typedef below to supress compiler warning */ 475 struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ 476 struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ 477 /* Huffman tree for bit lengths */ 478 struct ct_data_s bl_tree[2*BL_CODES+1]; 479 480 struct tree_desc_s l_desc; /* desc. for literal tree */ 481 struct tree_desc_s d_desc; /* desc. for distance tree */ 482 struct tree_desc_s bl_desc; /* desc. for bit length tree */ 483 484 ush bl_count[MAX_BITS+1]; 485 /* number of codes at each bit length for an optimal tree */ 486 487 int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ 488 int heap_len; /* number of elements in the heap */ 489 int heap_max; /* element of largest frequency */ 490 /* 491 * The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] 492 * is not used. The same heap array is used to build all 493 * trees. 494 */ 495 496 uch depth[2*L_CODES+1]; 497 /* 498 * Depth of each subtree used as tie breaker for trees of 499 * equal frequency 500 */ 501 502 uchf *l_buf; /* buffer for literals or lengths */ 503 504 uInt lit_bufsize; 505 /* 506 * Size of match buffer for literals/lengths. There are 4 507 * reasons for limiting lit_bufsize to 64K: 508 * 509 * - frequencies can be kept in 16 bit counters 510 * 511 * - if compression is not successful for the first block, 512 * all input data is still in the window so we can still 513 * emit a stored block even when input comes from standard 514 * input. (This can also be done for all blocks if 515 * lit_bufsize is not greater than 32K.) 516 * 517 * - if compression is not successful for a file smaller 518 * than 64K, we can even emit a stored file instead of a 519 * stored block (saving 5 bytes). This is applicable only 520 * for zip (not gzip or zlib). 521 * 522 * - creating new Huffman trees less frequently may not 523 * provide fast adaptation to changes in the input data 524 * statistics. (Take for example a binary file with poorly 525 * compressible code followed by a highly compressible 526 * string table.) Smaller buffer sizes give fast adaptation 527 * but have of course the overhead of transmitting trees 528 * more frequently. 529 * 530 * - I can't count above 4 531 */ 532 533 uInt last_lit; /* running index in l_buf */ 534 535 ushf *d_buf; 536 /* 537 * Buffer for distances. To simplify the code, d_buf and l_buf 538 * have the same number of elements. To use different lengths, 539 * an extra flag array would be necessary. 540 */ 541 542 ulg opt_len; /* bit length of current block with optimal trees */ 543 ulg static_len; /* bit length of current block with static trees */ 544 uInt matches; /* number of string matches in current block */ 545 int last_eob_len; /* bit length of EOB code for last block */ 546 547 ulg compressed_len; /* total bit length of compressed file PPP */ 548 #ifdef DEBUG_ZLIB 549 ulg bits_sent; /* bit length of the compressed data */ 550 #endif 551 552 ush bi_buf; 553 /* 554 * Output buffer. bits are inserted starting at the bottom 555 * (least significant bits). 556 */ 557 int bi_valid; 558 /* 559 * Number of valid bits in bi_buf. All bits above the last 560 * valid bit are always zero. 561 */ 562 563 } FAR deflate_state; 564 565 /* 566 * Output a byte on the stream. IN assertion: there is enough room in 567 * pending_buf. 568 */ 569 #define put_byte(s, c) {s->pending_buf[s->pending++] = (c); } 570 571 572 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) 573 /* 574 * Minimum amount of lookahead, except at the end of the input file. 575 * See deflate.c for comments about the MIN_MATCH+1. 576 */ 577 578 #define MAX_DIST(s) ((s)->w_size-MIN_LOOKAHEAD) 579 /* 580 * In order to simplify the code, particularly on 16 bit machines, 581 * match distances are limited to MAX_DIST instead of WSIZE. 582 */ 583 584 /* in trees.c */ 585 void _tr_init OF((deflate_state *s)); 586 int _tr_tally OF((deflate_state *s, unsigned dist, unsigned lc)); 587 void _tr_flush_block OF((deflate_state *s, charf *buf, ulg stored_len, 588 int eof)); 589 void _tr_align OF((deflate_state *s)); 590 void _tr_stored_block OF((deflate_state *s, charf *buf, ulg stored_len, 591 int eof)); 592 void _tr_stored_type_only OF((deflate_state *)); /* PPP */ 593 594 #define d_code(dist) \ 595 ((dist) < 256 ? _dist_code[dist] : _dist_code[256+((dist)>>7)]) 596 /* 597 * Mapping from a distance to a distance code. dist is the distance - 1 and 598 * must not have side effects. _dist_code[256] and _dist_code[257] are never 599 * used. 600 */ 601 602 #ifndef DEBUG_ZLIB 603 /* Inline versions of _tr_tally for speed: */ 604 605 local uch _length_code[]; 606 local uch _dist_code[]; 607 608 #define _tr_tally_lit(s, c, flush) \ 609 { uch cc = (c); \ 610 s->d_buf[s->last_lit] = 0; \ 611 s->l_buf[s->last_lit++] = cc; \ 612 s->dyn_ltree[cc].Freq++; \ 613 flush = (s->last_lit == s->lit_bufsize-1); \ 614 } 615 #define _tr_tally_dist(s, distance, length, flush) \ 616 { uch len = (length); \ 617 ush dist = (distance); \ 618 s->d_buf[s->last_lit] = dist; \ 619 s->l_buf[s->last_lit++] = len; \ 620 dist--; \ 621 s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \ 622 s->dyn_dtree[d_code(dist)].Freq++; \ 623 flush = (s->last_lit == s->lit_bufsize-1); \ 624 } 625 #else 626 #define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c) 627 #define _tr_tally_dist(s, distance, length, flush) \ 628 flush = _tr_tally(s, distance, length) 629 #endif 630 631 #endif 632 /* --- deflate.h */ 633 634 /* +++ deflate.c */ 635 /* 636 * deflate.c -- compress data using the deflation algorithm 637 * Copyright (C) 1995-1998 Jean-loup Gailly. 638 * For conditions of distribution and use, see copyright notice in zlib.h 639 */ 640 641 /* 642 * ALGORITHM 643 * 644 * The "deflation" process depends on being able to identify portions 645 * of the input text which are identical to earlier input (within a 646 * sliding window trailing behind the input currently being processed). 647 * 648 * The most straightforward technique turns out to be the fastest for 649 * most input files: try all possible matches and select the longest. 650 * The key feature of this algorithm is that insertions into the string 651 * dictionary are very simple and thus fast, and deletions are avoided 652 * completely. Insertions are performed at each input character, whereas 653 * string matches are performed only when the previous match ends. So it 654 * is preferable to spend more time in matches to allow very fast string 655 * insertions and avoid deletions. The matching algorithm for small 656 * strings is inspired from that of Rabin & Karp. A brute force approach 657 * is used to find longer strings when a small match has been found. 658 * A similar algorithm is used in comic (by Jan-Mark Wams) and freeze 659 * (by Leonid Broukhis). 660 * A previous version of this file used a more sophisticated algorithm 661 * (by Fiala and Greene) which is guaranteed to run in linear amortized 662 * time, but has a larger average cost, uses more memory and is patented. 663 * However the F&G algorithm may be faster for some highly redundant 664 * files if the parameter max_chain_length (described below) is too large. 665 * 666 * ACKNOWLEDGEMENTS 667 * 668 * The idea of lazy evaluation of matches is due to Jan-Mark Wams, and 669 * I found it in 'freeze' written by Leonid Broukhis. 670 * Thanks to many people for bug reports and testing. 671 * 672 * REFERENCES 673 * 674 * Deutsch, L.P.,"DEFLATE Compressed Data Format Specification". 675 * Available in ftp://ds.internic.net/rfc/rfc1951.txt 676 * 677 * A description of the Rabin and Karp algorithm is given in the book 678 * "Algorithms" by R. Sedgewick, Addison-Wesley, p252. 679 * 680 * Fiala,E.R., and Greene,D.H. 681 * Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595 682 * 683 */ 684 685 /* From: deflate.c,v 1.15 1996/07/24 13:40:58 me Exp $ */ 686 687 /* #include "deflate.h" */ 688 689 const char deflate_copyright[] = 690 " deflate 1.1.3 Copyright 1995-1998 Jean-loup Gailly "; 691 /* 692 * If you use the zlib library in a product, an acknowledgment is 693 * welcome in the documentation of your product. If for some reason 694 * you cannot include such an acknowledgment, I would appreciate that 695 * you keep this copyright string in the executable of your product. 696 */ 697 698 /* 699 * =========================================================================== 700 * Function prototypes. 701 */ 702 typedef enum { 703 /* block not completed, need more input or more output */ 704 need_more, 705 block_done, /* block flush performed */ 706 /* finish started, need only more output at next deflate */ 707 finish_started, 708 finish_done /* finish done, accept no more input or output */ 709 } block_state; 710 711 typedef block_state (*compress_func) OF((deflate_state *s, int flush)); 712 /* Compression function. Returns the block state after the call. */ 713 714 local void fill_window OF((deflate_state *s)); 715 local block_state deflate_stored OF((deflate_state *s, int flush)); 716 local block_state deflate_fast OF((deflate_state *s, int flush)); 717 local block_state deflate_slow OF((deflate_state *s, int flush)); 718 local void lm_init OF((deflate_state *s)); 719 local void putShortMSB OF((deflate_state *s, uInt b)); 720 local void flush_pending OF((z_streamp strm)); 721 local int read_buf OF((z_streamp strm, Bytef *buf, unsigned size)); 722 #ifdef ASMV 723 void match_init OF((void)); /* asm code initialization */ 724 uInt longest_match OF((deflate_state *s, IPos cur_match)); 725 #else 726 local uInt longest_match OF((deflate_state *s, IPos cur_match)); 727 #endif 728 729 #ifdef DEBUG_ZLIB 730 local void check_match OF((deflate_state *s, IPos start, IPos match, 731 int length)); 732 #endif 733 734 /* 735 * =========================================================================== 736 * Local data 737 */ 738 739 #define NIL 0 740 /* Tail of hash chains */ 741 742 #ifndef TOO_FAR 743 #define TOO_FAR 4096 744 #endif 745 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */ 746 747 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) 748 /* 749 * Minimum amount of lookahead, except at the end of the input file. 750 * See deflate.c for comments about the MIN_MATCH+1. 751 */ 752 753 /* 754 * Values for max_lazy_match, good_match and max_chain_length, 755 * depending on the desired pack level (0..9). The values given below 756 * have been tuned to exclude worst case performance for pathological 757 * files. Better values may be found for specific files. 758 */ 759 typedef struct config_s { 760 ush good_length; /* reduce lazy search above this match length */ 761 ush max_lazy; /* do not perform lazy search above this match length */ 762 ush nice_length; /* quit search above this match length */ 763 ush max_chain; 764 compress_func func; 765 } config; 766 767 local const config configuration_table[10] = { 768 /* good lazy nice chain */ 769 /* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */ 770 /* 1 */ {4, 4, 8, 4, deflate_fast}, /* maximum speed, no lazy matches */ 771 /* 2 */ {4, 5, 16, 8, deflate_fast}, 772 /* 3 */ {4, 6, 32, 32, deflate_fast}, 773 774 /* 4 */ {4, 4, 16, 16, deflate_slow}, /* lazy matches */ 775 /* 5 */ {8, 16, 32, 32, deflate_slow}, 776 /* 6 */ {8, 16, 128, 128, deflate_slow}, 777 /* 7 */ {8, 32, 128, 256, deflate_slow}, 778 /* 8 */ {32, 128, 258, 1024, deflate_slow}, 779 /* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* maximum compression */ 780 781 /* 782 * Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4 783 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different 784 * meaning. 785 */ 786 787 #define EQUAL 0 788 /* result of memcmp for equal strings */ 789 790 #ifndef NO_DUMMY_DECL 791 struct static_tree_desc_s {int dummy; }; /* for buggy compilers */ 792 #endif 793 794 /* 795 * =========================================================================== 796 * Update a hash value with the given input byte 797 * IN assertion: all calls to to UPDATE_HASH are made with consecutive 798 * input characters, so that a running hash key can be computed from the 799 * previous key instead of complete recalculation each time. 800 */ 801 #define UPDATE_HASH(s, h, c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask) 802 803 804 /* 805 * =========================================================================== 806 * Insert string str in the dictionary and set match_head to the previous head 807 * of the hash chain (the most recent string with same hash key). Return 808 * the previous length of the hash chain. 809 * If this file is compiled with -DFASTEST, the compression level is forced 810 * to 1, and no hash chains are maintained. 811 * IN assertion: all calls to to INSERT_STRING are made with consecutive 812 * input characters and the first MIN_MATCH bytes of str are valid 813 * (except for the last MIN_MATCH-1 bytes of the input file). 814 */ 815 #ifdef FASTEST 816 #define INSERT_STRING(s, str, match_head) \ 817 (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \ 818 match_head = s->head[s->ins_h], \ 819 s->head[s->ins_h] = (Pos)(str)) 820 #else 821 #define INSERT_STRING(s, str, match_head) \ 822 (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \ 823 s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \ 824 s->head[s->ins_h] = (Pos)(str)) 825 #endif 826 827 /* 828 * =========================================================================== 829 * Initialize the hash table (avoiding 64K overflow for 16 bit systems). 830 * prev[] will be initialized on the fly. 831 */ 832 #define CLEAR_HASH(s) \ 833 s->head[s->hash_size-1] = NIL; \ 834 zmemzero((Bytef *)s->head, (unsigned)(s->hash_size-1)*sizeof (*s->head)); 835 836 /* ========================================================================= */ 837 int 838 deflateInit_(strm, level, version, stream_size) 839 z_streamp strm; 840 int level; 841 const char *version; 842 int stream_size; 843 { 844 (void) deflate_copyright; 845 return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, 846 Z_DEFAULT_STRATEGY, version, stream_size); 847 /* To do: ignore strm->next_in if we use it as window */ 848 } 849 850 /* ========================================================================= */ 851 int deflateInit2_(strm, level, method, windowBits, memLevel, strategy, 852 version, stream_size) 853 z_streamp strm; 854 int level; 855 int method; 856 int windowBits; 857 int memLevel; 858 int strategy; 859 const char *version; 860 int stream_size; 861 { 862 deflate_state *s; 863 int noheader = 0; 864 static const char *my_version = ZLIB_VERSION; 865 866 ushf *overlay; 867 /* 868 * We overlay pending_buf and d_buf+l_buf. This works since 869 * the average output size for (length, distance) codes is <= 870 * 24 bits. 871 */ 872 873 if (version == Z_NULL || version[0] != my_version[0] || 874 stream_size != sizeof (z_stream)) { 875 return (Z_VERSION_ERROR); 876 } 877 if (strm == Z_NULL) 878 return (Z_STREAM_ERROR); 879 880 strm->msg = Z_NULL; 881 #ifndef NO_ZCFUNCS 882 if (strm->zalloc == Z_NULL) { 883 strm->zalloc = zcalloc; 884 strm->opaque = (voidpf)0; 885 } 886 if (strm->zfree == Z_NULL) strm->zfree = zcfree; 887 #endif 888 889 if (level == Z_DEFAULT_COMPRESSION) level = 6; 890 #ifdef FASTEST 891 level = 1; 892 #endif 893 894 if (windowBits < 0) { /* undocumented feature: suppress zlib header */ 895 noheader = 1; 896 windowBits = -windowBits; 897 } 898 if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED || 899 windowBits <= 8 || windowBits > 15 || level < 0 || level > 9 || 900 strategy < 0 || strategy > Z_HUFFMAN_ONLY) { 901 return (Z_STREAM_ERROR); 902 } 903 s = (deflate_state *) ZALLOC(strm, 1, sizeof (deflate_state)); 904 if (s == Z_NULL) 905 return (Z_MEM_ERROR); 906 strm->state = (struct internal_state FAR *)s; 907 s->strm = strm; 908 909 s->noheader = noheader; 910 s->w_bits = windowBits; 911 s->w_size = 1 << s->w_bits; 912 s->w_mask = s->w_size - 1; 913 914 s->hash_bits = memLevel + 7; 915 s->hash_size = 1 << s->hash_bits; 916 s->hash_mask = s->hash_size - 1; 917 s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH); 918 919 s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof (Byte)); 920 s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof (Pos)); 921 s->head = (Posf *) ZALLOC(strm, s->hash_size, sizeof (Pos)); 922 923 s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ 924 925 overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof (ush)+2); 926 s->pending_buf = (uchf *) overlay; 927 s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof (ush)+2L); 928 929 if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL || 930 s->pending_buf == Z_NULL) { 931 strm->msg = ERR_MSG(Z_MEM_ERROR); 932 s->status = INIT_STATE; 933 (void) deflateEnd(strm); 934 return (Z_MEM_ERROR); 935 } 936 s->d_buf = overlay + s->lit_bufsize/sizeof (ush); 937 s->l_buf = s->pending_buf + (1+sizeof (ush))*s->lit_bufsize; 938 939 s->level = level; 940 s->strategy = strategy; 941 s->method = (Byte)method; 942 943 return (deflateReset(strm)); 944 } 945 946 /* ========================================================================= */ 947 int 948 deflateSetDictionary(strm, dictionary, dictLength) 949 z_streamp strm; 950 const Bytef *dictionary; 951 uInt dictLength; 952 { 953 deflate_state *s; 954 uInt length = dictLength; 955 uInt n; 956 IPos hash_head = 0; 957 958 if (strm == Z_NULL || strm->state == Z_NULL || dictionary == Z_NULL) 959 return (Z_STREAM_ERROR); 960 961 s = (deflate_state *) strm->state; 962 if (s->status != INIT_STATE) 963 return (Z_STREAM_ERROR); 964 965 strm->adler = adler32(strm->adler, dictionary, dictLength); 966 967 if (length < MIN_MATCH) 968 return (Z_OK); 969 if (length > MAX_DIST(s)) { 970 length = MAX_DIST(s); 971 #ifndef USE_DICT_HEAD 972 /* use the tail of the dictionary */ 973 dictionary += dictLength - length; 974 #endif 975 } 976 Assert(length <= s->window_size, "dict copy"); 977 zmemcpy(s->window, dictionary, length); 978 s->strstart = length; 979 s->block_start = (long)length; 980 981 /* 982 * Insert all strings in the hash table (except for the last 983 * two bytes). s->lookahead stays null, so s->ins_h will be 984 * recomputed at the next call of fill_window. 985 */ 986 s->ins_h = s->window[0]; 987 UPDATE_HASH(s, s->ins_h, s->window[1]); 988 for (n = 0; n <= length - MIN_MATCH; n++) { 989 INSERT_STRING(s, n, hash_head); 990 } 991 if (hash_head) hash_head = 0; /* to make compiler happy */ 992 return (Z_OK); 993 } 994 995 /* ========================================================================= */ 996 int 997 deflateReset(strm) 998 z_streamp strm; 999 { 1000 deflate_state *s; 1001 1002 if (strm == Z_NULL || strm->state == Z_NULL || 1003 strm->zalloc == Z_NULL || strm->zfree == Z_NULL) 1004 return (Z_STREAM_ERROR); 1005 1006 strm->total_in = strm->total_out = 0; 1007 /* use zfree if we ever allocate msg dynamically */ 1008 strm->msg = Z_NULL; 1009 strm->data_type = Z_UNKNOWN; 1010 1011 s = (deflate_state *)strm->state; 1012 s->pending = 0; 1013 s->pending_out = s->pending_buf; 1014 1015 if (s->noheader < 0) { 1016 /* was set to -1 by deflate(..., Z_FINISH); */ 1017 s->noheader = 0; 1018 } 1019 s->status = s->noheader ? BUSY_STATE : INIT_STATE; 1020 strm->adler = 1; 1021 s->last_flush = Z_NO_FLUSH; 1022 1023 _tr_init(s); 1024 lm_init(s); 1025 1026 return (Z_OK); 1027 } 1028 1029 /* ========================================================================= */ 1030 int 1031 deflateParams(strm, level, strategy) 1032 z_streamp strm; 1033 int level; 1034 int strategy; 1035 { 1036 deflate_state *s; 1037 compress_func func; 1038 int err = Z_OK; 1039 1040 if (strm == Z_NULL || strm->state == Z_NULL) 1041 return (Z_STREAM_ERROR); 1042 s = (deflate_state *) strm->state; 1043 1044 if (level == Z_DEFAULT_COMPRESSION) { 1045 level = 6; 1046 } 1047 if (level < 0 || level > 9 || strategy < 0 || 1048 strategy > Z_HUFFMAN_ONLY) { 1049 return (Z_STREAM_ERROR); 1050 } 1051 func = configuration_table[s->level].func; 1052 1053 if (func != configuration_table[level].func && strm->total_in != 0) { 1054 /* Flush the last buffer: */ 1055 err = deflate(strm, Z_PARTIAL_FLUSH); 1056 } 1057 if (s->level != level) { 1058 s->level = level; 1059 s->max_lazy_match = configuration_table[level].max_lazy; 1060 s->good_match = configuration_table[level].good_length; 1061 s->nice_match = configuration_table[level].nice_length; 1062 s->max_chain_length = configuration_table[level].max_chain; 1063 } 1064 s->strategy = strategy; 1065 return (err); 1066 } 1067 1068 /* 1069 * ========================================================================= 1070 * Put a short in the pending buffer. The 16-bit value is put in MSB order. 1071 * IN assertion: the stream state is correct and there is enough room in 1072 * pending_buf. 1073 */ 1074 local void 1075 putShortMSB(s, b) 1076 deflate_state *s; 1077 uInt b; 1078 { 1079 put_byte(s, (Byte)(b >> 8)); 1080 put_byte(s, (Byte)(b & 0xff)); 1081 } 1082 1083 /* 1084 * ========================================================================= 1085 * Flush as much pending output as possible. All deflate() output goes 1086 * through this function so some applications may wish to modify it 1087 * to avoid allocating a large strm->next_out buffer and copying into it. 1088 * (See also read_buf()). 1089 */ 1090 local void 1091 flush_pending(strm) 1092 z_streamp strm; 1093 { 1094 deflate_state *s = (deflate_state *) strm->state; 1095 unsigned len = s->pending; 1096 1097 if (len > strm->avail_out) len = strm->avail_out; 1098 if (len == 0) 1099 return; 1100 1101 if (strm->next_out != Z_NULL) { /* PPP */ 1102 zmemcpy(strm->next_out, s->pending_out, len); 1103 strm->next_out += len; 1104 } /* PPP */ 1105 s->pending_out += len; 1106 strm->total_out += len; 1107 strm->avail_out -= len; 1108 s->pending -= len; 1109 if (s->pending == 0) { 1110 s->pending_out = s->pending_buf; 1111 } 1112 } 1113 1114 /* ========================================================================= */ 1115 int 1116 deflate(strm, flush) 1117 z_streamp strm; 1118 int flush; 1119 { 1120 int old_flush; /* value of flush param for previous deflate call */ 1121 deflate_state *s; 1122 1123 if (strm == Z_NULL || strm->state == Z_NULL || 1124 flush > Z_FINISH || flush < 0) { 1125 return (Z_STREAM_ERROR); 1126 } 1127 s = (deflate_state *) strm->state; 1128 1129 if (/* strm->next_out == Z_NULL || --- we allow null --- PPP */ 1130 (strm->next_in == Z_NULL && strm->avail_in != 0) || 1131 (s->status == FINISH_STATE && flush != Z_FINISH)) { 1132 ERR_RETURN(strm, Z_STREAM_ERROR); 1133 } 1134 if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR); 1135 1136 s->strm = strm; /* just in case */ 1137 old_flush = s->last_flush; 1138 s->last_flush = flush; 1139 1140 /* Write the zlib header */ 1141 if (s->status == INIT_STATE) { 1142 1143 uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8; 1144 uInt level_flags = (s->level-1) >> 1; 1145 1146 if (level_flags > 3) level_flags = 3; 1147 header |= (level_flags << 6); 1148 if (s->strstart != 0) header |= PRESET_DICT; 1149 header += 31 - (header % 31); 1150 1151 s->status = BUSY_STATE; 1152 putShortMSB(s, header); 1153 1154 /* Save the adler32 of the preset dictionary: */ 1155 if (s->strstart != 0) { 1156 putShortMSB(s, (uInt)(strm->adler >> 16)); 1157 putShortMSB(s, (uInt)(strm->adler & 0xffff)); 1158 } 1159 strm->adler = 1L; 1160 } 1161 1162 /* Flush as much pending output as possible */ 1163 if (s->pending != 0) { 1164 flush_pending(strm); 1165 if (strm->avail_out == 0) { 1166 /* 1167 * Since avail_out is 0, deflate will be 1168 * called again with more output space, but 1169 * possibly with both pending and avail_in 1170 * equal to zero. There won't be anything to 1171 * do, but this is not an error situation so 1172 * make sure we return OK instead of BUF_ERROR 1173 * at next call of deflate: 1174 */ 1175 s->last_flush = -1; 1176 return (Z_OK); 1177 } 1178 1179 /* 1180 * Make sure there is something to do and avoid 1181 * duplicate consecutive flushes. For repeated and 1182 * useless calls with Z_FINISH, we keep returning 1183 * Z_STREAM_END instead of Z_BUFF_ERROR. 1184 */ 1185 } else if (strm->avail_in == 0 && flush <= old_flush && 1186 flush != Z_FINISH) { 1187 ERR_RETURN(strm, Z_BUF_ERROR); 1188 } 1189 1190 /* User must not provide more input after the first FINISH: */ 1191 if (s->status == FINISH_STATE && strm->avail_in != 0) { 1192 ERR_RETURN(strm, Z_BUF_ERROR); 1193 } 1194 1195 /* Start a new block or continue the current one. */ 1196 if (strm->avail_in != 0 || s->lookahead != 0 || 1197 (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) { 1198 block_state bstate; 1199 1200 bstate = (*(configuration_table[s->level].func))(s, flush); 1201 1202 if (bstate == finish_started || bstate == finish_done) { 1203 s->status = FINISH_STATE; 1204 } 1205 if (bstate == need_more || bstate == finish_started) { 1206 if (strm->avail_out == 0) { 1207 /* avoid BUF_ERROR next call, see above */ 1208 s->last_flush = -1; 1209 } 1210 return (Z_OK); 1211 /* 1212 * If flush != Z_NO_FLUSH && avail_out == 0, 1213 * the next call of deflate should use the 1214 * same flush parameter to make sure that the 1215 * flush is complete. So we don't have to 1216 * output an empty block here, this will be 1217 * done at next call. This also ensures that 1218 * for a very small output buffer, we emit at 1219 * most one empty block. 1220 */ 1221 } 1222 if (bstate == block_done) { 1223 if (flush == Z_PARTIAL_FLUSH) { 1224 _tr_align(s); 1225 } else if (flush == Z_PACKET_FLUSH) { /* PPP */ 1226 /* 1227 * Output just the 3-bit `stored' 1228 * block type value, but not a zero 1229 * length. Added for PPP. 1230 */ 1231 _tr_stored_type_only(s); /* PPP */ 1232 } else { /* FULL_FLUSH or SYNC_FLUSH */ 1233 _tr_stored_block(s, (char *)0, 0L, 0); 1234 /* 1235 * For a full flush, this empty block 1236 * will be recognized as a special 1237 * marker by inflate_sync(). 1238 */ 1239 if (flush == Z_FULL_FLUSH) { 1240 CLEAR_HASH(s); /* forget history */ 1241 } 1242 } 1243 flush_pending(strm); 1244 if (strm->avail_out == 0) { 1245 /* avoid BUF_ERROR at next call, see above */ 1246 s->last_flush = -1; 1247 return (Z_OK); 1248 } 1249 } 1250 } 1251 Assert(strm->avail_out > 0, "bug2"); 1252 1253 if (flush != Z_FINISH) 1254 return (Z_OK); 1255 if (s->noheader) 1256 return (Z_STREAM_END); 1257 1258 /* Write the zlib trailer (adler32) */ 1259 putShortMSB(s, (uInt)(strm->adler >> 16)); 1260 putShortMSB(s, (uInt)(strm->adler & 0xffff)); 1261 flush_pending(strm); 1262 /* 1263 * If avail_out is zero, the application will call deflate 1264 * again to flush the rest. 1265 */ 1266 s->noheader = -1; /* write the trailer only once! */ 1267 return (s->pending != 0 ? Z_OK : Z_STREAM_END); 1268 } 1269 1270 /* ========================================================================= */ 1271 int 1272 deflateEnd(strm) 1273 z_streamp strm; 1274 { 1275 int status; 1276 deflate_state *s; 1277 1278 if (strm == Z_NULL || strm->state == Z_NULL) 1279 return (Z_STREAM_ERROR); 1280 s = (deflate_state *) strm->state; 1281 1282 status = s->status; 1283 if (status != INIT_STATE && status != BUSY_STATE && 1284 status != FINISH_STATE) { 1285 return (Z_STREAM_ERROR); 1286 } 1287 1288 /* Deallocate in reverse order of allocations: */ 1289 TRY_FREE(strm, s->pending_buf); 1290 TRY_FREE(strm, s->head); 1291 TRY_FREE(strm, s->prev); 1292 TRY_FREE(strm, s->window); 1293 1294 ZFREE(strm, s); 1295 strm->state = Z_NULL; 1296 1297 return (status == BUSY_STATE ? Z_DATA_ERROR : Z_OK); 1298 } 1299 1300 /* 1301 * ========================================================================= 1302 * Copy the source state to the destination state. 1303 * To simplify the source, this is not supported for 16-bit MSDOS (which 1304 * doesn't have enough memory anyway to duplicate compression states). 1305 */ 1306 int 1307 deflateCopy(dest, source) 1308 z_streamp dest; 1309 z_streamp source; 1310 { 1311 #ifdef MAXSEG_64K 1312 return (Z_STREAM_ERROR); 1313 #else 1314 deflate_state *ds; 1315 deflate_state *ss; 1316 ushf *overlay; 1317 1318 if (source == Z_NULL || dest == Z_NULL || source->state == Z_NULL) 1319 return (Z_STREAM_ERROR); 1320 ss = (deflate_state *) source->state; 1321 1322 zmemcpy(dest, source, sizeof (*dest)); 1323 1324 ds = (deflate_state *) ZALLOC(dest, 1, sizeof (deflate_state)); 1325 if (ds == Z_NULL) 1326 return (Z_MEM_ERROR); 1327 dest->state = (struct internal_state FAR *) ds; 1328 zmemcpy(ds, ss, sizeof (*ds)); 1329 ds->strm = dest; 1330 1331 ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof (Byte)); 1332 ds->prev = (Posf *) ZALLOC(dest, ds->w_size, sizeof (Pos)); 1333 ds->head = (Posf *) ZALLOC(dest, ds->hash_size, sizeof (Pos)); 1334 overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof (ush)+2); 1335 ds->pending_buf = (uchf *) overlay; 1336 1337 if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL || 1338 ds->pending_buf == Z_NULL) { 1339 ds->status = INIT_STATE; 1340 (void) deflateEnd(dest); 1341 return (Z_MEM_ERROR); 1342 } 1343 /* following zmemcpy doesn't work for 16-bit MSDOS */ 1344 zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof (Byte)); 1345 zmemcpy(ds->prev, ss->prev, ds->w_size * sizeof (Pos)); 1346 zmemcpy(ds->head, ss->head, ds->hash_size * sizeof (Pos)); 1347 zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size); 1348 1349 ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf); 1350 ds->d_buf = overlay + ds->lit_bufsize/sizeof (ush); 1351 ds->l_buf = ds->pending_buf + (1+sizeof (ush))*ds->lit_bufsize; 1352 1353 ds->l_desc.dyn_tree = ds->dyn_ltree; 1354 ds->d_desc.dyn_tree = ds->dyn_dtree; 1355 ds->bl_desc.dyn_tree = ds->bl_tree; 1356 1357 return (Z_OK); 1358 #endif 1359 } 1360 1361 /* 1362 * =========================================================================== 1363 * Return the number of bytes of output which are immediately available 1364 * for output from the decompressor. ---PPP--- 1365 */ 1366 int 1367 deflateOutputPending(strm) 1368 z_streamp strm; 1369 { 1370 if (strm == Z_NULL || strm->state == Z_NULL) 1371 return (0); 1372 1373 return (((deflate_state *)(strm->state))->pending); 1374 } 1375 1376 /* 1377 * =========================================================================== 1378 * Read a new buffer from the current input stream, update the adler32 1379 * and total number of bytes read. All deflate() input goes through 1380 * this function so some applications may wish to modify it to avoid 1381 * allocating a large strm->next_in buffer and copying from it. 1382 * (See also flush_pending()). 1383 */ 1384 local int 1385 read_buf(strm, buf, size) 1386 z_streamp strm; 1387 Bytef *buf; 1388 unsigned size; 1389 { 1390 unsigned len = strm->avail_in; 1391 1392 if (len > size) len = size; 1393 if (len == 0) 1394 return (0); 1395 1396 strm->avail_in -= len; 1397 1398 if (!((deflate_state *)(strm->state))->noheader) { 1399 strm->adler = adler32(strm->adler, strm->next_in, len); 1400 } 1401 zmemcpy(buf, strm->next_in, len); 1402 strm->next_in += len; 1403 strm->total_in += len; 1404 1405 return ((int)len); 1406 } 1407 1408 /* 1409 * =========================================================================== 1410 * Initialize the "longest match" routines for a new zlib stream 1411 */ 1412 local void 1413 lm_init(s) 1414 deflate_state *s; 1415 { 1416 s->window_size = (ulg)2L*s->w_size; 1417 1418 CLEAR_HASH(s); 1419 1420 /* Set the default configuration parameters: */ 1421 s->max_lazy_match = configuration_table[s->level].max_lazy; 1422 s->good_match = configuration_table[s->level].good_length; 1423 s->nice_match = configuration_table[s->level].nice_length; 1424 s->max_chain_length = configuration_table[s->level].max_chain; 1425 1426 s->strstart = 0; 1427 s->block_start = 0L; 1428 s->lookahead = 0; 1429 s->match_length = s->prev_length = MIN_MATCH-1; 1430 s->match_available = 0; 1431 s->ins_h = 0; 1432 #ifdef ASMV 1433 match_init(); /* initialize the asm code */ 1434 #endif 1435 } 1436 1437 /* 1438 * =========================================================================== 1439 * Set match_start to the longest match starting at the given string and 1440 * return its length. Matches shorter or equal to prev_length are discarded, 1441 * in which case the result is equal to prev_length and match_start is 1442 * garbage. 1443 * IN assertions: cur_match is the head of the hash chain for the current 1444 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 1445 * OUT assertion: the match length is not greater than s->lookahead. 1446 */ 1447 #ifndef ASMV 1448 /* 1449 * For 80x86 and 680x0, an optimized version will be provided in 1450 * match.asm or match.S. The code will be functionally equivalent. 1451 */ 1452 #ifndef FASTEST 1453 local uInt 1454 longest_match(s, cur_match) 1455 deflate_state *s; 1456 IPos cur_match; /* current match */ 1457 { 1458 /* max hash chain length */ 1459 unsigned chain_length = s->max_chain_length; 1460 register Bytef *scan = s->window + s->strstart; /* current string */ 1461 register Bytef *match; /* matched string */ 1462 register int len; /* length of current match */ 1463 int best_len = s->prev_length; /* best match length so far */ 1464 int nice_match = s->nice_match; /* stop if match long enough */ 1465 IPos limit = s->strstart > (IPos)MAX_DIST(s) ? 1466 s->strstart - (IPos)MAX_DIST(s) : NIL; 1467 /* 1468 * Stop when cur_match becomes <= limit. To simplify the code, 1469 * we prevent matches with the string of window index 0. 1470 */ 1471 Posf *prev = s->prev; 1472 uInt wmask = s->w_mask; 1473 1474 #ifdef UNALIGNED_OK 1475 /* 1476 * Compare two bytes at a time. Note: this is not always 1477 * beneficial. Try with and without -DUNALIGNED_OK to check. 1478 */ 1479 register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1; 1480 register ush scan_start = *(ushf*)scan; 1481 register ush scan_end = *(ushf*)(scan+best_len-1); 1482 #else 1483 register Bytef *strend = s->window + s->strstart + MAX_MATCH; 1484 register Byte scan_end1 = scan[best_len-1]; 1485 register Byte scan_end = scan[best_len]; 1486 #endif 1487 1488 /* 1489 * The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 1490 * multiple of 16. It is easy to get rid of this optimization 1491 * if necessary. 1492 */ 1493 Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); 1494 1495 /* Do not waste too much time if we already have a good match: */ 1496 if (s->prev_length >= s->good_match) { 1497 chain_length >>= 2; 1498 } 1499 /* 1500 * Do not look for matches beyond the end of the input. This 1501 * is necessary to make deflate deterministic. 1502 */ 1503 if ((uInt)nice_match > s->lookahead) nice_match = s->lookahead; 1504 1505 Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, 1506 "need lookahead"); 1507 1508 do { 1509 Assert(cur_match <= s->strstart, "no future"); 1510 match = s->window + cur_match; 1511 1512 /* 1513 * Skip to next match if the match length cannot 1514 * increase or if the match length is less than 2: 1515 */ 1516 #if (defined(UNALIGNED_OK) && MAX_MATCH == 258) 1517 /* 1518 * This code assumes sizeof (unsigned short) == 2. Do 1519 * not use UNALIGNED_OK if your compiler uses a 1520 * different size. 1521 */ 1522 if (*(ushf*)(match+best_len-1) != scan_end || 1523 *(ushf*)match != scan_start) continue; 1524 1525 /* 1526 * It is not necessary to compare scan[2] and match[2] 1527 * since they are always equal when the other bytes 1528 * match, given that the hash keys are equal and that 1529 * HASH_BITS >= 8. Compare 2 bytes at a time at 1530 * strstart+3, +5, ... up to strstart+257. We check 1531 * for insufficient lookahead only every 4th 1532 * comparison; the 128th check will be made at 1533 * strstart+257. If MAX_MATCH-2 is not a multiple of 1534 * 8, it is necessary to put more guard bytes at the 1535 * end of the window, or to check more often for 1536 * insufficient lookahead. 1537 */ 1538 Assert(scan[2] == match[2], "scan[2]?"); 1539 scan++, match++; 1540 do { 1541 } while (*(ushf *)(scan += 2) == *(ushf *)(match += 2) && 1542 *(ushf *)(scan += 2) == *(ushf *)(match += 2) && 1543 *(ushf *)(scan += 2) == *(ushf *)(match += 2) && 1544 *(ushf *)(scan += 2) == *(ushf *)(match += 2) && 1545 scan < strend); 1546 /* The funny "do {}" generates better code on most compilers */ 1547 1548 /* Here, scan <= window+strstart+257 */ 1549 Assert(scan <= s->window+(unsigned)(s->window_size-1), 1550 "wild scan"); 1551 if (*scan == *match) scan++; 1552 1553 len = (MAX_MATCH - 1) - (int)(strend-scan); 1554 scan = strend - (MAX_MATCH-1); 1555 1556 #else /* UNALIGNED_OK */ 1557 1558 if (match[best_len] != scan_end || 1559 match[best_len-1] != scan_end1 || 1560 *match != *scan || 1561 *++match != scan[1]) 1562 continue; 1563 1564 /* 1565 * The check at best_len-1 can be removed because it 1566 * will be made again later. (This heuristic is not 1567 * always a win.) It is not necessary to compare 1568 * scan[2] and match[2] since they are always equal 1569 * when the other bytes match, given that the hash 1570 * keys are equal and that HASH_BITS >= 8. 1571 */ 1572 scan += 2, match++; 1573 Assert(*scan == *match, "match[2]?"); 1574 1575 /* 1576 * We check for insufficient lookahead only every 8th 1577 * comparison; the 256th check will be made at 1578 * strstart+258. 1579 */ 1580 do { 1581 } while (*++scan == *++match && *++scan == *++match && 1582 *++scan == *++match && *++scan == *++match && 1583 *++scan == *++match && *++scan == *++match && 1584 *++scan == *++match && *++scan == *++match && 1585 scan < strend); 1586 1587 Assert(scan <= s->window+(unsigned)(s->window_size-1), 1588 "wild scan"); 1589 1590 len = MAX_MATCH - (int)(strend - scan); 1591 scan = strend - MAX_MATCH; 1592 1593 #endif /* UNALIGNED_OK */ 1594 1595 if (len > best_len) { 1596 s->match_start = cur_match; 1597 best_len = len; 1598 if (len >= nice_match) break; 1599 #ifdef UNALIGNED_OK 1600 scan_end = *(ushf*)(scan+best_len-1); 1601 #else 1602 scan_end1 = scan[best_len-1]; 1603 scan_end = scan[best_len]; 1604 #endif 1605 } 1606 } while ((cur_match = prev[cur_match & wmask]) > limit && 1607 --chain_length != 0); 1608 1609 if ((uInt)best_len <= s->lookahead) 1610 return (best_len); 1611 return (s->lookahead); 1612 } 1613 #else /* FASTEST */ 1614 /* 1615 * --------------------------------------------------------------------------- 1616 * Optimized version for level == 1 only 1617 */ 1618 local uInt 1619 longest_match(s, cur_match) 1620 deflate_state *s; 1621 IPos cur_match; /* current match */ 1622 { 1623 register Bytef *scan = s->window + s->strstart; /* current string */ 1624 register Bytef *match; /* matched string */ 1625 register int len; /* length of current match */ 1626 register Bytef *strend = s->window + s->strstart + MAX_MATCH; 1627 1628 /* 1629 * The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 1630 * multiple of 16. It is easy to get rid of this optimization 1631 * if necessary. 1632 */ 1633 Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); 1634 1635 Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, 1636 "need lookahead"); 1637 1638 Assert(cur_match <= s->strstart, "no future"); 1639 1640 match = s->window + cur_match; 1641 1642 /* Return failure if the match length is less than 2: */ 1643 if (match[0] != scan[0] || match[1] != scan[1]) 1644 return (MIN_MATCH-1); 1645 1646 /* 1647 * The check at best_len-1 can be removed because it will be 1648 * made again later. (This heuristic is not always a win.) It 1649 * is not necessary to compare scan[2] and match[2] since they 1650 * are always equal when the other bytes match, given that the 1651 * hash keys are equal and that HASH_BITS >= 8. 1652 */ 1653 scan += 2, match += 2; 1654 Assert(*scan == *match, "match[2]?"); 1655 1656 /* 1657 * We check for insufficient lookahead only every 8th comparison; 1658 * the 256th check will be made at strstart+258. 1659 */ 1660 do { 1661 } while (*++scan == *++match && *++scan == *++match && 1662 *++scan == *++match && *++scan == *++match && 1663 *++scan == *++match && *++scan == *++match && 1664 *++scan == *++match && *++scan == *++match && 1665 scan < strend); 1666 1667 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); 1668 1669 len = MAX_MATCH - (int)(strend - scan); 1670 1671 if (len < MIN_MATCH) 1672 return (MIN_MATCH - 1); 1673 1674 s->match_start = cur_match; 1675 return (len <= s->lookahead ? len : s->lookahead); 1676 } 1677 #endif /* FASTEST */ 1678 #endif /* ASMV */ 1679 1680 #ifdef DEBUG_ZLIB 1681 /* 1682 * =========================================================================== 1683 * Check that the match at match_start is indeed a match. 1684 */ 1685 local void 1686 check_match(s, start, match, length) 1687 deflate_state *s; 1688 IPos start, match; 1689 int length; 1690 { 1691 /* check that the match is indeed a match */ 1692 if (zmemcmp(s->window + match, s->window + start, length) != EQUAL) { 1693 fprintf(stderr, " start %u, match %u, length %d\n", 1694 start, match, length); 1695 do { 1696 fprintf(stderr, "%c%c", s->window[match++], 1697 s->window[start++]); 1698 } while (--length != 0); 1699 z_error("invalid match"); 1700 } 1701 if (z_verbose > 1) { 1702 fprintf(stderr, "\\[%d,%d]", start-match, length); 1703 do { putc(s->window[start++], stderr); } while (--length != 0); 1704 } 1705 } 1706 #else 1707 #define check_match(s, start, match, length) 1708 #endif 1709 1710 /* 1711 * =========================================================================== 1712 * Fill the window when the lookahead becomes insufficient. 1713 * Updates strstart and lookahead. 1714 * 1715 * IN assertion: lookahead < MIN_LOOKAHEAD 1716 * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD 1717 * At least one byte has been read, or avail_in == 0; reads are 1718 * performed for at least two bytes (required for the zip translate_eol 1719 * option -- not supported here). 1720 */ 1721 local void 1722 fill_window(s) 1723 deflate_state *s; 1724 { 1725 register unsigned n, m; 1726 register Posf *p; 1727 unsigned more; /* Amount of free space at the end of the window. */ 1728 uInt wsize = s->w_size; 1729 1730 do { 1731 more = (unsigned)(s->window_size -(ulg)s->lookahead - 1732 (ulg)s->strstart); 1733 1734 /* Deal with !@#$% 64K limit: */ 1735 if (more == 0 && s->strstart == 0 && s->lookahead == 0) { 1736 more = wsize; 1737 1738 } else if (more == (unsigned)(-1)) { 1739 /* 1740 * Very unlikely, but possible on 16 bit 1741 * machine if strstart == 0 and lookahead == 1 1742 * (input done one byte at time) 1743 */ 1744 more--; 1745 1746 /* 1747 * If the window is almost full and there is 1748 * insufficient lookahead, move the upper half 1749 * to the lower one to make room in the upper 1750 * half. 1751 */ 1752 } else if (s->strstart >= wsize+MAX_DIST(s)) { 1753 1754 Assert(wsize+wsize <= s->window_size, "wsize*2"); 1755 zmemcpy(s->window, s->window+wsize, (unsigned)wsize); 1756 s->match_start -= wsize; 1757 /* we now have strstart >= MAX_DIST */ 1758 s->strstart -= wsize; 1759 s->block_start -= (long)wsize; 1760 1761 /* 1762 * Slide the hash table (could be avoided with 1763 * 32 bit values at the expense of memory 1764 * usage). We slide even when level == 0 to 1765 * keep the hash table consistent if we switch 1766 * back to level > 0 later. (Using level 0 1767 * permanently is not an optimal usage of 1768 * zlib, so we don't care about this 1769 * pathological case.) 1770 */ 1771 n = s->hash_size; 1772 p = &s->head[n]; 1773 do { 1774 m = *--p; 1775 *p = (Pos)(m >= wsize ? m-wsize : NIL); 1776 } while (--n); 1777 1778 n = wsize; 1779 #ifndef FASTEST 1780 p = &s->prev[n]; 1781 do { 1782 m = *--p; 1783 *p = (Pos)(m >= wsize ? m-wsize : NIL); 1784 /* 1785 * If n is not on any hash chain, 1786 * prev[n] is garbage but its value 1787 * will never be used. 1788 */ 1789 } while (--n); 1790 #endif 1791 more += wsize; 1792 } 1793 if (s->strm->avail_in == 0) 1794 return; 1795 1796 /* 1797 * If there was no sliding: 1798 * strstart <= WSIZE+MAX_DIST-1 && 1799 * lookahead <= MIN_LOOKAHEAD - 1 && 1800 * more == window_size - lookahead - strstart 1801 * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + 1802 * MAX_DIST-1) 1803 * => more >= window_size - 2*WSIZE + 2 1804 * In the BIG_MEM or MMAP case (not yet supported), 1805 * window_size == input_size + MIN_LOOKAHEAD && 1806 * strstart + s->lookahead <= input_size => 1807 * more >= MIN_LOOKAHEAD. 1808 * Otherwise, window_size == 2*WSIZE so more >= 2. 1809 * If there was sliding, more >= WSIZE. So in all cases, 1810 * more >= 2. 1811 */ 1812 Assert(more >= 2, "more < 2"); 1813 Assert(s->strstart + s->lookahead + more <= s->window_size, 1814 "read too much"); 1815 1816 n = read_buf(s->strm, s->window + s->strstart + s->lookahead, 1817 more); 1818 s->lookahead += n; 1819 1820 /* Initialize the hash value now that we have some input: */ 1821 if (s->lookahead >= MIN_MATCH) { 1822 s->ins_h = s->window[s->strstart]; 1823 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]); 1824 #if MIN_MATCH != 3 1825 Call UPDATE_HASH() MIN_MATCH-3 more times 1826 #endif 1827 } 1828 /* 1829 * If the whole input has less than MIN_MATCH bytes, 1830 * ins_h is garbage, but this is not important since 1831 * only literal bytes will be emitted. 1832 */ 1833 1834 } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0); 1835 } 1836 1837 /* 1838 * =========================================================================== 1839 * Flush the current block, with given end-of-file flag. 1840 * IN assertion: strstart is set to the end of the current match. 1841 */ 1842 #define FLUSH_BLOCK_ONLY(s, eof) { \ 1843 _tr_flush_block(s, (s->block_start >= 0L ? \ 1844 (charf *)&s->window[(unsigned)s->block_start] : \ 1845 (charf *)Z_NULL), \ 1846 (ulg)((long)s->strstart - s->block_start), \ 1847 (eof)); \ 1848 s->block_start = s->strstart; \ 1849 flush_pending(s->strm); \ 1850 Tracev((stderr, "[FLUSH]")); \ 1851 } 1852 1853 /* Same but force premature exit if necessary. */ 1854 #define FLUSH_BLOCK(s, eof) { \ 1855 FLUSH_BLOCK_ONLY(s, eof); \ 1856 if (s->strm->avail_out == 0) \ 1857 return ((eof) ? finish_started : need_more); \ 1858 } 1859 1860 /* 1861 * =========================================================================== 1862 * Copy without compression as much as possible from the input stream, return 1863 * the current block state. 1864 * This function does not insert new strings in the dictionary since 1865 * uncompressible data is probably not useful. This function is used 1866 * only for the level=0 compression option. 1867 * NOTE: this function should be optimized to avoid extra copying from 1868 * window to pending_buf. 1869 */ 1870 local block_state 1871 deflate_stored(s, flush) 1872 deflate_state *s; 1873 int flush; 1874 { 1875 /* 1876 * Stored blocks are limited to 0xffff bytes, pending_buf is 1877 * limited to pending_buf_size, and each stored block has a 5 1878 * byte header: 1879 */ 1880 ulg max_block_size = 0xffff; 1881 ulg max_start; 1882 1883 if (max_block_size > s->pending_buf_size - 5) { 1884 max_block_size = s->pending_buf_size - 5; 1885 } 1886 1887 /* Copy as much as possible from input to output: */ 1888 for (;;) { 1889 /* Fill the window as much as possible: */ 1890 if (s->lookahead <= 1) { 1891 1892 Assert(s->strstart < s->w_size+MAX_DIST(s) || 1893 s->block_start >= (long)s->w_size, 1894 "slide too late"); 1895 1896 fill_window(s); 1897 if (s->lookahead == 0 && flush == Z_NO_FLUSH) 1898 return (need_more); 1899 1900 if (s->lookahead == 0) 1901 break; /* flush the current block */ 1902 } 1903 Assert(s->block_start >= 0L, "block gone"); 1904 1905 s->strstart += s->lookahead; 1906 s->lookahead = 0; 1907 1908 /* Emit a stored block if pending_buf will be full: */ 1909 max_start = s->block_start + max_block_size; 1910 if (s->strstart == 0 || (ulg)s->strstart >= max_start) { 1911 /* 1912 * strstart == 0 is possible when wraparound 1913 * on 16-bit machine 1914 */ 1915 s->lookahead = (uInt)(s->strstart - max_start); 1916 s->strstart = (uInt)max_start; 1917 FLUSH_BLOCK(s, 0); 1918 } 1919 /* 1920 * Flush if we may have to slide, otherwise 1921 * block_start may become negative and the data will 1922 * be gone: 1923 */ 1924 if (s->strstart - (uInt)s->block_start >= MAX_DIST(s)) { 1925 FLUSH_BLOCK(s, 0); 1926 } 1927 } 1928 FLUSH_BLOCK(s, flush == Z_FINISH); 1929 return (flush == Z_FINISH ? finish_done : block_done); 1930 } 1931 1932 /* 1933 * =========================================================================== 1934 * Compress as much as possible from the input stream, return the current 1935 * block state. 1936 * This function does not perform lazy evaluation of matches and inserts 1937 * new strings in the dictionary only for unmatched strings or for short 1938 * matches. It is used only for the fast compression options. 1939 */ 1940 local block_state 1941 deflate_fast(s, flush) 1942 deflate_state *s; 1943 int flush; 1944 { 1945 IPos hash_head = NIL; /* head of the hash chain */ 1946 int bflush; /* set if current block must be flushed */ 1947 1948 for (;;) { 1949 /* 1950 * Make sure that we always have enough lookahead, 1951 * except at the end of the input file. We need 1952 * MAX_MATCH bytes for the next match, plus MIN_MATCH 1953 * bytes to insert the string following the next 1954 * match. 1955 */ 1956 if (s->lookahead < MIN_LOOKAHEAD) { 1957 fill_window(s); 1958 if (s->lookahead < MIN_LOOKAHEAD && 1959 flush == Z_NO_FLUSH) { 1960 return (need_more); 1961 } 1962 if (s->lookahead == 0) 1963 break; /* flush the current block */ 1964 } 1965 1966 /* 1967 * Insert the string window[strstart .. strstart+2] in 1968 * the dictionary, and set hash_head to the head of 1969 * the hash chain: 1970 */ 1971 if (s->lookahead >= MIN_MATCH) { 1972 INSERT_STRING(s, s->strstart, hash_head); 1973 } 1974 1975 /* 1976 * Find the longest match, discarding those <= 1977 * prev_length. At this point we have always 1978 * match_length < MIN_MATCH 1979 */ 1980 if (hash_head != NIL && s->strstart - hash_head <= 1981 MAX_DIST(s)) { 1982 /* 1983 * To simplify the code, we prevent matches 1984 * with the string of window index 0 (in 1985 * particular we have to avoid a match of the 1986 * string with itself at the start of the 1987 * input file). 1988 */ 1989 if (s->strategy != Z_HUFFMAN_ONLY) { 1990 s->match_length = longest_match(s, hash_head); 1991 } 1992 /* longest_match() sets match_start */ 1993 } 1994 if (s->match_length >= MIN_MATCH) { 1995 check_match(s, s->strstart, s->match_start, 1996 s->match_length); 1997 1998 _tr_tally_dist(s, s->strstart - s->match_start, 1999 s->match_length - MIN_MATCH, bflush); 2000 2001 s->lookahead -= s->match_length; 2002 2003 /* 2004 * Insert new strings in the hash table only 2005 * if the match length is not too large. This 2006 * saves time but degrades compression. 2007 */ 2008 #ifndef FASTEST 2009 if (s->match_length <= s->max_insert_length && 2010 s->lookahead >= MIN_MATCH) { 2011 /* string at strstart already in hash table */ 2012 s->match_length--; 2013 do { 2014 s->strstart++; 2015 INSERT_STRING(s, s->strstart, 2016 hash_head); 2017 /* 2018 * strstart never exceeds 2019 * WSIZE-MAX_MATCH, so there 2020 * are always MIN_MATCH bytes 2021 * ahead. 2022 */ 2023 } while (--s->match_length != 0); 2024 s->strstart++; 2025 } else 2026 #endif 2027 { 2028 s->strstart += s->match_length; 2029 s->match_length = 0; 2030 s->ins_h = s->window[s->strstart]; 2031 UPDATE_HASH(s, s->ins_h, 2032 s->window[s->strstart+1]); 2033 #if MIN_MATCH != 3 2034 Call UPDATE_HASH() MIN_MATCH-3 more times 2035 #endif 2036 /* 2037 * If lookahead < MIN_MATCH, ins_h is 2038 * garbage, but it does not matter 2039 * since it will be recomputed at next 2040 * deflate call. 2041 */ 2042 } 2043 } else { 2044 /* No match, output a literal byte */ 2045 Tracevv((stderr, "%c", s->window[s->strstart])); 2046 _tr_tally_lit(s, s->window[s->strstart], bflush); 2047 s->lookahead--; 2048 s->strstart++; 2049 } 2050 if (bflush) FLUSH_BLOCK(s, 0); 2051 } 2052 FLUSH_BLOCK(s, flush == Z_FINISH); 2053 return (flush == Z_FINISH ? finish_done : block_done); 2054 } 2055 2056 /* 2057 * =========================================================================== 2058 * Same as above, but achieves better compression. We use a lazy 2059 * evaluation for matches: a match is finally adopted only if there is 2060 * no better match at the next window position. 2061 */ 2062 local block_state 2063 deflate_slow(s, flush) 2064 deflate_state *s; 2065 int flush; 2066 { 2067 IPos hash_head = NIL; /* head of hash chain */ 2068 int bflush; /* set if current block must be flushed */ 2069 2070 /* Process the input block. */ 2071 for (;;) { 2072 /* 2073 * Make sure that we always have enough lookahead, 2074 * except at the end of the input file. We need 2075 * MAX_MATCH bytes for the next match, plus MIN_MATCH 2076 * bytes to insert the string following the next 2077 * match. 2078 */ 2079 if (s->lookahead < MIN_LOOKAHEAD) { 2080 fill_window(s); 2081 if (s->lookahead < MIN_LOOKAHEAD && 2082 flush == Z_NO_FLUSH) { 2083 return (need_more); 2084 } 2085 /* flush the current block */ 2086 if (s->lookahead == 0) 2087 break; 2088 } 2089 2090 /* 2091 * Insert the string window[strstart .. strstart+2] in 2092 * the dictionary, and set hash_head to the head of 2093 * the hash chain: 2094 */ 2095 if (s->lookahead >= MIN_MATCH) { 2096 INSERT_STRING(s, s->strstart, hash_head); 2097 } 2098 2099 /* 2100 * Find the longest match, discarding those <= 2101 * prev_length. 2102 */ 2103 s->prev_length = s->match_length; 2104 s->prev_match = s->match_start; 2105 s->match_length = MIN_MATCH-1; 2106 2107 if (hash_head != NIL && s->prev_length < s->max_lazy_match && 2108 s->strstart - hash_head <= MAX_DIST(s)) { 2109 /* 2110 * To simplify the code, we prevent matches 2111 * with the string of window index 0 (in 2112 * particular we have to avoid a match of the 2113 * string with itself at the start of the 2114 * input file). 2115 */ 2116 if (s->strategy != Z_HUFFMAN_ONLY) { 2117 s->match_length = longest_match(s, hash_head); 2118 } 2119 /* longest_match() sets match_start */ 2120 2121 if (s->match_length <= 5 && 2122 (s->strategy == Z_FILTERED || 2123 (s->match_length == MIN_MATCH && 2124 s->strstart - s->match_start > TOO_FAR))) { 2125 2126 /* 2127 * If prev_match is also MIN_MATCH, 2128 * match_start is garbage but we will 2129 * ignore the current match anyway. 2130 */ 2131 s->match_length = MIN_MATCH-1; 2132 } 2133 } 2134 /* 2135 * If there was a match at the previous step and the 2136 * current match is not better, output the previous 2137 * match: 2138 */ 2139 if (s->prev_length >= MIN_MATCH && 2140 s->match_length <= s->prev_length) { 2141 uInt max_insert = s->strstart + s->lookahead - 2142 MIN_MATCH; 2143 /* Do not insert strings in hash table beyond this. */ 2144 2145 check_match(s, s->strstart-1, s->prev_match, 2146 s->prev_length); 2147 2148 _tr_tally_dist(s, s->strstart -1 - s->prev_match, 2149 s->prev_length - MIN_MATCH, bflush); 2150 2151 /* 2152 * Insert in hash table all strings up to the 2153 * end of the match. strstart-1 and strstart 2154 * are already inserted. If there is not 2155 * enough lookahead, the last two strings are 2156 * not inserted in the hash table. 2157 */ 2158 s->lookahead -= s->prev_length-1; 2159 s->prev_length -= 2; 2160 do { 2161 if (++s->strstart <= max_insert) { 2162 INSERT_STRING(s, s->strstart, 2163 hash_head); 2164 } 2165 } while (--s->prev_length != 0); 2166 s->match_available = 0; 2167 s->match_length = MIN_MATCH-1; 2168 s->strstart++; 2169 2170 if (bflush) FLUSH_BLOCK(s, 0); 2171 2172 } else if (s->match_available) { 2173 /* 2174 * If there was no match at the previous 2175 * position, output a single literal. If there 2176 * was a match but the current match is 2177 * longer, truncate the previous match to a 2178 * single literal. 2179 */ 2180 Tracevv((stderr, "%c", s->window[s->strstart-1])); 2181 _tr_tally_lit(s, s->window[s->strstart-1], bflush); 2182 if (bflush) { 2183 FLUSH_BLOCK_ONLY(s, 0); 2184 } 2185 s->strstart++; 2186 s->lookahead--; 2187 if (s->strm->avail_out == 0) 2188 return (need_more); 2189 } else { 2190 /* 2191 * There is no previous match to compare with, 2192 * wait for the next step to decide. 2193 */ 2194 s->match_available = 1; 2195 s->strstart++; 2196 s->lookahead--; 2197 } 2198 } 2199 Assert(flush != Z_NO_FLUSH, "no flush?"); 2200 if (s->match_available) { 2201 Tracevv((stderr, "%c", s->window[s->strstart-1])); 2202 _tr_tally_lit(s, s->window[s->strstart-1], bflush); 2203 s->match_available = 0; 2204 } 2205 FLUSH_BLOCK(s, flush == Z_FINISH); 2206 return (flush == Z_FINISH ? finish_done : block_done); 2207 } 2208 /* --- deflate.c */ 2209 2210 /* +++ trees.c */ 2211 /* 2212 * trees.c -- output deflated data using Huffman coding 2213 * Copyright (C) 1995-1998 Jean-loup Gailly 2214 * For conditions of distribution and use, see copyright notice in zlib.h 2215 */ 2216 2217 /* 2218 * ALGORITHM 2219 * 2220 * The "deflation" process uses several Huffman trees. The more 2221 * common source values are represented by shorter bit sequences. 2222 * 2223 * Each code tree is stored in a compressed form which is itself 2224 * a Huffman encoding of the lengths of all the code strings (in 2225 * ascending order by source values). The actual code strings are 2226 * reconstructed from the lengths in the inflate process, as described 2227 * in the deflate specification. 2228 * 2229 * REFERENCES 2230 * 2231 * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification". 2232 * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc 2233 * 2234 * Storer, James A. 2235 * Data Compression: Methods and Theory, pp. 49-50. 2236 * Computer Science Press, 1988. ISBN 0-7167-8156-5. 2237 * 2238 * Sedgewick, R. 2239 * Algorithms, p290. 2240 * Addison-Wesley, 1983. ISBN 0-201-06672-6. 2241 */ 2242 2243 /* From: trees.c,v 1.11 1996/07/24 13:41:06 me Exp $ */ 2244 2245 /* #include "deflate.h" */ 2246 2247 #ifdef DEBUG_ZLIB 2248 #include <ctype.h> 2249 #endif 2250 2251 /* 2252 * =========================================================================== 2253 * Constants 2254 */ 2255 2256 #define MAX_BL_BITS 7 2257 /* Bit length codes must not exceed MAX_BL_BITS bits */ 2258 2259 #define END_BLOCK 256 2260 /* end of block literal code */ 2261 2262 #define REP_3_6 16 2263 /* repeat previous bit length 3-6 times (2 bits of repeat count) */ 2264 2265 #define REPZ_3_10 17 2266 /* repeat a zero length 3-10 times (3 bits of repeat count) */ 2267 2268 #define REPZ_11_138 18 2269 /* repeat a zero length 11-138 times (7 bits of repeat count) */ 2270 2271 /* extra bits for each length code */ 2272 local const int extra_lbits[LENGTH_CODES] = { 2273 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 2274 4, 4, 4, 5, 5, 5, 5, 0}; 2275 2276 /* extra bits for each distance code */ 2277 local const int extra_dbits[D_CODES] = { 2278 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 2279 9, 10, 10, 11, 11, 12, 12, 13, 13}; 2280 2281 /* extra bits for each bit length code */ 2282 local const int extra_blbits[BL_CODES] = { 2283 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7}; 2284 2285 local const uch bl_order[BL_CODES] = { 2286 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}; 2287 2288 /* 2289 * The lengths of the bit length codes are sent in order of decreasing 2290 * probability, to avoid transmitting the lengths for unused bit 2291 * length codes. 2292 */ 2293 2294 #define Buf_size (8 * 2*sizeof (char)) 2295 /* 2296 * Number of bits used within bi_buf. (bi_buf might be implemented on 2297 * more than 16 bits on some systems.) 2298 */ 2299 2300 /* 2301 * =========================================================================== 2302 * Local data. These are initialized only once. 2303 */ 2304 #define DIST_CODE_LEN 512 /* see definition of array dist_code below */ 2305 2306 local ct_data static_ltree[L_CODES+2]; 2307 /* 2308 * The static literal tree. Since the bit lengths are imposed, there 2309 * is no need for the L_CODES extra codes used during heap 2310 * construction. However The codes 286 and 287 are needed to build a 2311 * canonical tree (see _tr_init below). 2312 */ 2313 2314 local ct_data static_dtree[D_CODES]; 2315 /* 2316 * The static distance tree. (Actually a trivial tree since all codes 2317 * use 5 bits.) 2318 */ 2319 2320 local uch _dist_code[512]; 2321 /* 2322 * distance codes. The first 256 values correspond to the distances 3 2323 * .. 258, the last 256 values correspond to the top 8 bits of the 15 2324 * bit distances. 2325 */ 2326 2327 local uch _length_code[MAX_MATCH-MIN_MATCH+1]; 2328 /* length code for each normalized match length (0 == MIN_MATCH) */ 2329 2330 local int base_length[LENGTH_CODES]; 2331 /* First normalized length for each code (0 = MIN_MATCH) */ 2332 2333 local int base_dist[D_CODES]; 2334 /* First normalized distance for each code (0 = distance of 1) */ 2335 2336 struct static_tree_desc_s { 2337 const ct_data *static_tree; /* static tree or NULL */ 2338 const intf *extra_bits; /* extra bits for each code or NULL */ 2339 int extra_base; /* base index for extra_bits */ 2340 int elems; /* max number of elements in the tree */ 2341 int max_length; /* max bit length for the codes */ 2342 }; 2343 2344 local static_tree_desc static_l_desc = { 2345 static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; 2346 2347 local static_tree_desc static_d_desc = { 2348 static_dtree, extra_dbits, 0, D_CODES, MAX_BITS}; 2349 2350 local static_tree_desc static_bl_desc = { 2351 (const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS}; 2352 2353 /* 2354 * =========================================================================== 2355 * Local (static) routines in this file. 2356 */ 2357 2358 local void tr_static_init OF((void)); 2359 local void init_block OF((deflate_state *s)); 2360 local void pqdownheap OF((deflate_state *s, ct_data *tree, int k)); 2361 local void gen_bitlen OF((deflate_state *s, tree_desc *desc)); 2362 local void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count)); 2363 local void build_tree OF((deflate_state *s, tree_desc *desc)); 2364 local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code)); 2365 local void send_tree OF((deflate_state *s, ct_data *tree, int max_code)); 2366 local int build_bl_tree OF((deflate_state *s)); 2367 local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes, 2368 int blcodes)); 2369 local void compress_block OF((deflate_state *s, ct_data *ltree, 2370 ct_data *dtree)); 2371 local void set_data_type OF((deflate_state *s)); 2372 local unsigned bi_reverse OF((unsigned value, int length)); 2373 local void bi_windup OF((deflate_state *s)); 2374 local void bi_flush OF((deflate_state *s)); 2375 local void copy_block OF((deflate_state *s, charf *buf, unsigned len, 2376 int header)); 2377 2378 #ifndef DEBUG_ZLIB 2379 #define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) 2380 /* Send a code of the given tree. c and tree must not have side effects */ 2381 2382 #else /* DEBUG_ZLIB */ 2383 #define send_code(s, c, tree) \ 2384 { if (z_verbose > 2) fprintf(stderr, "\ncd %3d ", (c)); \ 2385 send_bits(s, tree[c].Code, tree[c].Len); } 2386 #endif 2387 2388 /* 2389 * =========================================================================== 2390 * Output a short LSB first on the stream. 2391 * IN assertion: there is enough room in pendingBuf. 2392 */ 2393 #define put_short(s, w) { \ 2394 put_byte(s, (uch)((w) & 0xff)); \ 2395 put_byte(s, (uch)((ush)(w) >> 8)); \ 2396 } 2397 2398 /* 2399 * =========================================================================== 2400 * Send a value on a given number of bits. 2401 * IN assertion: length <= 16 and value fits in length bits. 2402 */ 2403 #ifdef DEBUG_ZLIB 2404 local void send_bits OF((deflate_state *s, int value, int length)); 2405 2406 local void 2407 send_bits(s, value, length) 2408 deflate_state *s; 2409 int value; /* value to send */ 2410 int length; /* number of bits */ 2411 { 2412 Tracevv((stderr, " l %2d v %4x ", length, value)); 2413 Assert(length > 0 && length <= 15, "invalid length"); 2414 s->bits_sent += (ulg)length; 2415 2416 /* 2417 * If not enough room in bi_buf, use (valid) bits from bi_buf 2418 * and (16 - bi_valid) bits from value, leaving (width - 2419 * (16-bi_valid)) unused bits in value. 2420 */ 2421 if (s->bi_valid > (int)Buf_size - length) { 2422 s->bi_buf |= (value << s->bi_valid); 2423 put_short(s, s->bi_buf); 2424 s->bi_buf = (ush)value >> (Buf_size - s->bi_valid); 2425 s->bi_valid += length - Buf_size; 2426 } else { 2427 s->bi_buf |= value << s->bi_valid; 2428 s->bi_valid += length; 2429 } 2430 } 2431 #else /* !DEBUG_ZLIB */ 2432 2433 #define send_bits(s, value, length) \ 2434 { int len = length; \ 2435 if (s->bi_valid > (int)Buf_size - len) {\ 2436 int val = value; \ 2437 s->bi_buf |= (val << s->bi_valid); \ 2438 put_short(s, s->bi_buf); \ 2439 s->bi_buf = (ush)val >> (Buf_size - s->bi_valid); \ 2440 s->bi_valid += len - Buf_size; \ 2441 } else {\ 2442 s->bi_buf |= (value) << s->bi_valid; \ 2443 s->bi_valid += len; \ 2444 }\ 2445 } 2446 #endif /* DEBUG_ZLIB */ 2447 2448 2449 #define MAX(a, b) (a >= b ? a : b) 2450 /* the arguments must not have side effects */ 2451 2452 /* 2453 * =========================================================================== 2454 * Initialize the various 'constant' tables. In a multi-threaded environment, 2455 * this function may be called by two threads concurrently, but this is 2456 * harmless since both invocations do exactly the same thing. 2457 */ 2458 local void 2459 tr_static_init() 2460 { 2461 static int static_init_done = 0; 2462 int n; /* iterates over tree elements */ 2463 int bits; /* bit counter */ 2464 int length; /* length value */ 2465 int code; /* code value */ 2466 int dist; /* distance index */ 2467 ush bl_count[MAX_BITS+1]; 2468 /* number of codes at each bit length for an optimal tree */ 2469 2470 if (static_init_done) 2471 return; 2472 2473 /* For some embedded targets, global variables are not initialized: */ 2474 static_l_desc.static_tree = static_ltree; 2475 static_l_desc.extra_bits = extra_lbits; 2476 static_d_desc.static_tree = static_dtree; 2477 static_d_desc.extra_bits = extra_dbits; 2478 static_bl_desc.extra_bits = extra_blbits; 2479 2480 /* Initialize the mapping length (0..255) -> length code (0..28) */ 2481 length = 0; 2482 for (code = 0; code < LENGTH_CODES-1; code++) { 2483 base_length[code] = length; 2484 for (n = 0; n < (1<<extra_lbits[code]); n++) { 2485 _length_code[length++] = (uch)code; 2486 } 2487 } 2488 Assert(length == 256, "tr_static_init: length != 256"); 2489 /* 2490 * Note that the length 255 (match length 258) can be 2491 * represented in two different ways: code 284 + 5 bits or 2492 * code 285, so we overwrite _length_code[255] to use the best 2493 * encoding: 2494 */ 2495 _length_code[length-1] = (uch)code; 2496 2497 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ 2498 dist = 0; 2499 for (code = 0; code < 16; code++) { 2500 base_dist[code] = dist; 2501 for (n = 0; n < (1<<extra_dbits[code]); n++) { 2502 _dist_code[dist++] = (uch)code; 2503 } 2504 } 2505 Assert(dist == 256, "tr_static_init: dist != 256"); 2506 dist >>= 7; /* from now on, all distances are divided by 128 */ 2507 for (; code < D_CODES; code++) { 2508 base_dist[code] = dist << 7; 2509 for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { 2510 _dist_code[256 + dist++] = (uch)code; 2511 } 2512 } 2513 Assert(dist == 256, "tr_static_init: 256+dist != 512"); 2514 2515 /* Construct the codes of the static literal tree */ 2516 for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; 2517 n = 0; 2518 while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; 2519 while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; 2520 while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; 2521 while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; 2522 /* 2523 * Codes 286 and 287 do not exist, but we must include them in the 2524 * tree construction to get a canonical Huffman tree (longest code 2525 * all ones) 2526 */ 2527 gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count); 2528 2529 /* The static distance tree is trivial: */ 2530 for (n = 0; n < D_CODES; n++) { 2531 static_dtree[n].Len = 5; 2532 static_dtree[n].Code = bi_reverse((unsigned)n, 5); 2533 } 2534 static_init_done = 1; 2535 } 2536 2537 /* 2538 * =========================================================================== 2539 * Initialize the tree data structures for a new zlib stream. 2540 */ 2541 void 2542 _tr_init(s) 2543 deflate_state *s; 2544 { 2545 tr_static_init(); 2546 2547 s->l_desc.dyn_tree = s->dyn_ltree; 2548 s->l_desc.stat_desc = &static_l_desc; 2549 2550 s->d_desc.dyn_tree = s->dyn_dtree; 2551 s->d_desc.stat_desc = &static_d_desc; 2552 2553 s->bl_desc.dyn_tree = s->bl_tree; 2554 s->bl_desc.stat_desc = &static_bl_desc; 2555 2556 s->bi_buf = 0; 2557 s->bi_valid = 0; 2558 s->last_eob_len = 8; /* enough lookahead for inflate */ 2559 s->compressed_len = 0L; /* PPP */ 2560 #ifdef DEBUG_ZLIB 2561 s->bits_sent = 0L; 2562 #endif 2563 2564 /* Initialize the first block of the first file: */ 2565 init_block(s); 2566 } 2567 2568 /* 2569 * =========================================================================== 2570 * Initialize a new block. 2571 */ 2572 local void 2573 init_block(s) 2574 deflate_state *s; 2575 { 2576 int n; /* iterates over tree elements */ 2577 2578 /* Initialize the trees. */ 2579 for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0; 2580 for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0; 2581 for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0; 2582 2583 s->dyn_ltree[END_BLOCK].Freq = 1; 2584 s->opt_len = s->static_len = 0L; 2585 s->last_lit = s->matches = 0; 2586 } 2587 2588 #define SMALLEST 1 2589 /* Index within the heap array of least frequent node in the Huffman tree */ 2590 2591 2592 /* 2593 * =========================================================================== 2594 * Remove the smallest element from the heap and recreate the heap with 2595 * one less element. Updates heap and heap_len. 2596 */ 2597 #define pqremove(s, tree, top) \ 2598 {\ 2599 top = s->heap[SMALLEST]; \ 2600 s->heap[SMALLEST] = s->heap[s->heap_len--]; \ 2601 pqdownheap(s, tree, SMALLEST); \ 2602 } 2603 2604 /* 2605 * =========================================================================== 2606 * Compares to subtrees, using the tree depth as tie breaker when 2607 * the subtrees have equal frequency. This minimizes the worst case length. 2608 */ 2609 #define smaller(tree, n, m, depth) \ 2610 (tree[n].Freq < tree[m].Freq || \ 2611 (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m])) 2612 /* 2613 * =========================================================================== 2614 * Restore the heap property by moving down the tree starting at node k, 2615 * exchanging a node with the smallest of its two sons if necessary, stopping 2616 * when the heap property is re-established (each father smaller than its 2617 * two sons). 2618 */ 2619 local void 2620 pqdownheap(s, tree, k) 2621 deflate_state *s; 2622 ct_data *tree; /* the tree to restore */ 2623 int k; /* node to move down */ 2624 { 2625 int v = s->heap[k]; 2626 int j = k << 1; /* left son of k */ 2627 while (j <= s->heap_len) { 2628 /* Set j to the smallest of the two sons: */ 2629 if (j < s->heap_len && 2630 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { 2631 j++; 2632 } 2633 /* Exit if v is smaller than both sons */ 2634 if (smaller(tree, v, s->heap[j], s->depth)) break; 2635 2636 /* Exchange v with the smallest son */ 2637 s->heap[k] = s->heap[j]; k = j; 2638 2639 /* And continue down the tree, setting j to the left son of k */ 2640 j <<= 1; 2641 } 2642 s->heap[k] = v; 2643 } 2644 2645 /* 2646 * =========================================================================== 2647 * Compute the optimal bit lengths for a tree and update the total bit length 2648 * for the current block. 2649 * IN assertion: the fields freq and dad are set, heap[heap_max] and 2650 * above are the tree nodes sorted by increasing frequency. 2651 * OUT assertions: the field len is set to the optimal bit length, the 2652 * array bl_count contains the frequencies for each bit length. 2653 * The length opt_len is updated; static_len is also updated if stree is 2654 * not null. 2655 */ 2656 local void 2657 gen_bitlen(s, desc) 2658 deflate_state *s; 2659 tree_desc *desc; /* the tree descriptor */ 2660 { 2661 ct_data *tree = desc->dyn_tree; 2662 int max_code = desc->max_code; 2663 const ct_data *stree = desc->stat_desc->static_tree; 2664 const intf *extra = desc->stat_desc->extra_bits; 2665 int base = desc->stat_desc->extra_base; 2666 int max_length = desc->stat_desc->max_length; 2667 int h; /* heap index */ 2668 int n, m; /* iterate over the tree elements */ 2669 int bits; /* bit length */ 2670 int xbits; /* extra bits */ 2671 ush f; /* frequency */ 2672 /* number of elements with bit length too large */ 2673 int overflow = 0; 2674 2675 for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0; 2676 2677 /* 2678 * In a first pass, compute the optimal bit lengths (which may 2679 * overflow in the case of the bit length tree). 2680 */ 2681 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ 2682 2683 for (h = s->heap_max+1; h < HEAP_SIZE; h++) { 2684 n = s->heap[h]; 2685 bits = tree[tree[n].Dad].Len + 1; 2686 if (bits > max_length) bits = max_length, overflow++; 2687 tree[n].Len = (ush)bits; 2688 /* We overwrite tree[n].Dad which is no longer needed */ 2689 2690 if (n > max_code) continue; /* not a leaf node */ 2691 2692 s->bl_count[bits]++; 2693 xbits = 0; 2694 if (n >= base) xbits = extra[n-base]; 2695 f = tree[n].Freq; 2696 s->opt_len += (ulg)f * (bits + xbits); 2697 if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits); 2698 } 2699 if (overflow == 0) 2700 return; 2701 2702 Trace((stderr, "\nbit length overflow\n")); 2703 /* This happens for example on obj2 and pic of the Calgary corpus */ 2704 2705 /* Find the first bit length which could increase: */ 2706 do { 2707 bits = max_length-1; 2708 while (s->bl_count[bits] == 0) bits--; 2709 s->bl_count[bits]--; /* move one leaf down the tree */ 2710 /* move one overflow item as its brother */ 2711 s->bl_count[bits+1] += 2; 2712 s->bl_count[max_length]--; 2713 /* 2714 * The brother of the overflow item also moves one 2715 * step up, but this does not affect 2716 * bl_count[max_length] 2717 */ 2718 overflow -= 2; 2719 } while (overflow > 0); 2720 2721 /* 2722 * Now recompute all bit lengths, scanning in increasing 2723 * frequency. h is still equal to HEAP_SIZE. (It is simpler 2724 * to reconstruct all lengths instead of fixing only the wrong 2725 * ones. This idea is taken from 'ar' written by Haruhiko 2726 * Okumura.) 2727 */ 2728 for (bits = max_length; bits != 0; bits--) { 2729 n = s->bl_count[bits]; 2730 while (n != 0) { 2731 m = s->heap[--h]; 2732 if (m > max_code) continue; 2733 if (tree[m].Len != (unsigned)bits) { 2734 Trace((stderr, "code %d bits %d->%d\n", m, 2735 tree[m].Len, bits)); 2736 s->opt_len += ((long)bits - (long)tree[m].Len) 2737 *(long)tree[m].Freq; 2738 tree[m].Len = (ush)bits; 2739 } 2740 n--; 2741 } 2742 } 2743 } 2744 2745 /* 2746 * =========================================================================== 2747 * Generate the codes for a given tree and bit counts (which need not be 2748 * optimal). 2749 * IN assertion: the array bl_count contains the bit length statistics for 2750 * the given tree and the field len is set for all tree elements. 2751 * OUT assertion: the field code is set for all tree elements of non 2752 * zero code length. 2753 */ 2754 local void 2755 gen_codes(tree, max_code, bl_count) 2756 ct_data *tree; /* the tree to decorate */ 2757 int max_code; /* largest code with non zero frequency */ 2758 ushf *bl_count; /* number of codes at each bit length */ 2759 { 2760 /* next code value for each bit length */ 2761 ush next_code[MAX_BITS+1]; 2762 ush code = 0; /* running code value */ 2763 int bits; /* bit index */ 2764 int n; /* code index */ 2765 2766 /* 2767 * The distribution counts are first used to generate the code 2768 * values without bit reversal. 2769 */ 2770 for (bits = 1; bits <= MAX_BITS; bits++) { 2771 next_code[bits] = code = (code + bl_count[bits-1]) << 1; 2772 } 2773 /* 2774 * Check that the bit counts in bl_count are consistent. The 2775 * last code must be all ones. 2776 */ 2777 Assert(code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1, 2778 "inconsistent bit counts"); 2779 Tracev((stderr, "\ngen_codes: max_code %d ", max_code)); 2780 2781 for (n = 0; n <= max_code; n++) { 2782 int len = tree[n].Len; 2783 if (len == 0) continue; 2784 /* Now reverse the bits */ 2785 tree[n].Code = bi_reverse(next_code[len]++, len); 2786 2787 Tracecv(tree != static_ltree, 2788 (stderr, "\nn %3d %c l %2d c %4x (%x) ", 2789 n, (isgraph(n) ? n : ' '), len, tree[n].Code, 2790 next_code[len]-1)); 2791 } 2792 } 2793 2794 /* 2795 * =========================================================================== 2796 * Construct one Huffman tree and assigns the code bit strings and lengths. 2797 * Update the total bit length for the current block. 2798 * IN assertion: the field freq is set for all tree elements. 2799 * OUT assertions: the fields len and code are set to the optimal bit length 2800 * and corresponding code. The length opt_len is updated; static_len is 2801 * also updated if stree is not null. The field max_code is set. 2802 */ 2803 local void 2804 build_tree(s, desc) 2805 deflate_state *s; 2806 tree_desc *desc; /* the tree descriptor */ 2807 { 2808 ct_data *tree = desc->dyn_tree; 2809 const ct_data *stree = desc->stat_desc->static_tree; 2810 int elems = desc->stat_desc->elems; 2811 int n, m; /* iterate over heap elements */ 2812 int max_code = -1; /* largest code with non zero frequency */ 2813 int node; /* new node being created */ 2814 2815 /* 2816 * Construct the initial heap, with least frequent element in 2817 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and 2818 * heap[2*n+1]. heap[0] is not used. 2819 */ 2820 s->heap_len = 0, s->heap_max = HEAP_SIZE; 2821 2822 for (n = 0; n < elems; n++) { 2823 if (tree[n].Freq != 0) { 2824 s->heap[++(s->heap_len)] = max_code = n; 2825 s->depth[n] = 0; 2826 } else { 2827 tree[n].Len = 0; 2828 } 2829 } 2830 2831 /* 2832 * The pkzip format requires that at least one distance code 2833 * exists, and that at least one bit should be sent even if 2834 * there is only one possible code. So to avoid special checks 2835 * later on we force at least two codes of non zero frequency. 2836 */ 2837 while (s->heap_len < 2) { 2838 node = s->heap[++(s->heap_len)] = (max_code < 2 ? 2839 ++max_code : 0); 2840 tree[node].Freq = 1; 2841 s->depth[node] = 0; 2842 s->opt_len--; if (stree) s->static_len -= stree[node].Len; 2843 /* node is 0 or 1 so it does not have extra bits */ 2844 } 2845 desc->max_code = max_code; 2846 2847 /* 2848 * The elements heap[heap_len/2+1 .. heap_len] are leaves of 2849 * the tree, establish sub-heaps of increasing lengths: 2850 */ 2851 for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); 2852 2853 /* 2854 * Construct the Huffman tree by repeatedly combining the 2855 * least two frequent nodes. 2856 */ 2857 node = elems; /* next internal node of the tree */ 2858 do { 2859 pqremove(s, tree, n); /* n = node of least frequency */ 2860 m = s->heap[SMALLEST]; /* m = node of next least frequency */ 2861 2862 /* keep the nodes sorted by frequency */ 2863 s->heap[--(s->heap_max)] = n; 2864 s->heap[--(s->heap_max)] = m; 2865 2866 /* Create a new node father of n and m */ 2867 tree[node].Freq = tree[n].Freq + tree[m].Freq; 2868 s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1); 2869 tree[n].Dad = tree[m].Dad = (ush)node; 2870 #ifdef DUMP_BL_TREE 2871 if (tree == s->bl_tree) { 2872 fprintf(stderr, "\nnode %d(%d), sons %d(%d) %d(%d)", 2873 node, tree[node].Freq, n, tree[n].Freq, m, 2874 tree[m].Freq); 2875 } 2876 #endif 2877 /* and insert the new node in the heap */ 2878 s->heap[SMALLEST] = node++; 2879 pqdownheap(s, tree, SMALLEST); 2880 2881 } while (s->heap_len >= 2); 2882 2883 s->heap[--(s->heap_max)] = s->heap[SMALLEST]; 2884 2885 /* 2886 * At this point, the fields freq and dad are set. We can now 2887 * generate the bit lengths. 2888 */ 2889 gen_bitlen(s, (tree_desc *)desc); 2890 2891 /* The field len is now set, we can generate the bit codes */ 2892 gen_codes((ct_data *)tree, max_code, s->bl_count); 2893 } 2894 2895 /* 2896 * =========================================================================== 2897 * Scan a literal or distance tree to determine the frequencies of the codes 2898 * in the bit length tree. 2899 */ 2900 local void 2901 scan_tree(s, tree, max_code) 2902 deflate_state *s; 2903 ct_data *tree; /* the tree to be scanned */ 2904 int max_code; /* and its largest code of non zero frequency */ 2905 { 2906 int n; /* iterates over all tree elements */ 2907 int prevlen = -1; /* last emitted length */ 2908 int curlen; /* length of current code */ 2909 int nextlen = tree[0].Len; /* length of next code */ 2910 int count = 0; /* repeat count of the current code */ 2911 int max_count = 7; /* max repeat count */ 2912 int min_count = 4; /* min repeat count */ 2913 2914 if (nextlen == 0) max_count = 138, min_count = 3; 2915 tree[max_code+1].Len = (ush)0xffff; /* guard */ 2916 2917 for (n = 0; n <= max_code; n++) { 2918 curlen = nextlen; nextlen = tree[n+1].Len; 2919 if (++count < max_count && curlen == nextlen) { 2920 continue; 2921 } else if (count < min_count) { 2922 s->bl_tree[curlen].Freq += count; 2923 } else if (curlen != 0) { 2924 if (curlen != prevlen) s->bl_tree[curlen].Freq++; 2925 s->bl_tree[REP_3_6].Freq++; 2926 } else if (count <= 10) { 2927 s->bl_tree[REPZ_3_10].Freq++; 2928 } else { 2929 s->bl_tree[REPZ_11_138].Freq++; 2930 } 2931 count = 0; prevlen = curlen; 2932 if (nextlen == 0) { 2933 max_count = 138, min_count = 3; 2934 } else if (curlen == nextlen) { 2935 max_count = 6, min_count = 3; 2936 } else { 2937 max_count = 7, min_count = 4; 2938 } 2939 } 2940 } 2941 2942 /* 2943 * =========================================================================== 2944 * Send a literal or distance tree in compressed form, using the codes in 2945 * bl_tree. 2946 */ 2947 local void 2948 send_tree(s, tree, max_code) 2949 deflate_state *s; 2950 ct_data *tree; /* the tree to be scanned */ 2951 int max_code; /* and its largest code of non zero frequency */ 2952 { 2953 int n; /* iterates over all tree elements */ 2954 int prevlen = -1; /* last emitted length */ 2955 int curlen; /* length of current code */ 2956 int nextlen = tree[0].Len; /* length of next code */ 2957 int count = 0; /* repeat count of the current code */ 2958 int max_count = 7; /* max repeat count */ 2959 int min_count = 4; /* min repeat count */ 2960 2961 /* tree[max_code+1].Len = -1; */ /* guard already set */ 2962 if (nextlen == 0) max_count = 138, min_count = 3; 2963 2964 for (n = 0; n <= max_code; n++) { 2965 curlen = nextlen; nextlen = tree[n+1].Len; 2966 if (++count < max_count && curlen == nextlen) { 2967 continue; 2968 } else if (count < min_count) { 2969 do { send_code(s, curlen, s->bl_tree); } 2970 while (--count != 0); 2971 2972 } else if (curlen != 0) { 2973 if (curlen != prevlen) { 2974 send_code(s, curlen, s->bl_tree); count--; 2975 } 2976 Assert(count >= 3 && count <= 6, " 3_6?"); 2977 send_code(s, REP_3_6, s->bl_tree); 2978 send_bits(s, count-3, 2); 2979 2980 } else if (count <= 10) { 2981 send_code(s, REPZ_3_10, s->bl_tree); 2982 send_bits(s, count-3, 3); 2983 2984 } else { 2985 send_code(s, REPZ_11_138, s->bl_tree); 2986 send_bits(s, count-11, 7); 2987 } 2988 count = 0; prevlen = curlen; 2989 if (nextlen == 0) { 2990 max_count = 138, min_count = 3; 2991 } else if (curlen == nextlen) { 2992 max_count = 6, min_count = 3; 2993 } else { 2994 max_count = 7, min_count = 4; 2995 } 2996 } 2997 } 2998 2999 /* 3000 * =========================================================================== 3001 * Construct the Huffman tree for the bit lengths and return the index in 3002 * bl_order of the last bit length code to send. 3003 */ 3004 local int 3005 build_bl_tree(s) 3006 deflate_state *s; 3007 { 3008 /* index of last bit length code of non zero freq */ 3009 int max_blindex; 3010 3011 /* 3012 * Determine the bit length frequencies for literal and 3013 * distance trees 3014 */ 3015 scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); 3016 scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); 3017 3018 /* Build the bit length tree: */ 3019 build_tree(s, (tree_desc *)(&(s->bl_desc))); 3020 /* 3021 * opt_len now includes the length of the tree 3022 * representations, except the lengths of the bit lengths 3023 * codes and the 5+5+4 bits for the counts. 3024 */ 3025 3026 /* 3027 * Determine the number of bit length codes to send. The pkzip 3028 * format requires that at least 4 bit length codes be 3029 * sent. (appnote.txt says 3 but the actual value used is 4.) 3030 */ 3031 for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { 3032 if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; 3033 } 3034 /* Update opt_len to include the bit length tree and counts */ 3035 s->opt_len += 3*(max_blindex+1) + 5+5+4; 3036 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", 3037 s->opt_len, s->static_len)); 3038 3039 return (max_blindex); 3040 } 3041 3042 /* 3043 * =========================================================================== 3044 * Send the header for a block using dynamic Huffman trees: the counts, the 3045 * lengths of the bit length codes, the literal tree and the distance tree. 3046 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. 3047 */ 3048 local void 3049 send_all_trees(s, lcodes, dcodes, blcodes) 3050 deflate_state *s; 3051 int lcodes, dcodes, blcodes; /* number of codes for each tree */ 3052 { 3053 int rank; /* index in bl_order */ 3054 3055 Assert(lcodes >= 257 && dcodes >= 1 && blcodes >= 4, 3056 "not enough codes"); 3057 Assert(lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, 3058 "too many codes"); 3059 Tracev((stderr, "\nbl counts: ")); 3060 send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */ 3061 send_bits(s, dcodes-1, 5); 3062 send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */ 3063 for (rank = 0; rank < blcodes; rank++) { 3064 Tracev((stderr, "\nbl code %2d ", bl_order[rank])); 3065 send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); 3066 } 3067 #ifdef DEBUG_ZLIB 3068 Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); 3069 #endif 3070 3071 /* literal tree */ 3072 send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); 3073 #ifdef DEBUG_ZLIB 3074 Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); 3075 #endif 3076 3077 /* distance tree */ 3078 send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); 3079 #ifdef DEBUG_ZLIB 3080 Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); 3081 #endif 3082 } 3083 3084 /* 3085 * =========================================================================== 3086 * Send a stored block 3087 */ 3088 void 3089 _tr_stored_block(s, buf, stored_len, eof) 3090 deflate_state *s; 3091 charf *buf; /* input block */ 3092 ulg stored_len; /* length of input block */ 3093 int eof; /* true if this is the last block for a file */ 3094 { 3095 send_bits(s, (STORED_BLOCK<<1)+eof, 3); /* send block type */ 3096 s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; /* PPP */ 3097 s->compressed_len += (stored_len + 4) << 3; /* PPP */ 3098 3099 copy_block(s, buf, (unsigned)stored_len, 1); /* with header */ 3100 } 3101 3102 /* 3103 * Send just the `stored block' type code without any length bytes or data. 3104 * ---PPP--- 3105 */ 3106 void 3107 _tr_stored_type_only(s) 3108 deflate_state *s; 3109 { 3110 send_bits(s, (STORED_BLOCK << 1), 3); 3111 bi_windup(s); 3112 s->compressed_len = (s->compressed_len + 3) & ~7L; /* PPP */ 3113 } 3114 3115 3116 /* 3117 * =========================================================================== 3118 * Send one empty static block to give enough lookahead for inflate. 3119 * This takes 10 bits, of which 7 may remain in the bit buffer. 3120 * The current inflate code requires 9 bits of lookahead. If the 3121 * last two codes for the previous block (real code plus EOB) were coded 3122 * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode 3123 * the last real code. In this case we send two empty static blocks instead 3124 * of one. (There are no problems if the previous block is stored or fixed.) 3125 * To simplify the code, we assume the worst case of last real code encoded 3126 * on one bit only. 3127 */ 3128 void 3129 _tr_align(s) 3130 deflate_state *s; 3131 { 3132 send_bits(s, STATIC_TREES<<1, 3); 3133 send_code(s, END_BLOCK, static_ltree); 3134 s->compressed_len += 10L; /* 3 for block type, 7 for EOB */ 3135 bi_flush(s); 3136 /* 3137 * Of the 10 bits for the empty block, we have already sent 3138 * (10 - bi_valid) bits. The lookahead for the last real code 3139 * (before the EOB of the previous block) was thus at least 3140 * one plus the length of the EOB plus what we have just sent 3141 * of the empty static block. 3142 */ 3143 if (1 + s->last_eob_len + 10 - s->bi_valid < 9) { 3144 send_bits(s, STATIC_TREES<<1, 3); 3145 send_code(s, END_BLOCK, static_ltree); 3146 s->compressed_len += 10L; 3147 bi_flush(s); 3148 } 3149 s->last_eob_len = 7; 3150 } 3151 3152 /* 3153 * =========================================================================== 3154 * Determine the best encoding for the current block: dynamic trees, static 3155 * trees or store, and output the encoded block to the zip file. 3156 */ 3157 void 3158 _tr_flush_block(s, buf, stored_len, eof) 3159 deflate_state *s; 3160 charf *buf; /* input block, or NULL if too old */ 3161 ulg stored_len; /* length of input block */ 3162 int eof; /* true if this is the last block for a file */ 3163 { 3164 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ 3165 /* index of last bit length code of non zero freq */ 3166 int max_blindex = 0; 3167 3168 /* Build the Huffman trees unless a stored block is forced */ 3169 if (s->level > 0) { 3170 3171 /* Check if the file is ascii or binary */ 3172 if (s->data_type == Z_UNKNOWN) set_data_type(s); 3173 3174 /* Construct the literal and distance trees */ 3175 build_tree(s, (tree_desc *)(&(s->l_desc))); 3176 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, 3177 s->static_len)); 3178 3179 build_tree(s, (tree_desc *)(&(s->d_desc))); 3180 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, 3181 s->static_len)); 3182 /* 3183 * At this point, opt_len and static_len are the total 3184 * bit lengths of the compressed block data, excluding 3185 * the tree representations. 3186 */ 3187 3188 /* 3189 * Build the bit length tree for the above two trees, 3190 * and get the index in bl_order of the last bit 3191 * length code to send. 3192 */ 3193 max_blindex = build_bl_tree(s); 3194 3195 /* 3196 * Determine the best encoding. Compute first the 3197 * block length in bytes 3198 */ 3199 opt_lenb = (s->opt_len+3+7)>>3; 3200 static_lenb = (s->static_len+3+7)>>3; 3201 3202 Tracev((stderr, 3203 "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", 3204 opt_lenb, s->opt_len, static_lenb, s->static_len, 3205 stored_len, s->last_lit)); 3206 3207 if (static_lenb <= opt_lenb) opt_lenb = static_lenb; 3208 3209 } else { 3210 Assert(buf != (char *)0, "lost buf"); 3211 /* force a stored block */ 3212 opt_lenb = static_lenb = stored_len + 5; 3213 } 3214 3215 /* 3216 * If compression failed and this is the first and last block, 3217 * and if the .zip file can be seeked (to rewrite the local 3218 * header), the whole file is transformed into a stored file: 3219 */ 3220 #ifdef STORED_FILE_OK 3221 #ifdef FORCE_STORED_FILE 3222 #define FRC_STR_COND eof && s->compressed_len == 0L /* force stored file */ 3223 #else 3224 #define FRC_STR_COND stored_len <= opt_lenb && eof && \ 3225 s->compressed_len == 0L && seekable() 3226 #endif 3227 if (FRC_STR_COND) { 3228 #undef FRC_STR_COND 3229 /* 3230 * Since LIT_BUFSIZE <= 2*WSIZE, the input data must 3231 * be there: 3232 */ 3233 if (buf == (charf*)0) error("block vanished"); 3234 3235 /* without header */ 3236 copy_block(s, buf, (unsigned)stored_len, 0); 3237 s->compressed_len = stored_len << 3; 3238 s->method = STORED; 3239 } else 3240 #endif /* STORED_FILE_OK */ 3241 3242 #ifdef FORCE_STORED 3243 #define FRC_STR_COND buf != (char *)0 /* force stored block */ 3244 #else 3245 /* 4: two words for the lengths */ 3246 #define FRC_STR_COND stored_len+4 <= opt_lenb && buf != (char *)0 3247 #endif 3248 if (FRC_STR_COND) { 3249 #undef FRC_STR_COND 3250 /* 3251 * The test buf != NULL is only necessary if 3252 * LIT_BUFSIZE > WSIZE. Otherwise we can't 3253 * have processed more than WSIZE input bytes 3254 * since the last block flush, because 3255 * compression would have been successful. If 3256 * LIT_BUFSIZE <= WSIZE, it is never too late 3257 * to transform a block into a stored block. 3258 */ 3259 _tr_stored_block(s, buf, stored_len, eof); 3260 #ifdef FORCE_STATIC 3261 #define FRC_STAT_COND static_lenb >= 0 /* force static trees */ 3262 #else 3263 #define FRC_STAT_COND static_lenb == opt_lenb 3264 #endif 3265 } else if (FRC_STAT_COND) { 3266 #undef FRC_STAT_COND 3267 send_bits(s, (STATIC_TREES<<1)+eof, 3); 3268 compress_block(s, (ct_data *)static_ltree, 3269 (ct_data *)static_dtree); 3270 s->compressed_len += 3 + s->static_len; /* PPP */ 3271 } else { 3272 send_bits(s, (DYN_TREES<<1)+eof, 3); 3273 send_all_trees(s, s->l_desc.max_code+1, 3274 s->d_desc.max_code+1, 3275 max_blindex+1); 3276 compress_block(s, (ct_data *)s->dyn_ltree, 3277 (ct_data *)s->dyn_dtree); 3278 s->compressed_len += 3 + s->opt_len; /* PPP */ 3279 } 3280 #ifdef DEBUG_ZLIB 3281 Assert(s->compressed_len == s->bits_sent, "bad compressed size"); 3282 #endif 3283 /* 3284 * The above check is made mod 2^32, for files larger than 512 3285 * MB and uLong implemented on 32 bits. 3286 */ 3287 init_block(s); 3288 3289 if (eof) { 3290 bi_windup(s); 3291 s->compressed_len += 7; /* align on byte boundary PPP */ 3292 } 3293 Tracev((stderr, "\ncomprlen %lu(%lu) ", s->compressed_len>>3, 3294 s->compressed_len-7*eof)); 3295 3296 /* return (s->compressed_len >> 3); */ 3297 } 3298 3299 /* 3300 * =========================================================================== 3301 * Save the match info and tally the frequency counts. Return true if 3302 * the current block must be flushed. 3303 */ 3304 int 3305 _tr_tally(s, dist, lc) 3306 deflate_state *s; 3307 unsigned dist; /* distance of matched string */ 3308 /* match length-MIN_MATCH or unmatched char (if dist==0) */ 3309 unsigned lc; 3310 { 3311 s->d_buf[s->last_lit] = (ush)dist; 3312 s->l_buf[s->last_lit++] = (uch)lc; 3313 if (dist == 0) { 3314 /* lc is the unmatched char */ 3315 s->dyn_ltree[lc].Freq++; 3316 } else { 3317 s->matches++; 3318 /* Here, lc is the match length - MIN_MATCH */ 3319 dist--; /* dist = match distance - 1 */ 3320 Assert((ush)dist < (ush)MAX_DIST(s) && 3321 (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && 3322 (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); 3323 3324 s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++; 3325 s->dyn_dtree[d_code(dist)].Freq++; 3326 } 3327 3328 #ifdef TRUNCATE_BLOCK 3329 /* Try to guess if it is profitable to stop the current block here */ 3330 if ((s->last_lit & 0x1fff) == 0 && s->level > 2) { 3331 /* Compute an upper bound for the compressed length */ 3332 ulg out_length = (ulg)s->last_lit*8L; 3333 ulg in_length = (ulg)((long)s->strstart - s->block_start); 3334 int dcode; 3335 for (dcode = 0; dcode < D_CODES; dcode++) { 3336 out_length += (ulg)s->dyn_dtree[dcode].Freq * 3337 (5L+extra_dbits[dcode]); 3338 } 3339 out_length >>= 3; 3340 Tracev((stderr, "\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", 3341 s->last_lit, in_length, out_length, 3342 100L - out_length*100L/in_length)); 3343 if (s->matches < s->last_lit/2 && out_length < in_length/2) 3344 return (1); 3345 } 3346 #endif 3347 return (s->last_lit == s->lit_bufsize-1); 3348 /* 3349 * We avoid equality with lit_bufsize because of wraparound at 64K 3350 * on 16 bit machines and because stored blocks are restricted to 3351 * 64K-1 bytes. 3352 */ 3353 } 3354 3355 /* 3356 * =========================================================================== 3357 * Send the block data compressed using the given Huffman trees 3358 */ 3359 local void 3360 compress_block(s, ltree, dtree) 3361 deflate_state *s; 3362 ct_data *ltree; /* literal tree */ 3363 ct_data *dtree; /* distance tree */ 3364 { 3365 unsigned dist; /* distance of matched string */ 3366 int lc; /* match length or unmatched char (if dist == 0) */ 3367 unsigned lx = 0; /* running index in l_buf */ 3368 unsigned code; /* the code to send */ 3369 int extra; /* number of extra bits to send */ 3370 3371 if (s->last_lit != 0) do { 3372 dist = s->d_buf[lx]; 3373 lc = s->l_buf[lx++]; 3374 if (dist == 0) { 3375 /* send a literal byte */ 3376 send_code(s, lc, ltree); 3377 Tracecv(isgraph(lc), (stderr, " '%c' ", lc)); 3378 } else { 3379 /* Here, lc is the match length - MIN_MATCH */ 3380 code = _length_code[lc]; 3381 /* send the length code */ 3382 send_code(s, code+LITERALS+1, ltree); 3383 extra = extra_lbits[code]; 3384 if (extra != 0) { 3385 lc -= base_length[code]; 3386 /* send the extra length bits */ 3387 send_bits(s, lc, extra); 3388 } 3389 /* dist is now the match distance - 1 */ 3390 dist--; 3391 code = d_code(dist); 3392 Assert(code < D_CODES, "bad d_code"); 3393 3394 /* send the distance code */ 3395 send_code(s, code, dtree); 3396 extra = extra_dbits[code]; 3397 if (extra != 0) { 3398 dist -= base_dist[code]; 3399 /* send the extra distance bits */ 3400 send_bits(s, dist, extra); 3401 } 3402 } /* literal or match pair ? */ 3403 3404 /* 3405 * Check that the overlay between pending_buf and 3406 * d_buf+l_buf is ok: 3407 */ 3408 Assert(s->pending < s->lit_bufsize + 2*lx, 3409 "pendingBuf overflow"); 3410 3411 } while (lx < s->last_lit); 3412 3413 send_code(s, END_BLOCK, ltree); 3414 s->last_eob_len = ltree[END_BLOCK].Len; 3415 } 3416 3417 /* 3418 * =========================================================================== 3419 * Set the data type to ASCII or BINARY, using a crude approximation: 3420 * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise. 3421 * IN assertion: the fields freq of dyn_ltree are set and the total of all 3422 * frequencies does not exceed 64K (to fit in an int on 16 bit machines). 3423 */ 3424 local void 3425 set_data_type(s) 3426 deflate_state *s; 3427 { 3428 int n = 0; 3429 unsigned ascii_freq = 0; 3430 unsigned bin_freq = 0; 3431 while (n < 7) bin_freq += s->dyn_ltree[n++].Freq; 3432 while (n < 128) ascii_freq += s->dyn_ltree[n++].Freq; 3433 while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq; 3434 s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? 3435 Z_BINARY : Z_ASCII); 3436 } 3437 3438 /* 3439 * =========================================================================== 3440 * Reverse the first len bits of a code, using straightforward code (a faster 3441 * method would use a table) 3442 * IN assertion: 1 <= len <= 15 3443 */ 3444 local unsigned 3445 bi_reverse(code, len) 3446 unsigned code; /* the value to invert */ 3447 int len; /* its bit length */ 3448 { 3449 register unsigned res = 0; 3450 do { 3451 res |= code & 1; 3452 code >>= 1, res <<= 1; 3453 } while (--len > 0); 3454 return (res >> 1); 3455 } 3456 3457 /* 3458 * =========================================================================== 3459 * Flush the bit buffer, keeping at most 7 bits in it. 3460 */ 3461 local void 3462 bi_flush(s) 3463 deflate_state *s; 3464 { 3465 if (s->bi_valid == 16) { 3466 put_short(s, s->bi_buf); 3467 s->bi_buf = 0; 3468 s->bi_valid = 0; 3469 } else if (s->bi_valid >= 8) { 3470 put_byte(s, (Byte)s->bi_buf); 3471 s->bi_buf >>= 8; 3472 s->bi_valid -= 8; 3473 } 3474 } 3475 3476 /* 3477 * =========================================================================== 3478 * Flush the bit buffer and align the output on a byte boundary 3479 */ 3480 local void 3481 bi_windup(s) 3482 deflate_state *s; 3483 { 3484 if (s->bi_valid > 8) { 3485 put_short(s, s->bi_buf); 3486 } else if (s->bi_valid > 0) { 3487 put_byte(s, (Byte)s->bi_buf); 3488 } 3489 s->bi_buf = 0; 3490 s->bi_valid = 0; 3491 #ifdef DEBUG_ZLIB 3492 s->bits_sent = (s->bits_sent+7) & ~7; 3493 #endif 3494 } 3495 3496 /* 3497 * =========================================================================== 3498 * Copy a stored block, storing first the length and its 3499 * one's complement if requested. 3500 */ 3501 local void 3502 copy_block(s, buf, len, header) 3503 deflate_state *s; 3504 charf *buf; /* the input data */ 3505 unsigned len; /* its length */ 3506 int header; /* true if block header must be written */ 3507 { 3508 bi_windup(s); /* align on byte boundary */ 3509 s->last_eob_len = 8; /* enough lookahead for inflate */ 3510 3511 if (header) { 3512 put_short(s, (ush)len); 3513 put_short(s, (ush)~len); 3514 #ifdef DEBUG_ZLIB 3515 s->bits_sent += 2*16; 3516 #endif 3517 } 3518 #ifdef DEBUG_ZLIB 3519 s->bits_sent += (ulg)len<<3; 3520 #endif 3521 /* bundle up the put_byte(s, *buf++) calls PPP */ 3522 Assert(s->pending + len < s->pending_buf_size, "pending_buf overrun"); 3523 zmemcpy(&s->pending_buf[s->pending], buf, len); /* PPP */ 3524 s->pending += len; /* PPP */ 3525 } 3526 /* --- trees.c */ 3527 3528 /* +++ inflate.c */ 3529 /* 3530 * inflate.c -- zlib interface to inflate modules 3531 * Copyright (C) 1995-1998 Mark Adler 3532 * For conditions of distribution and use, see copyright notice in zlib.h 3533 */ 3534 3535 /* #include "zutil.h" */ 3536 3537 /* +++ infblock.h */ 3538 /* 3539 * infblock.h -- header to use infblock.c 3540 * Copyright (C) 1995-1998 Mark Adler 3541 * For conditions of distribution and use, see copyright notice in zlib.h 3542 */ 3543 3544 /* 3545 * WARNING: this file should *not* be used by applications. It is part 3546 * of the implementation of the compression library and is subject to 3547 * change. Applications should only use zlib.h. 3548 */ 3549 3550 struct inflate_blocks_state; 3551 typedef struct inflate_blocks_state FAR inflate_blocks_statef; 3552 3553 extern inflate_blocks_statef * inflate_blocks_new OF(( 3554 z_streamp z, 3555 check_func c, /* check function */ 3556 uInt w)); /* window size */ 3557 3558 extern int inflate_blocks OF(( 3559 inflate_blocks_statef *, 3560 z_streamp, 3561 int)); /* initial return code */ 3562 3563 extern void inflate_blocks_reset OF(( 3564 inflate_blocks_statef *, 3565 z_streamp, 3566 uLongf *)); /* check value on output */ 3567 3568 extern int inflate_blocks_free OF(( 3569 inflate_blocks_statef *, 3570 z_streamp)); 3571 3572 extern void inflate_set_dictionary OF(( 3573 inflate_blocks_statef *s, 3574 const Bytef *d, /* dictionary */ 3575 uInt n)); /* dictionary length */ 3576 3577 extern int inflate_blocks_sync_point OF(( 3578 inflate_blocks_statef *s)); 3579 3580 /* PPP -- added function */ 3581 extern int inflate_addhistory OF(( 3582 inflate_blocks_statef *, 3583 z_streamp)); 3584 3585 /* PPP -- added function */ 3586 extern int inflate_packet_flush OF(( 3587 inflate_blocks_statef *)); 3588 /* --- infblock.h */ 3589 3590 #ifndef NO_DUMMY_DECL 3591 struct inflate_blocks_state {int dummy; }; /* for buggy compilers */ 3592 #endif 3593 3594 /* inflate private state */ 3595 struct internal_state { 3596 3597 /* mode */ 3598 enum { 3599 METHOD, /* waiting for method byte */ 3600 FLAG, /* waiting for flag byte */ 3601 DICT4, /* four dictionary check bytes to go */ 3602 DICT3, /* three dictionary check bytes to go */ 3603 DICT2, /* two dictionary check bytes to go */ 3604 DICT1, /* one dictionary check byte to go */ 3605 DICT0, /* waiting for inflateSetDictionary */ 3606 BLOCKS, /* decompressing blocks */ 3607 CHECK4, /* four check bytes to go */ 3608 CHECK3, /* three check bytes to go */ 3609 CHECK2, /* two check bytes to go */ 3610 CHECK1, /* one check byte to go */ 3611 DONE, /* finished check, done */ 3612 BAD} /* got an error--stay here */ 3613 mode; /* current inflate mode */ 3614 3615 /* mode dependent information */ 3616 union { 3617 uInt method; /* if FLAGS, method byte */ 3618 struct { 3619 uLong was; /* computed check value */ 3620 uLong need; /* stream check value */ 3621 } check; /* if CHECK, check values to compare */ 3622 uInt marker; /* if BAD, inflateSync's marker bytes count */ 3623 } sub; /* submode */ 3624 3625 /* mode independent information */ 3626 int nowrap; /* flag for no wrapper */ 3627 uInt wbits; /* log2(window size) (8..15, defaults to 15) */ 3628 /* current inflate_blocks state */ 3629 inflate_blocks_statef *blocks; 3630 }; 3631 3632 3633 int 3634 inflateReset(z) 3635 z_streamp z; 3636 { 3637 if (z == Z_NULL || z->state == Z_NULL) 3638 return (Z_STREAM_ERROR); 3639 z->total_in = z->total_out = 0; 3640 z->msg = Z_NULL; 3641 z->state->mode = z->state->nowrap ? BLOCKS : METHOD; 3642 inflate_blocks_reset(z->state->blocks, z, Z_NULL); 3643 Trace((stderr, "inflate: reset\n")); 3644 return (Z_OK); 3645 } 3646 3647 3648 int 3649 inflateEnd(z) 3650 z_streamp z; 3651 { 3652 if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL) 3653 return (Z_STREAM_ERROR); 3654 if (z->state->blocks != Z_NULL) { 3655 (void) inflate_blocks_free(z->state->blocks, z); 3656 z->state->blocks = Z_NULL; 3657 } 3658 ZFREE(z, z->state); 3659 z->state = Z_NULL; 3660 Trace((stderr, "inflate: end\n")); 3661 return (Z_OK); 3662 } 3663 3664 3665 int 3666 inflateInit2_(z, w, version, stream_size) 3667 z_streamp z; 3668 int w; 3669 const char *version; 3670 int stream_size; 3671 { 3672 if (version == Z_NULL || version[0] != ZLIB_VERSION[0] || 3673 stream_size != sizeof (z_stream)) 3674 return (Z_VERSION_ERROR); 3675 3676 /* initialize state */ 3677 if (z == Z_NULL) 3678 return (Z_STREAM_ERROR); 3679 z->msg = Z_NULL; 3680 #ifndef NO_ZCFUNCS 3681 if (z->zalloc == Z_NULL) 3682 { 3683 z->zalloc = zcalloc; 3684 z->opaque = (voidpf)0; 3685 } 3686 if (z->zfree == Z_NULL) z->zfree = zcfree; 3687 #endif 3688 if ((z->state = (struct internal_state FAR *) 3689 ZALLOC(z, 1, sizeof (struct internal_state))) == Z_NULL) 3690 return (Z_MEM_ERROR); 3691 z->state->blocks = Z_NULL; 3692 3693 /* handle undocumented nowrap option (no zlib header or check) */ 3694 z->state->nowrap = 0; 3695 if (w < 0) 3696 { 3697 w = - w; 3698 z->state->nowrap = 1; 3699 } 3700 3701 /* set window size */ 3702 if (w < 8 || w > 15) 3703 { 3704 (void) inflateEnd(z); 3705 return (Z_STREAM_ERROR); 3706 } 3707 z->state->wbits = (uInt)w; 3708 3709 /* create inflate_blocks state */ 3710 if ((z->state->blocks = 3711 inflate_blocks_new(z, z->state->nowrap ? 3712 Z_NULL : adler32, (uInt)1 << w)) 3713 == Z_NULL) 3714 { 3715 (void) inflateEnd(z); 3716 return (Z_MEM_ERROR); 3717 } 3718 Trace((stderr, "inflate: allocated\n")); 3719 3720 /* reset state */ 3721 (void) inflateReset(z); 3722 return (Z_OK); 3723 } 3724 3725 3726 int 3727 inflateInit_(z, version, stream_size) 3728 z_streamp z; 3729 const char *version; 3730 int stream_size; 3731 { 3732 return (inflateInit2_(z, DEF_WBITS, version, stream_size)); 3733 } 3734 3735 /* PPP -- added "empty" label and changed f to Z_OK */ 3736 #define NEEDBYTE {if (z->avail_in == 0) goto empty; r = Z_OK; } ((void)0) 3737 #define NEXTBYTE (z->avail_in--, z->total_in++, *z->next_in++) 3738 3739 int 3740 inflate(z, f) 3741 z_streamp z; 3742 int f; 3743 { 3744 int r; 3745 uInt b; 3746 3747 if (z == Z_NULL || z->state == Z_NULL || z->next_in == Z_NULL) 3748 return (Z_STREAM_ERROR); 3749 /* f = f == Z_FINISH ? Z_BUF_ERROR : Z_OK; -- PPP; Z_FINISH unused */ 3750 r = Z_BUF_ERROR; 3751 /* CONSTCOND */ 3752 while (1) 3753 switch (z->state->mode) 3754 { 3755 case METHOD: 3756 NEEDBYTE; 3757 if (((z->state->sub.method = NEXTBYTE) & 0xf) != Z_DEFLATED) 3758 { 3759 z->state->mode = BAD; 3760 z->msg = "unknown compression method"; 3761 /* can't try inflateSync */ 3762 z->state->sub.marker = 5; 3763 break; 3764 } 3765 if ((z->state->sub.method >> 4) + 8 > z->state->wbits) 3766 { 3767 z->state->mode = BAD; 3768 z->msg = "invalid window size"; 3769 /* can't try inflateSync */ 3770 z->state->sub.marker = 5; 3771 break; 3772 } 3773 z->state->mode = FLAG; 3774 /* FALLTHRU */ 3775 case FLAG: 3776 NEEDBYTE; 3777 b = NEXTBYTE; 3778 if (((z->state->sub.method << 8) + b) % 31) 3779 { 3780 z->state->mode = BAD; 3781 z->msg = "incorrect header check"; 3782 /* can't try inflateSync */ 3783 z->state->sub.marker = 5; 3784 break; 3785 } 3786 Trace((stderr, "inflate: zlib header ok\n")); 3787 if (!(b & PRESET_DICT)) 3788 { 3789 z->state->mode = BLOCKS; 3790 break; 3791 } 3792 z->state->mode = DICT4; 3793 /* FALLTHRU */ 3794 case DICT4: 3795 NEEDBYTE; 3796 z->state->sub.check.need = (uLong)NEXTBYTE << 24; 3797 z->state->mode = DICT3; 3798 /* FALLTHRU */ 3799 case DICT3: 3800 NEEDBYTE; 3801 z->state->sub.check.need += (uLong)NEXTBYTE << 16; 3802 z->state->mode = DICT2; 3803 /* FALLTHRU */ 3804 case DICT2: 3805 NEEDBYTE; 3806 z->state->sub.check.need += (uLong)NEXTBYTE << 8; 3807 z->state->mode = DICT1; 3808 /* FALLTHRU */ 3809 case DICT1: 3810 NEEDBYTE; 3811 z->state->sub.check.need += (uLong)NEXTBYTE; 3812 z->adler = z->state->sub.check.need; 3813 z->state->mode = DICT0; 3814 return (Z_NEED_DICT); 3815 case DICT0: 3816 z->state->mode = BAD; 3817 z->msg = "need dictionary"; 3818 z->state->sub.marker = 0; /* can try inflateSync */ 3819 return (Z_STREAM_ERROR); 3820 case BLOCKS: 3821 r = inflate_blocks(z->state->blocks, z, r); 3822 if (f == Z_PACKET_FLUSH && z->avail_in == 0 && /* PPP */ 3823 z->avail_out != 0) /* PPP */ 3824 r = inflate_packet_flush(z->state->blocks); /* PPP */ 3825 if (r == Z_DATA_ERROR) 3826 { 3827 z->state->mode = BAD; 3828 /* can try inflateSync */ 3829 z->state->sub.marker = 0; 3830 break; 3831 } 3832 /* PPP */ 3833 if (r != Z_STREAM_END) 3834 return (r); 3835 r = Z_OK; /* PPP */ 3836 inflate_blocks_reset(z->state->blocks, z, 3837 &z->state->sub.check.was); 3838 if (z->state->nowrap) 3839 { 3840 z->state->mode = DONE; 3841 break; 3842 } 3843 z->state->mode = CHECK4; 3844 /* FALLTHRU */ 3845 case CHECK4: 3846 NEEDBYTE; 3847 z->state->sub.check.need = (uLong)NEXTBYTE << 24; 3848 z->state->mode = CHECK3; 3849 /* FALLTHRU */ 3850 case CHECK3: 3851 NEEDBYTE; 3852 z->state->sub.check.need += (uLong)NEXTBYTE << 16; 3853 z->state->mode = CHECK2; 3854 /* FALLTHRU */ 3855 case CHECK2: 3856 NEEDBYTE; 3857 z->state->sub.check.need += (uLong)NEXTBYTE << 8; 3858 z->state->mode = CHECK1; 3859 /* FALLTHRU */ 3860 case CHECK1: 3861 NEEDBYTE; 3862 z->state->sub.check.need += (uLong)NEXTBYTE; 3863 3864 if (z->state->sub.check.was != z->state->sub.check.need) 3865 { 3866 z->state->mode = BAD; 3867 z->msg = "incorrect data check"; 3868 /* can't try inflateSync */ 3869 z->state->sub.marker = 5; 3870 break; 3871 } 3872 Trace((stderr, "inflate: zlib check ok\n")); 3873 z->state->mode = DONE; 3874 /* FALLTHRU */ 3875 case DONE: 3876 return (Z_STREAM_END); 3877 case BAD: 3878 return (Z_DATA_ERROR); 3879 default: 3880 return (Z_STREAM_ERROR); 3881 } 3882 3883 /* PPP -- packet flush handling */ 3884 empty: 3885 if (f != Z_PACKET_FLUSH) 3886 return (r); 3887 z->state->mode = BAD; 3888 z->msg = "need more for packet flush"; 3889 z->state->sub.marker = 0; /* can try inflateSync */ 3890 return (Z_DATA_ERROR); 3891 } 3892 3893 3894 int 3895 inflateSetDictionary(z, dictionary, dictLength) 3896 z_streamp z; 3897 const Bytef *dictionary; 3898 uInt dictLength; 3899 { 3900 uInt length = dictLength; 3901 3902 if (z == Z_NULL || z->state == Z_NULL || z->state->mode != DICT0) 3903 return (Z_STREAM_ERROR); 3904 3905 if (adler32(1L, dictionary, dictLength) != z->adler) 3906 return (Z_DATA_ERROR); 3907 z->adler = 1L; 3908 3909 if (length >= ((uInt)1<<z->state->wbits)) 3910 { 3911 length = (1<<z->state->wbits)-1; 3912 dictionary += dictLength - length; 3913 } 3914 inflate_set_dictionary(z->state->blocks, dictionary, length); 3915 z->state->mode = BLOCKS; 3916 return (Z_OK); 3917 } 3918 3919 /* 3920 * This subroutine adds the data at next_in/avail_in to the output history 3921 * without performing any output. The output buffer must be "caught up"; 3922 * i.e. no pending output (hence s->read equals s->write), and the state must 3923 * be BLOCKS (i.e. we should be willing to see the start of a series of 3924 * BLOCKS). On exit, the output will also be caught up, and the checksum 3925 * will have been updated if need be. 3926 * 3927 * Added for PPP. 3928 */ 3929 3930 int 3931 inflateIncomp(z) 3932 z_stream *z; 3933 { 3934 if (z->state->mode != BLOCKS) 3935 return (Z_DATA_ERROR); 3936 return (inflate_addhistory(z->state->blocks, z)); 3937 } 3938 3939 3940 int 3941 inflateSync(z) 3942 z_streamp z; 3943 { 3944 uInt n; /* number of bytes to look at */ 3945 Bytef *p; /* pointer to bytes */ 3946 uInt m; /* number of marker bytes found in a row */ 3947 uLong r, w; /* temporaries to save total_in and total_out */ 3948 3949 /* set up */ 3950 if (z == Z_NULL || z->state == Z_NULL) 3951 return (Z_STREAM_ERROR); 3952 if (z->state->mode != BAD) 3953 { 3954 z->state->mode = BAD; 3955 z->state->sub.marker = 0; 3956 } 3957 if ((n = z->avail_in) == 0) 3958 return (Z_BUF_ERROR); 3959 p = z->next_in; 3960 m = z->state->sub.marker; 3961 3962 /* search */ 3963 while (n && m < 4) 3964 { 3965 static const Byte mark[4] = { 0, 0, 0xff, 0xff }; 3966 if (*p == mark[m]) 3967 m++; 3968 else if (*p) 3969 m = 0; 3970 else 3971 /* 3972 * This statement maps 2->2 and 3->1 because a 3973 * mismatch with input byte 0x00 on the first 3974 * 0xFF in the pattern means that we still 3975 * have two contiguous zeros matched (thus 3976 * offset 2 is kept), but a mismatch on the 3977 * second 0xFF means that only one 0x00 byte 3978 * has been matched. (Boyer-Moore like 3979 * search.) 3980 */ 3981 m = 4 - m; 3982 p++, n--; 3983 } 3984 3985 /* restore */ 3986 z->total_in += p - z->next_in; 3987 z->next_in = p; 3988 z->avail_in = n; 3989 z->state->sub.marker = m; 3990 3991 /* return no joy or set up to restart on a new block */ 3992 if (m != 4) 3993 return (Z_DATA_ERROR); 3994 r = z->total_in; w = z->total_out; 3995 (void) inflateReset(z); 3996 z->total_in = r; z->total_out = w; 3997 z->state->mode = BLOCKS; 3998 return (Z_OK); 3999 } 4000 4001 /* 4002 * Returns true if inflate is currently at the end of a block 4003 * generated by Z_SYNC_FLUSH or Z_FULL_FLUSH. This function is used by 4004 * one PPP implementation to provide an additional safety check. PPP 4005 * uses Z_SYNC_FLUSH but removes the length bytes of the resulting 4006 * empty stored block. When decompressing, PPP checks that at the end 4007 * of input packet, inflate is waiting for these length bytes. 4008 */ 4009 int 4010 inflateSyncPoint(z) 4011 z_streamp z; 4012 { 4013 if (z == Z_NULL || z->state == Z_NULL || z->state->blocks == Z_NULL) 4014 return (Z_STREAM_ERROR); 4015 return (inflate_blocks_sync_point(z->state->blocks)); 4016 } 4017 4018 #undef NEEDBYTE 4019 #undef NEXTBYTE 4020 /* --- inflate.c */ 4021 4022 /* +++ infblock.c */ 4023 /* 4024 * infblock.c -- interpret and process block types to last block 4025 * Copyright (C) 1995-1998 Mark Adler 4026 * For conditions of distribution and use, see copyright notice in zlib.h 4027 */ 4028 4029 /* #include "zutil.h" */ 4030 /* #include "infblock.h" */ 4031 4032 /* +++ inftrees.h */ 4033 /* 4034 * inftrees.h -- header to use inftrees.c 4035 * Copyright (C) 1995-1998 Mark Adler 4036 * For conditions of distribution and use, see copyright notice in zlib.h 4037 */ 4038 4039 /* 4040 * WARNING: this file should *not* be used by applications. It is part 4041 * of the implementation of the compression library and is subject to 4042 * change. Applications should only use zlib.h. 4043 */ 4044 4045 /* 4046 * Huffman code lookup table entry--this entry is four bytes for 4047 * machines that have 16-bit pointers (e.g. PC's in the small or 4048 * medium model). 4049 */ 4050 4051 typedef struct inflate_huft_s FAR inflate_huft; 4052 4053 struct inflate_huft_s { 4054 union { 4055 struct { 4056 Byte Exop; /* number of extra bits or operation */ 4057 /* number of bits in this code or subcode */ 4058 Byte Bits; 4059 } what; 4060 Bytef *pad; /* pad structure to a power of 2 (4 bytes for */ 4061 } word; /* 16-bit, 8 bytes for 32-bit machines) */ 4062 /* literal, length base, distance base, or table offset */ 4063 uInt base; 4064 }; 4065 4066 /* 4067 * Maximum size of dynamic tree. The maximum found in a long but non- 4068 * exhaustive search was 1004 huft structures (850 for length/literals 4069 * and 154 for distances, the latter actually the result of an 4070 * exhaustive search). The actual maximum is not known, but the value 4071 * below is more than safe. 4072 */ 4073 #define MANY 1440 4074 4075 extern int inflate_trees_bits OF(( 4076 uIntf *, /* 19 code lengths */ 4077 uIntf *, /* bits tree desired/actual depth */ 4078 inflate_huft * FAR *, /* bits tree result */ 4079 inflate_huft *, /* space for trees */ 4080 z_streamp)); /* for zalloc, zfree functions */ 4081 4082 extern int inflate_trees_dynamic OF(( 4083 uInt, /* number of literal/length codes */ 4084 uInt, /* number of distance codes */ 4085 uIntf *, /* that many (total) code lengths */ 4086 uIntf *, /* literal desired/actual bit depth */ 4087 uIntf *, /* distance desired/actual bit depth */ 4088 inflate_huft * FAR *, /* literal/length tree result */ 4089 inflate_huft * FAR *, /* distance tree result */ 4090 inflate_huft *, /* space for trees */ 4091 z_streamp)); /* for zalloc, zfree functions */ 4092 4093 extern int inflate_trees_fixed OF(( 4094 uIntf *, /* literal desired/actual bit depth */ 4095 uIntf *, /* distance desired/actual bit depth */ 4096 const inflate_huft * FAR *, /* literal/length tree result */ 4097 const inflate_huft * FAR *, /* distance tree result */ 4098 z_streamp)); 4099 4100 /* --- inftrees.h */ 4101 4102 /* +++ infcodes.h */ 4103 /* 4104 * infcodes.h -- header to use infcodes.c 4105 * Copyright (C) 1995-1998 Mark Adler 4106 * For conditions of distribution and use, see copyright notice in zlib.h 4107 */ 4108 4109 /* 4110 * WARNING: this file should *not* be used by applications. It is part 4111 * of the implementation of the compression library and is subject to 4112 * change. Applications should only use zlib.h. 4113 */ 4114 4115 struct inflate_codes_state; 4116 typedef struct inflate_codes_state FAR inflate_codes_statef; 4117 4118 extern inflate_codes_statef *inflate_codes_new OF(( 4119 uInt, uInt, 4120 const inflate_huft *, const inflate_huft *, 4121 z_streamp)); 4122 4123 extern int inflate_codes OF(( 4124 inflate_blocks_statef *, 4125 z_streamp, 4126 int)); 4127 4128 extern void inflate_codes_free OF(( 4129 inflate_codes_statef *, 4130 z_streamp)); 4131 4132 /* --- infcodes.h */ 4133 4134 /* +++ infutil.h */ 4135 /* 4136 * infutil.h -- types and macros common to blocks and codes 4137 * Copyright (C) 1995-1998 Mark Adler 4138 * For conditions of distribution and use, see copyright notice in zlib.h 4139 */ 4140 4141 /* 4142 * WARNING: this file should *not* be used by applications. It is part 4143 * of the implementation of the compression library and is subject to 4144 * change. Applications should only use zlib.h. 4145 */ 4146 4147 #ifndef _INFUTIL_H 4148 #define _INFUTIL_H 4149 4150 typedef enum { 4151 TYPE, /* get type bits (3, including end bit) */ 4152 LENS, /* get lengths for stored */ 4153 STORED, /* processing stored block */ 4154 TABLE, /* get table lengths */ 4155 BTREE, /* get bit lengths tree for a dynamic block */ 4156 DTREE, /* get length, distance trees for a dynamic block */ 4157 CODES, /* processing fixed or dynamic block */ 4158 DRY, /* output remaining window bytes */ 4159 DONEB, /* finished last block, done */ 4160 BADB} /* got a data error--stuck here */ 4161 inflate_block_mode; 4162 4163 /* inflate blocks semi-private state */ 4164 struct inflate_blocks_state { 4165 4166 /* mode */ 4167 inflate_block_mode mode; /* current inflate_block mode */ 4168 4169 /* mode dependent information */ 4170 union { 4171 uInt left; /* if STORED, bytes left to copy */ 4172 struct { 4173 uInt table; /* table lengths (14 bits) */ 4174 uInt index; /* index into blens (or border) */ 4175 uIntf *blens; /* bit lengths of codes */ 4176 uInt bb; /* bit length tree depth */ 4177 inflate_huft *tb; /* bit length decoding tree */ 4178 } trees; /* if DTREE, decoding info for trees */ 4179 struct { 4180 inflate_codes_statef *codes; 4181 } decode; /* if CODES, current state */ 4182 } sub; /* submode */ 4183 uInt last; /* true if this block is the last block */ 4184 4185 /* mode independent information */ 4186 uInt bitk; /* bits in bit buffer */ 4187 uLong bitb; /* bit buffer */ 4188 inflate_huft *hufts; /* single malloc for tree space */ 4189 Bytef *window; /* sliding window */ 4190 Bytef *end; /* one byte after sliding window */ 4191 Bytef *read; /* window read pointer */ 4192 Bytef *write; /* window write pointer */ 4193 check_func checkfn; /* check function */ 4194 uLong check; /* check on output */ 4195 4196 }; 4197 4198 4199 /* defines for inflate input/output */ 4200 /* update pointers and return */ 4201 #define UPDBITS {s->bitb = b; s->bitk = k; } 4202 #define UPDIN {z->avail_in = n; z->total_in += p-z->next_in; z->next_in = p; } 4203 #define UPDOUT {s->write = q; } 4204 #define UPDATE {UPDBITS UPDIN UPDOUT} 4205 #define LEAVE {UPDATE return (inflate_flush(s, z, r)); } 4206 /* get bytes and bits */ 4207 #define LOADIN {p = z->next_in; n = z->avail_in; b = s->bitb; k = s->bitk; } 4208 #define NEEDBYTE { if (n) r = Z_OK; else LEAVE } 4209 #define NEXTBYTE (n--, *p++) 4210 #define NEEDBITS(j) { while (k < (j)) { NEEDBYTE; b |= ((uLong)NEXTBYTE)<<k; \ 4211 k += 8; }} 4212 #define DUMPBITS(j) {b >>= (j); k -= (j); } 4213 /* output bytes */ 4214 #define WAVAIL (uInt)(q < s->read ? s->read-q-1 : s->end-q) 4215 #define LOADOUT {q = s->write; m = (uInt)WAVAIL; } 4216 #define WWRAP {if (q == s->end && s->read != s->window) {q = s->window; \ 4217 m = (uInt)WAVAIL; }} 4218 #define FLUSH {UPDOUT r = inflate_flush(s, z, r); LOADOUT} 4219 #define NEEDOUT {if (m == 0) {WWRAP if (m == 0) { FLUSH WWRAP \ 4220 if (m == 0) LEAVE }} r = Z_OK; } 4221 #define OUTBYTE(a) {*q++ = (Byte)(a); m--; } 4222 /* load local pointers */ 4223 #define LOAD {LOADIN LOADOUT} 4224 4225 /* masks for lower bits (size given to avoid silly warnings with Visual C++) */ 4226 extern uInt inflate_mask[17]; 4227 4228 /* copy as much as possible from the sliding window to the output area */ 4229 extern int inflate_flush OF(( 4230 inflate_blocks_statef *, 4231 z_streamp, 4232 int)); 4233 4234 #ifndef NO_DUMMY_DECL 4235 struct internal_state {int dummy; }; /* for buggy compilers */ 4236 #endif 4237 4238 #endif 4239 /* --- infutil.h */ 4240 4241 #ifndef NO_DUMMY_DECL 4242 struct inflate_codes_state {int dummy; }; /* for buggy compilers */ 4243 #endif 4244 4245 /* Table for deflate from PKZIP's appnote.txt. */ 4246 local const uInt border[] = { /* Order of the bit length code lengths */ 4247 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}; 4248 4249 /* 4250 * Notes beyond the 1.93a appnote.txt: 4251 * 4252 * 1. Distance pointers never point before the beginning of the output 4253 * stream. 4254 * 2. Distance pointers can point back across blocks, up to 32k away. 4255 * 3. There is an implied maximum of 7 bits for the bit length table and 4256 * 15 bits for the actual data. 4257 * 4. If only one code exists, then it is encoded using one bit. (Zero 4258 * would be more efficient, but perhaps a little confusing.) If two 4259 * codes exist, they are coded using one bit each (0 and 1). 4260 * 5. There is no way of sending zero distance codes--a dummy must be 4261 * sent if there are none. (History: a pre 2.0 version of PKZIP would 4262 * store blocks with no distance codes, but this was discovered to be 4263 * too harsh a criterion.) Valid only for 1.93a. 2.04c does allow 4264 * zero distance codes, which is sent as one code of zero bits in 4265 * length. 4266 * 6. There are up to 286 literal/length codes. Code 256 represents the 4267 * end-of-block. Note however that the static length tree defines 4268 * 288 codes just to fill out the Huffman codes. Codes 286 and 287 4269 * cannot be used though, since there is no length base or extra bits 4270 * defined for them. Similarily, there are up to 30 distance codes. 4271 * However, static trees define 32 codes (all 5 bits) to fill out the 4272 * Huffman codes, but the last two had better not show up in the data. 4273 * 7. Unzip can check dynamic Huffman blocks for complete code sets. 4274 * The exception is that a single code would not be complete (see #4). 4275 * 8. The five bits following the block type is really the number of 4276 * literal codes sent minus 257. 4277 * 9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits 4278 * (1+6+6). Therefore, to output three times the length, you output 4279 * three codes (1+1+1), whereas to output four times the same length, 4280 * you only need two codes (1+3). Hmm. 4281 * 10. In the tree reconstruction algorithm, Code = Code + Increment 4282 * only if BitLength(i) is not zero. (Pretty obvious.) 4283 * 11. Correction: 4 Bits: #of Bit Length codes - 4 (4 - 19) 4284 * 12. Note: length code 284 can represent 227-258, but length code 285 4285 * really is 258. The last length deserves its own, short code 4286 * since it gets used a lot in very redundant files. The length 4287 * 258 is special since 258 - 3 (the min match length) is 255. 4288 * 13. The literal/length and distance code bit lengths are read as a 4289 * single stream of lengths. It is possible (and advantageous) for 4290 * a repeat code (16, 17, or 18) to go across the boundary between 4291 * the two sets of lengths. 4292 */ 4293 4294 4295 void 4296 inflate_blocks_reset(s, z, c) 4297 inflate_blocks_statef *s; 4298 z_streamp z; 4299 uLongf *c; 4300 { 4301 if (c != Z_NULL) 4302 *c = s->check; 4303 if ((s->mode == BTREE || s->mode == DTREE) && 4304 s->sub.trees.blens != Z_NULL) { 4305 ZFREE(z, s->sub.trees.blens); 4306 s->sub.trees.blens = Z_NULL; 4307 } 4308 if (s->mode == CODES && s->sub.decode.codes != Z_NULL) { 4309 (void) inflate_codes_free(s->sub.decode.codes, z); 4310 s->sub.decode.codes = Z_NULL; 4311 } 4312 s->mode = TYPE; 4313 s->bitk = 0; 4314 s->bitb = 0; 4315 s->read = s->write = s->window; 4316 if (s->checkfn != Z_NULL) 4317 z->adler = s->check = (*s->checkfn)(0L, Z_NULL, 0); 4318 Trace((stderr, "inflate: blocks reset\n")); 4319 } 4320 4321 inflate_blocks_statef * 4322 inflate_blocks_new(z, c, w) 4323 z_streamp z; 4324 check_func c; 4325 uInt w; 4326 { 4327 inflate_blocks_statef *s; 4328 4329 if ((s = (inflate_blocks_statef *)ZALLOC 4330 (z, 1, sizeof (struct inflate_blocks_state))) == Z_NULL) 4331 return (s); 4332 s->hufts = (inflate_huft *)ZALLOC(z, MANY, sizeof (inflate_huft)); 4333 if (s->hufts == Z_NULL) { 4334 ZFREE(z, s); 4335 return (Z_NULL); 4336 } 4337 if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL) 4338 { 4339 ZFREE(z, s->hufts); 4340 ZFREE(z, s); 4341 return (Z_NULL); 4342 } 4343 s->end = s->window + w; 4344 s->checkfn = c; 4345 s->mode = TYPE; 4346 Trace((stderr, "inflate: blocks allocated\n")); 4347 inflate_blocks_reset(s, z, Z_NULL); 4348 return (s); 4349 } 4350 4351 4352 int 4353 inflate_blocks(s, z, r) 4354 inflate_blocks_statef *s; 4355 z_streamp z; 4356 int r; 4357 { 4358 uInt t; /* temporary storage */ 4359 uLong b; /* bit buffer */ 4360 uInt k; /* bits in bit buffer */ 4361 Bytef *p; /* input data pointer */ 4362 uInt n; /* bytes available there */ 4363 Bytef *q; /* output window write pointer */ 4364 uInt m; /* bytes to end of window or read pointer */ 4365 4366 /* copy input/output information to locals (UPDATE macro restores) */ 4367 LOAD; 4368 4369 /* process input based on current state */ 4370 /* CONSTCOND */ 4371 while (1) 4372 switch (s->mode) 4373 { 4374 case TYPE: 4375 NEEDBITS(3); 4376 t = (uInt)b & 7; 4377 s->last = t & 1; 4378 switch (t >> 1) 4379 { 4380 case 0: /* stored */ 4381 Trace((stderr, "inflate: stored block%s\n", 4382 s->last ? " (last)" : "")); 4383 DUMPBITS(3); 4384 t = k & 7; /* go to byte boundary */ 4385 DUMPBITS(t); 4386 s->mode = LENS; /* get length of stored block */ 4387 break; 4388 case 1: /* fixed */ 4389 Trace((stderr, "inflate: fixed codes block%s\n", 4390 s->last ? " (last)" : "")); 4391 { 4392 uInt bl, bd; 4393 const inflate_huft *tl, *td; 4394 4395 (void) inflate_trees_fixed(&bl, &bd, &tl, &td, 4396 z); 4397 s->sub.decode.codes = inflate_codes_new(bl, 4398 bd, tl, td, z); 4399 if (s->sub.decode.codes == Z_NULL) 4400 { 4401 r = Z_MEM_ERROR; 4402 LEAVE 4403 } 4404 } 4405 DUMPBITS(3); 4406 s->mode = CODES; 4407 break; 4408 case 2: /* dynamic */ 4409 Trace((stderr, "inflate: dynamic codes block%s\n", 4410 s->last ? " (last)" : "")); 4411 DUMPBITS(3); 4412 s->mode = TABLE; 4413 break; 4414 case 3: /* illegal */ 4415 DUMPBITS(3); 4416 s->mode = BADB; 4417 z->msg = "invalid block type"; 4418 r = Z_DATA_ERROR; 4419 LEAVE 4420 } 4421 break; 4422 case LENS: 4423 NEEDBITS(32); 4424 if ((((~b) >> 16) & 0xffff) != (b & 0xffff)) 4425 { 4426 s->mode = BADB; 4427 z->msg = "invalid stored block lengths"; 4428 r = Z_DATA_ERROR; 4429 LEAVE 4430 } 4431 s->sub.left = (uInt)b & 0xffff; 4432 b = k = 0; /* dump bits */ 4433 Tracev((stderr, "inflate: stored length %u\n", 4434 s->sub.left)); 4435 s->mode = s->sub.left ? STORED : (s->last ? DRY : TYPE); 4436 break; 4437 case STORED: 4438 if (n == 0) 4439 LEAVE 4440 NEEDOUT; 4441 t = s->sub.left; 4442 if (t > n) t = n; 4443 if (t > m) t = m; 4444 zmemcpy(q, p, t); 4445 p += t; n -= t; 4446 q += t; m -= t; 4447 if ((s->sub.left -= t) != 0) 4448 break; 4449 Tracev((stderr, 4450 "inflate: stored end, %lu total out\n", 4451 z->total_out + (q >= s->read ? q - s->read : 4452 (s->end - s->read) + (q - s->window)))); 4453 s->mode = s->last ? DRY : TYPE; 4454 break; 4455 case TABLE: 4456 NEEDBITS(14); 4457 s->sub.trees.table = t = (uInt)b & 0x3fff; 4458 #ifndef PKZIP_BUG_WORKAROUND 4459 if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29) 4460 { 4461 s->mode = BADB; 4462 z->msg = 4463 (char *)"too many length or distance symbols"; 4464 r = Z_DATA_ERROR; 4465 LEAVE 4466 } 4467 #endif 4468 t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f); 4469 /* if (t < 19) t = 19; */ 4470 if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, 4471 sizeof (uInt))) == Z_NULL) 4472 { 4473 r = Z_MEM_ERROR; 4474 LEAVE 4475 } 4476 DUMPBITS(14); 4477 s->sub.trees.index = 0; 4478 Tracev((stderr, "inflate: table sizes ok\n")); 4479 s->mode = BTREE; 4480 /* FALLTHRU */ 4481 case BTREE: 4482 while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10)) 4483 { 4484 NEEDBITS(3); 4485 s->sub.trees.blens[border[s->sub.trees.index++]] = 4486 (uInt)b & 7; 4487 DUMPBITS(3); 4488 } 4489 while (s->sub.trees.index < 19) 4490 s->sub.trees.blens[border[s->sub.trees.index++]] = 4491 0; 4492 s->sub.trees.bb = 7; 4493 t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb, 4494 &s->sub.trees.tb, s->hufts, z); 4495 if (t != Z_OK) 4496 { 4497 ZFREE(z, s->sub.trees.blens); 4498 s->sub.trees.blens = Z_NULL; 4499 r = t; 4500 if (r == Z_DATA_ERROR) 4501 s->mode = BADB; 4502 LEAVE 4503 } 4504 s->sub.trees.index = 0; 4505 Tracev((stderr, "inflate: bits tree ok\n")); 4506 s->mode = DTREE; 4507 /* FALLTHRU */ 4508 case DTREE: 4509 while (t = s->sub.trees.table, 4510 s->sub.trees.index < 258 + (t & 0x1f) + 4511 ((t >> 5) & 0x1f)) 4512 { 4513 inflate_huft *h; 4514 uInt i, j, c; 4515 4516 t = s->sub.trees.bb; 4517 NEEDBITS(t); 4518 h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]); 4519 t = h->word.what.Bits; 4520 c = h->base; 4521 if (c < 16) 4522 { 4523 DUMPBITS(t); 4524 s->sub.trees.blens[s->sub.trees.index++] = 4525 c; 4526 } else { /* c == 16..18 */ 4527 i = c == 18 ? 7 : c - 14; 4528 j = c == 18 ? 11 : 3; 4529 NEEDBITS(t + i); 4530 DUMPBITS(t); 4531 j += (uInt)b & inflate_mask[i]; 4532 DUMPBITS(i); 4533 i = s->sub.trees.index; 4534 t = s->sub.trees.table; 4535 if (i + j > 258 + (t & 0x1f) + 4536 ((t >> 5) & 0x1f) || 4537 (c == 16 && i < 1)) 4538 { 4539 ZFREE(z, s->sub.trees.blens); 4540 s->sub.trees.blens = Z_NULL; 4541 s->mode = BADB; 4542 z->msg = "invalid bit length repeat"; 4543 r = Z_DATA_ERROR; 4544 LEAVE 4545 } 4546 c = c == 16 ? s->sub.trees.blens[i - 1] : 0; 4547 do { 4548 s->sub.trees.blens[i++] = c; 4549 } while (--j); 4550 s->sub.trees.index = i; 4551 } 4552 } 4553 s->sub.trees.tb = Z_NULL; 4554 { 4555 uInt bl, bd; 4556 inflate_huft *tl, *td; 4557 inflate_codes_statef *c; 4558 4559 /* must be <= 9 for lookahead assumptions */ 4560 bl = 9; 4561 /* must be <= 9 for lookahead assumptions */ 4562 bd = 6; 4563 t = s->sub.trees.table; 4564 t = inflate_trees_dynamic(257 + (t & 0x1f), 4565 1 + ((t >> 5) & 0x1f), 4566 s->sub.trees.blens, &bl, &bd, &tl, &td, 4567 s->hufts, z); 4568 ZFREE(z, s->sub.trees.blens); 4569 s->sub.trees.blens = Z_NULL; 4570 if (t != Z_OK) 4571 { 4572 if (t == (uInt)Z_DATA_ERROR) 4573 s->mode = BADB; 4574 r = t; 4575 LEAVE 4576 } 4577 Tracev((stderr, "inflate: trees ok\n")); 4578 if ((c = inflate_codes_new(bl, bd, tl, td, z)) == 4579 Z_NULL) 4580 { 4581 r = Z_MEM_ERROR; 4582 LEAVE 4583 } 4584 s->sub.decode.codes = c; 4585 } 4586 s->mode = CODES; 4587 /* FALLTHRU */ 4588 case CODES: 4589 UPDATE; 4590 if ((r = inflate_codes(s, z, r)) != Z_STREAM_END) 4591 return (inflate_flush(s, z, r)); 4592 r = Z_OK; 4593 (void) inflate_codes_free(s->sub.decode.codes, z); 4594 LOAD; 4595 Tracev((stderr, "inflate: codes end, %lu total out\n", 4596 z->total_out + (q >= s->read ? q - s->read : 4597 (s->end - s->read) + (q - s->window)))); 4598 if (!s->last) 4599 { 4600 s->mode = TYPE; 4601 break; 4602 } 4603 s->mode = DRY; 4604 /* FALLTHRU */ 4605 case DRY: 4606 FLUSH; 4607 if (s->read != s->write) 4608 LEAVE 4609 s->mode = DONEB; 4610 /* FALLTHRU */ 4611 case DONEB: 4612 r = Z_STREAM_END; 4613 LEAVE 4614 case BADB: 4615 r = Z_DATA_ERROR; 4616 LEAVE 4617 default: 4618 r = Z_STREAM_ERROR; 4619 LEAVE 4620 } 4621 /* NOTREACHED */ 4622 /* otherwise lint complains */ 4623 } 4624 4625 4626 int 4627 inflate_blocks_free(s, z) 4628 inflate_blocks_statef *s; 4629 z_streamp z; 4630 { 4631 inflate_blocks_reset(s, z, Z_NULL); 4632 ZFREE(z, s->window); 4633 s->window = Z_NULL; 4634 ZFREE(z, s->hufts); 4635 s->hufts = Z_NULL; 4636 ZFREE(z, s); 4637 Trace((stderr, "inflate: blocks freed\n")); 4638 return (Z_OK); 4639 } 4640 4641 4642 void 4643 inflate_set_dictionary(s, d, n) 4644 inflate_blocks_statef *s; 4645 const Bytef *d; 4646 uInt n; 4647 { 4648 Assert(s->window + n <= s->end, "set dict"); 4649 zmemcpy((charf *)s->window, d, n); 4650 s->read = s->write = s->window + n; 4651 } 4652 4653 /* 4654 * Returns true if inflate is currently at the end of a block 4655 * generated by Z_SYNC_FLUSH or Z_FULL_FLUSH. 4656 * IN assertion: s != Z_NULL 4657 */ 4658 int 4659 inflate_blocks_sync_point(s) 4660 inflate_blocks_statef *s; 4661 { 4662 return (s->mode == LENS); 4663 } 4664 4665 /* 4666 * This subroutine adds the data at next_in/avail_in to the output history 4667 * without performing any output. The output buffer must be "caught up"; 4668 * i.e. no pending output (hence s->read equals s->write), and the state must 4669 * be BLOCKS (i.e. we should be willing to see the start of a series of 4670 * BLOCKS). On exit, the output will also be caught up, and the checksum 4671 * will have been updated if need be. 4672 */ 4673 int 4674 inflate_addhistory(s, z) 4675 inflate_blocks_statef *s; 4676 z_stream *z; 4677 { 4678 uLong b; /* bit buffer */ /* NOT USED HERE */ 4679 uInt k; /* bits in bit buffer */ /* NOT USED HERE */ 4680 uInt t; /* temporary storage */ 4681 Bytef *p; /* input data pointer */ 4682 uInt n; /* bytes available there */ 4683 Bytef *q; /* output window write pointer */ 4684 uInt m; /* bytes to end of window or read pointer */ 4685 4686 if (s->read != s->write) 4687 return (Z_STREAM_ERROR); 4688 if (s->mode != TYPE) 4689 return (Z_DATA_ERROR); 4690 4691 /* we're ready to rock */ 4692 LOAD; 4693 /* 4694 * while there is input ready, copy to output buffer, moving 4695 * pointers as needed. 4696 */ 4697 while (n) { 4698 t = n; /* how many to do */ 4699 /* is there room until end of buffer? */ 4700 if (t > m) t = m; 4701 /* update check information */ 4702 if (s->checkfn != Z_NULL) 4703 s->check = (*s->checkfn)(s->check, q, t); 4704 zmemcpy(q, p, t); 4705 q += t; 4706 p += t; 4707 n -= t; 4708 z->total_out += t; 4709 s->read = q; /* drag read pointer forward */ 4710 /* WWRAP */ /* expand WWRAP macro by hand to handle s->read */ 4711 if (q == s->end) { 4712 s->read = q = s->window; 4713 m = WAVAIL; 4714 } 4715 } 4716 UPDATE; 4717 return (Z_OK); 4718 } 4719 4720 4721 /* 4722 * At the end of a Deflate-compressed PPP packet, we expect to have seen 4723 * a `stored' block type value but not the (zero) length bytes. 4724 */ 4725 int 4726 inflate_packet_flush(s) 4727 inflate_blocks_statef *s; 4728 { 4729 if (s->mode != LENS) 4730 return (Z_DATA_ERROR); 4731 s->mode = TYPE; 4732 return (Z_OK); 4733 } 4734 /* --- infblock.c */ 4735 4736 /* +++ inftrees.c */ 4737 /* 4738 * inftrees.c -- generate Huffman trees for efficient decoding 4739 * Copyright (C) 1995-1998 Mark Adler 4740 * For conditions of distribution and use, see copyright notice in zlib.h 4741 */ 4742 4743 /* #include "zutil.h" */ 4744 /* #include "inftrees.h" */ 4745 4746 const char inflate_copyright[] = 4747 " inflate 1.1.3 Copyright 1995-1998 Mark Adler "; 4748 /* 4749 * If you use the zlib library in a product, an acknowledgment is 4750 * welcome in the documentation of your product. If for some reason 4751 * you cannot include such an acknowledgment, I would appreciate that 4752 * you keep this copyright string in the executable of your product. 4753 */ 4754 4755 #ifndef NO_DUMMY_DECL 4756 struct internal_state {int dummy; }; /* for buggy compilers */ 4757 #endif 4758 4759 /* simplify the use of the inflate_huft type with some defines */ 4760 #define exop word.what.Exop 4761 #define bits word.what.Bits 4762 4763 4764 local int huft_build OF(( 4765 uIntf *, /* code lengths in bits */ 4766 uInt, /* number of codes */ 4767 uInt, /* number of "simple" codes */ 4768 const uIntf *, /* list of base values for non-simple codes */ 4769 const uIntf *, /* list of extra bits for non-simple codes */ 4770 inflate_huft * FAR*, /* result: starting table */ 4771 uIntf *, /* maximum lookup bits (returns actual) */ 4772 inflate_huft *hp, /* space for trees */ 4773 uInt *hn, /* hufts used in space */ 4774 uIntf *v)); /* working area: values in order of bit length */ 4775 4776 /* Tables for deflate from PKZIP's appnote.txt. */ 4777 local const uInt cplens[31] = { /* Copy lengths for literal codes 257..285 */ 4778 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 4779 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0}; 4780 /* see note #13 above about 258 */ 4781 local const uInt cplext[31] = { /* Extra bits for literal codes 257..285 */ 4782 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 4783 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112}; 4784 /* 112==invalid */ 4785 local const uInt cpdist[30] = { /* Copy offsets for distance codes 0..29 */ 4786 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 4787 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, 4788 8193, 12289, 16385, 24577}; 4789 local const uInt cpdext[30] = { /* Extra bits for distance codes */ 4790 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 4791 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 4792 12, 12, 13, 13}; 4793 4794 /* 4795 * Huffman code decoding is performed using a multi-level table 4796 * lookup. The fastest way to decode is to simply build a lookup 4797 * table whose size is determined by the longest code. However, the 4798 * time it takes to build this table can also be a factor if the data 4799 * being decoded is not very long. The most common codes are 4800 * necessarily the shortest codes, so those codes dominate the 4801 * decoding time, and hence the speed. The idea is you can have a 4802 * shorter table that decodes the shorter, more probable codes, and 4803 * then point to subsidiary tables for the longer codes. The time it 4804 * costs to decode the longer codes is then traded against the time it 4805 * takes to make longer tables. 4806 * 4807 * This results of this trade are in the variables lbits and dbits 4808 * below. lbits is the number of bits the first level table for 4809 * literal/ length codes can decode in one step, and dbits is the same 4810 * thing for the distance codes. Subsequent tables are also less than 4811 * or equal to those sizes. These values may be adjusted either when 4812 * all of the codes are shorter than that, in which case the longest 4813 * code length in bits is used, or when the shortest code is *longer* 4814 * than the requested table size, in which case the length of the 4815 * shortest code in bits is used. 4816 * 4817 * There are two different values for the two tables, since they code 4818 * a different number of possibilities each. The literal/length table 4819 * codes 286 possible values, or in a flat code, a little over eight 4820 * bits. The distance table codes 30 possible values, or a little 4821 * less than five bits, flat. The optimum values for speed end up 4822 * being about one bit more than those, so lbits is 8+1 and dbits is 4823 * 5+1. The optimum values may differ though from machine to machine, 4824 * and possibly even between compilers. Your mileage may vary. 4825 */ 4826 4827 4828 /* If BMAX needs to be larger than 16, then h and x[] should be uLong. */ 4829 #define BMAX 15 /* maximum bit length of any code */ 4830 4831 4832 local int 4833 huft_build(b, n, s, d, e, t, m, hp, hn, v) 4834 uIntf *b; /* code lengths in bits (all assumed <= BMAX) */ 4835 uInt n; /* number of codes (assumed <= 288) */ 4836 uInt s; /* number of simple-valued codes (0..s-1) */ 4837 const uIntf *d; /* list of base values for non-simple codes */ 4838 const uIntf *e; /* list of extra bits for non-simple codes */ 4839 inflate_huft * FAR *t; /* result: starting table */ 4840 uIntf *m; /* maximum lookup bits, returns actual */ 4841 inflate_huft *hp; /* space for trees */ 4842 uInt *hn; /* hufts used in space */ 4843 uIntf *v; /* working area: values in order of bit length */ 4844 /* 4845 * Given a list of code lengths and a maximum table size, make a set 4846 * of tables to decode that set of codes. Return Z_OK on success, 4847 * Z_BUF_ERROR if the given code set is incomplete (the tables are 4848 * still built in this case), Z_DATA_ERROR if the input is invalid (an 4849 * over-subscribed set of lengths), or Z_MEM_ERROR if not enough 4850 * memory. 4851 */ 4852 { 4853 4854 uInt a; /* counter for codes of length k */ 4855 uInt c[BMAX+1]; /* bit length count table */ 4856 uInt f; /* i repeats in table every f entries */ 4857 int g; /* maximum code length */ 4858 int h; /* table level */ 4859 register uInt i; /* counter, current code */ 4860 register uInt j; /* counter */ 4861 register int k; /* number of bits in current code */ 4862 int l; /* bits per table (returned in m) */ 4863 register uIntf *p; /* pointer into c[], b[], or v[] */ 4864 inflate_huft *q; /* points to current table */ 4865 struct inflate_huft_s r; /* table entry for structure assignment */ 4866 inflate_huft *u[BMAX]; /* table stack */ 4867 uInt mask; /* (1 << w) - 1, to avoid cc -O bug on HP */ 4868 register int w; /* bits before this table == (l * h) */ 4869 uInt x[BMAX+1]; /* bit offsets, then code stack */ 4870 uIntf *xp; /* pointer into x */ 4871 int y; /* number of dummy codes added */ 4872 uInt z; /* number of entries in current table */ 4873 4874 (void) inflate_copyright; 4875 /* Generate counts for each bit length */ 4876 p = c; 4877 #define C0 *p++ = 0; 4878 #define C2 C0 C0 C0 C0 4879 #define C4 C2 C2 C2 C2 4880 C4 /* clear c[]--assume BMAX+1 is 16 */ 4881 p = b; i = n; 4882 do { 4883 c[*p++]++; /* assume all entries <= BMAX */ 4884 } while (--i); 4885 if (c[0] == n) /* null input--all zero length codes */ 4886 { 4887 *t = (inflate_huft *)Z_NULL; 4888 *m = 0; 4889 return (Z_OK); 4890 } 4891 4892 4893 /* Find minimum and maximum length, bound *m by those */ 4894 l = *m; 4895 for (j = 1; j <= BMAX; j++) 4896 if (c[j]) 4897 break; 4898 k = j; /* minimum code length */ 4899 if ((uInt)l < j) 4900 l = j; 4901 for (i = BMAX; i; i--) 4902 if (c[i]) 4903 break; 4904 g = i; /* maximum code length */ 4905 if ((uInt)l > i) 4906 l = i; 4907 *m = l; 4908 4909 4910 /* Adjust last length count to fill out codes, if needed */ 4911 for (y = 1 << j; j < i; j++, y <<= 1) 4912 if ((y -= c[j]) < 0) 4913 return (Z_DATA_ERROR); 4914 if ((y -= c[i]) < 0) 4915 return (Z_DATA_ERROR); 4916 c[i] += y; 4917 4918 4919 /* Generate starting offsets into the value table for each length */ 4920 x[1] = j = 0; 4921 p = c + 1; xp = x + 2; 4922 while (--i) { /* note that i == g from above */ 4923 *xp++ = (j += *p++); 4924 } 4925 4926 4927 /* Make a table of values in order of bit lengths */ 4928 p = b; i = 0; 4929 do { 4930 if ((j = *p++) != 0) 4931 v[x[j]++] = i; 4932 } while (++i < n); 4933 n = x[g]; /* set n to length of v */ 4934 4935 4936 /* Generate the Huffman codes and for each, make the table entries */ 4937 x[0] = i = 0; /* first Huffman code is zero */ 4938 p = v; /* grab values in bit order */ 4939 h = -1; /* no tables yet--level -1 */ 4940 w = -l; /* bits decoded == (l * h) */ 4941 u[0] = (inflate_huft *)Z_NULL; /* just to keep compilers happy */ 4942 q = (inflate_huft *)Z_NULL; /* ditto */ 4943 z = 0; /* ditto */ 4944 4945 /* go through the bit lengths (k already is bits in shortest code) */ 4946 for (; k <= g; k++) { 4947 a = c[k]; 4948 while (a--) { 4949 /* 4950 * here i is the Huffman code of length k bits 4951 * for value *p. make tables up to required 4952 * level. 4953 */ 4954 while (k > w + l) { 4955 h++; 4956 w += l; /* previous table always l bits */ 4957 4958 /* 4959 * compute minimum size table less 4960 * than or equal to l bits 4961 */ 4962 z = g - w; 4963 /* table size upper limit */ 4964 z = z > (uInt)l ? l : z; 4965 /* try a k-w bit table */ 4966 if ((f = 1 << (j = k - w)) > a + 1) { 4967 /* too few codes for k-w bit table */ 4968 /* deduct codes from patterns left */ 4969 f -= a + 1; 4970 xp = c + k; 4971 if (j < z) 4972 /* 4973 * try smaller tables 4974 * up to z bits 4975 */ 4976 while (++j < z) { 4977 /* 4978 * enough 4979 * codes to 4980 * use up j 4981 * bits 4982 */ 4983 if ((f <<= 1) <= *++xp) 4984 break; 4985 f -= *xp; 4986 /* 4987 * else deduct 4988 * codes from 4989 * patterns 4990 */ 4991 } 4992 } 4993 /* table entries for j-bit table */ 4994 z = 1 << j; 4995 4996 /* allocate new table */ 4997 /* (note: doesn't matter for fixed) */ 4998 /* not enough memory */ 4999 if (*hn + z > MANY) 5000 return (Z_MEM_ERROR); 5001 u[h] = q = hp + *hn; 5002 *hn += z; 5003 5004 /* connect to last table, if there is one */ 5005 if (h) { 5006 /* save pattern for backing up */ 5007 x[h] = i; 5008 /* bits to dump before this table */ 5009 r.bits = (Byte)l; 5010 /* bits in this table */ 5011 r.exop = (Byte)j; 5012 j = i >> (w - l); 5013 /* offset to this table */ 5014 r.base = (uInt)(q - u[h-1] - j); 5015 /* connect to last table */ 5016 u[h-1][j] = r; 5017 } else 5018 /* first table is returned result */ 5019 *t = q; 5020 } 5021 5022 /* set up table entry in r */ 5023 r.bits = (Byte)(k - w); 5024 if (p >= v + n) 5025 /* out of values--invalid code */ 5026 r.exop = 128 + 64; 5027 else if (*p < s) 5028 { 5029 /* 256 is end-of-block */ 5030 r.exop = (Byte)(*p < 256 ? 0 : 32 + 64); 5031 /* simple code is just the value */ 5032 r.base = *p++; 5033 } 5034 else 5035 { 5036 /* non-simple--look up in lists */ 5037 r.exop = (Byte)(e[*p - s] + 16 + 64); 5038 r.base = d[*p++ - s]; 5039 } 5040 5041 /* fill code-like entries with r */ 5042 f = 1 << (k - w); 5043 for (j = i >> w; j < z; j += f) 5044 q[j] = r; 5045 5046 /* backwards increment the k-bit code i */ 5047 for (j = 1 << (k - 1); i & j; j >>= 1) 5048 i ^= j; 5049 i ^= j; 5050 5051 /* backup over finished tables */ 5052 mask = (1 << w) - 1; /* needed on HP, cc -O bug */ 5053 while ((i & mask) != x[h]) 5054 { 5055 h--; /* don't need to update q */ 5056 w -= l; 5057 mask = (1 << w) - 1; 5058 } 5059 } 5060 } 5061 5062 5063 /* Return Z_BUF_ERROR if we were given an incomplete table */ 5064 return (y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK); 5065 } 5066 5067 5068 int 5069 inflate_trees_bits(c, bb, tb, hp, z) 5070 uIntf *c; /* 19 code lengths */ 5071 uIntf *bb; /* bits tree desired/actual depth */ 5072 inflate_huft * FAR *tb; /* bits tree result */ 5073 inflate_huft *hp; /* space for trees */ 5074 z_streamp z; /* for zfree function */ 5075 { 5076 int r; 5077 uInt hn = 0; /* hufts used in space */ 5078 uIntf v[19]; /* work area for huft_build */ 5079 5080 r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, tb, bb, 5081 hp, &hn, v); 5082 if (r == Z_DATA_ERROR) 5083 z->msg = "oversubscribed dynamic bit lengths tree"; 5084 else if (r == Z_BUF_ERROR || *bb == 0) 5085 { 5086 z->msg = "incomplete dynamic bit lengths tree"; 5087 r = Z_DATA_ERROR; 5088 } 5089 return (r); 5090 } 5091 5092 5093 int 5094 inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, hp, z) 5095 uInt nl; /* number of literal/length codes */ 5096 uInt nd; /* number of distance codes */ 5097 uIntf *c; /* that many (total) code lengths */ 5098 uIntf *bl; /* literal desired/actual bit depth */ 5099 uIntf *bd; /* distance desired/actual bit depth */ 5100 inflate_huft * FAR *tl; /* literal/length tree result */ 5101 inflate_huft * FAR *td; /* distance tree result */ 5102 inflate_huft *hp; /* space for trees */ 5103 z_streamp z; /* for zfree function */ 5104 { 5105 int r; 5106 uInt hn = 0; /* hufts used in space */ 5107 uIntf v[288]; /* work area for huft_build */ 5108 5109 /* build literal/length tree */ 5110 r = huft_build(c, nl, 257, cplens, cplext, tl, bl, hp, &hn, v); 5111 if (r != Z_OK || *bl == 0) 5112 { 5113 if (r == Z_DATA_ERROR) 5114 z->msg = "oversubscribed literal/length tree"; 5115 else if (r != Z_MEM_ERROR) 5116 { 5117 z->msg = "incomplete literal/length tree"; 5118 r = Z_DATA_ERROR; 5119 } 5120 return (r); 5121 } 5122 5123 /* build distance tree */ 5124 r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, hp, &hn, v); 5125 if (r != Z_OK || (*bd == 0 && nl > 257)) 5126 { 5127 if (r == Z_DATA_ERROR) 5128 z->msg = "oversubscribed distance tree"; 5129 else if (r == Z_BUF_ERROR) { 5130 #ifdef PKZIP_BUG_WORKAROUND 5131 r = Z_OK; 5132 #else 5133 z->msg = "incomplete distance tree"; 5134 r = Z_DATA_ERROR; 5135 } else if (r != Z_MEM_ERROR) { 5136 z->msg = "empty distance tree with lengths"; 5137 r = Z_DATA_ERROR; 5138 #endif 5139 } 5140 return (r); 5141 } 5142 5143 /* done */ 5144 return (Z_OK); 5145 } 5146 5147 5148 /* build fixed tables only once--keep them here */ 5149 /* #define BUILDFIXED */ 5150 #ifdef BUILDFIXED 5151 local int fixed_built = 0; 5152 #define FIXEDH 544 /* number of hufts used by fixed tables */ 5153 local inflate_huft fixed_mem[FIXEDH]; 5154 local uInt fixed_bl; 5155 local uInt fixed_bd; 5156 local inflate_huft *fixed_tl; 5157 local inflate_huft *fixed_td; 5158 #else 5159 #include "inffixed.h" 5160 #endif 5161 5162 /*ARGSUSED*/ 5163 int 5164 inflate_trees_fixed(bl, bd, tl, td, z) 5165 uIntf *bl; /* literal desired/actual bit depth */ 5166 uIntf *bd; /* distance desired/actual bit depth */ 5167 const inflate_huft * FAR *tl; /* literal/length tree result */ 5168 const inflate_huft * FAR *td; /* distance tree result */ 5169 z_streamp z; /* for memory allocation */ 5170 { 5171 #ifdef BUILDFIXED 5172 /* 5173 * build fixed tables if not already (multiple overlapped 5174 * executions ok) 5175 */ 5176 if (!fixed_built) 5177 { 5178 int k; /* temporary variable */ 5179 uInt f = 0; /* number of hufts used in fixed_mem */ 5180 uIntf *c; /* length list for huft_build */ 5181 uIntf *v; 5182 5183 /* allocate memory */ 5184 if ((c = (uIntf*)ZALLOC(z, 288, sizeof (uInt))) == Z_NULL) 5185 return (Z_MEM_ERROR); 5186 if ((v = (uIntf*)ZALLOC(z, 288, sizeof (uInt))) == Z_NULL) 5187 { 5188 ZFREE(z, c); 5189 return (Z_MEM_ERROR); 5190 } 5191 /* literal table */ 5192 for (k = 0; k < 144; k++) 5193 c[k] = 8; 5194 for (; k < 256; k++) 5195 c[k] = 9; 5196 for (; k < 280; k++) 5197 c[k] = 7; 5198 for (; k < 288; k++) 5199 c[k] = 8; 5200 fixed_bl = 9; 5201 (void) huft_build(c, 288, 257, cplens, cplext, &fixed_tl, 5202 &fixed_bl, fixed_mem, &f, v); 5203 5204 /* distance table */ 5205 for (k = 0; k < 30; k++) 5206 c[k] = 5; 5207 fixed_bd = 5; 5208 (void) huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, 5209 &fixed_bd, fixed_mem, &f, v); 5210 5211 /* done */ 5212 ZFREE(z, v); 5213 ZFREE(z, c); 5214 fixed_built = 1; 5215 } 5216 #endif 5217 *bl = fixed_bl; 5218 *bd = fixed_bd; 5219 *tl = fixed_tl; 5220 *td = fixed_td; 5221 return (Z_OK); 5222 } 5223 /* --- inftrees.c */ 5224 5225 /* +++ infcodes.c */ 5226 /* 5227 * infcodes.c -- process literals and length/distance pairs 5228 * Copyright (C) 1995-1998 Mark Adler 5229 * For conditions of distribution and use, see copyright notice in zlib.h 5230 */ 5231 5232 /* #include "zutil.h" */ 5233 /* #include "inftrees.h" */ 5234 /* #include "infblock.h" */ 5235 /* #include "infcodes.h" */ 5236 /* #include "infutil.h" */ 5237 5238 /* +++ inffast.h */ 5239 /* 5240 * inffast.h -- header to use inffast.c 5241 * Copyright (C) 1995-1998 Mark Adler 5242 * For conditions of distribution and use, see copyright notice in zlib.h 5243 */ 5244 5245 /* 5246 * WARNING: this file should *not* be used by applications. It is part 5247 * of the implementation of the compression library and is subject to 5248 * change. Applications should only use zlib.h. 5249 */ 5250 5251 extern int inflate_fast OF(( 5252 uInt, 5253 uInt, 5254 const inflate_huft *, 5255 const inflate_huft *, 5256 inflate_blocks_statef *, 5257 z_streamp)); 5258 /* --- inffast.h */ 5259 5260 /* simplify the use of the inflate_huft type with some defines */ 5261 #define exop word.what.Exop 5262 #define bits word.what.Bits 5263 5264 /* inflate codes private state */ 5265 struct inflate_codes_state { 5266 5267 /* mode */ 5268 enum { /* waiting for "i:"=input, "o:"=output, "x:"=nothing */ 5269 START, /* x: set up for LEN */ 5270 LEN, /* i: get length/literal/eob next */ 5271 LENEXT, /* i: getting length extra (have base) */ 5272 DIST, /* i: get distance next */ 5273 DISTEXT, /* i: getting distance extra */ 5274 COPY, /* o: copying bytes in window, waiting for space */ 5275 LIT, /* o: got literal, waiting for output space */ 5276 WASH, /* o: got eob, possibly still output waiting */ 5277 END, /* x: got eob and all data flushed */ 5278 BADCODE} /* x: got error */ 5279 mode; /* current inflate_codes mode */ 5280 5281 /* mode dependent information */ 5282 uInt len; 5283 union { 5284 struct { 5285 const inflate_huft *tree; /* pointer into tree */ 5286 uInt need; /* bits needed */ 5287 } code; /* if LEN or DIST, where in tree */ 5288 uInt lit; /* if LIT, literal */ 5289 struct { 5290 uInt get; /* bits to get for extra */ 5291 uInt dist; /* distance back to copy from */ 5292 } copy; /* if EXT or COPY, where and how much */ 5293 } sub; /* submode */ 5294 5295 /* mode independent information */ 5296 Byte lbits; /* ltree bits decoded per branch */ 5297 Byte dbits; /* dtree bits decoder per branch */ 5298 const inflate_huft *ltree; /* literal/length/eob tree */ 5299 const inflate_huft *dtree; /* distance tree */ 5300 5301 }; 5302 5303 5304 inflate_codes_statef * 5305 inflate_codes_new(bl, bd, tl, td, z) 5306 uInt bl, bd; 5307 const inflate_huft *tl; 5308 const inflate_huft *td; /* need separate declaration for Borland C++ */ 5309 z_streamp z; 5310 { 5311 inflate_codes_statef *c; 5312 5313 if ((c = (inflate_codes_statef *) 5314 ZALLOC(z, 1, sizeof (struct inflate_codes_state))) != Z_NULL) 5315 { 5316 c->mode = START; 5317 c->lbits = (Byte)bl; 5318 c->dbits = (Byte)bd; 5319 c->ltree = tl; 5320 c->dtree = td; 5321 Tracev((stderr, "inflate: codes new\n")); 5322 } 5323 return (c); 5324 } 5325 5326 5327 int 5328 inflate_codes(s, z, r) 5329 inflate_blocks_statef *s; 5330 z_streamp z; 5331 int r; 5332 { 5333 uInt j; /* temporary storage */ 5334 const inflate_huft *t; /* temporary pointer */ 5335 uInt e; /* extra bits or operation */ 5336 uLong b; /* bit buffer */ 5337 uInt k; /* bits in bit buffer */ 5338 Bytef *p; /* input data pointer */ 5339 uInt n; /* bytes available there */ 5340 Bytef *q; /* output window write pointer */ 5341 uInt m; /* bytes to end of window or read pointer */ 5342 Bytef *f; /* pointer to copy strings from */ 5343 inflate_codes_statef *c = s->sub.decode.codes; /* codes state */ 5344 5345 /* copy input/output information to locals (UPDATE macro restores) */ 5346 LOAD; 5347 5348 /* process input and output based on current state */ 5349 /* CONSTCOND */ 5350 while (1) 5351 /* waiting for "i:"=input, "o:"=output, "x:"=nothing */ 5352 switch (c->mode) { 5353 case START: /* x: set up for LEN */ 5354 #ifndef SLOW 5355 if (m >= 258 && n >= 10) 5356 { 5357 UPDATE; 5358 r = inflate_fast(c->lbits, c->dbits, 5359 c->ltree, c->dtree, s, z); 5360 LOAD; 5361 if (r != Z_OK) { 5362 c->mode = r == Z_STREAM_END ? 5363 WASH : BADCODE; 5364 break; 5365 } 5366 } 5367 #endif /* !SLOW */ 5368 c->sub.code.need = c->lbits; 5369 c->sub.code.tree = c->ltree; 5370 c->mode = LEN; 5371 /* FALLTHRU */ 5372 case LEN: /* i: get length/literal/eob next */ 5373 j = c->sub.code.need; 5374 NEEDBITS(j); 5375 t = c->sub.code.tree + 5376 ((uInt)b & inflate_mask[j]); 5377 DUMPBITS(t->bits); 5378 e = (uInt)(t->exop); 5379 if (e == 0) { /* literal */ 5380 c->sub.lit = t->base; 5381 Tracevv((stderr, t->base >= 0x20 && 5382 t->base < 0x7f ? 5383 "inflate: literal '%c'\n" : 5384 "inflate: literal 0x%02x\n", 5385 t->base)); 5386 c->mode = LIT; 5387 break; 5388 } 5389 if (e & 16) { /* length */ 5390 c->sub.copy.get = e & 15; 5391 c->len = t->base; 5392 c->mode = LENEXT; 5393 break; 5394 } 5395 if ((e & 64) == 0) { /* next table */ 5396 c->sub.code.need = e; 5397 c->sub.code.tree = t + t->base; 5398 break; 5399 } 5400 if (e & 32) { /* end of block */ 5401 Tracevv((stderr, 5402 "inflate: end of block\n")); 5403 c->mode = WASH; 5404 break; 5405 } 5406 c->mode = BADCODE; /* invalid code */ 5407 z->msg = "invalid literal/length code"; 5408 r = Z_DATA_ERROR; 5409 LEAVE 5410 case LENEXT: /* i: getting length extra (have base) */ 5411 j = c->sub.copy.get; 5412 NEEDBITS(j); 5413 c->len += (uInt)b & inflate_mask[j]; 5414 DUMPBITS(j); 5415 c->sub.code.need = c->dbits; 5416 c->sub.code.tree = c->dtree; 5417 Tracevv((stderr, 5418 "inflate: length %u\n", c->len)); 5419 c->mode = DIST; 5420 /* FALLTHRU */ 5421 case DIST: /* i: get distance next */ 5422 j = c->sub.code.need; 5423 NEEDBITS(j); 5424 t = c->sub.code.tree + ((uInt)b & inflate_mask[j]); 5425 DUMPBITS(t->bits); 5426 e = (uInt)(t->exop); 5427 if (e & 16) { /* distance */ 5428 c->sub.copy.get = e & 15; 5429 c->sub.copy.dist = t->base; 5430 c->mode = DISTEXT; 5431 break; 5432 } 5433 if ((e & 64) == 0) { /* next table */ 5434 c->sub.code.need = e; 5435 c->sub.code.tree = t + t->base; 5436 break; 5437 } 5438 c->mode = BADCODE; /* invalid code */ 5439 z->msg = "invalid distance code"; 5440 r = Z_DATA_ERROR; 5441 LEAVE 5442 case DISTEXT: /* i: getting distance extra */ 5443 j = c->sub.copy.get; 5444 NEEDBITS(j); 5445 c->sub.copy.dist += (uInt)b & inflate_mask[j]; 5446 DUMPBITS(j); 5447 Tracevv((stderr, 5448 "inflate: distance %u\n", 5449 c->sub.copy.dist)); 5450 c->mode = COPY; 5451 /* FALLTHRU */ 5452 case COPY: 5453 /* o: copying bytes in window, waiting for space */ 5454 #ifndef __TURBOC__ /* Turbo C bug for following expression */ 5455 f = (uInt)(q - s->window) < c->sub.copy.dist ? 5456 s->end - (c->sub.copy.dist - (q - s->window)) : 5457 q - c->sub.copy.dist; 5458 #else 5459 f = q - c->sub.copy.dist; 5460 if ((uInt)(q - s->window) < c->sub.copy.dist) 5461 f = s->end - (c->sub.copy.dist - 5462 (uInt)(q - s->window)); 5463 #endif 5464 while (c->len) 5465 { 5466 NEEDOUT; 5467 OUTBYTE(*f++); 5468 if (f == s->end) 5469 f = s->window; 5470 c->len--; 5471 } 5472 c->mode = START; 5473 break; 5474 case LIT: /* o: got literal, waiting for output space */ 5475 NEEDOUT; 5476 OUTBYTE(c->sub.lit); 5477 c->mode = START; 5478 break; 5479 case WASH: /* o: got eob, possibly more output */ 5480 if (k > 7) { /* return unused byte, if any */ 5481 Assert(k < 16, 5482 "inflate_codes grabbed too many bytes"); 5483 k -= 8; 5484 n++; 5485 p--; /* can always return one */ 5486 } 5487 FLUSH; 5488 if (s->read != s->write) 5489 LEAVE 5490 c->mode = END; 5491 /* FALLTHRU */ 5492 case END: 5493 r = Z_STREAM_END; 5494 LEAVE 5495 case BADCODE: /* x: got error */ 5496 r = Z_DATA_ERROR; 5497 LEAVE 5498 default: 5499 r = Z_STREAM_ERROR; 5500 LEAVE 5501 } 5502 /* NOTREACHED */ 5503 /* otherwise lint complains */ 5504 } 5505 5506 5507 void 5508 inflate_codes_free(c, z) 5509 inflate_codes_statef *c; 5510 z_streamp z; 5511 { 5512 ZFREE(z, c); 5513 Tracev((stderr, "inflate: codes free\n")); 5514 } 5515 /* --- infcodes.c */ 5516 5517 /* +++ infutil.c */ 5518 /* 5519 * inflate_util.c -- data and routines common to blocks and codes 5520 * Copyright (C) 1995-1998 Mark Adler 5521 * For conditions of distribution and use, see copyright notice in zlib.h 5522 */ 5523 5524 /* #include "zutil.h" */ 5525 /* #include "infblock.h" */ 5526 /* #include "inftrees.h" */ 5527 /* #include "infcodes.h" */ 5528 /* #include "infutil.h" */ 5529 5530 #ifndef NO_DUMMY_DECL 5531 struct inflate_codes_state {int dummy; }; /* for buggy compilers */ 5532 #endif 5533 5534 /* And'ing with mask[n] masks the lower n bits */ 5535 uInt inflate_mask[17] = { 5536 0x0000, 5537 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff, 5538 0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff 5539 }; 5540 5541 5542 /* copy as much as possible from the sliding window to the output area */ 5543 int 5544 inflate_flush(s, z, r) 5545 inflate_blocks_statef *s; 5546 z_streamp z; 5547 int r; 5548 { 5549 uInt n; 5550 Bytef *p; 5551 Bytef *q; 5552 5553 /* local copies of source and destination pointers */ 5554 p = z->next_out; 5555 q = s->read; 5556 5557 /* compute number of bytes to copy as far as end of window */ 5558 n = (uInt)((q <= s->write ? s->write : s->end) - q); 5559 if (n > z->avail_out) n = z->avail_out; 5560 if (n && r == Z_BUF_ERROR) r = Z_OK; 5561 5562 /* update counters */ 5563 z->avail_out -= n; 5564 z->total_out += n; 5565 5566 /* update check information */ 5567 if (s->checkfn != Z_NULL) 5568 z->adler = s->check = (*s->checkfn)(s->check, q, n); 5569 5570 /* copy as far as end of window */ 5571 if (p != Z_NULL) { /* PPP */ 5572 zmemcpy(p, q, n); 5573 p += n; 5574 } /* PPP */ 5575 q += n; 5576 5577 /* see if more to copy at beginning of window */ 5578 if (q == s->end) 5579 { 5580 /* wrap pointers */ 5581 q = s->window; 5582 if (s->write == s->end) 5583 s->write = s->window; 5584 5585 /* compute bytes to copy */ 5586 n = (uInt)(s->write - q); 5587 if (n > z->avail_out) n = z->avail_out; 5588 if (n && r == Z_BUF_ERROR) r = Z_OK; 5589 5590 /* update counters */ 5591 z->avail_out -= n; 5592 z->total_out += n; 5593 5594 /* update check information */ 5595 if (s->checkfn != Z_NULL) 5596 z->adler = s->check = (*s->checkfn)(s->check, q, n); 5597 5598 /* copy */ 5599 if (p != Z_NULL) { /* PPP */ 5600 zmemcpy(p, q, n); 5601 p += n; 5602 } /* PPP */ 5603 q += n; 5604 } 5605 5606 /* update pointers */ 5607 z->next_out = p; 5608 s->read = q; 5609 5610 /* done */ 5611 return (r); 5612 } 5613 /* --- infutil.c */ 5614 5615 /* +++ inffast.c */ 5616 /* 5617 * inffast.c -- process literals and length/distance pairs fast 5618 * Copyright (C) 1995-1998 Mark Adler 5619 * For conditions of distribution and use, see copyright notice in zlib.h 5620 */ 5621 5622 /* #include "zutil.h" */ 5623 /* #include "inftrees.h" */ 5624 /* #include "infblock.h" */ 5625 /* #include "infcodes.h" */ 5626 /* #include "infutil.h" */ 5627 /* #include "inffast.h" */ 5628 5629 #ifndef NO_DUMMY_DECL 5630 struct inflate_codes_state {int dummy; }; /* for buggy compilers */ 5631 #endif 5632 5633 /* simplify the use of the inflate_huft type with some defines */ 5634 #define exop word.what.Exop 5635 #define bits word.what.Bits 5636 5637 /* macros for bit input with no checking and for returning unused bytes */ 5638 #define GRABBITS(j) { while (k < (j)) {b |= ((uLong)NEXTBYTE)<<k; k += 8; }} 5639 #define UNGRAB {c = z->avail_in-n; c = (k>>3) < c?k>>3:c; n += c; p -= c; \ 5640 k -= c<<3; } 5641 5642 /* 5643 * Called with number of bytes left to write in window at least 258 5644 * (the maximum string length) and number of input bytes available at 5645 * least ten. The ten bytes are six bytes for the longest length/ 5646 * distance pair plus four bytes for overloading the bit buffer. 5647 */ 5648 5649 int 5650 inflate_fast(bl, bd, tl, td, s, z) 5651 uInt bl, bd; 5652 const inflate_huft *tl; 5653 const inflate_huft *td; /* need separate declaration for Borland C++ */ 5654 inflate_blocks_statef *s; 5655 z_streamp z; 5656 { 5657 const inflate_huft *t; /* temporary pointer */ 5658 uInt e; /* extra bits or operation */ 5659 uLong b; /* bit buffer */ 5660 uInt k; /* bits in bit buffer */ 5661 Bytef *p; /* input data pointer */ 5662 uInt n; /* bytes available there */ 5663 Bytef *q; /* output window write pointer */ 5664 uInt m; /* bytes to end of window or read pointer */ 5665 uInt ml; /* mask for literal/length tree */ 5666 uInt md; /* mask for distance tree */ 5667 uInt c; /* bytes to copy */ 5668 uInt d; /* distance back to copy from */ 5669 Bytef *r; /* copy source pointer */ 5670 5671 /* load input, output, bit values */ 5672 LOAD; 5673 5674 /* initialize masks */ 5675 ml = inflate_mask[bl]; 5676 md = inflate_mask[bd]; 5677 5678 /* do until not enough input or output space for fast loop */ 5679 do { /* assume called with m >= 258 && n >= 10 */ 5680 /* get literal/length code */ 5681 /* max bits for literal/length code */ 5682 GRABBITS(20); 5683 if ((e = (t = tl + ((uInt)b & ml))->exop) == 0) { 5684 DUMPBITS(t->bits); 5685 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? 5686 "inflate: * literal '%c'\n" : 5687 "inflate: * literal 0x%02x\n", t->base)); 5688 *q++ = (Byte)t->base; 5689 m--; 5690 continue; 5691 } 5692 do { 5693 DUMPBITS(t->bits); 5694 if (e & 16) { 5695 /* get extra bits for length */ 5696 e &= 15; 5697 c = t->base + ((uInt)b & inflate_mask[e]); 5698 DUMPBITS(e); 5699 Tracevv((stderr, 5700 "inflate: * length %u\n", c)); 5701 5702 /* decode distance base of block to copy */ 5703 GRABBITS(15); /* max bits for distance code */ 5704 e = (t = td + ((uInt)b & md))->exop; 5705 do { 5706 DUMPBITS(t->bits); 5707 if (e & 16) { 5708 /* 5709 * get extra bits to 5710 * add to distance 5711 * base 5712 */ 5713 e &= 15; 5714 /* get extra bits (up to 13) */ 5715 GRABBITS(e); 5716 d = t->base + ((uInt)b & 5717 inflate_mask[e]); 5718 DUMPBITS(e); 5719 Tracevv((stderr, 5720 "inflate: * " 5721 "distance %u\n", d)); 5722 5723 /* do the copy */ 5724 m -= c; 5725 /* offset before dest */ 5726 if ((uInt)(q - s->window) >= d) 5727 /* just copy */ 5728 { 5729 r = q - d; 5730 /* 5731 * minimum 5732 * count is 5733 * three, so 5734 * unroll loop 5735 * a little 5736 */ 5737 *q++ = *r++; c--; 5738 *q++ = *r++; c--; 5739 } 5740 /* else offset after destination */ 5741 else { 5742 /* bytes from offset to end */ 5743 e = d - (uInt)(q - 5744 s->window); 5745 /* pointer to offset */ 5746 r = s->end - e; 5747 /* if source crosses */ 5748 if (c > e) { 5749 /* copy to end of window */ 5750 c -= e; 5751 do { 5752 *q++ = 5753 *r 5754 ++; 5755 } while (--e); 5756 /* copy rest from start of window */ 5757 r = s->window; 5758 } 5759 } 5760 /* copy all or what's left */ 5761 do { 5762 *q++ = *r++; 5763 } while (--c); 5764 break; 5765 } else if ((e & 64) == 0) { 5766 t += t->base; 5767 e = (t += ((uInt)b & 5768 inflate_mask[e]))->exop; 5769 } else { 5770 z->msg = 5771 "invalid distance code"; 5772 UNGRAB; 5773 UPDATE; 5774 return (Z_DATA_ERROR); 5775 } 5776 /* CONSTCOND */ 5777 } while (1); 5778 break; 5779 } 5780 if ((e & 64) == 0) 5781 { 5782 t += t->base; 5783 if ((e = (t += ((uInt)b & 5784 inflate_mask[e]))->exop) == 0) 5785 { 5786 DUMPBITS(t->bits); 5787 Tracevv((stderr, t->base >= 0x20 && 5788 t->base < 0x7f ? 5789 "inflate: * literal '%c'\n" 5790 : 5791 "inflate: * literal " 5792 "0x%02x\n", t->base)); 5793 *q++ = (Byte)t->base; 5794 m--; 5795 break; 5796 } 5797 } else if (e & 32) { 5798 Tracevv((stderr, 5799 "inflate: * end of block\n")); 5800 UNGRAB; 5801 UPDATE; 5802 return (Z_STREAM_END); 5803 } else { 5804 z->msg = "invalid literal/length code"; 5805 UNGRAB; 5806 UPDATE; 5807 return (Z_DATA_ERROR); 5808 } 5809 /* CONSTCOND */ 5810 } while (1); 5811 } while (m >= 258 && n >= 10); 5812 5813 /* not enough input or output--restore pointers and return */ 5814 UNGRAB; 5815 UPDATE; 5816 return (Z_OK); 5817 } 5818 /* --- inffast.c */ 5819 5820 /* +++ zutil.c */ 5821 /* 5822 * zutil.c -- target dependent utility functions for the compression library 5823 * Copyright (C) 1995-1998 Jean-loup Gailly. 5824 * For conditions of distribution and use, see copyright notice in zlib.h 5825 */ 5826 5827 /* From: zutil.c,v 1.17 1996/07/24 13:41:12 me Exp $ */ 5828 5829 #ifdef DEBUG_ZLIB 5830 #include <stdio.h> 5831 #endif 5832 5833 /* #include "zutil.h" */ 5834 5835 #ifndef NO_DUMMY_DECL 5836 struct internal_state {int dummy; }; /* for buggy compilers */ 5837 #endif 5838 5839 #ifndef STDC 5840 extern void exit OF((int)); 5841 #endif 5842 5843 static const char *z_errmsg[10] = { 5844 "need dictionary", /* Z_NEED_DICT 2 */ 5845 "stream end", /* Z_STREAM_END 1 */ 5846 "", /* Z_OK 0 */ 5847 "file error", /* Z_ERRNO (-1) */ 5848 "stream error", /* Z_STREAM_ERROR (-2) */ 5849 "data error", /* Z_DATA_ERROR (-3) */ 5850 "insufficient memory", /* Z_MEM_ERROR (-4) */ 5851 "buffer error", /* Z_BUF_ERROR (-5) */ 5852 "incompatible version", /* Z_VERSION_ERROR (-6) */ 5853 ""}; 5854 5855 5856 const char * 5857 zlibVersion() 5858 { 5859 return (ZLIB_VERSION); 5860 } 5861 5862 #ifdef DEBUG_ZLIB 5863 void 5864 z_error(m) 5865 char *m; 5866 { 5867 fprintf(stderr, "%s\n", m); 5868 exit(1); 5869 } 5870 #endif 5871 5872 #ifndef HAVE_MEMCPY 5873 5874 void 5875 zmemcpy(dest, source, len) 5876 Bytef* dest; 5877 const Bytef* source; 5878 uInt len; 5879 { 5880 if (len == 0) 5881 return; 5882 do { 5883 *dest++ = *source++; /* ??? to be unrolled */ 5884 } while (--len != 0); 5885 } 5886 5887 int 5888 zmemcmp(s1, s2, len) 5889 const Bytef* s1; 5890 const Bytef* s2; 5891 uInt len; 5892 { 5893 uInt j; 5894 5895 for (j = 0; j < len; j++) { 5896 if (s1[j] != s2[j]) 5897 return (2*(s1[j] > s2[j])-1); 5898 } 5899 return (0); 5900 } 5901 5902 void 5903 zmemzero(dest, len) 5904 Bytef* dest; 5905 uInt len; 5906 { 5907 if (len == 0) 5908 return; 5909 do { 5910 *dest++ = 0; /* ??? to be unrolled */ 5911 } while (--len != 0); 5912 } 5913 #endif 5914 5915 #ifdef __TURBOC__ 5916 #if (defined(__BORLANDC__) || !defined(SMALL_MEDIUM)) && !defined(__32BIT__) 5917 /* 5918 * Small and medium model in Turbo C are for now limited to near 5919 * allocation with reduced MAX_WBITS and MAX_MEM_LEVEL 5920 */ 5921 #define MY_ZCALLOC 5922 5923 /* 5924 * Turbo C malloc() does not allow dynamic allocation of 64K bytes and 5925 * farmalloc(64K) returns a pointer with an offset of 8, so we must 5926 * fix the pointer. Warning: the pointer must be put back to its 5927 * original form in order to free it, use zcfree(). 5928 */ 5929 5930 #define MAX_PTR 10 5931 /* 10*64K = 640K */ 5932 5933 local int next_ptr = 0; 5934 5935 typedef struct ptr_table_s { 5936 voidpf org_ptr; 5937 voidpf new_ptr; 5938 } ptr_table; 5939 5940 local ptr_table table[MAX_PTR]; 5941 /* 5942 * This table is used to remember the original form of pointers to 5943 * large buffers (64K). Such pointers are normalized with a zero 5944 * offset. Since MSDOS is not a preemptive multitasking OS, this 5945 * table is not protected from concurrent access. This hack doesn't 5946 * work anyway on a protected system like OS/2. Use Microsoft C 5947 * instead. 5948 */ 5949 5950 voidpf 5951 zcalloc(voidpf opaque, unsigned items, unsigned size) 5952 { 5953 voidpf buf = opaque; /* just to make some compilers happy */ 5954 ulg bsize = (ulg)items*size; 5955 5956 /* 5957 * If we allocate less than 65520 bytes, we assume that 5958 * farmalloc will return a usable pointer which doesn't have 5959 * to be normalized. 5960 */ 5961 if (bsize < 65520L) { 5962 buf = farmalloc(bsize); 5963 if (*(ush *)&buf != 0) 5964 return (buf); 5965 } else { 5966 buf = farmalloc(bsize + 16L); 5967 } 5968 if (buf == NULL || next_ptr >= MAX_PTR) 5969 return (NULL); 5970 table[next_ptr].org_ptr = buf; 5971 5972 /* Normalize the pointer to seg:0 */ 5973 *((ush *)&buf+1) += ((ush)((uch *)buf-0) + 15) >> 4; 5974 *(ush *)&buf = 0; 5975 table[next_ptr++].new_ptr = buf; 5976 return (buf); 5977 } 5978 5979 void 5980 zcfree(voidpf opaque, voidpf ptr) 5981 { 5982 int n; 5983 if (*(ush*)&ptr != 0) { /* object < 64K */ 5984 farfree(ptr); 5985 return; 5986 } 5987 /* Find the original pointer */ 5988 for (n = 0; n < next_ptr; n++) { 5989 if (ptr != table[n].new_ptr) 5990 continue; 5991 5992 farfree(table[n].org_ptr); 5993 while (++n < next_ptr) { 5994 table[n-1] = table[n]; 5995 } 5996 next_ptr--; 5997 return; 5998 } 5999 ptr = opaque; /* just to make some compilers happy */ 6000 Assert(0, "zcfree: ptr not found"); 6001 } 6002 #endif 6003 #endif /* __TURBOC__ */ 6004 6005 6006 #if defined(M_I86) && !defined(__32BIT__) 6007 /* Microsoft C in 16-bit mode */ 6008 6009 #define MY_ZCALLOC 6010 6011 #if (!defined(_MSC_VER) || (_MSC_VER <= 600)) 6012 #define _halloc halloc 6013 #define _hfree hfree 6014 #endif 6015 6016 voidpf 6017 zcalloc(voidpf opaque, unsigned items, unsigned size) 6018 { 6019 if (opaque) opaque = 0; /* to make compiler happy */ 6020 return (_halloc((long)items, size)); 6021 } 6022 6023 void 6024 zcfree(voidpf opaque, voidpf ptr) 6025 { 6026 if (opaque) opaque = 0; /* to make compiler happy */ 6027 _hfree(ptr); 6028 } 6029 6030 #endif /* MSC */ 6031 6032 6033 #ifndef MY_ZCALLOC /* Any system without a special alloc function */ 6034 6035 #ifndef STDC 6036 extern voidp calloc OF((uInt items, uInt size)); 6037 extern void free OF((voidpf ptr)); 6038 #endif 6039 6040 voidpf 6041 zcalloc(opaque, items, size) 6042 voidpf opaque; 6043 unsigned items; 6044 unsigned size; 6045 { 6046 if (opaque) items += size - size; /* make compiler happy */ 6047 return ((voidpf)calloc(items, size)); 6048 } 6049 6050 /*ARGSUSED*/ 6051 void 6052 zcfree(opaque, ptr) 6053 voidpf opaque; 6054 voidpf ptr; 6055 { 6056 free(ptr); 6057 } 6058 6059 #endif /* MY_ZCALLOC */ 6060 /* --- zutil.c */ 6061 6062 /* +++ adler32.c */ 6063 /* 6064 * adler32.c -- compute the Adler-32 checksum of a data stream 6065 * Copyright (C) 1995-1998 Mark Adler 6066 * For conditions of distribution and use, see copyright notice in zlib.h 6067 */ 6068 6069 /* From: adler32.c,v 1.10 1996/05/22 11:52:18 me Exp $ */ 6070 6071 /* #include "zlib.h" */ 6072 6073 #define BASE 65521L /* largest prime smaller than 65536 */ 6074 #define NMAX 5552 6075 /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */ 6076 6077 #define DO1(buf, i) {s1 += buf[i]; s2 += s1; } 6078 #define DO2(buf, i) DO1(buf, i); DO1(buf, i+1); 6079 #define DO4(buf, i) DO2(buf, i); DO2(buf, i+2); 6080 #define DO8(buf, i) DO4(buf, i); DO4(buf, i+4); 6081 #define DO16(buf) DO8(buf, 0); DO8(buf, 8); 6082 6083 /* ========================================================================= */ 6084 uLong 6085 adler32(adler, buf, len) 6086 uLong adler; 6087 const Bytef *buf; 6088 uInt len; 6089 { 6090 unsigned long s1 = adler & 0xffff; 6091 unsigned long s2 = (adler >> 16) & 0xffff; 6092 int k; 6093 6094 if (buf == Z_NULL) 6095 return (1L); 6096 6097 while (len > 0) { 6098 k = len < NMAX ? len : NMAX; 6099 len -= k; 6100 while (k >= 16) { 6101 DO16(buf); 6102 buf += 16; 6103 k -= 16; 6104 } 6105 if (k != 0) do { 6106 s1 += *buf++; 6107 s2 += s1; 6108 } while (--k); 6109 s1 %= BASE; 6110 s2 %= BASE; 6111 } 6112 return ((s2 << 16) | s1); 6113 } 6114 /* --- adler32.c */ 6115