/* * Copyright 2004 Sun Microsystems, Inc. All rights reserved. * Use is subject to license terms. */ /* * Updated from zlib-1.0.4 to zlib-1.1.3 by James Carlson. * * This file is derived from various .h and .c files from the zlib-1.0.4 * distribution by Jean-loup Gailly and Mark Adler, with some additions * by Paul Mackerras to aid in implementing Deflate compression and * decompression for PPP packets. See zlib.h for conditions of * distribution and use. * * Changes that have been made include: * - added Z_PACKET_FLUSH (see zlib.h for details) * - added inflateIncomp and deflateOutputPending * - allow strm->next_out to be NULL, meaning discard the output * * $Id: zlib.c,v 1.11 1998/09/13 23:37:12 paulus Exp $ */ #pragma ident "%Z%%M% %I% %E% SMI" /* * ==FILEVERSION 971210== * * This marker is used by the Linux installation script to determine * whether an up-to-date version of this file is already installed. */ #define NO_DUMMY_DECL #define NO_ZCFUNCS #define MY_ZCALLOC #if defined(__FreeBSD__) && (defined(KERNEL) || defined(_KERNEL)) #define inflate inflate_ppp /* FreeBSD already has an inflate :-( */ #endif /* +++ zutil.h */ /* * * zutil.h -- internal interface and configuration of the compression library * Copyright (C) 1995-1998 Jean-loup Gailly. * For conditions of distribution and use, see copyright notice in zlib.h */ /* * WARNING: this file should *not* be used by applications. It is part * of the implementation of the compression library and is subject to * change. Applications should only use zlib.h. */ /* From: zutil.h,v 1.16 1996/07/24 13:41:13 me Exp $ */ #ifndef _Z_UTIL_H #define _Z_UTIL_H #include "zlib.h" #if defined(KERNEL) || defined(_KERNEL) /* Assume this is a *BSD or SVR4 kernel */ #include #include #include #ifdef SOL2 #include #endif #undef u #define HAVE_MEMCPY #define memcmp bcmp #else #if defined(__KERNEL__) /* Assume this is a Linux kernel */ #include #define HAVE_MEMCPY #else /* not kernel */ #include #ifdef NO_ERRNO_H extern int errno; #else #include #endif #ifdef STDC #include #include #endif #endif /* __KERNEL__ */ #endif /* _KERNEL || KERNEL */ #ifndef local #define local static #endif /* compile with -Dlocal if your debugger can't find static symbols */ typedef unsigned char uch; typedef uch FAR uchf; typedef unsigned short ush; typedef ush FAR ushf; typedef unsigned long ulg; extern const char *z_errmsg[10]; /* indexed by 2-zlib_error */ /* (size given to avoid silly warnings with Visual C++) */ #define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)] #define ERR_RETURN(strm, err) \ return (strm->msg = ERR_MSG(err), (err)) /* To be used only when the state is known to be valid */ /* common constants */ #ifndef DEF_WBITS #define DEF_WBITS MAX_WBITS #endif /* default windowBits for decompression. MAX_WBITS is for compression only */ #if MAX_MEM_LEVEL >= 8 #define DEF_MEM_LEVEL 8 #else #define DEF_MEM_LEVEL MAX_MEM_LEVEL #endif /* default memLevel */ #define STORED_BLOCK 0 #define STATIC_TREES 1 #define DYN_TREES 2 /* The three kinds of block type */ #define MIN_MATCH 3 #define MAX_MATCH 258 /* The minimum and maximum match lengths */ #define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */ /* target dependencies */ #ifdef MSDOS #define OS_CODE 0x00 #ifdef __TURBOC__ #include #else /* MSC or DJGPP */ #include #endif #endif #ifdef OS2 #define OS_CODE 0x06 #endif #ifdef WIN32 /* Window 95 & Windows NT */ #define OS_CODE 0x0b #endif #if defined(VAXC) || defined(VMS) #define OS_CODE 0x02 #define F_OPEN(name, mode) \ fopen((name), (mode), "mbc=60", "ctx=stm", "rfm=fix", "mrs=512") #endif #ifdef AMIGA #define OS_CODE 0x01 #endif #if defined(ATARI) || defined(atarist) #define OS_CODE 0x05 #endif #ifdef MACOS #define OS_CODE 0x07 #endif #ifdef __50SERIES /* Prime/PRIMOS */ #define OS_CODE 0x0F #endif #ifdef TOPS20 #define OS_CODE 0x0a #endif #if defined(_BEOS_) || defined(RISCOS) #define fdopen(fd, mode) NULL /* No fdopen() */ #endif /* Common defaults */ #ifndef OS_CODE #define OS_CODE 0x03 /* assume Unix */ #endif #ifndef F_OPEN #define F_OPEN(name, mode) fopen((name), (mode)) #endif /* functions */ #ifdef HAVE_STRERROR extern char *strerror OF((int)); #define zstrerror(errnum) strerror(errnum) #else #define zstrerror(errnum) "" #endif #if defined(pyr) #define NO_MEMCPY #endif #if (defined(M_I86SM) || defined(M_I86MM)) && !defined(_MSC_VER) /* * Use our own functions for small and medium model with MSC <= 5.0. * You may have to use the same strategy for Borland C (untested). */ #define NO_MEMCPY #endif #if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY) #define HAVE_MEMCPY #endif #ifdef HAVE_MEMCPY #ifdef SMALL_MEDIUM /* MSDOS small or medium model */ #define zmemcpy _fmemcpy #define zmemcmp _fmemcmp #define zmemzero(dest, len) _fmemset(dest, 0, len) #else #define zmemcpy (void) memcpy #define zmemcmp memcmp #define zmemzero(dest, len) (void) memset(dest, 0, len) #endif #else extern void zmemcpy OF((Bytef* dest, const Bytef* source, uInt len)); extern int zmemcmp OF((const Bytef* s1, const Bytef* s2, uInt len)); extern void zmemzero OF((Bytef* dest, uInt len)); #endif /* Diagnostic functions */ #ifdef DEBUG_ZLIB #include #ifndef verbose #define verbose 0 #endif extern void z_error OF((char *m)); #define Assert(cond, msg) { if (!(cond)) z_error(msg); } #define Trace(x) {if (z_verbose >= 0) fprintf x; } #define Tracev(x) {if (z_verbose > 0) fprintf x; } #define Tracevv(x) {if (z_verbose > 1) fprintf x; } #define Tracec(c, x) {if (z_verbose > 0 && (c)) fprintf x; } #define Tracecv(c, x) {if (z_verbose > 1 && (c)) fprintf x; } #else #if defined(SOL2) && defined(DEBUG) #define Assert(cond, msg) ((cond) ? ((void)0) : panic(msg)) #else #define Assert(cond, msg) ((void)0) #endif #define Trace(x) ((void)0) #define Tracev(x) ((void)0) #define Tracevv(x) ((void)0) #define Tracec(c, x) ((void)0) #define Tracecv(c, x) ((void)0) #endif typedef uLong (*check_func) OF((uLong check, const Bytef *buf, uInt len)); /* voidpf zcalloc OF((voidpf opaque, unsigned items, unsigned size)); */ /* void zcfree OF((voidpf opaque, voidpf ptr)); */ #define ZALLOC(strm, items, size) \ (*((strm)->zalloc))((strm)->opaque, (items), (size)) #define ZFREE(strm, addr) (*((strm)->zfree))((strm)->opaque, (voidpf)(addr)) #define TRY_FREE(s, p) {if (p) ZFREE(s, p); } #endif /* _Z_UTIL_H */ /* --- zutil.h */ /* +++ deflate.h */ /* * deflate.h -- internal compression state * Copyright (C) 1995-1998 Jean-loup Gailly * For conditions of distribution and use, see copyright notice in zlib.h */ /* * WARNING: this file should *not* be used by applications. It is part * of the implementation of the compression library and is subject to * change. Applications should only use zlib.h. */ /* From: deflate.h,v 1.10 1996/07/02 12:41:00 me Exp $ */ #ifndef _DEFLATE_H #define _DEFLATE_H /* #include "zutil.h" */ /* * =========================================================================== * Internal compression state. */ #define LENGTH_CODES 29 /* number of length codes, not counting the special END_BLOCK code */ #define LITERALS 256 /* number of literal bytes 0..255 */ #define L_CODES (LITERALS+1+LENGTH_CODES) /* number of Literal or Length codes, including the END_BLOCK code */ #define D_CODES 30 /* number of distance codes */ #define BL_CODES 19 /* number of codes used to transfer the bit lengths */ #define HEAP_SIZE (2*L_CODES+1) /* maximum heap size */ #define MAX_BITS 15 /* All codes must not exceed MAX_BITS bits */ #define INIT_STATE 42 #define BUSY_STATE 113 #define FINISH_STATE 666 /* Stream status */ /* Data structure describing a single value and its code string. */ typedef struct ct_data_s { union { ush freq; /* frequency count */ ush code; /* bit string */ } fc; union { ush dad; /* father node in Huffman tree */ ush len; /* length of bit string */ } dl; } FAR ct_data; #define Freq fc.freq #define Code fc.code #define Dad dl.dad #define Len dl.len typedef struct static_tree_desc_s static_tree_desc; typedef struct tree_desc_s { ct_data *dyn_tree; /* the dynamic tree */ int max_code; /* largest code with non zero frequency */ static_tree_desc *stat_desc; /* the corresponding static tree */ } FAR tree_desc; typedef ush Pos; typedef Pos FAR Posf; typedef unsigned IPos; /* * A Pos is an index in the character window. We use short instead of * int to save space in the various tables. IPos is used only for * parameter passing. */ typedef struct deflate_state { z_streamp strm; /* pointer back to this zlib stream */ int status; /* as the name implies */ Bytef *pending_buf; /* output still pending */ ulg pending_buf_size; /* size of pending_buf */ Bytef *pending_out; /* next pending byte to output to the stream */ int pending; /* nb of bytes in the pending buffer */ int noheader; /* suppress zlib header and adler32 */ Byte data_type; /* UNKNOWN, BINARY or ASCII */ Byte method; /* STORED (for zip only) or DEFLATED */ /* value of flush param for previous deflate call */ int last_flush; /* used by deflate.c: */ uInt w_size; /* LZ77 window size (32K by default) */ uInt w_bits; /* log2(w_size) (8..16) */ uInt w_mask; /* w_size - 1 */ Bytef *window; /* * Sliding window. Input bytes are read into the second half * of the window, and move to the first half later to keep a * dictionary of at least wSize bytes. With this organization, * matches are limited to a distance of wSize-MAX_MATCH bytes, * but this ensures that IO is always performed with a length * multiple of the block size. Also, it limits the window size * to 64K, which is quite useful on MSDOS. To do: use the * user input buffer as sliding window. */ ulg window_size; /* * Actual size of window: 2*wSize, except when the user input * buffer is directly used as sliding window. */ Posf *prev; /* * Link to older string with same hash index. To limit the * size of this array to 64K, this link is maintained only for * the last 32K strings. An index in this array is thus a * window index modulo 32K. */ Posf *head; /* Heads of the hash chains or NIL. */ uInt ins_h; /* hash index of string to be inserted */ uInt hash_size; /* number of elements in hash table */ uInt hash_bits; /* log2(hash_size) */ uInt hash_mask; /* hash_size-1 */ uInt hash_shift; /* * Number of bits by which ins_h must be shifted at each input * step. It must be such that after MIN_MATCH steps, the * oldest byte no longer takes part in the hash key, that is: * hash_shift * MIN_MATCH >= hash_bits */ long block_start; /* * Window position at the beginning of the current output * block. Gets negative when the window is moved backwards. */ uInt match_length; /* length of best match */ IPos prev_match; /* previous match */ int match_available; /* set if previous match exists */ uInt strstart; /* start of string to insert */ uInt match_start; /* start of matching string */ uInt lookahead; /* number of valid bytes ahead in window */ uInt prev_length; /* * Length of the best match at previous step. Matches not * greater than this are discarded. This is used in the lazy * match evaluation. */ uInt max_chain_length; /* * To speed up deflation, hash chains are never searched * beyond *this length. A higher limit improves compression * ratio but *degrades the speed. */ uInt max_lazy_match; /* * Attempt to find a better match only when the current match * is strictly smaller than this value. This mechanism is used * only for compression levels >= 4. */ #define max_insert_length max_lazy_match /* * Insert new strings in the hash table only if the match * length is not greater than this length. This saves time but * degrades compression. max_insert_length is used only for * compression levels <= 3. */ int level; /* compression level (1..9) */ int strategy; /* favor or force Huffman coding */ uInt good_match; /* Use a faster search when the previous match is longer than this */ int nice_match; /* Stop searching when current match exceeds this */ /* used by trees.c: */ /* Didn't use ct_data typedef below to supress compiler warning */ struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ /* Huffman tree for bit lengths */ struct ct_data_s bl_tree[2*BL_CODES+1]; struct tree_desc_s l_desc; /* desc. for literal tree */ struct tree_desc_s d_desc; /* desc. for distance tree */ struct tree_desc_s bl_desc; /* desc. for bit length tree */ ush bl_count[MAX_BITS+1]; /* number of codes at each bit length for an optimal tree */ int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ int heap_len; /* number of elements in the heap */ int heap_max; /* element of largest frequency */ /* * The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] * is not used. The same heap array is used to build all * trees. */ uch depth[2*L_CODES+1]; /* * Depth of each subtree used as tie breaker for trees of * equal frequency */ uchf *l_buf; /* buffer for literals or lengths */ uInt lit_bufsize; /* * Size of match buffer for literals/lengths. There are 4 * reasons for limiting lit_bufsize to 64K: * * - frequencies can be kept in 16 bit counters * * - if compression is not successful for the first block, * all input data is still in the window so we can still * emit a stored block even when input comes from standard * input. (This can also be done for all blocks if * lit_bufsize is not greater than 32K.) * * - if compression is not successful for a file smaller * than 64K, we can even emit a stored file instead of a * stored block (saving 5 bytes). This is applicable only * for zip (not gzip or zlib). * * - creating new Huffman trees less frequently may not * provide fast adaptation to changes in the input data * statistics. (Take for example a binary file with poorly * compressible code followed by a highly compressible * string table.) Smaller buffer sizes give fast adaptation * but have of course the overhead of transmitting trees * more frequently. * * - I can't count above 4 */ uInt last_lit; /* running index in l_buf */ ushf *d_buf; /* * Buffer for distances. To simplify the code, d_buf and l_buf * have the same number of elements. To use different lengths, * an extra flag array would be necessary. */ ulg opt_len; /* bit length of current block with optimal trees */ ulg static_len; /* bit length of current block with static trees */ uInt matches; /* number of string matches in current block */ int last_eob_len; /* bit length of EOB code for last block */ ulg compressed_len; /* total bit length of compressed file PPP */ #ifdef DEBUG_ZLIB ulg bits_sent; /* bit length of the compressed data */ #endif ush bi_buf; /* * Output buffer. bits are inserted starting at the bottom * (least significant bits). */ int bi_valid; /* * Number of valid bits in bi_buf. All bits above the last * valid bit are always zero. */ } FAR deflate_state; /* * Output a byte on the stream. IN assertion: there is enough room in * pending_buf. */ #define put_byte(s, c) {s->pending_buf[s->pending++] = (c); } #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) /* * Minimum amount of lookahead, except at the end of the input file. * See deflate.c for comments about the MIN_MATCH+1. */ #define MAX_DIST(s) ((s)->w_size-MIN_LOOKAHEAD) /* * In order to simplify the code, particularly on 16 bit machines, * match distances are limited to MAX_DIST instead of WSIZE. */ /* in trees.c */ void _tr_init OF((deflate_state *s)); int _tr_tally OF((deflate_state *s, unsigned dist, unsigned lc)); void _tr_flush_block OF((deflate_state *s, charf *buf, ulg stored_len, int eof)); void _tr_align OF((deflate_state *s)); void _tr_stored_block OF((deflate_state *s, charf *buf, ulg stored_len, int eof)); void _tr_stored_type_only OF((deflate_state *)); /* PPP */ #define d_code(dist) \ ((dist) < 256 ? _dist_code[dist] : _dist_code[256+((dist)>>7)]) /* * Mapping from a distance to a distance code. dist is the distance - 1 and * must not have side effects. _dist_code[256] and _dist_code[257] are never * used. */ #ifndef DEBUG_ZLIB /* Inline versions of _tr_tally for speed: */ local uch _length_code[]; local uch _dist_code[]; #define _tr_tally_lit(s, c, flush) \ { uch cc = (c); \ s->d_buf[s->last_lit] = 0; \ s->l_buf[s->last_lit++] = cc; \ s->dyn_ltree[cc].Freq++; \ flush = (s->last_lit == s->lit_bufsize-1); \ } #define _tr_tally_dist(s, distance, length, flush) \ { uch len = (length); \ ush dist = (distance); \ s->d_buf[s->last_lit] = dist; \ s->l_buf[s->last_lit++] = len; \ dist--; \ s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \ s->dyn_dtree[d_code(dist)].Freq++; \ flush = (s->last_lit == s->lit_bufsize-1); \ } #else #define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c) #define _tr_tally_dist(s, distance, length, flush) \ flush = _tr_tally(s, distance, length) #endif #endif /* --- deflate.h */ /* +++ deflate.c */ /* * deflate.c -- compress data using the deflation algorithm * Copyright (C) 1995-1998 Jean-loup Gailly. * For conditions of distribution and use, see copyright notice in zlib.h */ /* * ALGORITHM * * The "deflation" process depends on being able to identify portions * of the input text which are identical to earlier input (within a * sliding window trailing behind the input currently being processed). * * The most straightforward technique turns out to be the fastest for * most input files: try all possible matches and select the longest. * The key feature of this algorithm is that insertions into the string * dictionary are very simple and thus fast, and deletions are avoided * completely. Insertions are performed at each input character, whereas * string matches are performed only when the previous match ends. So it * is preferable to spend more time in matches to allow very fast string * insertions and avoid deletions. The matching algorithm for small * strings is inspired from that of Rabin & Karp. A brute force approach * is used to find longer strings when a small match has been found. * A similar algorithm is used in comic (by Jan-Mark Wams) and freeze * (by Leonid Broukhis). * A previous version of this file used a more sophisticated algorithm * (by Fiala and Greene) which is guaranteed to run in linear amortized * time, but has a larger average cost, uses more memory and is patented. * However the F&G algorithm may be faster for some highly redundant * files if the parameter max_chain_length (described below) is too large. * * ACKNOWLEDGEMENTS * * The idea of lazy evaluation of matches is due to Jan-Mark Wams, and * I found it in 'freeze' written by Leonid Broukhis. * Thanks to many people for bug reports and testing. * * REFERENCES * * Deutsch, L.P.,"DEFLATE Compressed Data Format Specification". * Available in ftp://ds.internic.net/rfc/rfc1951.txt * * A description of the Rabin and Karp algorithm is given in the book * "Algorithms" by R. Sedgewick, Addison-Wesley, p252. * * Fiala,E.R., and Greene,D.H. * Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595 * */ /* From: deflate.c,v 1.15 1996/07/24 13:40:58 me Exp $ */ /* #include "deflate.h" */ const char deflate_copyright[] = " deflate 1.1.3 Copyright 1995-1998 Jean-loup Gailly "; /* * If you use the zlib library in a product, an acknowledgment is * welcome in the documentation of your product. If for some reason * you cannot include such an acknowledgment, I would appreciate that * you keep this copyright string in the executable of your product. */ /* * =========================================================================== * Function prototypes. */ typedef enum { /* block not completed, need more input or more output */ need_more, block_done, /* block flush performed */ /* finish started, need only more output at next deflate */ finish_started, finish_done /* finish done, accept no more input or output */ } block_state; typedef block_state (*compress_func) OF((deflate_state *s, int flush)); /* Compression function. Returns the block state after the call. */ local void fill_window OF((deflate_state *s)); local block_state deflate_stored OF((deflate_state *s, int flush)); local block_state deflate_fast OF((deflate_state *s, int flush)); local block_state deflate_slow OF((deflate_state *s, int flush)); local void lm_init OF((deflate_state *s)); local void putShortMSB OF((deflate_state *s, uInt b)); local void flush_pending OF((z_streamp strm)); local int read_buf OF((z_streamp strm, Bytef *buf, unsigned size)); #ifdef ASMV void match_init OF((void)); /* asm code initialization */ uInt longest_match OF((deflate_state *s, IPos cur_match)); #else local uInt longest_match OF((deflate_state *s, IPos cur_match)); #endif #ifdef DEBUG_ZLIB local void check_match OF((deflate_state *s, IPos start, IPos match, int length)); #endif /* * =========================================================================== * Local data */ #define NIL 0 /* Tail of hash chains */ #ifndef TOO_FAR #define TOO_FAR 4096 #endif /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */ #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) /* * Minimum amount of lookahead, except at the end of the input file. * See deflate.c for comments about the MIN_MATCH+1. */ /* * Values for max_lazy_match, good_match and max_chain_length, * depending on the desired pack level (0..9). The values given below * have been tuned to exclude worst case performance for pathological * files. Better values may be found for specific files. */ typedef struct config_s { ush good_length; /* reduce lazy search above this match length */ ush max_lazy; /* do not perform lazy search above this match length */ ush nice_length; /* quit search above this match length */ ush max_chain; compress_func func; } config; local const config configuration_table[10] = { /* good lazy nice chain */ /* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */ /* 1 */ {4, 4, 8, 4, deflate_fast}, /* maximum speed, no lazy matches */ /* 2 */ {4, 5, 16, 8, deflate_fast}, /* 3 */ {4, 6, 32, 32, deflate_fast}, /* 4 */ {4, 4, 16, 16, deflate_slow}, /* lazy matches */ /* 5 */ {8, 16, 32, 32, deflate_slow}, /* 6 */ {8, 16, 128, 128, deflate_slow}, /* 7 */ {8, 32, 128, 256, deflate_slow}, /* 8 */ {32, 128, 258, 1024, deflate_slow}, /* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* maximum compression */ /* * Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different * meaning. */ #define EQUAL 0 /* result of memcmp for equal strings */ #ifndef NO_DUMMY_DECL struct static_tree_desc_s {int dummy; }; /* for buggy compilers */ #endif /* * =========================================================================== * Update a hash value with the given input byte * IN assertion: all calls to to UPDATE_HASH are made with consecutive * input characters, so that a running hash key can be computed from the * previous key instead of complete recalculation each time. */ #define UPDATE_HASH(s, h, c) (h = (((h)<hash_shift) ^ (c)) & s->hash_mask) /* * =========================================================================== * Insert string str in the dictionary and set match_head to the previous head * of the hash chain (the most recent string with same hash key). Return * the previous length of the hash chain. * If this file is compiled with -DFASTEST, the compression level is forced * to 1, and no hash chains are maintained. * IN assertion: all calls to to INSERT_STRING are made with consecutive * input characters and the first MIN_MATCH bytes of str are valid * (except for the last MIN_MATCH-1 bytes of the input file). */ #ifdef FASTEST #define INSERT_STRING(s, str, match_head) \ (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \ match_head = s->head[s->ins_h], \ s->head[s->ins_h] = (Pos)(str)) #else #define INSERT_STRING(s, str, match_head) \ (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \ s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \ s->head[s->ins_h] = (Pos)(str)) #endif /* * =========================================================================== * Initialize the hash table (avoiding 64K overflow for 16 bit systems). * prev[] will be initialized on the fly. */ #define CLEAR_HASH(s) \ s->head[s->hash_size-1] = NIL; \ zmemzero((Bytef *)s->head, (unsigned)(s->hash_size-1)*sizeof (*s->head)); /* ========================================================================= */ int deflateInit_(strm, level, version, stream_size) z_streamp strm; int level; const char *version; int stream_size; { (void) deflate_copyright; return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY, version, stream_size); /* To do: ignore strm->next_in if we use it as window */ } /* ========================================================================= */ int deflateInit2_(strm, level, method, windowBits, memLevel, strategy, version, stream_size) z_streamp strm; int level; int method; int windowBits; int memLevel; int strategy; const char *version; int stream_size; { deflate_state *s; int noheader = 0; static const char *my_version = ZLIB_VERSION; ushf *overlay; /* * We overlay pending_buf and d_buf+l_buf. This works since * the average output size for (length, distance) codes is <= * 24 bits. */ if (version == Z_NULL || version[0] != my_version[0] || stream_size != sizeof (z_stream)) { return (Z_VERSION_ERROR); } if (strm == Z_NULL) return (Z_STREAM_ERROR); strm->msg = Z_NULL; #ifndef NO_ZCFUNCS if (strm->zalloc == Z_NULL) { strm->zalloc = zcalloc; strm->opaque = (voidpf)0; } if (strm->zfree == Z_NULL) strm->zfree = zcfree; #endif if (level == Z_DEFAULT_COMPRESSION) level = 6; #ifdef FASTEST level = 1; #endif if (windowBits < 0) { /* undocumented feature: suppress zlib header */ noheader = 1; windowBits = -windowBits; } if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED || windowBits <= 8 || windowBits > 15 || level < 0 || level > 9 || strategy < 0 || strategy > Z_HUFFMAN_ONLY) { return (Z_STREAM_ERROR); } s = (deflate_state *) ZALLOC(strm, 1, sizeof (deflate_state)); if (s == Z_NULL) return (Z_MEM_ERROR); strm->state = (struct internal_state FAR *)s; s->strm = strm; s->noheader = noheader; s->w_bits = windowBits; s->w_size = 1 << s->w_bits; s->w_mask = s->w_size - 1; s->hash_bits = memLevel + 7; s->hash_size = 1 << s->hash_bits; s->hash_mask = s->hash_size - 1; s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH); s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof (Byte)); s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof (Pos)); s->head = (Posf *) ZALLOC(strm, s->hash_size, sizeof (Pos)); s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof (ush)+2); s->pending_buf = (uchf *) overlay; s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof (ush)+2L); if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL || s->pending_buf == Z_NULL) { strm->msg = ERR_MSG(Z_MEM_ERROR); s->status = INIT_STATE; (void) deflateEnd(strm); return (Z_MEM_ERROR); } s->d_buf = overlay + s->lit_bufsize/sizeof (ush); s->l_buf = s->pending_buf + (1+sizeof (ush))*s->lit_bufsize; s->level = level; s->strategy = strategy; s->method = (Byte)method; return (deflateReset(strm)); } /* ========================================================================= */ int deflateSetDictionary(strm, dictionary, dictLength) z_streamp strm; const Bytef *dictionary; uInt dictLength; { deflate_state *s; uInt length = dictLength; uInt n; IPos hash_head = 0; if (strm == Z_NULL || strm->state == Z_NULL || dictionary == Z_NULL) return (Z_STREAM_ERROR); s = (deflate_state *) strm->state; if (s->status != INIT_STATE) return (Z_STREAM_ERROR); strm->adler = adler32(strm->adler, dictionary, dictLength); if (length < MIN_MATCH) return (Z_OK); if (length > MAX_DIST(s)) { length = MAX_DIST(s); #ifndef USE_DICT_HEAD /* use the tail of the dictionary */ dictionary += dictLength - length; #endif } Assert(length <= s->window_size, "dict copy"); zmemcpy(s->window, dictionary, length); s->strstart = length; s->block_start = (long)length; /* * Insert all strings in the hash table (except for the last * two bytes). s->lookahead stays null, so s->ins_h will be * recomputed at the next call of fill_window. */ s->ins_h = s->window[0]; UPDATE_HASH(s, s->ins_h, s->window[1]); for (n = 0; n <= length - MIN_MATCH; n++) { INSERT_STRING(s, n, hash_head); } if (hash_head) hash_head = 0; /* to make compiler happy */ return (Z_OK); } /* ========================================================================= */ int deflateReset(strm) z_streamp strm; { deflate_state *s; if (strm == Z_NULL || strm->state == Z_NULL || strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return (Z_STREAM_ERROR); strm->total_in = strm->total_out = 0; /* use zfree if we ever allocate msg dynamically */ strm->msg = Z_NULL; strm->data_type = Z_UNKNOWN; s = (deflate_state *)strm->state; s->pending = 0; s->pending_out = s->pending_buf; if (s->noheader < 0) { /* was set to -1 by deflate(..., Z_FINISH); */ s->noheader = 0; } s->status = s->noheader ? BUSY_STATE : INIT_STATE; strm->adler = 1; s->last_flush = Z_NO_FLUSH; _tr_init(s); lm_init(s); return (Z_OK); } /* ========================================================================= */ int deflateParams(strm, level, strategy) z_streamp strm; int level; int strategy; { deflate_state *s; compress_func func; int err = Z_OK; if (strm == Z_NULL || strm->state == Z_NULL) return (Z_STREAM_ERROR); s = (deflate_state *) strm->state; if (level == Z_DEFAULT_COMPRESSION) { level = 6; } if (level < 0 || level > 9 || strategy < 0 || strategy > Z_HUFFMAN_ONLY) { return (Z_STREAM_ERROR); } func = configuration_table[s->level].func; if (func != configuration_table[level].func && strm->total_in != 0) { /* Flush the last buffer: */ err = deflate(strm, Z_PARTIAL_FLUSH); } if (s->level != level) { s->level = level; s->max_lazy_match = configuration_table[level].max_lazy; s->good_match = configuration_table[level].good_length; s->nice_match = configuration_table[level].nice_length; s->max_chain_length = configuration_table[level].max_chain; } s->strategy = strategy; return (err); } /* * ========================================================================= * Put a short in the pending buffer. The 16-bit value is put in MSB order. * IN assertion: the stream state is correct and there is enough room in * pending_buf. */ local void putShortMSB(s, b) deflate_state *s; uInt b; { put_byte(s, (Byte)(b >> 8)); put_byte(s, (Byte)(b & 0xff)); } /* * ========================================================================= * Flush as much pending output as possible. All deflate() output goes * through this function so some applications may wish to modify it * to avoid allocating a large strm->next_out buffer and copying into it. * (See also read_buf()). */ local void flush_pending(strm) z_streamp strm; { deflate_state *s = (deflate_state *) strm->state; unsigned len = s->pending; if (len > strm->avail_out) len = strm->avail_out; if (len == 0) return; if (strm->next_out != Z_NULL) { /* PPP */ zmemcpy(strm->next_out, s->pending_out, len); strm->next_out += len; } /* PPP */ s->pending_out += len; strm->total_out += len; strm->avail_out -= len; s->pending -= len; if (s->pending == 0) { s->pending_out = s->pending_buf; } } /* ========================================================================= */ int deflate(strm, flush) z_streamp strm; int flush; { int old_flush; /* value of flush param for previous deflate call */ deflate_state *s; if (strm == Z_NULL || strm->state == Z_NULL || flush > Z_FINISH || flush < 0) { return (Z_STREAM_ERROR); } s = (deflate_state *) strm->state; if (/* strm->next_out == Z_NULL || --- we allow null --- PPP */ (strm->next_in == Z_NULL && strm->avail_in != 0) || (s->status == FINISH_STATE && flush != Z_FINISH)) { ERR_RETURN(strm, Z_STREAM_ERROR); } if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR); s->strm = strm; /* just in case */ old_flush = s->last_flush; s->last_flush = flush; /* Write the zlib header */ if (s->status == INIT_STATE) { uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8; uInt level_flags = (s->level-1) >> 1; if (level_flags > 3) level_flags = 3; header |= (level_flags << 6); if (s->strstart != 0) header |= PRESET_DICT; header += 31 - (header % 31); s->status = BUSY_STATE; putShortMSB(s, header); /* Save the adler32 of the preset dictionary: */ if (s->strstart != 0) { putShortMSB(s, (uInt)(strm->adler >> 16)); putShortMSB(s, (uInt)(strm->adler & 0xffff)); } strm->adler = 1L; } /* Flush as much pending output as possible */ if (s->pending != 0) { flush_pending(strm); if (strm->avail_out == 0) { /* * Since avail_out is 0, deflate will be * called again with more output space, but * possibly with both pending and avail_in * equal to zero. There won't be anything to * do, but this is not an error situation so * make sure we return OK instead of BUF_ERROR * at next call of deflate: */ s->last_flush = -1; return (Z_OK); } /* * Make sure there is something to do and avoid * duplicate consecutive flushes. For repeated and * useless calls with Z_FINISH, we keep returning * Z_STREAM_END instead of Z_BUFF_ERROR. */ } else if (strm->avail_in == 0 && flush <= old_flush && flush != Z_FINISH) { ERR_RETURN(strm, Z_BUF_ERROR); } /* User must not provide more input after the first FINISH: */ if (s->status == FINISH_STATE && strm->avail_in != 0) { ERR_RETURN(strm, Z_BUF_ERROR); } /* Start a new block or continue the current one. */ if (strm->avail_in != 0 || s->lookahead != 0 || (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) { block_state bstate; bstate = (*(configuration_table[s->level].func))(s, flush); if (bstate == finish_started || bstate == finish_done) { s->status = FINISH_STATE; } if (bstate == need_more || bstate == finish_started) { if (strm->avail_out == 0) { /* avoid BUF_ERROR next call, see above */ s->last_flush = -1; } return (Z_OK); /* * If flush != Z_NO_FLUSH && avail_out == 0, * the next call of deflate should use the * same flush parameter to make sure that the * flush is complete. So we don't have to * output an empty block here, this will be * done at next call. This also ensures that * for a very small output buffer, we emit at * most one empty block. */ } if (bstate == block_done) { if (flush == Z_PARTIAL_FLUSH) { _tr_align(s); } else if (flush == Z_PACKET_FLUSH) { /* PPP */ /* * Output just the 3-bit `stored' * block type value, but not a zero * length. Added for PPP. */ _tr_stored_type_only(s); /* PPP */ } else { /* FULL_FLUSH or SYNC_FLUSH */ _tr_stored_block(s, (char *)0, 0L, 0); /* * For a full flush, this empty block * will be recognized as a special * marker by inflate_sync(). */ if (flush == Z_FULL_FLUSH) { CLEAR_HASH(s); /* forget history */ } } flush_pending(strm); if (strm->avail_out == 0) { /* avoid BUF_ERROR at next call, see above */ s->last_flush = -1; return (Z_OK); } } } Assert(strm->avail_out > 0, "bug2"); if (flush != Z_FINISH) return (Z_OK); if (s->noheader) return (Z_STREAM_END); /* Write the zlib trailer (adler32) */ putShortMSB(s, (uInt)(strm->adler >> 16)); putShortMSB(s, (uInt)(strm->adler & 0xffff)); flush_pending(strm); /* * If avail_out is zero, the application will call deflate * again to flush the rest. */ s->noheader = -1; /* write the trailer only once! */ return (s->pending != 0 ? Z_OK : Z_STREAM_END); } /* ========================================================================= */ int deflateEnd(strm) z_streamp strm; { int status; deflate_state *s; if (strm == Z_NULL || strm->state == Z_NULL) return (Z_STREAM_ERROR); s = (deflate_state *) strm->state; status = s->status; if (status != INIT_STATE && status != BUSY_STATE && status != FINISH_STATE) { return (Z_STREAM_ERROR); } /* Deallocate in reverse order of allocations: */ TRY_FREE(strm, s->pending_buf); TRY_FREE(strm, s->head); TRY_FREE(strm, s->prev); TRY_FREE(strm, s->window); ZFREE(strm, s); strm->state = Z_NULL; return (status == BUSY_STATE ? Z_DATA_ERROR : Z_OK); } /* * ========================================================================= * Copy the source state to the destination state. * To simplify the source, this is not supported for 16-bit MSDOS (which * doesn't have enough memory anyway to duplicate compression states). */ int deflateCopy(dest, source) z_streamp dest; z_streamp source; { #ifdef MAXSEG_64K return (Z_STREAM_ERROR); #else deflate_state *ds; deflate_state *ss; ushf *overlay; if (source == Z_NULL || dest == Z_NULL || source->state == Z_NULL) return (Z_STREAM_ERROR); ss = (deflate_state *) source->state; zmemcpy(dest, source, sizeof (*dest)); ds = (deflate_state *) ZALLOC(dest, 1, sizeof (deflate_state)); if (ds == Z_NULL) return (Z_MEM_ERROR); dest->state = (struct internal_state FAR *) ds; zmemcpy(ds, ss, sizeof (*ds)); ds->strm = dest; ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof (Byte)); ds->prev = (Posf *) ZALLOC(dest, ds->w_size, sizeof (Pos)); ds->head = (Posf *) ZALLOC(dest, ds->hash_size, sizeof (Pos)); overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof (ush)+2); ds->pending_buf = (uchf *) overlay; if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL || ds->pending_buf == Z_NULL) { ds->status = INIT_STATE; (void) deflateEnd(dest); return (Z_MEM_ERROR); } /* following zmemcpy doesn't work for 16-bit MSDOS */ zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof (Byte)); zmemcpy(ds->prev, ss->prev, ds->w_size * sizeof (Pos)); zmemcpy(ds->head, ss->head, ds->hash_size * sizeof (Pos)); zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size); ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf); ds->d_buf = overlay + ds->lit_bufsize/sizeof (ush); ds->l_buf = ds->pending_buf + (1+sizeof (ush))*ds->lit_bufsize; ds->l_desc.dyn_tree = ds->dyn_ltree; ds->d_desc.dyn_tree = ds->dyn_dtree; ds->bl_desc.dyn_tree = ds->bl_tree; return (Z_OK); #endif } /* * =========================================================================== * Return the number of bytes of output which are immediately available * for output from the decompressor. ---PPP--- */ int deflateOutputPending(strm) z_streamp strm; { if (strm == Z_NULL || strm->state == Z_NULL) return (0); return (((deflate_state *)(strm->state))->pending); } /* * =========================================================================== * Read a new buffer from the current input stream, update the adler32 * and total number of bytes read. All deflate() input goes through * this function so some applications may wish to modify it to avoid * allocating a large strm->next_in buffer and copying from it. * (See also flush_pending()). */ local int read_buf(strm, buf, size) z_streamp strm; Bytef *buf; unsigned size; { unsigned len = strm->avail_in; if (len > size) len = size; if (len == 0) return (0); strm->avail_in -= len; if (!((deflate_state *)(strm->state))->noheader) { strm->adler = adler32(strm->adler, strm->next_in, len); } zmemcpy(buf, strm->next_in, len); strm->next_in += len; strm->total_in += len; return ((int)len); } /* * =========================================================================== * Initialize the "longest match" routines for a new zlib stream */ local void lm_init(s) deflate_state *s; { s->window_size = (ulg)2L*s->w_size; CLEAR_HASH(s); /* Set the default configuration parameters: */ s->max_lazy_match = configuration_table[s->level].max_lazy; s->good_match = configuration_table[s->level].good_length; s->nice_match = configuration_table[s->level].nice_length; s->max_chain_length = configuration_table[s->level].max_chain; s->strstart = 0; s->block_start = 0L; s->lookahead = 0; s->match_length = s->prev_length = MIN_MATCH-1; s->match_available = 0; s->ins_h = 0; #ifdef ASMV match_init(); /* initialize the asm code */ #endif } /* * =========================================================================== * Set match_start to the longest match starting at the given string and * return its length. Matches shorter or equal to prev_length are discarded, * in which case the result is equal to prev_length and match_start is * garbage. * IN assertions: cur_match is the head of the hash chain for the current * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 * OUT assertion: the match length is not greater than s->lookahead. */ #ifndef ASMV /* * For 80x86 and 680x0, an optimized version will be provided in * match.asm or match.S. The code will be functionally equivalent. */ #ifndef FASTEST local uInt longest_match(s, cur_match) deflate_state *s; IPos cur_match; /* current match */ { /* max hash chain length */ unsigned chain_length = s->max_chain_length; register Bytef *scan = s->window + s->strstart; /* current string */ register Bytef *match; /* matched string */ register int len; /* length of current match */ int best_len = s->prev_length; /* best match length so far */ int nice_match = s->nice_match; /* stop if match long enough */ IPos limit = s->strstart > (IPos)MAX_DIST(s) ? s->strstart - (IPos)MAX_DIST(s) : NIL; /* * Stop when cur_match becomes <= limit. To simplify the code, * we prevent matches with the string of window index 0. */ Posf *prev = s->prev; uInt wmask = s->w_mask; #ifdef UNALIGNED_OK /* * Compare two bytes at a time. Note: this is not always * beneficial. Try with and without -DUNALIGNED_OK to check. */ register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1; register ush scan_start = *(ushf*)scan; register ush scan_end = *(ushf*)(scan+best_len-1); #else register Bytef *strend = s->window + s->strstart + MAX_MATCH; register Byte scan_end1 = scan[best_len-1]; register Byte scan_end = scan[best_len]; #endif /* * The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 * multiple of 16. It is easy to get rid of this optimization * if necessary. */ Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); /* Do not waste too much time if we already have a good match: */ if (s->prev_length >= s->good_match) { chain_length >>= 2; } /* * Do not look for matches beyond the end of the input. This * is necessary to make deflate deterministic. */ if ((uInt)nice_match > s->lookahead) nice_match = s->lookahead; Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); do { Assert(cur_match <= s->strstart, "no future"); match = s->window + cur_match; /* * Skip to next match if the match length cannot * increase or if the match length is less than 2: */ #if (defined(UNALIGNED_OK) && MAX_MATCH == 258) /* * This code assumes sizeof (unsigned short) == 2. Do * not use UNALIGNED_OK if your compiler uses a * different size. */ if (*(ushf*)(match+best_len-1) != scan_end || *(ushf*)match != scan_start) continue; /* * It is not necessary to compare scan[2] and match[2] * since they are always equal when the other bytes * match, given that the hash keys are equal and that * HASH_BITS >= 8. Compare 2 bytes at a time at * strstart+3, +5, ... up to strstart+257. We check * for insufficient lookahead only every 4th * comparison; the 128th check will be made at * strstart+257. If MAX_MATCH-2 is not a multiple of * 8, it is necessary to put more guard bytes at the * end of the window, or to check more often for * insufficient lookahead. */ Assert(scan[2] == match[2], "scan[2]?"); scan++, match++; do { } while (*(ushf *)(scan += 2) == *(ushf *)(match += 2) && *(ushf *)(scan += 2) == *(ushf *)(match += 2) && *(ushf *)(scan += 2) == *(ushf *)(match += 2) && *(ushf *)(scan += 2) == *(ushf *)(match += 2) && scan < strend); /* The funny "do {}" generates better code on most compilers */ /* Here, scan <= window+strstart+257 */ Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); if (*scan == *match) scan++; len = (MAX_MATCH - 1) - (int)(strend-scan); scan = strend - (MAX_MATCH-1); #else /* UNALIGNED_OK */ if (match[best_len] != scan_end || match[best_len-1] != scan_end1 || *match != *scan || *++match != scan[1]) continue; /* * The check at best_len-1 can be removed because it * will be made again later. (This heuristic is not * always a win.) It is not necessary to compare * scan[2] and match[2] since they are always equal * when the other bytes match, given that the hash * keys are equal and that HASH_BITS >= 8. */ scan += 2, match++; Assert(*scan == *match, "match[2]?"); /* * We check for insufficient lookahead only every 8th * comparison; the 256th check will be made at * strstart+258. */ do { } while (*++scan == *++match && *++scan == *++match && *++scan == *++match && *++scan == *++match && *++scan == *++match && *++scan == *++match && *++scan == *++match && *++scan == *++match && scan < strend); Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); len = MAX_MATCH - (int)(strend - scan); scan = strend - MAX_MATCH; #endif /* UNALIGNED_OK */ if (len > best_len) { s->match_start = cur_match; best_len = len; if (len >= nice_match) break; #ifdef UNALIGNED_OK scan_end = *(ushf*)(scan+best_len-1); #else scan_end1 = scan[best_len-1]; scan_end = scan[best_len]; #endif } } while ((cur_match = prev[cur_match & wmask]) > limit && --chain_length != 0); if ((uInt)best_len <= s->lookahead) return (best_len); return (s->lookahead); } #else /* FASTEST */ /* * --------------------------------------------------------------------------- * Optimized version for level == 1 only */ local uInt longest_match(s, cur_match) deflate_state *s; IPos cur_match; /* current match */ { register Bytef *scan = s->window + s->strstart; /* current string */ register Bytef *match; /* matched string */ register int len; /* length of current match */ register Bytef *strend = s->window + s->strstart + MAX_MATCH; /* * The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 * multiple of 16. It is easy to get rid of this optimization * if necessary. */ Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); Assert(cur_match <= s->strstart, "no future"); match = s->window + cur_match; /* Return failure if the match length is less than 2: */ if (match[0] != scan[0] || match[1] != scan[1]) return (MIN_MATCH-1); /* * The check at best_len-1 can be removed because it will be * made again later. (This heuristic is not always a win.) It * is not necessary to compare scan[2] and match[2] since they * are always equal when the other bytes match, given that the * hash keys are equal and that HASH_BITS >= 8. */ scan += 2, match += 2; Assert(*scan == *match, "match[2]?"); /* * We check for insufficient lookahead only every 8th comparison; * the 256th check will be made at strstart+258. */ do { } while (*++scan == *++match && *++scan == *++match && *++scan == *++match && *++scan == *++match && *++scan == *++match && *++scan == *++match && *++scan == *++match && *++scan == *++match && scan < strend); Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); len = MAX_MATCH - (int)(strend - scan); if (len < MIN_MATCH) return (MIN_MATCH - 1); s->match_start = cur_match; return (len <= s->lookahead ? len : s->lookahead); } #endif /* FASTEST */ #endif /* ASMV */ #ifdef DEBUG_ZLIB /* * =========================================================================== * Check that the match at match_start is indeed a match. */ local void check_match(s, start, match, length) deflate_state *s; IPos start, match; int length; { /* check that the match is indeed a match */ if (zmemcmp(s->window + match, s->window + start, length) != EQUAL) { fprintf(stderr, " start %u, match %u, length %d\n", start, match, length); do { fprintf(stderr, "%c%c", s->window[match++], s->window[start++]); } while (--length != 0); z_error("invalid match"); } if (z_verbose > 1) { fprintf(stderr, "\\[%d,%d]", start-match, length); do { putc(s->window[start++], stderr); } while (--length != 0); } } #else #define check_match(s, start, match, length) #endif /* * =========================================================================== * Fill the window when the lookahead becomes insufficient. * Updates strstart and lookahead. * * IN assertion: lookahead < MIN_LOOKAHEAD * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD * At least one byte has been read, or avail_in == 0; reads are * performed for at least two bytes (required for the zip translate_eol * option -- not supported here). */ local void fill_window(s) deflate_state *s; { register unsigned n, m; register Posf *p; unsigned more; /* Amount of free space at the end of the window. */ uInt wsize = s->w_size; do { more = (unsigned)(s->window_size -(ulg)s->lookahead - (ulg)s->strstart); /* Deal with !@#$% 64K limit: */ if (more == 0 && s->strstart == 0 && s->lookahead == 0) { more = wsize; } else if (more == (unsigned)(-1)) { /* * Very unlikely, but possible on 16 bit * machine if strstart == 0 and lookahead == 1 * (input done one byte at time) */ more--; /* * If the window is almost full and there is * insufficient lookahead, move the upper half * to the lower one to make room in the upper * half. */ } else if (s->strstart >= wsize+MAX_DIST(s)) { Assert(wsize+wsize <= s->window_size, "wsize*2"); zmemcpy(s->window, s->window+wsize, (unsigned)wsize); s->match_start -= wsize; /* we now have strstart >= MAX_DIST */ s->strstart -= wsize; s->block_start -= (long)wsize; /* * Slide the hash table (could be avoided with * 32 bit values at the expense of memory * usage). We slide even when level == 0 to * keep the hash table consistent if we switch * back to level > 0 later. (Using level 0 * permanently is not an optimal usage of * zlib, so we don't care about this * pathological case.) */ n = s->hash_size; p = &s->head[n]; do { m = *--p; *p = (Pos)(m >= wsize ? m-wsize : NIL); } while (--n); n = wsize; #ifndef FASTEST p = &s->prev[n]; do { m = *--p; *p = (Pos)(m >= wsize ? m-wsize : NIL); /* * If n is not on any hash chain, * prev[n] is garbage but its value * will never be used. */ } while (--n); #endif more += wsize; } if (s->strm->avail_in == 0) return; /* * If there was no sliding: * strstart <= WSIZE+MAX_DIST-1 && * lookahead <= MIN_LOOKAHEAD - 1 && * more == window_size - lookahead - strstart * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + * MAX_DIST-1) * => more >= window_size - 2*WSIZE + 2 * In the BIG_MEM or MMAP case (not yet supported), * window_size == input_size + MIN_LOOKAHEAD && * strstart + s->lookahead <= input_size => * more >= MIN_LOOKAHEAD. * Otherwise, window_size == 2*WSIZE so more >= 2. * If there was sliding, more >= WSIZE. So in all cases, * more >= 2. */ Assert(more >= 2, "more < 2"); Assert(s->strstart + s->lookahead + more <= s->window_size, "read too much"); n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more); s->lookahead += n; /* Initialize the hash value now that we have some input: */ if (s->lookahead >= MIN_MATCH) { s->ins_h = s->window[s->strstart]; UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]); #if MIN_MATCH != 3 Call UPDATE_HASH() MIN_MATCH-3 more times #endif } /* * If the whole input has less than MIN_MATCH bytes, * ins_h is garbage, but this is not important since * only literal bytes will be emitted. */ } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0); } /* * =========================================================================== * Flush the current block, with given end-of-file flag. * IN assertion: strstart is set to the end of the current match. */ #define FLUSH_BLOCK_ONLY(s, eof) { \ _tr_flush_block(s, (s->block_start >= 0L ? \ (charf *)&s->window[(unsigned)s->block_start] : \ (charf *)Z_NULL), \ (ulg)((long)s->strstart - s->block_start), \ (eof)); \ s->block_start = s->strstart; \ flush_pending(s->strm); \ Tracev((stderr, "[FLUSH]")); \ } /* Same but force premature exit if necessary. */ #define FLUSH_BLOCK(s, eof) { \ FLUSH_BLOCK_ONLY(s, eof); \ if (s->strm->avail_out == 0) \ return ((eof) ? finish_started : need_more); \ } /* * =========================================================================== * Copy without compression as much as possible from the input stream, return * the current block state. * This function does not insert new strings in the dictionary since * uncompressible data is probably not useful. This function is used * only for the level=0 compression option. * NOTE: this function should be optimized to avoid extra copying from * window to pending_buf. */ local block_state deflate_stored(s, flush) deflate_state *s; int flush; { /* * Stored blocks are limited to 0xffff bytes, pending_buf is * limited to pending_buf_size, and each stored block has a 5 * byte header: */ ulg max_block_size = 0xffff; ulg max_start; if (max_block_size > s->pending_buf_size - 5) { max_block_size = s->pending_buf_size - 5; } /* Copy as much as possible from input to output: */ for (;;) { /* Fill the window as much as possible: */ if (s->lookahead <= 1) { Assert(s->strstart < s->w_size+MAX_DIST(s) || s->block_start >= (long)s->w_size, "slide too late"); fill_window(s); if (s->lookahead == 0 && flush == Z_NO_FLUSH) return (need_more); if (s->lookahead == 0) break; /* flush the current block */ } Assert(s->block_start >= 0L, "block gone"); s->strstart += s->lookahead; s->lookahead = 0; /* Emit a stored block if pending_buf will be full: */ max_start = s->block_start + max_block_size; if (s->strstart == 0 || (ulg)s->strstart >= max_start) { /* * strstart == 0 is possible when wraparound * on 16-bit machine */ s->lookahead = (uInt)(s->strstart - max_start); s->strstart = (uInt)max_start; FLUSH_BLOCK(s, 0); } /* * Flush if we may have to slide, otherwise * block_start may become negative and the data will * be gone: */ if (s->strstart - (uInt)s->block_start >= MAX_DIST(s)) { FLUSH_BLOCK(s, 0); } } FLUSH_BLOCK(s, flush == Z_FINISH); return (flush == Z_FINISH ? finish_done : block_done); } /* * =========================================================================== * Compress as much as possible from the input stream, return the current * block state. * This function does not perform lazy evaluation of matches and inserts * new strings in the dictionary only for unmatched strings or for short * matches. It is used only for the fast compression options. */ local block_state deflate_fast(s, flush) deflate_state *s; int flush; { IPos hash_head = NIL; /* head of the hash chain */ int bflush; /* set if current block must be flushed */ for (;;) { /* * Make sure that we always have enough lookahead, * except at the end of the input file. We need * MAX_MATCH bytes for the next match, plus MIN_MATCH * bytes to insert the string following the next * match. */ if (s->lookahead < MIN_LOOKAHEAD) { fill_window(s); if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) { return (need_more); } if (s->lookahead == 0) break; /* flush the current block */ } /* * Insert the string window[strstart .. strstart+2] in * the dictionary, and set hash_head to the head of * the hash chain: */ if (s->lookahead >= MIN_MATCH) { INSERT_STRING(s, s->strstart, hash_head); } /* * Find the longest match, discarding those <= * prev_length. At this point we have always * match_length < MIN_MATCH */ if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) { /* * To simplify the code, we prevent matches * with the string of window index 0 (in * particular we have to avoid a match of the * string with itself at the start of the * input file). */ if (s->strategy != Z_HUFFMAN_ONLY) { s->match_length = longest_match(s, hash_head); } /* longest_match() sets match_start */ } if (s->match_length >= MIN_MATCH) { check_match(s, s->strstart, s->match_start, s->match_length); _tr_tally_dist(s, s->strstart - s->match_start, s->match_length - MIN_MATCH, bflush); s->lookahead -= s->match_length; /* * Insert new strings in the hash table only * if the match length is not too large. This * saves time but degrades compression. */ #ifndef FASTEST if (s->match_length <= s->max_insert_length && s->lookahead >= MIN_MATCH) { /* string at strstart already in hash table */ s->match_length--; do { s->strstart++; INSERT_STRING(s, s->strstart, hash_head); /* * strstart never exceeds * WSIZE-MAX_MATCH, so there * are always MIN_MATCH bytes * ahead. */ } while (--s->match_length != 0); s->strstart++; } else #endif { s->strstart += s->match_length; s->match_length = 0; s->ins_h = s->window[s->strstart]; UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]); #if MIN_MATCH != 3 Call UPDATE_HASH() MIN_MATCH-3 more times #endif /* * If lookahead < MIN_MATCH, ins_h is * garbage, but it does not matter * since it will be recomputed at next * deflate call. */ } } else { /* No match, output a literal byte */ Tracevv((stderr, "%c", s->window[s->strstart])); _tr_tally_lit(s, s->window[s->strstart], bflush); s->lookahead--; s->strstart++; } if (bflush) FLUSH_BLOCK(s, 0); } FLUSH_BLOCK(s, flush == Z_FINISH); return (flush == Z_FINISH ? finish_done : block_done); } /* * =========================================================================== * Same as above, but achieves better compression. We use a lazy * evaluation for matches: a match is finally adopted only if there is * no better match at the next window position. */ local block_state deflate_slow(s, flush) deflate_state *s; int flush; { IPos hash_head = NIL; /* head of hash chain */ int bflush; /* set if current block must be flushed */ /* Process the input block. */ for (;;) { /* * Make sure that we always have enough lookahead, * except at the end of the input file. We need * MAX_MATCH bytes for the next match, plus MIN_MATCH * bytes to insert the string following the next * match. */ if (s->lookahead < MIN_LOOKAHEAD) { fill_window(s); if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) { return (need_more); } /* flush the current block */ if (s->lookahead == 0) break; } /* * Insert the string window[strstart .. strstart+2] in * the dictionary, and set hash_head to the head of * the hash chain: */ if (s->lookahead >= MIN_MATCH) { INSERT_STRING(s, s->strstart, hash_head); } /* * Find the longest match, discarding those <= * prev_length. */ s->prev_length = s->match_length; s->prev_match = s->match_start; s->match_length = MIN_MATCH-1; if (hash_head != NIL && s->prev_length < s->max_lazy_match && s->strstart - hash_head <= MAX_DIST(s)) { /* * To simplify the code, we prevent matches * with the string of window index 0 (in * particular we have to avoid a match of the * string with itself at the start of the * input file). */ if (s->strategy != Z_HUFFMAN_ONLY) { s->match_length = longest_match(s, hash_head); } /* longest_match() sets match_start */ if (s->match_length <= 5 && (s->strategy == Z_FILTERED || (s->match_length == MIN_MATCH && s->strstart - s->match_start > TOO_FAR))) { /* * If prev_match is also MIN_MATCH, * match_start is garbage but we will * ignore the current match anyway. */ s->match_length = MIN_MATCH-1; } } /* * If there was a match at the previous step and the * current match is not better, output the previous * match: */ if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) { uInt max_insert = s->strstart + s->lookahead - MIN_MATCH; /* Do not insert strings in hash table beyond this. */ check_match(s, s->strstart-1, s->prev_match, s->prev_length); _tr_tally_dist(s, s->strstart -1 - s->prev_match, s->prev_length - MIN_MATCH, bflush); /* * Insert in hash table all strings up to the * end of the match. strstart-1 and strstart * are already inserted. If there is not * enough lookahead, the last two strings are * not inserted in the hash table. */ s->lookahead -= s->prev_length-1; s->prev_length -= 2; do { if (++s->strstart <= max_insert) { INSERT_STRING(s, s->strstart, hash_head); } } while (--s->prev_length != 0); s->match_available = 0; s->match_length = MIN_MATCH-1; s->strstart++; if (bflush) FLUSH_BLOCK(s, 0); } else if (s->match_available) { /* * If there was no match at the previous * position, output a single literal. If there * was a match but the current match is * longer, truncate the previous match to a * single literal. */ Tracevv((stderr, "%c", s->window[s->strstart-1])); _tr_tally_lit(s, s->window[s->strstart-1], bflush); if (bflush) { FLUSH_BLOCK_ONLY(s, 0); } s->strstart++; s->lookahead--; if (s->strm->avail_out == 0) return (need_more); } else { /* * There is no previous match to compare with, * wait for the next step to decide. */ s->match_available = 1; s->strstart++; s->lookahead--; } } Assert(flush != Z_NO_FLUSH, "no flush?"); if (s->match_available) { Tracevv((stderr, "%c", s->window[s->strstart-1])); _tr_tally_lit(s, s->window[s->strstart-1], bflush); s->match_available = 0; } FLUSH_BLOCK(s, flush == Z_FINISH); return (flush == Z_FINISH ? finish_done : block_done); } /* --- deflate.c */ /* +++ trees.c */ /* * trees.c -- output deflated data using Huffman coding * Copyright (C) 1995-1998 Jean-loup Gailly * For conditions of distribution and use, see copyright notice in zlib.h */ /* * ALGORITHM * * The "deflation" process uses several Huffman trees. The more * common source values are represented by shorter bit sequences. * * Each code tree is stored in a compressed form which is itself * a Huffman encoding of the lengths of all the code strings (in * ascending order by source values). The actual code strings are * reconstructed from the lengths in the inflate process, as described * in the deflate specification. * * REFERENCES * * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification". * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc * * Storer, James A. * Data Compression: Methods and Theory, pp. 49-50. * Computer Science Press, 1988. ISBN 0-7167-8156-5. * * Sedgewick, R. * Algorithms, p290. * Addison-Wesley, 1983. ISBN 0-201-06672-6. */ /* From: trees.c,v 1.11 1996/07/24 13:41:06 me Exp $ */ /* #include "deflate.h" */ #ifdef DEBUG_ZLIB #include #endif /* * =========================================================================== * Constants */ #define MAX_BL_BITS 7 /* Bit length codes must not exceed MAX_BL_BITS bits */ #define END_BLOCK 256 /* end of block literal code */ #define REP_3_6 16 /* repeat previous bit length 3-6 times (2 bits of repeat count) */ #define REPZ_3_10 17 /* repeat a zero length 3-10 times (3 bits of repeat count) */ #define REPZ_11_138 18 /* repeat a zero length 11-138 times (7 bits of repeat count) */ /* extra bits for each length code */ local const int extra_lbits[LENGTH_CODES] = { 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0}; /* extra bits for each distance code */ local const int extra_dbits[D_CODES] = { 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13}; /* extra bits for each bit length code */ local const int extra_blbits[BL_CODES] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7}; local const uch bl_order[BL_CODES] = { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}; /* * The lengths of the bit length codes are sent in order of decreasing * probability, to avoid transmitting the lengths for unused bit * length codes. */ #define Buf_size (8 * 2*sizeof (char)) /* * Number of bits used within bi_buf. (bi_buf might be implemented on * more than 16 bits on some systems.) */ /* * =========================================================================== * Local data. These are initialized only once. */ #define DIST_CODE_LEN 512 /* see definition of array dist_code below */ local ct_data static_ltree[L_CODES+2]; /* * The static literal tree. Since the bit lengths are imposed, there * is no need for the L_CODES extra codes used during heap * construction. However The codes 286 and 287 are needed to build a * canonical tree (see _tr_init below). */ local ct_data static_dtree[D_CODES]; /* * The static distance tree. (Actually a trivial tree since all codes * use 5 bits.) */ local uch _dist_code[512]; /* * distance codes. The first 256 values correspond to the distances 3 * .. 258, the last 256 values correspond to the top 8 bits of the 15 * bit distances. */ local uch _length_code[MAX_MATCH-MIN_MATCH+1]; /* length code for each normalized match length (0 == MIN_MATCH) */ local int base_length[LENGTH_CODES]; /* First normalized length for each code (0 = MIN_MATCH) */ local int base_dist[D_CODES]; /* First normalized distance for each code (0 = distance of 1) */ struct static_tree_desc_s { const ct_data *static_tree; /* static tree or NULL */ const intf *extra_bits; /* extra bits for each code or NULL */ int extra_base; /* base index for extra_bits */ int elems; /* max number of elements in the tree */ int max_length; /* max bit length for the codes */ }; local static_tree_desc static_l_desc = { static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; local static_tree_desc static_d_desc = { static_dtree, extra_dbits, 0, D_CODES, MAX_BITS}; local static_tree_desc static_bl_desc = { (const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS}; /* * =========================================================================== * Local (static) routines in this file. */ local void tr_static_init OF((void)); local void init_block OF((deflate_state *s)); local void pqdownheap OF((deflate_state *s, ct_data *tree, int k)); local void gen_bitlen OF((deflate_state *s, tree_desc *desc)); local void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count)); local void build_tree OF((deflate_state *s, tree_desc *desc)); local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code)); local void send_tree OF((deflate_state *s, ct_data *tree, int max_code)); local int build_bl_tree OF((deflate_state *s)); local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes, int blcodes)); local void compress_block OF((deflate_state *s, ct_data *ltree, ct_data *dtree)); local void set_data_type OF((deflate_state *s)); local unsigned bi_reverse OF((unsigned value, int length)); local void bi_windup OF((deflate_state *s)); local void bi_flush OF((deflate_state *s)); local void copy_block OF((deflate_state *s, charf *buf, unsigned len, int header)); #ifndef DEBUG_ZLIB #define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) /* Send a code of the given tree. c and tree must not have side effects */ #else /* DEBUG_ZLIB */ #define send_code(s, c, tree) \ { if (z_verbose > 2) fprintf(stderr, "\ncd %3d ", (c)); \ send_bits(s, tree[c].Code, tree[c].Len); } #endif /* * =========================================================================== * Output a short LSB first on the stream. * IN assertion: there is enough room in pendingBuf. */ #define put_short(s, w) { \ put_byte(s, (uch)((w) & 0xff)); \ put_byte(s, (uch)((ush)(w) >> 8)); \ } /* * =========================================================================== * Send a value on a given number of bits. * IN assertion: length <= 16 and value fits in length bits. */ #ifdef DEBUG_ZLIB local void send_bits OF((deflate_state *s, int value, int length)); local void send_bits(s, value, length) deflate_state *s; int value; /* value to send */ int length; /* number of bits */ { Tracevv((stderr, " l %2d v %4x ", length, value)); Assert(length > 0 && length <= 15, "invalid length"); s->bits_sent += (ulg)length; /* * If not enough room in bi_buf, use (valid) bits from bi_buf * and (16 - bi_valid) bits from value, leaving (width - * (16-bi_valid)) unused bits in value. */ if (s->bi_valid > (int)Buf_size - length) { s->bi_buf |= (value << s->bi_valid); put_short(s, s->bi_buf); s->bi_buf = (ush)value >> (Buf_size - s->bi_valid); s->bi_valid += length - Buf_size; } else { s->bi_buf |= value << s->bi_valid; s->bi_valid += length; } } #else /* !DEBUG_ZLIB */ #define send_bits(s, value, length) \ { int len = length; \ if (s->bi_valid > (int)Buf_size - len) {\ int val = value; \ s->bi_buf |= (val << s->bi_valid); \ put_short(s, s->bi_buf); \ s->bi_buf = (ush)val >> (Buf_size - s->bi_valid); \ s->bi_valid += len - Buf_size; \ } else {\ s->bi_buf |= (value) << s->bi_valid; \ s->bi_valid += len; \ }\ } #endif /* DEBUG_ZLIB */ #define MAX(a, b) (a >= b ? a : b) /* the arguments must not have side effects */ /* * =========================================================================== * Initialize the various 'constant' tables. In a multi-threaded environment, * this function may be called by two threads concurrently, but this is * harmless since both invocations do exactly the same thing. */ local void tr_static_init() { static int static_init_done = 0; int n; /* iterates over tree elements */ int bits; /* bit counter */ int length; /* length value */ int code; /* code value */ int dist; /* distance index */ ush bl_count[MAX_BITS+1]; /* number of codes at each bit length for an optimal tree */ if (static_init_done) return; /* For some embedded targets, global variables are not initialized: */ static_l_desc.static_tree = static_ltree; static_l_desc.extra_bits = extra_lbits; static_d_desc.static_tree = static_dtree; static_d_desc.extra_bits = extra_dbits; static_bl_desc.extra_bits = extra_blbits; /* Initialize the mapping length (0..255) -> length code (0..28) */ length = 0; for (code = 0; code < LENGTH_CODES-1; code++) { base_length[code] = length; for (n = 0; n < (1< dist code (0..29) */ dist = 0; for (code = 0; code < 16; code++) { base_dist[code] = dist; for (n = 0; n < (1<>= 7; /* from now on, all distances are divided by 128 */ for (; code < D_CODES; code++) { base_dist[code] = dist << 7; for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { _dist_code[256 + dist++] = (uch)code; } } Assert(dist == 256, "tr_static_init: 256+dist != 512"); /* Construct the codes of the static literal tree */ for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; n = 0; while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; /* * Codes 286 and 287 do not exist, but we must include them in the * tree construction to get a canonical Huffman tree (longest code * all ones) */ gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count); /* The static distance tree is trivial: */ for (n = 0; n < D_CODES; n++) { static_dtree[n].Len = 5; static_dtree[n].Code = bi_reverse((unsigned)n, 5); } static_init_done = 1; } /* * =========================================================================== * Initialize the tree data structures for a new zlib stream. */ void _tr_init(s) deflate_state *s; { tr_static_init(); s->l_desc.dyn_tree = s->dyn_ltree; s->l_desc.stat_desc = &static_l_desc; s->d_desc.dyn_tree = s->dyn_dtree; s->d_desc.stat_desc = &static_d_desc; s->bl_desc.dyn_tree = s->bl_tree; s->bl_desc.stat_desc = &static_bl_desc; s->bi_buf = 0; s->bi_valid = 0; s->last_eob_len = 8; /* enough lookahead for inflate */ s->compressed_len = 0L; /* PPP */ #ifdef DEBUG_ZLIB s->bits_sent = 0L; #endif /* Initialize the first block of the first file: */ init_block(s); } /* * =========================================================================== * Initialize a new block. */ local void init_block(s) deflate_state *s; { int n; /* iterates over tree elements */ /* Initialize the trees. */ for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0; for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0; for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0; s->dyn_ltree[END_BLOCK].Freq = 1; s->opt_len = s->static_len = 0L; s->last_lit = s->matches = 0; } #define SMALLEST 1 /* Index within the heap array of least frequent node in the Huffman tree */ /* * =========================================================================== * Remove the smallest element from the heap and recreate the heap with * one less element. Updates heap and heap_len. */ #define pqremove(s, tree, top) \ {\ top = s->heap[SMALLEST]; \ s->heap[SMALLEST] = s->heap[s->heap_len--]; \ pqdownheap(s, tree, SMALLEST); \ } /* * =========================================================================== * Compares to subtrees, using the tree depth as tie breaker when * the subtrees have equal frequency. This minimizes the worst case length. */ #define smaller(tree, n, m, depth) \ (tree[n].Freq < tree[m].Freq || \ (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m])) /* * =========================================================================== * Restore the heap property by moving down the tree starting at node k, * exchanging a node with the smallest of its two sons if necessary, stopping * when the heap property is re-established (each father smaller than its * two sons). */ local void pqdownheap(s, tree, k) deflate_state *s; ct_data *tree; /* the tree to restore */ int k; /* node to move down */ { int v = s->heap[k]; int j = k << 1; /* left son of k */ while (j <= s->heap_len) { /* Set j to the smallest of the two sons: */ if (j < s->heap_len && smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { j++; } /* Exit if v is smaller than both sons */ if (smaller(tree, v, s->heap[j], s->depth)) break; /* Exchange v with the smallest son */ s->heap[k] = s->heap[j]; k = j; /* And continue down the tree, setting j to the left son of k */ j <<= 1; } s->heap[k] = v; } /* * =========================================================================== * Compute the optimal bit lengths for a tree and update the total bit length * for the current block. * IN assertion: the fields freq and dad are set, heap[heap_max] and * above are the tree nodes sorted by increasing frequency. * OUT assertions: the field len is set to the optimal bit length, the * array bl_count contains the frequencies for each bit length. * The length opt_len is updated; static_len is also updated if stree is * not null. */ local void gen_bitlen(s, desc) deflate_state *s; tree_desc *desc; /* the tree descriptor */ { ct_data *tree = desc->dyn_tree; int max_code = desc->max_code; const ct_data *stree = desc->stat_desc->static_tree; const intf *extra = desc->stat_desc->extra_bits; int base = desc->stat_desc->extra_base; int max_length = desc->stat_desc->max_length; int h; /* heap index */ int n, m; /* iterate over the tree elements */ int bits; /* bit length */ int xbits; /* extra bits */ ush f; /* frequency */ /* number of elements with bit length too large */ int overflow = 0; for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0; /* * In a first pass, compute the optimal bit lengths (which may * overflow in the case of the bit length tree). */ tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ for (h = s->heap_max+1; h < HEAP_SIZE; h++) { n = s->heap[h]; bits = tree[tree[n].Dad].Len + 1; if (bits > max_length) bits = max_length, overflow++; tree[n].Len = (ush)bits; /* We overwrite tree[n].Dad which is no longer needed */ if (n > max_code) continue; /* not a leaf node */ s->bl_count[bits]++; xbits = 0; if (n >= base) xbits = extra[n-base]; f = tree[n].Freq; s->opt_len += (ulg)f * (bits + xbits); if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits); } if (overflow == 0) return; Trace((stderr, "\nbit length overflow\n")); /* This happens for example on obj2 and pic of the Calgary corpus */ /* Find the first bit length which could increase: */ do { bits = max_length-1; while (s->bl_count[bits] == 0) bits--; s->bl_count[bits]--; /* move one leaf down the tree */ /* move one overflow item as its brother */ s->bl_count[bits+1] += 2; s->bl_count[max_length]--; /* * The brother of the overflow item also moves one * step up, but this does not affect * bl_count[max_length] */ overflow -= 2; } while (overflow > 0); /* * Now recompute all bit lengths, scanning in increasing * frequency. h is still equal to HEAP_SIZE. (It is simpler * to reconstruct all lengths instead of fixing only the wrong * ones. This idea is taken from 'ar' written by Haruhiko * Okumura.) */ for (bits = max_length; bits != 0; bits--) { n = s->bl_count[bits]; while (n != 0) { m = s->heap[--h]; if (m > max_code) continue; if (tree[m].Len != (unsigned)bits) { Trace((stderr, "code %d bits %d->%d\n", m, tree[m].Len, bits)); s->opt_len += ((long)bits - (long)tree[m].Len) *(long)tree[m].Freq; tree[m].Len = (ush)bits; } n--; } } } /* * =========================================================================== * Generate the codes for a given tree and bit counts (which need not be * optimal). * IN assertion: the array bl_count contains the bit length statistics for * the given tree and the field len is set for all tree elements. * OUT assertion: the field code is set for all tree elements of non * zero code length. */ local void gen_codes(tree, max_code, bl_count) ct_data *tree; /* the tree to decorate */ int max_code; /* largest code with non zero frequency */ ushf *bl_count; /* number of codes at each bit length */ { /* next code value for each bit length */ ush next_code[MAX_BITS+1]; ush code = 0; /* running code value */ int bits; /* bit index */ int n; /* code index */ /* * The distribution counts are first used to generate the code * values without bit reversal. */ for (bits = 1; bits <= MAX_BITS; bits++) { next_code[bits] = code = (code + bl_count[bits-1]) << 1; } /* * Check that the bit counts in bl_count are consistent. The * last code must be all ones. */ Assert(code + bl_count[MAX_BITS]-1 == (1<dyn_tree; const ct_data *stree = desc->stat_desc->static_tree; int elems = desc->stat_desc->elems; int n, m; /* iterate over heap elements */ int max_code = -1; /* largest code with non zero frequency */ int node; /* new node being created */ /* * Construct the initial heap, with least frequent element in * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and * heap[2*n+1]. heap[0] is not used. */ s->heap_len = 0, s->heap_max = HEAP_SIZE; for (n = 0; n < elems; n++) { if (tree[n].Freq != 0) { s->heap[++(s->heap_len)] = max_code = n; s->depth[n] = 0; } else { tree[n].Len = 0; } } /* * The pkzip format requires that at least one distance code * exists, and that at least one bit should be sent even if * there is only one possible code. So to avoid special checks * later on we force at least two codes of non zero frequency. */ while (s->heap_len < 2) { node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0); tree[node].Freq = 1; s->depth[node] = 0; s->opt_len--; if (stree) s->static_len -= stree[node].Len; /* node is 0 or 1 so it does not have extra bits */ } desc->max_code = max_code; /* * The elements heap[heap_len/2+1 .. heap_len] are leaves of * the tree, establish sub-heaps of increasing lengths: */ for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); /* * Construct the Huffman tree by repeatedly combining the * least two frequent nodes. */ node = elems; /* next internal node of the tree */ do { pqremove(s, tree, n); /* n = node of least frequency */ m = s->heap[SMALLEST]; /* m = node of next least frequency */ /* keep the nodes sorted by frequency */ s->heap[--(s->heap_max)] = n; s->heap[--(s->heap_max)] = m; /* Create a new node father of n and m */ tree[node].Freq = tree[n].Freq + tree[m].Freq; s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1); tree[n].Dad = tree[m].Dad = (ush)node; #ifdef DUMP_BL_TREE if (tree == s->bl_tree) { fprintf(stderr, "\nnode %d(%d), sons %d(%d) %d(%d)", node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); } #endif /* and insert the new node in the heap */ s->heap[SMALLEST] = node++; pqdownheap(s, tree, SMALLEST); } while (s->heap_len >= 2); s->heap[--(s->heap_max)] = s->heap[SMALLEST]; /* * At this point, the fields freq and dad are set. We can now * generate the bit lengths. */ gen_bitlen(s, (tree_desc *)desc); /* The field len is now set, we can generate the bit codes */ gen_codes((ct_data *)tree, max_code, s->bl_count); } /* * =========================================================================== * Scan a literal or distance tree to determine the frequencies of the codes * in the bit length tree. */ local void scan_tree(s, tree, max_code) deflate_state *s; ct_data *tree; /* the tree to be scanned */ int max_code; /* and its largest code of non zero frequency */ { int n; /* iterates over all tree elements */ int prevlen = -1; /* last emitted length */ int curlen; /* length of current code */ int nextlen = tree[0].Len; /* length of next code */ int count = 0; /* repeat count of the current code */ int max_count = 7; /* max repeat count */ int min_count = 4; /* min repeat count */ if (nextlen == 0) max_count = 138, min_count = 3; tree[max_code+1].Len = (ush)0xffff; /* guard */ for (n = 0; n <= max_code; n++) { curlen = nextlen; nextlen = tree[n+1].Len; if (++count < max_count && curlen == nextlen) { continue; } else if (count < min_count) { s->bl_tree[curlen].Freq += count; } else if (curlen != 0) { if (curlen != prevlen) s->bl_tree[curlen].Freq++; s->bl_tree[REP_3_6].Freq++; } else if (count <= 10) { s->bl_tree[REPZ_3_10].Freq++; } else { s->bl_tree[REPZ_11_138].Freq++; } count = 0; prevlen = curlen; if (nextlen == 0) { max_count = 138, min_count = 3; } else if (curlen == nextlen) { max_count = 6, min_count = 3; } else { max_count = 7, min_count = 4; } } } /* * =========================================================================== * Send a literal or distance tree in compressed form, using the codes in * bl_tree. */ local void send_tree(s, tree, max_code) deflate_state *s; ct_data *tree; /* the tree to be scanned */ int max_code; /* and its largest code of non zero frequency */ { int n; /* iterates over all tree elements */ int prevlen = -1; /* last emitted length */ int curlen; /* length of current code */ int nextlen = tree[0].Len; /* length of next code */ int count = 0; /* repeat count of the current code */ int max_count = 7; /* max repeat count */ int min_count = 4; /* min repeat count */ /* tree[max_code+1].Len = -1; */ /* guard already set */ if (nextlen == 0) max_count = 138, min_count = 3; for (n = 0; n <= max_code; n++) { curlen = nextlen; nextlen = tree[n+1].Len; if (++count < max_count && curlen == nextlen) { continue; } else if (count < min_count) { do { send_code(s, curlen, s->bl_tree); } while (--count != 0); } else if (curlen != 0) { if (curlen != prevlen) { send_code(s, curlen, s->bl_tree); count--; } Assert(count >= 3 && count <= 6, " 3_6?"); send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2); } else if (count <= 10) { send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3); } else { send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7); } count = 0; prevlen = curlen; if (nextlen == 0) { max_count = 138, min_count = 3; } else if (curlen == nextlen) { max_count = 6, min_count = 3; } else { max_count = 7, min_count = 4; } } } /* * =========================================================================== * Construct the Huffman tree for the bit lengths and return the index in * bl_order of the last bit length code to send. */ local int build_bl_tree(s) deflate_state *s; { /* index of last bit length code of non zero freq */ int max_blindex; /* * Determine the bit length frequencies for literal and * distance trees */ scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); /* Build the bit length tree: */ build_tree(s, (tree_desc *)(&(s->bl_desc))); /* * opt_len now includes the length of the tree * representations, except the lengths of the bit lengths * codes and the 5+5+4 bits for the counts. */ /* * Determine the number of bit length codes to send. The pkzip * format requires that at least 4 bit length codes be * sent. (appnote.txt says 3 but the actual value used is 4.) */ for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; } /* Update opt_len to include the bit length tree and counts */ s->opt_len += 3*(max_blindex+1) + 5+5+4; Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", s->opt_len, s->static_len)); return (max_blindex); } /* * =========================================================================== * Send the header for a block using dynamic Huffman trees: the counts, the * lengths of the bit length codes, the literal tree and the distance tree. * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. */ local void send_all_trees(s, lcodes, dcodes, blcodes) deflate_state *s; int lcodes, dcodes, blcodes; /* number of codes for each tree */ { int rank; /* index in bl_order */ Assert(lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); Assert(lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, "too many codes"); Tracev((stderr, "\nbl counts: ")); send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */ send_bits(s, dcodes-1, 5); send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */ for (rank = 0; rank < blcodes; rank++) { Tracev((stderr, "\nbl code %2d ", bl_order[rank])); send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); } #ifdef DEBUG_ZLIB Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); #endif /* literal tree */ send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); #ifdef DEBUG_ZLIB Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); #endif /* distance tree */ send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); #ifdef DEBUG_ZLIB Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); #endif } /* * =========================================================================== * Send a stored block */ void _tr_stored_block(s, buf, stored_len, eof) deflate_state *s; charf *buf; /* input block */ ulg stored_len; /* length of input block */ int eof; /* true if this is the last block for a file */ { send_bits(s, (STORED_BLOCK<<1)+eof, 3); /* send block type */ s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; /* PPP */ s->compressed_len += (stored_len + 4) << 3; /* PPP */ copy_block(s, buf, (unsigned)stored_len, 1); /* with header */ } /* * Send just the `stored block' type code without any length bytes or data. * ---PPP--- */ void _tr_stored_type_only(s) deflate_state *s; { send_bits(s, (STORED_BLOCK << 1), 3); bi_windup(s); s->compressed_len = (s->compressed_len + 3) & ~7L; /* PPP */ } /* * =========================================================================== * Send one empty static block to give enough lookahead for inflate. * This takes 10 bits, of which 7 may remain in the bit buffer. * The current inflate code requires 9 bits of lookahead. If the * last two codes for the previous block (real code plus EOB) were coded * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode * the last real code. In this case we send two empty static blocks instead * of one. (There are no problems if the previous block is stored or fixed.) * To simplify the code, we assume the worst case of last real code encoded * on one bit only. */ void _tr_align(s) deflate_state *s; { send_bits(s, STATIC_TREES<<1, 3); send_code(s, END_BLOCK, static_ltree); s->compressed_len += 10L; /* 3 for block type, 7 for EOB */ bi_flush(s); /* * Of the 10 bits for the empty block, we have already sent * (10 - bi_valid) bits. The lookahead for the last real code * (before the EOB of the previous block) was thus at least * one plus the length of the EOB plus what we have just sent * of the empty static block. */ if (1 + s->last_eob_len + 10 - s->bi_valid < 9) { send_bits(s, STATIC_TREES<<1, 3); send_code(s, END_BLOCK, static_ltree); s->compressed_len += 10L; bi_flush(s); } s->last_eob_len = 7; } /* * =========================================================================== * Determine the best encoding for the current block: dynamic trees, static * trees or store, and output the encoded block to the zip file. */ void _tr_flush_block(s, buf, stored_len, eof) deflate_state *s; charf *buf; /* input block, or NULL if too old */ ulg stored_len; /* length of input block */ int eof; /* true if this is the last block for a file */ { ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ /* index of last bit length code of non zero freq */ int max_blindex = 0; /* Build the Huffman trees unless a stored block is forced */ if (s->level > 0) { /* Check if the file is ascii or binary */ if (s->data_type == Z_UNKNOWN) set_data_type(s); /* Construct the literal and distance trees */ build_tree(s, (tree_desc *)(&(s->l_desc))); Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, s->static_len)); build_tree(s, (tree_desc *)(&(s->d_desc))); Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, s->static_len)); /* * At this point, opt_len and static_len are the total * bit lengths of the compressed block data, excluding * the tree representations. */ /* * Build the bit length tree for the above two trees, * and get the index in bl_order of the last bit * length code to send. */ max_blindex = build_bl_tree(s); /* * Determine the best encoding. Compute first the * block length in bytes */ opt_lenb = (s->opt_len+3+7)>>3; static_lenb = (s->static_len+3+7)>>3; Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, s->last_lit)); if (static_lenb <= opt_lenb) opt_lenb = static_lenb; } else { Assert(buf != (char *)0, "lost buf"); /* force a stored block */ opt_lenb = static_lenb = stored_len + 5; } /* * If compression failed and this is the first and last block, * and if the .zip file can be seeked (to rewrite the local * header), the whole file is transformed into a stored file: */ #ifdef STORED_FILE_OK #ifdef FORCE_STORED_FILE #define FRC_STR_COND eof && s->compressed_len == 0L /* force stored file */ #else #define FRC_STR_COND stored_len <= opt_lenb && eof && \ s->compressed_len == 0L && seekable() #endif if (FRC_STR_COND) { #undef FRC_STR_COND /* * Since LIT_BUFSIZE <= 2*WSIZE, the input data must * be there: */ if (buf == (charf*)0) error("block vanished"); /* without header */ copy_block(s, buf, (unsigned)stored_len, 0); s->compressed_len = stored_len << 3; s->method = STORED; } else #endif /* STORED_FILE_OK */ #ifdef FORCE_STORED #define FRC_STR_COND buf != (char *)0 /* force stored block */ #else /* 4: two words for the lengths */ #define FRC_STR_COND stored_len+4 <= opt_lenb && buf != (char *)0 #endif if (FRC_STR_COND) { #undef FRC_STR_COND /* * The test buf != NULL is only necessary if * LIT_BUFSIZE > WSIZE. Otherwise we can't * have processed more than WSIZE input bytes * since the last block flush, because * compression would have been successful. If * LIT_BUFSIZE <= WSIZE, it is never too late * to transform a block into a stored block. */ _tr_stored_block(s, buf, stored_len, eof); #ifdef FORCE_STATIC #define FRC_STAT_COND static_lenb >= 0 /* force static trees */ #else #define FRC_STAT_COND static_lenb == opt_lenb #endif } else if (FRC_STAT_COND) { #undef FRC_STAT_COND send_bits(s, (STATIC_TREES<<1)+eof, 3); compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree); s->compressed_len += 3 + s->static_len; /* PPP */ } else { send_bits(s, (DYN_TREES<<1)+eof, 3); send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1, max_blindex+1); compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree); s->compressed_len += 3 + s->opt_len; /* PPP */ } #ifdef DEBUG_ZLIB Assert(s->compressed_len == s->bits_sent, "bad compressed size"); #endif /* * The above check is made mod 2^32, for files larger than 512 * MB and uLong implemented on 32 bits. */ init_block(s); if (eof) { bi_windup(s); s->compressed_len += 7; /* align on byte boundary PPP */ } Tracev((stderr, "\ncomprlen %lu(%lu) ", s->compressed_len>>3, s->compressed_len-7*eof)); /* return (s->compressed_len >> 3); */ } /* * =========================================================================== * Save the match info and tally the frequency counts. Return true if * the current block must be flushed. */ int _tr_tally(s, dist, lc) deflate_state *s; unsigned dist; /* distance of matched string */ /* match length-MIN_MATCH or unmatched char (if dist==0) */ unsigned lc; { s->d_buf[s->last_lit] = (ush)dist; s->l_buf[s->last_lit++] = (uch)lc; if (dist == 0) { /* lc is the unmatched char */ s->dyn_ltree[lc].Freq++; } else { s->matches++; /* Here, lc is the match length - MIN_MATCH */ dist--; /* dist = match distance - 1 */ Assert((ush)dist < (ush)MAX_DIST(s) && (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++; s->dyn_dtree[d_code(dist)].Freq++; } #ifdef TRUNCATE_BLOCK /* Try to guess if it is profitable to stop the current block here */ if ((s->last_lit & 0x1fff) == 0 && s->level > 2) { /* Compute an upper bound for the compressed length */ ulg out_length = (ulg)s->last_lit*8L; ulg in_length = (ulg)((long)s->strstart - s->block_start); int dcode; for (dcode = 0; dcode < D_CODES; dcode++) { out_length += (ulg)s->dyn_dtree[dcode].Freq * (5L+extra_dbits[dcode]); } out_length >>= 3; Tracev((stderr, "\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", s->last_lit, in_length, out_length, 100L - out_length*100L/in_length)); if (s->matches < s->last_lit/2 && out_length < in_length/2) return (1); } #endif return (s->last_lit == s->lit_bufsize-1); /* * We avoid equality with lit_bufsize because of wraparound at 64K * on 16 bit machines and because stored blocks are restricted to * 64K-1 bytes. */ } /* * =========================================================================== * Send the block data compressed using the given Huffman trees */ local void compress_block(s, ltree, dtree) deflate_state *s; ct_data *ltree; /* literal tree */ ct_data *dtree; /* distance tree */ { unsigned dist; /* distance of matched string */ int lc; /* match length or unmatched char (if dist == 0) */ unsigned lx = 0; /* running index in l_buf */ unsigned code; /* the code to send */ int extra; /* number of extra bits to send */ if (s->last_lit != 0) do { dist = s->d_buf[lx]; lc = s->l_buf[lx++]; if (dist == 0) { /* send a literal byte */ send_code(s, lc, ltree); Tracecv(isgraph(lc), (stderr, " '%c' ", lc)); } else { /* Here, lc is the match length - MIN_MATCH */ code = _length_code[lc]; /* send the length code */ send_code(s, code+LITERALS+1, ltree); extra = extra_lbits[code]; if (extra != 0) { lc -= base_length[code]; /* send the extra length bits */ send_bits(s, lc, extra); } /* dist is now the match distance - 1 */ dist--; code = d_code(dist); Assert(code < D_CODES, "bad d_code"); /* send the distance code */ send_code(s, code, dtree); extra = extra_dbits[code]; if (extra != 0) { dist -= base_dist[code]; /* send the extra distance bits */ send_bits(s, dist, extra); } } /* literal or match pair ? */ /* * Check that the overlay between pending_buf and * d_buf+l_buf is ok: */ Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow"); } while (lx < s->last_lit); send_code(s, END_BLOCK, ltree); s->last_eob_len = ltree[END_BLOCK].Len; } /* * =========================================================================== * Set the data type to ASCII or BINARY, using a crude approximation: * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise. * IN assertion: the fields freq of dyn_ltree are set and the total of all * frequencies does not exceed 64K (to fit in an int on 16 bit machines). */ local void set_data_type(s) deflate_state *s; { int n = 0; unsigned ascii_freq = 0; unsigned bin_freq = 0; while (n < 7) bin_freq += s->dyn_ltree[n++].Freq; while (n < 128) ascii_freq += s->dyn_ltree[n++].Freq; while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq; s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII); } /* * =========================================================================== * Reverse the first len bits of a code, using straightforward code (a faster * method would use a table) * IN assertion: 1 <= len <= 15 */ local unsigned bi_reverse(code, len) unsigned code; /* the value to invert */ int len; /* its bit length */ { register unsigned res = 0; do { res |= code & 1; code >>= 1, res <<= 1; } while (--len > 0); return (res >> 1); } /* * =========================================================================== * Flush the bit buffer, keeping at most 7 bits in it. */ local void bi_flush(s) deflate_state *s; { if (s->bi_valid == 16) { put_short(s, s->bi_buf); s->bi_buf = 0; s->bi_valid = 0; } else if (s->bi_valid >= 8) { put_byte(s, (Byte)s->bi_buf); s->bi_buf >>= 8; s->bi_valid -= 8; } } /* * =========================================================================== * Flush the bit buffer and align the output on a byte boundary */ local void bi_windup(s) deflate_state *s; { if (s->bi_valid > 8) { put_short(s, s->bi_buf); } else if (s->bi_valid > 0) { put_byte(s, (Byte)s->bi_buf); } s->bi_buf = 0; s->bi_valid = 0; #ifdef DEBUG_ZLIB s->bits_sent = (s->bits_sent+7) & ~7; #endif } /* * =========================================================================== * Copy a stored block, storing first the length and its * one's complement if requested. */ local void copy_block(s, buf, len, header) deflate_state *s; charf *buf; /* the input data */ unsigned len; /* its length */ int header; /* true if block header must be written */ { bi_windup(s); /* align on byte boundary */ s->last_eob_len = 8; /* enough lookahead for inflate */ if (header) { put_short(s, (ush)len); put_short(s, (ush)~len); #ifdef DEBUG_ZLIB s->bits_sent += 2*16; #endif } #ifdef DEBUG_ZLIB s->bits_sent += (ulg)len<<3; #endif /* bundle up the put_byte(s, *buf++) calls PPP */ Assert(s->pending + len < s->pending_buf_size, "pending_buf overrun"); zmemcpy(&s->pending_buf[s->pending], buf, len); /* PPP */ s->pending += len; /* PPP */ } /* --- trees.c */ /* +++ inflate.c */ /* * inflate.c -- zlib interface to inflate modules * Copyright (C) 1995-1998 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ /* #include "zutil.h" */ /* +++ infblock.h */ /* * infblock.h -- header to use infblock.c * Copyright (C) 1995-1998 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ /* * WARNING: this file should *not* be used by applications. It is part * of the implementation of the compression library and is subject to * change. Applications should only use zlib.h. */ struct inflate_blocks_state; typedef struct inflate_blocks_state FAR inflate_blocks_statef; extern inflate_blocks_statef * inflate_blocks_new OF(( z_streamp z, check_func c, /* check function */ uInt w)); /* window size */ extern int inflate_blocks OF(( inflate_blocks_statef *, z_streamp, int)); /* initial return code */ extern void inflate_blocks_reset OF(( inflate_blocks_statef *, z_streamp, uLongf *)); /* check value on output */ extern int inflate_blocks_free OF(( inflate_blocks_statef *, z_streamp)); extern void inflate_set_dictionary OF(( inflate_blocks_statef *s, const Bytef *d, /* dictionary */ uInt n)); /* dictionary length */ extern int inflate_blocks_sync_point OF(( inflate_blocks_statef *s)); /* PPP -- added function */ extern int inflate_addhistory OF(( inflate_blocks_statef *, z_streamp)); /* PPP -- added function */ extern int inflate_packet_flush OF(( inflate_blocks_statef *)); /* --- infblock.h */ #ifndef NO_DUMMY_DECL struct inflate_blocks_state {int dummy; }; /* for buggy compilers */ #endif /* inflate private state */ struct internal_state { /* mode */ enum { METHOD, /* waiting for method byte */ FLAG, /* waiting for flag byte */ DICT4, /* four dictionary check bytes to go */ DICT3, /* three dictionary check bytes to go */ DICT2, /* two dictionary check bytes to go */ DICT1, /* one dictionary check byte to go */ DICT0, /* waiting for inflateSetDictionary */ BLOCKS, /* decompressing blocks */ CHECK4, /* four check bytes to go */ CHECK3, /* three check bytes to go */ CHECK2, /* two check bytes to go */ CHECK1, /* one check byte to go */ DONE, /* finished check, done */ BAD} /* got an error--stay here */ mode; /* current inflate mode */ /* mode dependent information */ union { uInt method; /* if FLAGS, method byte */ struct { uLong was; /* computed check value */ uLong need; /* stream check value */ } check; /* if CHECK, check values to compare */ uInt marker; /* if BAD, inflateSync's marker bytes count */ } sub; /* submode */ /* mode independent information */ int nowrap; /* flag for no wrapper */ uInt wbits; /* log2(window size) (8..15, defaults to 15) */ /* current inflate_blocks state */ inflate_blocks_statef *blocks; }; int inflateReset(z) z_streamp z; { if (z == Z_NULL || z->state == Z_NULL) return (Z_STREAM_ERROR); z->total_in = z->total_out = 0; z->msg = Z_NULL; z->state->mode = z->state->nowrap ? BLOCKS : METHOD; inflate_blocks_reset(z->state->blocks, z, Z_NULL); Trace((stderr, "inflate: reset\n")); return (Z_OK); } int inflateEnd(z) z_streamp z; { if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL) return (Z_STREAM_ERROR); if (z->state->blocks != Z_NULL) { (void) inflate_blocks_free(z->state->blocks, z); z->state->blocks = Z_NULL; } ZFREE(z, z->state); z->state = Z_NULL; Trace((stderr, "inflate: end\n")); return (Z_OK); } int inflateInit2_(z, w, version, stream_size) z_streamp z; int w; const char *version; int stream_size; { if (version == Z_NULL || version[0] != ZLIB_VERSION[0] || stream_size != sizeof (z_stream)) return (Z_VERSION_ERROR); /* initialize state */ if (z == Z_NULL) return (Z_STREAM_ERROR); z->msg = Z_NULL; #ifndef NO_ZCFUNCS if (z->zalloc == Z_NULL) { z->zalloc = zcalloc; z->opaque = (voidpf)0; } if (z->zfree == Z_NULL) z->zfree = zcfree; #endif if ((z->state = (struct internal_state FAR *) ZALLOC(z, 1, sizeof (struct internal_state))) == Z_NULL) return (Z_MEM_ERROR); z->state->blocks = Z_NULL; /* handle undocumented nowrap option (no zlib header or check) */ z->state->nowrap = 0; if (w < 0) { w = - w; z->state->nowrap = 1; } /* set window size */ if (w < 8 || w > 15) { (void) inflateEnd(z); return (Z_STREAM_ERROR); } z->state->wbits = (uInt)w; /* create inflate_blocks state */ if ((z->state->blocks = inflate_blocks_new(z, z->state->nowrap ? Z_NULL : adler32, (uInt)1 << w)) == Z_NULL) { (void) inflateEnd(z); return (Z_MEM_ERROR); } Trace((stderr, "inflate: allocated\n")); /* reset state */ (void) inflateReset(z); return (Z_OK); } int inflateInit_(z, version, stream_size) z_streamp z; const char *version; int stream_size; { return (inflateInit2_(z, DEF_WBITS, version, stream_size)); } /* PPP -- added "empty" label and changed f to Z_OK */ #define NEEDBYTE {if (z->avail_in == 0) goto empty; r = Z_OK; } ((void)0) #define NEXTBYTE (z->avail_in--, z->total_in++, *z->next_in++) int inflate(z, f) z_streamp z; int f; { int r; uInt b; if (z == Z_NULL || z->state == Z_NULL || z->next_in == Z_NULL) return (Z_STREAM_ERROR); /* f = f == Z_FINISH ? Z_BUF_ERROR : Z_OK; -- PPP; Z_FINISH unused */ r = Z_BUF_ERROR; /* CONSTCOND */ while (1) switch (z->state->mode) { case METHOD: NEEDBYTE; if (((z->state->sub.method = NEXTBYTE) & 0xf) != Z_DEFLATED) { z->state->mode = BAD; z->msg = "unknown compression method"; /* can't try inflateSync */ z->state->sub.marker = 5; break; } if ((z->state->sub.method >> 4) + 8 > z->state->wbits) { z->state->mode = BAD; z->msg = "invalid window size"; /* can't try inflateSync */ z->state->sub.marker = 5; break; } z->state->mode = FLAG; /* FALLTHRU */ case FLAG: NEEDBYTE; b = NEXTBYTE; if (((z->state->sub.method << 8) + b) % 31) { z->state->mode = BAD; z->msg = "incorrect header check"; /* can't try inflateSync */ z->state->sub.marker = 5; break; } Trace((stderr, "inflate: zlib header ok\n")); if (!(b & PRESET_DICT)) { z->state->mode = BLOCKS; break; } z->state->mode = DICT4; /* FALLTHRU */ case DICT4: NEEDBYTE; z->state->sub.check.need = (uLong)NEXTBYTE << 24; z->state->mode = DICT3; /* FALLTHRU */ case DICT3: NEEDBYTE; z->state->sub.check.need += (uLong)NEXTBYTE << 16; z->state->mode = DICT2; /* FALLTHRU */ case DICT2: NEEDBYTE; z->state->sub.check.need += (uLong)NEXTBYTE << 8; z->state->mode = DICT1; /* FALLTHRU */ case DICT1: NEEDBYTE; z->state->sub.check.need += (uLong)NEXTBYTE; z->adler = z->state->sub.check.need; z->state->mode = DICT0; return (Z_NEED_DICT); case DICT0: z->state->mode = BAD; z->msg = "need dictionary"; z->state->sub.marker = 0; /* can try inflateSync */ return (Z_STREAM_ERROR); case BLOCKS: r = inflate_blocks(z->state->blocks, z, r); if (f == Z_PACKET_FLUSH && z->avail_in == 0 && /* PPP */ z->avail_out != 0) /* PPP */ r = inflate_packet_flush(z->state->blocks); /* PPP */ if (r == Z_DATA_ERROR) { z->state->mode = BAD; /* can try inflateSync */ z->state->sub.marker = 0; break; } /* PPP */ if (r != Z_STREAM_END) return (r); r = Z_OK; /* PPP */ inflate_blocks_reset(z->state->blocks, z, &z->state->sub.check.was); if (z->state->nowrap) { z->state->mode = DONE; break; } z->state->mode = CHECK4; /* FALLTHRU */ case CHECK4: NEEDBYTE; z->state->sub.check.need = (uLong)NEXTBYTE << 24; z->state->mode = CHECK3; /* FALLTHRU */ case CHECK3: NEEDBYTE; z->state->sub.check.need += (uLong)NEXTBYTE << 16; z->state->mode = CHECK2; /* FALLTHRU */ case CHECK2: NEEDBYTE; z->state->sub.check.need += (uLong)NEXTBYTE << 8; z->state->mode = CHECK1; /* FALLTHRU */ case CHECK1: NEEDBYTE; z->state->sub.check.need += (uLong)NEXTBYTE; if (z->state->sub.check.was != z->state->sub.check.need) { z->state->mode = BAD; z->msg = "incorrect data check"; /* can't try inflateSync */ z->state->sub.marker = 5; break; } Trace((stderr, "inflate: zlib check ok\n")); z->state->mode = DONE; /* FALLTHRU */ case DONE: return (Z_STREAM_END); case BAD: return (Z_DATA_ERROR); default: return (Z_STREAM_ERROR); } /* PPP -- packet flush handling */ empty: if (f != Z_PACKET_FLUSH) return (r); z->state->mode = BAD; z->msg = "need more for packet flush"; z->state->sub.marker = 0; /* can try inflateSync */ return (Z_DATA_ERROR); } int inflateSetDictionary(z, dictionary, dictLength) z_streamp z; const Bytef *dictionary; uInt dictLength; { uInt length = dictLength; if (z == Z_NULL || z->state == Z_NULL || z->state->mode != DICT0) return (Z_STREAM_ERROR); if (adler32(1L, dictionary, dictLength) != z->adler) return (Z_DATA_ERROR); z->adler = 1L; if (length >= ((uInt)1<state->wbits)) { length = (1<state->wbits)-1; dictionary += dictLength - length; } inflate_set_dictionary(z->state->blocks, dictionary, length); z->state->mode = BLOCKS; return (Z_OK); } /* * This subroutine adds the data at next_in/avail_in to the output history * without performing any output. The output buffer must be "caught up"; * i.e. no pending output (hence s->read equals s->write), and the state must * be BLOCKS (i.e. we should be willing to see the start of a series of * BLOCKS). On exit, the output will also be caught up, and the checksum * will have been updated if need be. * * Added for PPP. */ int inflateIncomp(z) z_stream *z; { if (z->state->mode != BLOCKS) return (Z_DATA_ERROR); return (inflate_addhistory(z->state->blocks, z)); } int inflateSync(z) z_streamp z; { uInt n; /* number of bytes to look at */ Bytef *p; /* pointer to bytes */ uInt m; /* number of marker bytes found in a row */ uLong r, w; /* temporaries to save total_in and total_out */ /* set up */ if (z == Z_NULL || z->state == Z_NULL) return (Z_STREAM_ERROR); if (z->state->mode != BAD) { z->state->mode = BAD; z->state->sub.marker = 0; } if ((n = z->avail_in) == 0) return (Z_BUF_ERROR); p = z->next_in; m = z->state->sub.marker; /* search */ while (n && m < 4) { static const Byte mark[4] = { 0, 0, 0xff, 0xff }; if (*p == mark[m]) m++; else if (*p) m = 0; else /* * This statement maps 2->2 and 3->1 because a * mismatch with input byte 0x00 on the first * 0xFF in the pattern means that we still * have two contiguous zeros matched (thus * offset 2 is kept), but a mismatch on the * second 0xFF means that only one 0x00 byte * has been matched. (Boyer-Moore like * search.) */ m = 4 - m; p++, n--; } /* restore */ z->total_in += p - z->next_in; z->next_in = p; z->avail_in = n; z->state->sub.marker = m; /* return no joy or set up to restart on a new block */ if (m != 4) return (Z_DATA_ERROR); r = z->total_in; w = z->total_out; (void) inflateReset(z); z->total_in = r; z->total_out = w; z->state->mode = BLOCKS; return (Z_OK); } /* * Returns true if inflate is currently at the end of a block * generated by Z_SYNC_FLUSH or Z_FULL_FLUSH. This function is used by * one PPP implementation to provide an additional safety check. PPP * uses Z_SYNC_FLUSH but removes the length bytes of the resulting * empty stored block. When decompressing, PPP checks that at the end * of input packet, inflate is waiting for these length bytes. */ int inflateSyncPoint(z) z_streamp z; { if (z == Z_NULL || z->state == Z_NULL || z->state->blocks == Z_NULL) return (Z_STREAM_ERROR); return (inflate_blocks_sync_point(z->state->blocks)); } #undef NEEDBYTE #undef NEXTBYTE /* --- inflate.c */ /* +++ infblock.c */ /* * infblock.c -- interpret and process block types to last block * Copyright (C) 1995-1998 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ /* #include "zutil.h" */ /* #include "infblock.h" */ /* +++ inftrees.h */ /* * inftrees.h -- header to use inftrees.c * Copyright (C) 1995-1998 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ /* * WARNING: this file should *not* be used by applications. It is part * of the implementation of the compression library and is subject to * change. Applications should only use zlib.h. */ /* * Huffman code lookup table entry--this entry is four bytes for * machines that have 16-bit pointers (e.g. PC's in the small or * medium model). */ typedef struct inflate_huft_s FAR inflate_huft; struct inflate_huft_s { union { struct { Byte Exop; /* number of extra bits or operation */ /* number of bits in this code or subcode */ Byte Bits; } what; Bytef *pad; /* pad structure to a power of 2 (4 bytes for */ } word; /* 16-bit, 8 bytes for 32-bit machines) */ /* literal, length base, distance base, or table offset */ uInt base; }; /* * Maximum size of dynamic tree. The maximum found in a long but non- * exhaustive search was 1004 huft structures (850 for length/literals * and 154 for distances, the latter actually the result of an * exhaustive search). The actual maximum is not known, but the value * below is more than safe. */ #define MANY 1440 extern int inflate_trees_bits OF(( uIntf *, /* 19 code lengths */ uIntf *, /* bits tree desired/actual depth */ inflate_huft * FAR *, /* bits tree result */ inflate_huft *, /* space for trees */ z_streamp)); /* for zalloc, zfree functions */ extern int inflate_trees_dynamic OF(( uInt, /* number of literal/length codes */ uInt, /* number of distance codes */ uIntf *, /* that many (total) code lengths */ uIntf *, /* literal desired/actual bit depth */ uIntf *, /* distance desired/actual bit depth */ inflate_huft * FAR *, /* literal/length tree result */ inflate_huft * FAR *, /* distance tree result */ inflate_huft *, /* space for trees */ z_streamp)); /* for zalloc, zfree functions */ extern int inflate_trees_fixed OF(( uIntf *, /* literal desired/actual bit depth */ uIntf *, /* distance desired/actual bit depth */ const inflate_huft * FAR *, /* literal/length tree result */ const inflate_huft * FAR *, /* distance tree result */ z_streamp)); /* --- inftrees.h */ /* +++ infcodes.h */ /* * infcodes.h -- header to use infcodes.c * Copyright (C) 1995-1998 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ /* * WARNING: this file should *not* be used by applications. It is part * of the implementation of the compression library and is subject to * change. Applications should only use zlib.h. */ struct inflate_codes_state; typedef struct inflate_codes_state FAR inflate_codes_statef; extern inflate_codes_statef *inflate_codes_new OF(( uInt, uInt, const inflate_huft *, const inflate_huft *, z_streamp)); extern int inflate_codes OF(( inflate_blocks_statef *, z_streamp, int)); extern void inflate_codes_free OF(( inflate_codes_statef *, z_streamp)); /* --- infcodes.h */ /* +++ infutil.h */ /* * infutil.h -- types and macros common to blocks and codes * Copyright (C) 1995-1998 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ /* * WARNING: this file should *not* be used by applications. It is part * of the implementation of the compression library and is subject to * change. Applications should only use zlib.h. */ #ifndef _INFUTIL_H #define _INFUTIL_H typedef enum { TYPE, /* get type bits (3, including end bit) */ LENS, /* get lengths for stored */ STORED, /* processing stored block */ TABLE, /* get table lengths */ BTREE, /* get bit lengths tree for a dynamic block */ DTREE, /* get length, distance trees for a dynamic block */ CODES, /* processing fixed or dynamic block */ DRY, /* output remaining window bytes */ DONEB, /* finished last block, done */ BADB} /* got a data error--stuck here */ inflate_block_mode; /* inflate blocks semi-private state */ struct inflate_blocks_state { /* mode */ inflate_block_mode mode; /* current inflate_block mode */ /* mode dependent information */ union { uInt left; /* if STORED, bytes left to copy */ struct { uInt table; /* table lengths (14 bits) */ uInt index; /* index into blens (or border) */ uIntf *blens; /* bit lengths of codes */ uInt bb; /* bit length tree depth */ inflate_huft *tb; /* bit length decoding tree */ } trees; /* if DTREE, decoding info for trees */ struct { inflate_codes_statef *codes; } decode; /* if CODES, current state */ } sub; /* submode */ uInt last; /* true if this block is the last block */ /* mode independent information */ uInt bitk; /* bits in bit buffer */ uLong bitb; /* bit buffer */ inflate_huft *hufts; /* single malloc for tree space */ Bytef *window; /* sliding window */ Bytef *end; /* one byte after sliding window */ Bytef *read; /* window read pointer */ Bytef *write; /* window write pointer */ check_func checkfn; /* check function */ uLong check; /* check on output */ }; /* defines for inflate input/output */ /* update pointers and return */ #define UPDBITS {s->bitb = b; s->bitk = k; } #define UPDIN {z->avail_in = n; z->total_in += p-z->next_in; z->next_in = p; } #define UPDOUT {s->write = q; } #define UPDATE {UPDBITS UPDIN UPDOUT} #define LEAVE {UPDATE return (inflate_flush(s, z, r)); } /* get bytes and bits */ #define LOADIN {p = z->next_in; n = z->avail_in; b = s->bitb; k = s->bitk; } #define NEEDBYTE { if (n) r = Z_OK; else LEAVE } #define NEXTBYTE (n--, *p++) #define NEEDBITS(j) { while (k < (j)) { NEEDBYTE; b |= ((uLong)NEXTBYTE)<>= (j); k -= (j); } /* output bytes */ #define WAVAIL (uInt)(q < s->read ? s->read-q-1 : s->end-q) #define LOADOUT {q = s->write; m = (uInt)WAVAIL; } #define WWRAP {if (q == s->end && s->read != s->window) {q = s->window; \ m = (uInt)WAVAIL; }} #define FLUSH {UPDOUT r = inflate_flush(s, z, r); LOADOUT} #define NEEDOUT {if (m == 0) {WWRAP if (m == 0) { FLUSH WWRAP \ if (m == 0) LEAVE }} r = Z_OK; } #define OUTBYTE(a) {*q++ = (Byte)(a); m--; } /* load local pointers */ #define LOAD {LOADIN LOADOUT} /* masks for lower bits (size given to avoid silly warnings with Visual C++) */ extern uInt inflate_mask[17]; /* copy as much as possible from the sliding window to the output area */ extern int inflate_flush OF(( inflate_blocks_statef *, z_streamp, int)); #ifndef NO_DUMMY_DECL struct internal_state {int dummy; }; /* for buggy compilers */ #endif #endif /* --- infutil.h */ #ifndef NO_DUMMY_DECL struct inflate_codes_state {int dummy; }; /* for buggy compilers */ #endif /* Table for deflate from PKZIP's appnote.txt. */ local const uInt border[] = { /* Order of the bit length code lengths */ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}; /* * Notes beyond the 1.93a appnote.txt: * * 1. Distance pointers never point before the beginning of the output * stream. * 2. Distance pointers can point back across blocks, up to 32k away. * 3. There is an implied maximum of 7 bits for the bit length table and * 15 bits for the actual data. * 4. If only one code exists, then it is encoded using one bit. (Zero * would be more efficient, but perhaps a little confusing.) If two * codes exist, they are coded using one bit each (0 and 1). * 5. There is no way of sending zero distance codes--a dummy must be * sent if there are none. (History: a pre 2.0 version of PKZIP would * store blocks with no distance codes, but this was discovered to be * too harsh a criterion.) Valid only for 1.93a. 2.04c does allow * zero distance codes, which is sent as one code of zero bits in * length. * 6. There are up to 286 literal/length codes. Code 256 represents the * end-of-block. Note however that the static length tree defines * 288 codes just to fill out the Huffman codes. Codes 286 and 287 * cannot be used though, since there is no length base or extra bits * defined for them. Similarily, there are up to 30 distance codes. * However, static trees define 32 codes (all 5 bits) to fill out the * Huffman codes, but the last two had better not show up in the data. * 7. Unzip can check dynamic Huffman blocks for complete code sets. * The exception is that a single code would not be complete (see #4). * 8. The five bits following the block type is really the number of * literal codes sent minus 257. * 9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits * (1+6+6). Therefore, to output three times the length, you output * three codes (1+1+1), whereas to output four times the same length, * you only need two codes (1+3). Hmm. * 10. In the tree reconstruction algorithm, Code = Code + Increment * only if BitLength(i) is not zero. (Pretty obvious.) * 11. Correction: 4 Bits: #of Bit Length codes - 4 (4 - 19) * 12. Note: length code 284 can represent 227-258, but length code 285 * really is 258. The last length deserves its own, short code * since it gets used a lot in very redundant files. The length * 258 is special since 258 - 3 (the min match length) is 255. * 13. The literal/length and distance code bit lengths are read as a * single stream of lengths. It is possible (and advantageous) for * a repeat code (16, 17, or 18) to go across the boundary between * the two sets of lengths. */ void inflate_blocks_reset(s, z, c) inflate_blocks_statef *s; z_streamp z; uLongf *c; { if (c != Z_NULL) *c = s->check; if ((s->mode == BTREE || s->mode == DTREE) && s->sub.trees.blens != Z_NULL) { ZFREE(z, s->sub.trees.blens); s->sub.trees.blens = Z_NULL; } if (s->mode == CODES && s->sub.decode.codes != Z_NULL) { (void) inflate_codes_free(s->sub.decode.codes, z); s->sub.decode.codes = Z_NULL; } s->mode = TYPE; s->bitk = 0; s->bitb = 0; s->read = s->write = s->window; if (s->checkfn != Z_NULL) z->adler = s->check = (*s->checkfn)(0L, Z_NULL, 0); Trace((stderr, "inflate: blocks reset\n")); } inflate_blocks_statef * inflate_blocks_new(z, c, w) z_streamp z; check_func c; uInt w; { inflate_blocks_statef *s; if ((s = (inflate_blocks_statef *)ZALLOC (z, 1, sizeof (struct inflate_blocks_state))) == Z_NULL) return (s); s->hufts = (inflate_huft *)ZALLOC(z, MANY, sizeof (inflate_huft)); if (s->hufts == Z_NULL) { ZFREE(z, s); return (Z_NULL); } if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL) { ZFREE(z, s->hufts); ZFREE(z, s); return (Z_NULL); } s->end = s->window + w; s->checkfn = c; s->mode = TYPE; Trace((stderr, "inflate: blocks allocated\n")); inflate_blocks_reset(s, z, Z_NULL); return (s); } int inflate_blocks(s, z, r) inflate_blocks_statef *s; z_streamp z; int r; { uInt t; /* temporary storage */ uLong b; /* bit buffer */ uInt k; /* bits in bit buffer */ Bytef *p; /* input data pointer */ uInt n; /* bytes available there */ Bytef *q; /* output window write pointer */ uInt m; /* bytes to end of window or read pointer */ /* copy input/output information to locals (UPDATE macro restores) */ LOAD; /* process input based on current state */ /* CONSTCOND */ while (1) switch (s->mode) { case TYPE: NEEDBITS(3); t = (uInt)b & 7; s->last = t & 1; switch (t >> 1) { case 0: /* stored */ Trace((stderr, "inflate: stored block%s\n", s->last ? " (last)" : "")); DUMPBITS(3); t = k & 7; /* go to byte boundary */ DUMPBITS(t); s->mode = LENS; /* get length of stored block */ break; case 1: /* fixed */ Trace((stderr, "inflate: fixed codes block%s\n", s->last ? " (last)" : "")); { uInt bl, bd; const inflate_huft *tl, *td; (void) inflate_trees_fixed(&bl, &bd, &tl, &td, z); s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z); if (s->sub.decode.codes == Z_NULL) { r = Z_MEM_ERROR; LEAVE } } DUMPBITS(3); s->mode = CODES; break; case 2: /* dynamic */ Trace((stderr, "inflate: dynamic codes block%s\n", s->last ? " (last)" : "")); DUMPBITS(3); s->mode = TABLE; break; case 3: /* illegal */ DUMPBITS(3); s->mode = BADB; z->msg = "invalid block type"; r = Z_DATA_ERROR; LEAVE } break; case LENS: NEEDBITS(32); if ((((~b) >> 16) & 0xffff) != (b & 0xffff)) { s->mode = BADB; z->msg = "invalid stored block lengths"; r = Z_DATA_ERROR; LEAVE } s->sub.left = (uInt)b & 0xffff; b = k = 0; /* dump bits */ Tracev((stderr, "inflate: stored length %u\n", s->sub.left)); s->mode = s->sub.left ? STORED : (s->last ? DRY : TYPE); break; case STORED: if (n == 0) LEAVE NEEDOUT; t = s->sub.left; if (t > n) t = n; if (t > m) t = m; zmemcpy(q, p, t); p += t; n -= t; q += t; m -= t; if ((s->sub.left -= t) != 0) break; Tracev((stderr, "inflate: stored end, %lu total out\n", z->total_out + (q >= s->read ? q - s->read : (s->end - s->read) + (q - s->window)))); s->mode = s->last ? DRY : TYPE; break; case TABLE: NEEDBITS(14); s->sub.trees.table = t = (uInt)b & 0x3fff; #ifndef PKZIP_BUG_WORKAROUND if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29) { s->mode = BADB; z->msg = (char *)"too many length or distance symbols"; r = Z_DATA_ERROR; LEAVE } #endif t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f); /* if (t < 19) t = 19; */ if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof (uInt))) == Z_NULL) { r = Z_MEM_ERROR; LEAVE } DUMPBITS(14); s->sub.trees.index = 0; Tracev((stderr, "inflate: table sizes ok\n")); s->mode = BTREE; /* FALLTHRU */ case BTREE: while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10)) { NEEDBITS(3); s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7; DUMPBITS(3); } while (s->sub.trees.index < 19) s->sub.trees.blens[border[s->sub.trees.index++]] = 0; s->sub.trees.bb = 7; t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb, &s->sub.trees.tb, s->hufts, z); if (t != Z_OK) { ZFREE(z, s->sub.trees.blens); s->sub.trees.blens = Z_NULL; r = t; if (r == Z_DATA_ERROR) s->mode = BADB; LEAVE } s->sub.trees.index = 0; Tracev((stderr, "inflate: bits tree ok\n")); s->mode = DTREE; /* FALLTHRU */ case DTREE: while (t = s->sub.trees.table, s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f)) { inflate_huft *h; uInt i, j, c; t = s->sub.trees.bb; NEEDBITS(t); h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]); t = h->word.what.Bits; c = h->base; if (c < 16) { DUMPBITS(t); s->sub.trees.blens[s->sub.trees.index++] = c; } else { /* c == 16..18 */ i = c == 18 ? 7 : c - 14; j = c == 18 ? 11 : 3; NEEDBITS(t + i); DUMPBITS(t); j += (uInt)b & inflate_mask[i]; DUMPBITS(i); i = s->sub.trees.index; t = s->sub.trees.table; if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) || (c == 16 && i < 1)) { ZFREE(z, s->sub.trees.blens); s->sub.trees.blens = Z_NULL; s->mode = BADB; z->msg = "invalid bit length repeat"; r = Z_DATA_ERROR; LEAVE } c = c == 16 ? s->sub.trees.blens[i - 1] : 0; do { s->sub.trees.blens[i++] = c; } while (--j); s->sub.trees.index = i; } } s->sub.trees.tb = Z_NULL; { uInt bl, bd; inflate_huft *tl, *td; inflate_codes_statef *c; /* must be <= 9 for lookahead assumptions */ bl = 9; /* must be <= 9 for lookahead assumptions */ bd = 6; t = s->sub.trees.table; t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f), s->sub.trees.blens, &bl, &bd, &tl, &td, s->hufts, z); ZFREE(z, s->sub.trees.blens); s->sub.trees.blens = Z_NULL; if (t != Z_OK) { if (t == (uInt)Z_DATA_ERROR) s->mode = BADB; r = t; LEAVE } Tracev((stderr, "inflate: trees ok\n")); if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL) { r = Z_MEM_ERROR; LEAVE } s->sub.decode.codes = c; } s->mode = CODES; /* FALLTHRU */ case CODES: UPDATE; if ((r = inflate_codes(s, z, r)) != Z_STREAM_END) return (inflate_flush(s, z, r)); r = Z_OK; (void) inflate_codes_free(s->sub.decode.codes, z); LOAD; Tracev((stderr, "inflate: codes end, %lu total out\n", z->total_out + (q >= s->read ? q - s->read : (s->end - s->read) + (q - s->window)))); if (!s->last) { s->mode = TYPE; break; } s->mode = DRY; /* FALLTHRU */ case DRY: FLUSH; if (s->read != s->write) LEAVE s->mode = DONEB; /* FALLTHRU */ case DONEB: r = Z_STREAM_END; LEAVE case BADB: r = Z_DATA_ERROR; LEAVE default: r = Z_STREAM_ERROR; LEAVE } /* NOTREACHED */ /* otherwise lint complains */ } int inflate_blocks_free(s, z) inflate_blocks_statef *s; z_streamp z; { inflate_blocks_reset(s, z, Z_NULL); ZFREE(z, s->window); s->window = Z_NULL; ZFREE(z, s->hufts); s->hufts = Z_NULL; ZFREE(z, s); Trace((stderr, "inflate: blocks freed\n")); return (Z_OK); } void inflate_set_dictionary(s, d, n) inflate_blocks_statef *s; const Bytef *d; uInt n; { Assert(s->window + n <= s->end, "set dict"); zmemcpy((charf *)s->window, d, n); s->read = s->write = s->window + n; } /* * Returns true if inflate is currently at the end of a block * generated by Z_SYNC_FLUSH or Z_FULL_FLUSH. * IN assertion: s != Z_NULL */ int inflate_blocks_sync_point(s) inflate_blocks_statef *s; { return (s->mode == LENS); } /* * This subroutine adds the data at next_in/avail_in to the output history * without performing any output. The output buffer must be "caught up"; * i.e. no pending output (hence s->read equals s->write), and the state must * be BLOCKS (i.e. we should be willing to see the start of a series of * BLOCKS). On exit, the output will also be caught up, and the checksum * will have been updated if need be. */ int inflate_addhistory(s, z) inflate_blocks_statef *s; z_stream *z; { uLong b; /* bit buffer */ /* NOT USED HERE */ uInt k; /* bits in bit buffer */ /* NOT USED HERE */ uInt t; /* temporary storage */ Bytef *p; /* input data pointer */ uInt n; /* bytes available there */ Bytef *q; /* output window write pointer */ uInt m; /* bytes to end of window or read pointer */ if (s->read != s->write) return (Z_STREAM_ERROR); if (s->mode != TYPE) return (Z_DATA_ERROR); /* we're ready to rock */ LOAD; /* * while there is input ready, copy to output buffer, moving * pointers as needed. */ while (n) { t = n; /* how many to do */ /* is there room until end of buffer? */ if (t > m) t = m; /* update check information */ if (s->checkfn != Z_NULL) s->check = (*s->checkfn)(s->check, q, t); zmemcpy(q, p, t); q += t; p += t; n -= t; z->total_out += t; s->read = q; /* drag read pointer forward */ /* WWRAP */ /* expand WWRAP macro by hand to handle s->read */ if (q == s->end) { s->read = q = s->window; m = WAVAIL; } } UPDATE; return (Z_OK); } /* * At the end of a Deflate-compressed PPP packet, we expect to have seen * a `stored' block type value but not the (zero) length bytes. */ int inflate_packet_flush(s) inflate_blocks_statef *s; { if (s->mode != LENS) return (Z_DATA_ERROR); s->mode = TYPE; return (Z_OK); } /* --- infblock.c */ /* +++ inftrees.c */ /* * inftrees.c -- generate Huffman trees for efficient decoding * Copyright (C) 1995-1998 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ /* #include "zutil.h" */ /* #include "inftrees.h" */ const char inflate_copyright[] = " inflate 1.1.3 Copyright 1995-1998 Mark Adler "; /* * If you use the zlib library in a product, an acknowledgment is * welcome in the documentation of your product. If for some reason * you cannot include such an acknowledgment, I would appreciate that * you keep this copyright string in the executable of your product. */ #ifndef NO_DUMMY_DECL struct internal_state {int dummy; }; /* for buggy compilers */ #endif /* simplify the use of the inflate_huft type with some defines */ #define exop word.what.Exop #define bits word.what.Bits local int huft_build OF(( uIntf *, /* code lengths in bits */ uInt, /* number of codes */ uInt, /* number of "simple" codes */ const uIntf *, /* list of base values for non-simple codes */ const uIntf *, /* list of extra bits for non-simple codes */ inflate_huft * FAR*, /* result: starting table */ uIntf *, /* maximum lookup bits (returns actual) */ inflate_huft *hp, /* space for trees */ uInt *hn, /* hufts used in space */ uIntf *v)); /* working area: values in order of bit length */ /* Tables for deflate from PKZIP's appnote.txt. */ local const uInt cplens[31] = { /* Copy lengths for literal codes 257..285 */ 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0}; /* see note #13 above about 258 */ local const uInt cplext[31] = { /* Extra bits for literal codes 257..285 */ 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112}; /* 112==invalid */ local const uInt cpdist[30] = { /* Copy offsets for distance codes 0..29 */ 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577}; local const uInt cpdext[30] = { /* Extra bits for distance codes */ 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13}; /* * Huffman code decoding is performed using a multi-level table * lookup. The fastest way to decode is to simply build a lookup * table whose size is determined by the longest code. However, the * time it takes to build this table can also be a factor if the data * being decoded is not very long. The most common codes are * necessarily the shortest codes, so those codes dominate the * decoding time, and hence the speed. The idea is you can have a * shorter table that decodes the shorter, more probable codes, and * then point to subsidiary tables for the longer codes. The time it * costs to decode the longer codes is then traded against the time it * takes to make longer tables. * * This results of this trade are in the variables lbits and dbits * below. lbits is the number of bits the first level table for * literal/ length codes can decode in one step, and dbits is the same * thing for the distance codes. Subsequent tables are also less than * or equal to those sizes. These values may be adjusted either when * all of the codes are shorter than that, in which case the longest * code length in bits is used, or when the shortest code is *longer* * than the requested table size, in which case the length of the * shortest code in bits is used. * * There are two different values for the two tables, since they code * a different number of possibilities each. The literal/length table * codes 286 possible values, or in a flat code, a little over eight * bits. The distance table codes 30 possible values, or a little * less than five bits, flat. The optimum values for speed end up * being about one bit more than those, so lbits is 8+1 and dbits is * 5+1. The optimum values may differ though from machine to machine, * and possibly even between compilers. Your mileage may vary. */ /* If BMAX needs to be larger than 16, then h and x[] should be uLong. */ #define BMAX 15 /* maximum bit length of any code */ local int huft_build(b, n, s, d, e, t, m, hp, hn, v) uIntf *b; /* code lengths in bits (all assumed <= BMAX) */ uInt n; /* number of codes (assumed <= 288) */ uInt s; /* number of simple-valued codes (0..s-1) */ const uIntf *d; /* list of base values for non-simple codes */ const uIntf *e; /* list of extra bits for non-simple codes */ inflate_huft * FAR *t; /* result: starting table */ uIntf *m; /* maximum lookup bits, returns actual */ inflate_huft *hp; /* space for trees */ uInt *hn; /* hufts used in space */ uIntf *v; /* working area: values in order of bit length */ /* * Given a list of code lengths and a maximum table size, make a set * of tables to decode that set of codes. Return Z_OK on success, * Z_BUF_ERROR if the given code set is incomplete (the tables are * still built in this case), Z_DATA_ERROR if the input is invalid (an * over-subscribed set of lengths), or Z_MEM_ERROR if not enough * memory. */ { uInt a; /* counter for codes of length k */ uInt c[BMAX+1]; /* bit length count table */ uInt f; /* i repeats in table every f entries */ int g; /* maximum code length */ int h; /* table level */ register uInt i; /* counter, current code */ register uInt j; /* counter */ register int k; /* number of bits in current code */ int l; /* bits per table (returned in m) */ register uIntf *p; /* pointer into c[], b[], or v[] */ inflate_huft *q; /* points to current table */ struct inflate_huft_s r; /* table entry for structure assignment */ inflate_huft *u[BMAX]; /* table stack */ uInt mask; /* (1 << w) - 1, to avoid cc -O bug on HP */ register int w; /* bits before this table == (l * h) */ uInt x[BMAX+1]; /* bit offsets, then code stack */ uIntf *xp; /* pointer into x */ int y; /* number of dummy codes added */ uInt z; /* number of entries in current table */ (void) inflate_copyright; /* Generate counts for each bit length */ p = c; #define C0 *p++ = 0; #define C2 C0 C0 C0 C0 #define C4 C2 C2 C2 C2 C4 /* clear c[]--assume BMAX+1 is 16 */ p = b; i = n; do { c[*p++]++; /* assume all entries <= BMAX */ } while (--i); if (c[0] == n) /* null input--all zero length codes */ { *t = (inflate_huft *)Z_NULL; *m = 0; return (Z_OK); } /* Find minimum and maximum length, bound *m by those */ l = *m; for (j = 1; j <= BMAX; j++) if (c[j]) break; k = j; /* minimum code length */ if ((uInt)l < j) l = j; for (i = BMAX; i; i--) if (c[i]) break; g = i; /* maximum code length */ if ((uInt)l > i) l = i; *m = l; /* Adjust last length count to fill out codes, if needed */ for (y = 1 << j; j < i; j++, y <<= 1) if ((y -= c[j]) < 0) return (Z_DATA_ERROR); if ((y -= c[i]) < 0) return (Z_DATA_ERROR); c[i] += y; /* Generate starting offsets into the value table for each length */ x[1] = j = 0; p = c + 1; xp = x + 2; while (--i) { /* note that i == g from above */ *xp++ = (j += *p++); } /* Make a table of values in order of bit lengths */ p = b; i = 0; do { if ((j = *p++) != 0) v[x[j]++] = i; } while (++i < n); n = x[g]; /* set n to length of v */ /* Generate the Huffman codes and for each, make the table entries */ x[0] = i = 0; /* first Huffman code is zero */ p = v; /* grab values in bit order */ h = -1; /* no tables yet--level -1 */ w = -l; /* bits decoded == (l * h) */ u[0] = (inflate_huft *)Z_NULL; /* just to keep compilers happy */ q = (inflate_huft *)Z_NULL; /* ditto */ z = 0; /* ditto */ /* go through the bit lengths (k already is bits in shortest code) */ for (; k <= g; k++) { a = c[k]; while (a--) { /* * here i is the Huffman code of length k bits * for value *p. make tables up to required * level. */ while (k > w + l) { h++; w += l; /* previous table always l bits */ /* * compute minimum size table less * than or equal to l bits */ z = g - w; /* table size upper limit */ z = z > (uInt)l ? l : z; /* try a k-w bit table */ if ((f = 1 << (j = k - w)) > a + 1) { /* too few codes for k-w bit table */ /* deduct codes from patterns left */ f -= a + 1; xp = c + k; if (j < z) /* * try smaller tables * up to z bits */ while (++j < z) { /* * enough * codes to * use up j * bits */ if ((f <<= 1) <= *++xp) break; f -= *xp; /* * else deduct * codes from * patterns */ } } /* table entries for j-bit table */ z = 1 << j; /* allocate new table */ /* (note: doesn't matter for fixed) */ /* not enough memory */ if (*hn + z > MANY) return (Z_MEM_ERROR); u[h] = q = hp + *hn; *hn += z; /* connect to last table, if there is one */ if (h) { /* save pattern for backing up */ x[h] = i; /* bits to dump before this table */ r.bits = (Byte)l; /* bits in this table */ r.exop = (Byte)j; j = i >> (w - l); /* offset to this table */ r.base = (uInt)(q - u[h-1] - j); /* connect to last table */ u[h-1][j] = r; } else /* first table is returned result */ *t = q; } /* set up table entry in r */ r.bits = (Byte)(k - w); if (p >= v + n) /* out of values--invalid code */ r.exop = 128 + 64; else if (*p < s) { /* 256 is end-of-block */ r.exop = (Byte)(*p < 256 ? 0 : 32 + 64); /* simple code is just the value */ r.base = *p++; } else { /* non-simple--look up in lists */ r.exop = (Byte)(e[*p - s] + 16 + 64); r.base = d[*p++ - s]; } /* fill code-like entries with r */ f = 1 << (k - w); for (j = i >> w; j < z; j += f) q[j] = r; /* backwards increment the k-bit code i */ for (j = 1 << (k - 1); i & j; j >>= 1) i ^= j; i ^= j; /* backup over finished tables */ mask = (1 << w) - 1; /* needed on HP, cc -O bug */ while ((i & mask) != x[h]) { h--; /* don't need to update q */ w -= l; mask = (1 << w) - 1; } } } /* Return Z_BUF_ERROR if we were given an incomplete table */ return (y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK); } int inflate_trees_bits(c, bb, tb, hp, z) uIntf *c; /* 19 code lengths */ uIntf *bb; /* bits tree desired/actual depth */ inflate_huft * FAR *tb; /* bits tree result */ inflate_huft *hp; /* space for trees */ z_streamp z; /* for zfree function */ { int r; uInt hn = 0; /* hufts used in space */ uIntf v[19]; /* work area for huft_build */ r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, tb, bb, hp, &hn, v); if (r == Z_DATA_ERROR) z->msg = "oversubscribed dynamic bit lengths tree"; else if (r == Z_BUF_ERROR || *bb == 0) { z->msg = "incomplete dynamic bit lengths tree"; r = Z_DATA_ERROR; } return (r); } int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, hp, z) uInt nl; /* number of literal/length codes */ uInt nd; /* number of distance codes */ uIntf *c; /* that many (total) code lengths */ uIntf *bl; /* literal desired/actual bit depth */ uIntf *bd; /* distance desired/actual bit depth */ inflate_huft * FAR *tl; /* literal/length tree result */ inflate_huft * FAR *td; /* distance tree result */ inflate_huft *hp; /* space for trees */ z_streamp z; /* for zfree function */ { int r; uInt hn = 0; /* hufts used in space */ uIntf v[288]; /* work area for huft_build */ /* build literal/length tree */ r = huft_build(c, nl, 257, cplens, cplext, tl, bl, hp, &hn, v); if (r != Z_OK || *bl == 0) { if (r == Z_DATA_ERROR) z->msg = "oversubscribed literal/length tree"; else if (r != Z_MEM_ERROR) { z->msg = "incomplete literal/length tree"; r = Z_DATA_ERROR; } return (r); } /* build distance tree */ r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, hp, &hn, v); if (r != Z_OK || (*bd == 0 && nl > 257)) { if (r == Z_DATA_ERROR) z->msg = "oversubscribed distance tree"; else if (r == Z_BUF_ERROR) { #ifdef PKZIP_BUG_WORKAROUND r = Z_OK; #else z->msg = "incomplete distance tree"; r = Z_DATA_ERROR; } else if (r != Z_MEM_ERROR) { z->msg = "empty distance tree with lengths"; r = Z_DATA_ERROR; #endif } return (r); } /* done */ return (Z_OK); } /* build fixed tables only once--keep them here */ /* #define BUILDFIXED */ #ifdef BUILDFIXED local int fixed_built = 0; #define FIXEDH 544 /* number of hufts used by fixed tables */ local inflate_huft fixed_mem[FIXEDH]; local uInt fixed_bl; local uInt fixed_bd; local inflate_huft *fixed_tl; local inflate_huft *fixed_td; #else #include "inffixed.h" #endif /*ARGSUSED*/ int inflate_trees_fixed(bl, bd, tl, td, z) uIntf *bl; /* literal desired/actual bit depth */ uIntf *bd; /* distance desired/actual bit depth */ const inflate_huft * FAR *tl; /* literal/length tree result */ const inflate_huft * FAR *td; /* distance tree result */ z_streamp z; /* for memory allocation */ { #ifdef BUILDFIXED /* * build fixed tables if not already (multiple overlapped * executions ok) */ if (!fixed_built) { int k; /* temporary variable */ uInt f = 0; /* number of hufts used in fixed_mem */ uIntf *c; /* length list for huft_build */ uIntf *v; /* allocate memory */ if ((c = (uIntf*)ZALLOC(z, 288, sizeof (uInt))) == Z_NULL) return (Z_MEM_ERROR); if ((v = (uIntf*)ZALLOC(z, 288, sizeof (uInt))) == Z_NULL) { ZFREE(z, c); return (Z_MEM_ERROR); } /* literal table */ for (k = 0; k < 144; k++) c[k] = 8; for (; k < 256; k++) c[k] = 9; for (; k < 280; k++) c[k] = 7; for (; k < 288; k++) c[k] = 8; fixed_bl = 9; (void) huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl, fixed_mem, &f, v); /* distance table */ for (k = 0; k < 30; k++) c[k] = 5; fixed_bd = 5; (void) huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd, fixed_mem, &f, v); /* done */ ZFREE(z, v); ZFREE(z, c); fixed_built = 1; } #endif *bl = fixed_bl; *bd = fixed_bd; *tl = fixed_tl; *td = fixed_td; return (Z_OK); } /* --- inftrees.c */ /* +++ infcodes.c */ /* * infcodes.c -- process literals and length/distance pairs * Copyright (C) 1995-1998 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ /* #include "zutil.h" */ /* #include "inftrees.h" */ /* #include "infblock.h" */ /* #include "infcodes.h" */ /* #include "infutil.h" */ /* +++ inffast.h */ /* * inffast.h -- header to use inffast.c * Copyright (C) 1995-1998 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ /* * WARNING: this file should *not* be used by applications. It is part * of the implementation of the compression library and is subject to * change. Applications should only use zlib.h. */ extern int inflate_fast OF(( uInt, uInt, const inflate_huft *, const inflate_huft *, inflate_blocks_statef *, z_streamp)); /* --- inffast.h */ /* simplify the use of the inflate_huft type with some defines */ #define exop word.what.Exop #define bits word.what.Bits /* inflate codes private state */ struct inflate_codes_state { /* mode */ enum { /* waiting for "i:"=input, "o:"=output, "x:"=nothing */ START, /* x: set up for LEN */ LEN, /* i: get length/literal/eob next */ LENEXT, /* i: getting length extra (have base) */ DIST, /* i: get distance next */ DISTEXT, /* i: getting distance extra */ COPY, /* o: copying bytes in window, waiting for space */ LIT, /* o: got literal, waiting for output space */ WASH, /* o: got eob, possibly still output waiting */ END, /* x: got eob and all data flushed */ BADCODE} /* x: got error */ mode; /* current inflate_codes mode */ /* mode dependent information */ uInt len; union { struct { const inflate_huft *tree; /* pointer into tree */ uInt need; /* bits needed */ } code; /* if LEN or DIST, where in tree */ uInt lit; /* if LIT, literal */ struct { uInt get; /* bits to get for extra */ uInt dist; /* distance back to copy from */ } copy; /* if EXT or COPY, where and how much */ } sub; /* submode */ /* mode independent information */ Byte lbits; /* ltree bits decoded per branch */ Byte dbits; /* dtree bits decoder per branch */ const inflate_huft *ltree; /* literal/length/eob tree */ const inflate_huft *dtree; /* distance tree */ }; inflate_codes_statef * inflate_codes_new(bl, bd, tl, td, z) uInt bl, bd; const inflate_huft *tl; const inflate_huft *td; /* need separate declaration for Borland C++ */ z_streamp z; { inflate_codes_statef *c; if ((c = (inflate_codes_statef *) ZALLOC(z, 1, sizeof (struct inflate_codes_state))) != Z_NULL) { c->mode = START; c->lbits = (Byte)bl; c->dbits = (Byte)bd; c->ltree = tl; c->dtree = td; Tracev((stderr, "inflate: codes new\n")); } return (c); } int inflate_codes(s, z, r) inflate_blocks_statef *s; z_streamp z; int r; { uInt j; /* temporary storage */ const inflate_huft *t; /* temporary pointer */ uInt e; /* extra bits or operation */ uLong b; /* bit buffer */ uInt k; /* bits in bit buffer */ Bytef *p; /* input data pointer */ uInt n; /* bytes available there */ Bytef *q; /* output window write pointer */ uInt m; /* bytes to end of window or read pointer */ Bytef *f; /* pointer to copy strings from */ inflate_codes_statef *c = s->sub.decode.codes; /* codes state */ /* copy input/output information to locals (UPDATE macro restores) */ LOAD; /* process input and output based on current state */ /* CONSTCOND */ while (1) /* waiting for "i:"=input, "o:"=output, "x:"=nothing */ switch (c->mode) { case START: /* x: set up for LEN */ #ifndef SLOW if (m >= 258 && n >= 10) { UPDATE; r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z); LOAD; if (r != Z_OK) { c->mode = r == Z_STREAM_END ? WASH : BADCODE; break; } } #endif /* !SLOW */ c->sub.code.need = c->lbits; c->sub.code.tree = c->ltree; c->mode = LEN; /* FALLTHRU */ case LEN: /* i: get length/literal/eob next */ j = c->sub.code.need; NEEDBITS(j); t = c->sub.code.tree + ((uInt)b & inflate_mask[j]); DUMPBITS(t->bits); e = (uInt)(t->exop); if (e == 0) { /* literal */ c->sub.lit = t->base; Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? "inflate: literal '%c'\n" : "inflate: literal 0x%02x\n", t->base)); c->mode = LIT; break; } if (e & 16) { /* length */ c->sub.copy.get = e & 15; c->len = t->base; c->mode = LENEXT; break; } if ((e & 64) == 0) { /* next table */ c->sub.code.need = e; c->sub.code.tree = t + t->base; break; } if (e & 32) { /* end of block */ Tracevv((stderr, "inflate: end of block\n")); c->mode = WASH; break; } c->mode = BADCODE; /* invalid code */ z->msg = "invalid literal/length code"; r = Z_DATA_ERROR; LEAVE case LENEXT: /* i: getting length extra (have base) */ j = c->sub.copy.get; NEEDBITS(j); c->len += (uInt)b & inflate_mask[j]; DUMPBITS(j); c->sub.code.need = c->dbits; c->sub.code.tree = c->dtree; Tracevv((stderr, "inflate: length %u\n", c->len)); c->mode = DIST; /* FALLTHRU */ case DIST: /* i: get distance next */ j = c->sub.code.need; NEEDBITS(j); t = c->sub.code.tree + ((uInt)b & inflate_mask[j]); DUMPBITS(t->bits); e = (uInt)(t->exop); if (e & 16) { /* distance */ c->sub.copy.get = e & 15; c->sub.copy.dist = t->base; c->mode = DISTEXT; break; } if ((e & 64) == 0) { /* next table */ c->sub.code.need = e; c->sub.code.tree = t + t->base; break; } c->mode = BADCODE; /* invalid code */ z->msg = "invalid distance code"; r = Z_DATA_ERROR; LEAVE case DISTEXT: /* i: getting distance extra */ j = c->sub.copy.get; NEEDBITS(j); c->sub.copy.dist += (uInt)b & inflate_mask[j]; DUMPBITS(j); Tracevv((stderr, "inflate: distance %u\n", c->sub.copy.dist)); c->mode = COPY; /* FALLTHRU */ case COPY: /* o: copying bytes in window, waiting for space */ #ifndef __TURBOC__ /* Turbo C bug for following expression */ f = (uInt)(q - s->window) < c->sub.copy.dist ? s->end - (c->sub.copy.dist - (q - s->window)) : q - c->sub.copy.dist; #else f = q - c->sub.copy.dist; if ((uInt)(q - s->window) < c->sub.copy.dist) f = s->end - (c->sub.copy.dist - (uInt)(q - s->window)); #endif while (c->len) { NEEDOUT; OUTBYTE(*f++); if (f == s->end) f = s->window; c->len--; } c->mode = START; break; case LIT: /* o: got literal, waiting for output space */ NEEDOUT; OUTBYTE(c->sub.lit); c->mode = START; break; case WASH: /* o: got eob, possibly more output */ if (k > 7) { /* return unused byte, if any */ Assert(k < 16, "inflate_codes grabbed too many bytes"); k -= 8; n++; p--; /* can always return one */ } FLUSH; if (s->read != s->write) LEAVE c->mode = END; /* FALLTHRU */ case END: r = Z_STREAM_END; LEAVE case BADCODE: /* x: got error */ r = Z_DATA_ERROR; LEAVE default: r = Z_STREAM_ERROR; LEAVE } /* NOTREACHED */ /* otherwise lint complains */ } void inflate_codes_free(c, z) inflate_codes_statef *c; z_streamp z; { ZFREE(z, c); Tracev((stderr, "inflate: codes free\n")); } /* --- infcodes.c */ /* +++ infutil.c */ /* * inflate_util.c -- data and routines common to blocks and codes * Copyright (C) 1995-1998 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ /* #include "zutil.h" */ /* #include "infblock.h" */ /* #include "inftrees.h" */ /* #include "infcodes.h" */ /* #include "infutil.h" */ #ifndef NO_DUMMY_DECL struct inflate_codes_state {int dummy; }; /* for buggy compilers */ #endif /* And'ing with mask[n] masks the lower n bits */ uInt inflate_mask[17] = { 0x0000, 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff, 0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff }; /* copy as much as possible from the sliding window to the output area */ int inflate_flush(s, z, r) inflate_blocks_statef *s; z_streamp z; int r; { uInt n; Bytef *p; Bytef *q; /* local copies of source and destination pointers */ p = z->next_out; q = s->read; /* compute number of bytes to copy as far as end of window */ n = (uInt)((q <= s->write ? s->write : s->end) - q); if (n > z->avail_out) n = z->avail_out; if (n && r == Z_BUF_ERROR) r = Z_OK; /* update counters */ z->avail_out -= n; z->total_out += n; /* update check information */ if (s->checkfn != Z_NULL) z->adler = s->check = (*s->checkfn)(s->check, q, n); /* copy as far as end of window */ if (p != Z_NULL) { /* PPP */ zmemcpy(p, q, n); p += n; } /* PPP */ q += n; /* see if more to copy at beginning of window */ if (q == s->end) { /* wrap pointers */ q = s->window; if (s->write == s->end) s->write = s->window; /* compute bytes to copy */ n = (uInt)(s->write - q); if (n > z->avail_out) n = z->avail_out; if (n && r == Z_BUF_ERROR) r = Z_OK; /* update counters */ z->avail_out -= n; z->total_out += n; /* update check information */ if (s->checkfn != Z_NULL) z->adler = s->check = (*s->checkfn)(s->check, q, n); /* copy */ if (p != Z_NULL) { /* PPP */ zmemcpy(p, q, n); p += n; } /* PPP */ q += n; } /* update pointers */ z->next_out = p; s->read = q; /* done */ return (r); } /* --- infutil.c */ /* +++ inffast.c */ /* * inffast.c -- process literals and length/distance pairs fast * Copyright (C) 1995-1998 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ /* #include "zutil.h" */ /* #include "inftrees.h" */ /* #include "infblock.h" */ /* #include "infcodes.h" */ /* #include "infutil.h" */ /* #include "inffast.h" */ #ifndef NO_DUMMY_DECL struct inflate_codes_state {int dummy; }; /* for buggy compilers */ #endif /* simplify the use of the inflate_huft type with some defines */ #define exop word.what.Exop #define bits word.what.Bits /* macros for bit input with no checking and for returning unused bytes */ #define GRABBITS(j) { while (k < (j)) {b |= ((uLong)NEXTBYTE)<avail_in-n; c = (k>>3) < c?k>>3:c; n += c; p -= c; \ k -= c<<3; } /* * Called with number of bytes left to write in window at least 258 * (the maximum string length) and number of input bytes available at * least ten. The ten bytes are six bytes for the longest length/ * distance pair plus four bytes for overloading the bit buffer. */ int inflate_fast(bl, bd, tl, td, s, z) uInt bl, bd; const inflate_huft *tl; const inflate_huft *td; /* need separate declaration for Borland C++ */ inflate_blocks_statef *s; z_streamp z; { const inflate_huft *t; /* temporary pointer */ uInt e; /* extra bits or operation */ uLong b; /* bit buffer */ uInt k; /* bits in bit buffer */ Bytef *p; /* input data pointer */ uInt n; /* bytes available there */ Bytef *q; /* output window write pointer */ uInt m; /* bytes to end of window or read pointer */ uInt ml; /* mask for literal/length tree */ uInt md; /* mask for distance tree */ uInt c; /* bytes to copy */ uInt d; /* distance back to copy from */ Bytef *r; /* copy source pointer */ /* load input, output, bit values */ LOAD; /* initialize masks */ ml = inflate_mask[bl]; md = inflate_mask[bd]; /* do until not enough input or output space for fast loop */ do { /* assume called with m >= 258 && n >= 10 */ /* get literal/length code */ /* max bits for literal/length code */ GRABBITS(20); if ((e = (t = tl + ((uInt)b & ml))->exop) == 0) { DUMPBITS(t->bits); Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? "inflate: * literal '%c'\n" : "inflate: * literal 0x%02x\n", t->base)); *q++ = (Byte)t->base; m--; continue; } do { DUMPBITS(t->bits); if (e & 16) { /* get extra bits for length */ e &= 15; c = t->base + ((uInt)b & inflate_mask[e]); DUMPBITS(e); Tracevv((stderr, "inflate: * length %u\n", c)); /* decode distance base of block to copy */ GRABBITS(15); /* max bits for distance code */ e = (t = td + ((uInt)b & md))->exop; do { DUMPBITS(t->bits); if (e & 16) { /* * get extra bits to * add to distance * base */ e &= 15; /* get extra bits (up to 13) */ GRABBITS(e); d = t->base + ((uInt)b & inflate_mask[e]); DUMPBITS(e); Tracevv((stderr, "inflate: * " "distance %u\n", d)); /* do the copy */ m -= c; /* offset before dest */ if ((uInt)(q - s->window) >= d) /* just copy */ { r = q - d; /* * minimum * count is * three, so * unroll loop * a little */ *q++ = *r++; c--; *q++ = *r++; c--; } /* else offset after destination */ else { /* bytes from offset to end */ e = d - (uInt)(q - s->window); /* pointer to offset */ r = s->end - e; /* if source crosses */ if (c > e) { /* copy to end of window */ c -= e; do { *q++ = *r ++; } while (--e); /* copy rest from start of window */ r = s->window; } } /* copy all or what's left */ do { *q++ = *r++; } while (--c); break; } else if ((e & 64) == 0) { t += t->base; e = (t += ((uInt)b & inflate_mask[e]))->exop; } else { z->msg = "invalid distance code"; UNGRAB; UPDATE; return (Z_DATA_ERROR); } /* CONSTCOND */ } while (1); break; } if ((e & 64) == 0) { t += t->base; if ((e = (t += ((uInt)b & inflate_mask[e]))->exop) == 0) { DUMPBITS(t->bits); Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? "inflate: * literal '%c'\n" : "inflate: * literal " "0x%02x\n", t->base)); *q++ = (Byte)t->base; m--; break; } } else if (e & 32) { Tracevv((stderr, "inflate: * end of block\n")); UNGRAB; UPDATE; return (Z_STREAM_END); } else { z->msg = "invalid literal/length code"; UNGRAB; UPDATE; return (Z_DATA_ERROR); } /* CONSTCOND */ } while (1); } while (m >= 258 && n >= 10); /* not enough input or output--restore pointers and return */ UNGRAB; UPDATE; return (Z_OK); } /* --- inffast.c */ /* +++ zutil.c */ /* * zutil.c -- target dependent utility functions for the compression library * Copyright (C) 1995-1998 Jean-loup Gailly. * For conditions of distribution and use, see copyright notice in zlib.h */ /* From: zutil.c,v 1.17 1996/07/24 13:41:12 me Exp $ */ #ifdef DEBUG_ZLIB #include #endif /* #include "zutil.h" */ #ifndef NO_DUMMY_DECL struct internal_state {int dummy; }; /* for buggy compilers */ #endif #ifndef STDC extern void exit OF((int)); #endif const char *z_errmsg[10] = { "need dictionary", /* Z_NEED_DICT 2 */ "stream end", /* Z_STREAM_END 1 */ "", /* Z_OK 0 */ "file error", /* Z_ERRNO (-1) */ "stream error", /* Z_STREAM_ERROR (-2) */ "data error", /* Z_DATA_ERROR (-3) */ "insufficient memory", /* Z_MEM_ERROR (-4) */ "buffer error", /* Z_BUF_ERROR (-5) */ "incompatible version", /* Z_VERSION_ERROR (-6) */ ""}; const char * zlibVersion() { return (ZLIB_VERSION); } #ifdef DEBUG_ZLIB void z_error(m) char *m; { fprintf(stderr, "%s\n", m); exit(1); } #endif #ifndef HAVE_MEMCPY void zmemcpy(dest, source, len) Bytef* dest; const Bytef* source; uInt len; { if (len == 0) return; do { *dest++ = *source++; /* ??? to be unrolled */ } while (--len != 0); } int zmemcmp(s1, s2, len) const Bytef* s1; const Bytef* s2; uInt len; { uInt j; for (j = 0; j < len; j++) { if (s1[j] != s2[j]) return (2*(s1[j] > s2[j])-1); } return (0); } void zmemzero(dest, len) Bytef* dest; uInt len; { if (len == 0) return; do { *dest++ = 0; /* ??? to be unrolled */ } while (--len != 0); } #endif #ifdef __TURBOC__ #if (defined(__BORLANDC__) || !defined(SMALL_MEDIUM)) && !defined(__32BIT__) /* * Small and medium model in Turbo C are for now limited to near * allocation with reduced MAX_WBITS and MAX_MEM_LEVEL */ #define MY_ZCALLOC /* * Turbo C malloc() does not allow dynamic allocation of 64K bytes and * farmalloc(64K) returns a pointer with an offset of 8, so we must * fix the pointer. Warning: the pointer must be put back to its * original form in order to free it, use zcfree(). */ #define MAX_PTR 10 /* 10*64K = 640K */ local int next_ptr = 0; typedef struct ptr_table_s { voidpf org_ptr; voidpf new_ptr; } ptr_table; local ptr_table table[MAX_PTR]; /* * This table is used to remember the original form of pointers to * large buffers (64K). Such pointers are normalized with a zero * offset. Since MSDOS is not a preemptive multitasking OS, this * table is not protected from concurrent access. This hack doesn't * work anyway on a protected system like OS/2. Use Microsoft C * instead. */ voidpf zcalloc(voidpf opaque, unsigned items, unsigned size) { voidpf buf = opaque; /* just to make some compilers happy */ ulg bsize = (ulg)items*size; /* * If we allocate less than 65520 bytes, we assume that * farmalloc will return a usable pointer which doesn't have * to be normalized. */ if (bsize < 65520L) { buf = farmalloc(bsize); if (*(ush *)&buf != 0) return (buf); } else { buf = farmalloc(bsize + 16L); } if (buf == NULL || next_ptr >= MAX_PTR) return (NULL); table[next_ptr].org_ptr = buf; /* Normalize the pointer to seg:0 */ *((ush *)&buf+1) += ((ush)((uch *)buf-0) + 15) >> 4; *(ush *)&buf = 0; table[next_ptr++].new_ptr = buf; return (buf); } void zcfree(voidpf opaque, voidpf ptr) { int n; if (*(ush*)&ptr != 0) { /* object < 64K */ farfree(ptr); return; } /* Find the original pointer */ for (n = 0; n < next_ptr; n++) { if (ptr != table[n].new_ptr) continue; farfree(table[n].org_ptr); while (++n < next_ptr) { table[n-1] = table[n]; } next_ptr--; return; } ptr = opaque; /* just to make some compilers happy */ Assert(0, "zcfree: ptr not found"); } #endif #endif /* __TURBOC__ */ #if defined(M_I86) && !defined(__32BIT__) /* Microsoft C in 16-bit mode */ #define MY_ZCALLOC #if (!defined(_MSC_VER) || (_MSC_VER <= 600)) #define _halloc halloc #define _hfree hfree #endif voidpf zcalloc(voidpf opaque, unsigned items, unsigned size) { if (opaque) opaque = 0; /* to make compiler happy */ return (_halloc((long)items, size)); } void zcfree(voidpf opaque, voidpf ptr) { if (opaque) opaque = 0; /* to make compiler happy */ _hfree(ptr); } #endif /* MSC */ #ifndef MY_ZCALLOC /* Any system without a special alloc function */ #ifndef STDC extern voidp calloc OF((uInt items, uInt size)); extern void free OF((voidpf ptr)); #endif voidpf zcalloc(opaque, items, size) voidpf opaque; unsigned items; unsigned size; { if (opaque) items += size - size; /* make compiler happy */ return ((voidpf)calloc(items, size)); } /*ARGSUSED*/ void zcfree(opaque, ptr) voidpf opaque; voidpf ptr; { free(ptr); } #endif /* MY_ZCALLOC */ /* --- zutil.c */ /* +++ adler32.c */ /* * adler32.c -- compute the Adler-32 checksum of a data stream * Copyright (C) 1995-1998 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ /* From: adler32.c,v 1.10 1996/05/22 11:52:18 me Exp $ */ /* #include "zlib.h" */ #define BASE 65521L /* largest prime smaller than 65536 */ #define NMAX 5552 /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */ #define DO1(buf, i) {s1 += buf[i]; s2 += s1; } #define DO2(buf, i) DO1(buf, i); DO1(buf, i+1); #define DO4(buf, i) DO2(buf, i); DO2(buf, i+2); #define DO8(buf, i) DO4(buf, i); DO4(buf, i+4); #define DO16(buf) DO8(buf, 0); DO8(buf, 8); /* ========================================================================= */ uLong adler32(adler, buf, len) uLong adler; const Bytef *buf; uInt len; { unsigned long s1 = adler & 0xffff; unsigned long s2 = (adler >> 16) & 0xffff; int k; if (buf == Z_NULL) return (1L); while (len > 0) { k = len < NMAX ? len : NMAX; len -= k; while (k >= 16) { DO16(buf); buf += 16; k -= 16; } if (k != 0) do { s1 += *buf++; s2 += s1; } while (--k); s1 %= BASE; s2 %= BASE; } return ((s2 << 16) | s1); } /* --- adler32.c */